0

有时,在导入照片或从观看视频等活动返回应用程序时,我们的应用程序会锁定并出现黑屏,尽管某些功能仍会继续运行,例如敬酒。日志的结尾始终如一地说:

I/Choreographer(17165): Skipped ## frames!  The application may be doing too much work on its main thread.
12-21 23:59:15.620 W/libEGL  (17165): EGLNativeWindowType 0x77323f0010 disconnect failed

这是一个不一致且断断续续的问题,因此很难确定。任何想法,将不胜感激。我假设一些线程问题正在锁定 UI 线程,但因为它很少发生并且不容易重现,我真的不知道发生了什么。

编辑:这只发生在从一些意图活动(例如发送电子邮件、挑选照片等)返回应用程序时。我已经设法通过一些放置良好的断点使其可重现。

这是我认为负责的 c# 代码。我添加了 REF(1) 2 和 3,以指示我的断点在哪里。似乎 REF(2) 冻结时永远不会到达,因此有时永远不会重新创建被破坏的表面,因此永远不会恢复渲染......我们正在使用可记录的表面视图,这是它的子类,请原谅我多久它是:

private class ARRenderThread : Thread, ISurfaceHolderCallback2
        {
            RecordableSurfaceView mSurfaceView;

            EGLDisplay mEGLDisplay;

            EGLContext mEGLContext;

            EGLSurface mEGLSurface;

            EGLSurface mEGLSurfaceMedia;

            public LinkedList<Runnable> mRunnableQueue = new LinkedList<Runnable>();

            int[] config = new int[] {
                    EGL14.EglRedSize, 8,
                    EGL14.EglGreenSize, 8,
                    EGL14.EglBlueSize, 8,
                    EGL14.EglAlphaSize, 8,
                    EGL14.EglRenderableType, EGL14.EglOpenglEs2Bit,
                    EGLExt.EglRecordableAndroid, 1,
//                    EGL14.EglSurfaceType, EGL14.EglPbufferBit,
                    EGL14.EglDepthSize, 16,
                    EGL14.EglNone
            };


            public ARRenderThread(RecordableSurfaceView surfaceView)
            {
                this.mSurfaceView = surfaceView;
                if (Build.VERSION.SdkInt >= Build.VERSION_CODES.O)
                {
                    config[10] = EGLExt.EglRecordableAndroid;
                }
            }

            private AtomicBoolean mLoop = new AtomicBoolean(false);

            EGLConfig chooseEglConfig(EGLDisplay eglDisplay)
            {
                int[] configsCount = new int[] { 0 };
                EGLConfig[] configs = new EGLConfig[1];
                EGL14.EglChooseConfig(eglDisplay, config, 0, configs, 0, configs.Length, configsCount,
                        0);
                return configs[0];
            }
            public override void Run()
            {
                if (mSurfaceView.mHasGLContext.Get())
                {
                    return;
                }
                mEGLDisplay = EGL14.EglGetDisplay(EGL14.EglDefaultDisplay);
                int[] version = new int[2];
                EGL14.EglInitialize(mEGLDisplay, version, 0, version, 1);
                EGLConfig eglConfig = chooseEglConfig(mEGLDisplay);
                mEGLContext = EGL14
                        .EglCreateContext(mEGLDisplay, eglConfig, EGL14.EglNoContext,
                                new int[] { EGL14.EglContextClientVersion, 2, EGL14.EglNone }, 0);

                int[] surfaceAttribs = {
                            EGL14.EglNone
                    };

                mEGLSurface = EGL14
                        .EglCreateWindowSurface(mEGLDisplay, eglConfig, mSurfaceView,
                                surfaceAttribs, 0);
                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);

                // guarantee to only report surface as created once GL context
                // associated with the surface has been created, and call on the GL thread
                // NOT the main thread but BEFORE the codec surface is attached to the GL context
                RendererCallbacks result;
                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                {

                    result.onSurfaceCreated();

                }

                mSurfaceView.mMediaSurfaceCreated.Set(false);

                GLES20.GlClearColor(0.1f, 0.1f, 0.1f, 1.0f);

                mSurfaceView.mHasGLContext.Set(true);

                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                {

                    result.onContextCreated();

                }

                mLoop.Set(true); // REF(1)

                while (mLoop.Get())
                {

                    if (!mSurfaceView.mPaused)
                    {
                        bool shouldRender = false;

                        //we're just rendering when requested, so check that no one
                        //has requested and if not, just continue
                        if (mSurfaceView.mRenderMode.Get() == (int)Rendermode.WhenDirty)
                        {

                            if (mSurfaceView.mRenderRequested.Get())
                            {
                                mSurfaceView.mRenderRequested.Set(false);
                                shouldRender = true;
                            }

                        }
                        else
                        {
                            shouldRender = true;
                        }

                        if (mSurfaceView.mSizeChange.Get())
                        {

                            GLES20.GlViewport(0, 0, mSurfaceView.mWidth, mSurfaceView.mHeight);

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onSurfaceChanged(mSurfaceView.mWidth, mSurfaceView.mHeight);

                            }

                            mSurfaceView.mSizeChange.Set(false);
                        }

                        if (shouldRender)
                        {

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onPreDrawFrame();

                            }

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onDrawScreen();

                            }

                            EGL14.EglSwapBuffers(mEGLDisplay, mEGLSurface);

                            if (mSurfaceView.mIsRecording.Get())
                            {
                                if (!mSurfaceView.mMediaSurfaceCreated.Get())
                                {
                                    mEGLSurfaceMedia = EGL14
                                        .EglCreateWindowSurface(mEGLDisplay, eglConfig, mSurfaceView.mSurface,
                                                surfaceAttribs, 0);
                                    mSurfaceView.mMediaSurfaceCreated.Set(true);
                                }

                                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurfaceMedia, mEGLSurfaceMedia,
                                        mEGLContext);

                                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                                {

                                    GLES20.GlViewport(0, 0, mSurfaceView.mOutWidth, mSurfaceView.mOutHeight);
                                    //EGLExt.EglPresentationTimeANDROID(mEGLDisplay, mEGLSurfaceMedia, (JavaSystem.CurrentTimeMillis() - RecordableSurfaceView.mStartTimeMillisecs) * 1000L *1000L);
                                    result.onDrawRecording();
                                    GLES20.GlViewport(0, 0, mSurfaceView.mWidth, mSurfaceView.mHeight);
                                }

                                EGL14.EglSwapBuffers(mEGLDisplay, mEGLSurfaceMedia);
                                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface,
                                        mEGLContext);
                            }
                        }

                        while (mRunnableQueue.Count > 0)
                        {
                            Runnable ev = mRunnableQueue.First.Value;
                            mRunnableQueue.RemoveFirst();
                            ev.Run();
                        }
                    }

                    try
                    {
                        Thread.Sleep((long)(1f / 120.0f * 1000f));
                    }
                    catch (InterruptedException intex)
                    {
                        if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result)) // REF(2)
                        {
                            result.onSurfaceDestroyed();
                        }

                        if (mEGLDisplay != null)
                        {
                            EGL14.EglMakeCurrent(mEGLDisplay, EGL14.EglNoSurface,
                                    EGL14.EglNoSurface,
                                    EGL14.EglNoContext);

                            if (mEGLSurface != null)
                            {
                                EGL14.EglDestroySurface(mEGLDisplay, mEGLSurface);
                            }

                            if (mEGLSurfaceMedia != null)
                            {
                                EGL14.EglDestroySurface(mEGLDisplay, mEGLSurfaceMedia);
                            }

                            EGL14.EglDestroyContext(mEGLDisplay, mEGLContext);
                            mSurfaceView.mHasGLContext.Set(false);
                            EGL14.EglReleaseThread();
                            EGL14.EglTerminate(mEGLDisplay);
                            mSurfaceView.mSurface.Release();

                        }
                        return;
                    }
                }
            }

            public void SurfaceRedrawNeeded(ISurfaceHolder surfaceHolder)
            {

            }

            public void SurfaceCreated(ISurfaceHolder surfaceHolder)
            {

                if (!this.IsAlive && !this.IsInterrupted && this.GetState() != State.Terminated)
                {
                    this.Start();
                }
            }

            public void SurfaceChanged(ISurfaceHolder surfaceHolder, Android.Graphics.Format i, int width, int height)
            {

                if (mSurfaceView.mWidth != width)
                {
                    mSurfaceView.mWidth = width;
                    mSurfaceView.mSizeChange.Set(true);
                }

                if (mSurfaceView.mHeight != height)
                {
                    mSurfaceView.mHeight = height;
                    mSurfaceView.mSizeChange.Set(true);
                }


            }

            public void SurfaceDestroyed(ISurfaceHolder surfaceHolder)
            {
                mLoop.Set(false); //REF(3)
                this.Interrupt();
                mSurfaceView.Holder.RemoveCallback(this);
            }
        }

这是日志的较长版本,因为它失败了,我有 3 个日志产生几乎相同的输出,如下所示:

12-21 23:47:40.384 D/CCodecConfig(17165):   c2::u32 output.delay.value = 2
12-21 23:47:40.384 D/CCodecConfig(17165):   string output.media-type.value = "audio/raw"
12-21 23:47:40.384 D/CCodecConfig(17165):   c2::u32 raw.channel-count.value = 1
12-21 23:47:40.384 D/CCodecConfig(17165):   c2::u32 raw.sample-rate.value = 44100
12-21 23:47:40.384 D/CCodecConfig(17165): }
12-21 23:47:40.385 D/CCodecConfig(17165): no c2 equivalents for language
12-21 23:47:40.385 D/CCodecConfig(17165): config failed => CORRUPTED
12-21 23:47:40.386 D/CCodecConfig(17165): c2 config diff is   c2::u32 raw.channel-count.value = 2
12-21 23:47:40.386 W/Codec2Client(17165): query -- param skipped: index = 1107298332.
12-21 23:47:40.386 D/CCodec  (17165): client requested max input size 469, which is smaller than what component recommended (8192); overriding with component recommendation.
12-21 23:47:40.386 W/CCodec  (17165): This behavior is subject to change. It is recommended that app developers double check whether the requested max input size is in reasonable range.
12-21 23:47:40.386 D/CCodec  (17165): setup formats input: AMessage(what = 0x00000000) = {
12-21 23:47:40.386 D/CCodec  (17165):   int32_t channel-count = 2
12-21 23:47:40.386 D/CCodec  (17165):   int32_t level = 0
12-21 23:47:40.386 D/CCodec  (17165):   int32_t max-input-size = 8192
12-21 23:47:40.386 D/CCodec  (17165):   string mime = "audio/mp4a-latm"
12-21 23:47:40.386 D/CCodec  (17165):   int32_t profile = 2
12-21 23:47:40.386 D/CCodec  (17165):   int32_t sample-rate = 44100
12-21 23:47:40.386 D/CCodec  (17165): } and output: AMessage(what = 0x00000000) = {
12-21 23:47:40.386 D/CCodec  (17165):   int32_t channel-count = 2
12-21 23:47:40.386 D/CCodec  (17165):   string mime = "audio/raw"
12-21 23:47:40.386 D/CCodec  (17165):   int32_t sample-rate = 44100
12-21 23:47:40.386 D/CCodec  (17165): }
12-21 23:47:40.388 W/Codec2Client(17165): query -- param skipped: index = 1342179345.
12-21 23:47:40.388 W/Codec2Client(17165): query -- param skipped: index = 2415921170.
12-21 23:47:40.388 W/Codec2Client(17165): query -- param skipped: index = 1610614798.
12-21 23:47:40.390 D/CCodecBufferChannel(17165): [c2.android.aac.decoder#945] Created input block pool with allocatorID 16 => poolID 139 - OK (0)
12-21 23:47:40.391 D/BufferPoolAccessor(17165): bufferpool2 0x7827bc0c20 : 0(0 size) total buffers - 0(0 size) used buffers - 1/7 (recycle/alloc) - 6/25 (fetch/transfer)
12-21 23:47:40.391 D/BufferPoolAccessor(17165): Destruction - bufferpool2 0x7827bc0c20 cached: 0/0M, 0/0% in use; allocs: 7, 14% recycled; transfers: 25, 76% unfetced
12-21 23:47:40.391 I/CCodecBufferChannel(17165): [c2.android.aac.decoder#945] Created output block pool with allocatorID 16 => poolID 578 - OK
12-21 23:47:40.392 D/CCodecBufferChannel(17165): [c2.android.aac.decoder#945] Configured output block pool ids 578 => OK
12-21 23:47:40.404 D/CCodec  (17165): allocate(c2.qti.avc.decoder)
12-21 23:47:40.405 I/Codec2Client(17165): Creating a Codec2 client to service "default"
12-21 23:47:40.407 I/Codec2Client(17165): Client to Codec2 service "default" created
12-21 23:47:40.407 I/CCodec  (17165): setting up 'default' as default (vendor) store
12-21 23:47:40.410 I/CCodec  (17165): Created component [c2.qti.avc.decoder]
12-21 23:47:40.411 D/CCodecConfig(17165): read media type: video/avc
12-21 23:47:40.412 D/ReflectedParamUpdater(17165): extent() != 1 for single value type: output.buffers.pool-ids.values
12-21 23:47:40.416 D/CCodecConfig(17165): ignoring local param raw.size (0xd2001800) as it is already supported
12-21 23:47:40.416 D/CCodecConfig(17165): ignoring local param raw.color (0xd2001809) as it is already supported
12-21 23:47:40.416 D/CCodecConfig(17165): ignoring local param raw.hdr-static-info (0xd200180a) as it is already supported
12-21 23:47:40.417 I/CCodecConfig(17165): query failed after returning 17 values (BAD_INDEX)
12-21 23:47:40.418 D/CCodecConfig(17165): c2 config diff is Dict {
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::i32 algo.priority.value = -1
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float algo.rate.value = 4.2039e-44
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 algo.secure-mode.value = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float coded.frame-rate.value = 30
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.pl.level = 20480
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.pl.profile = 20480
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.vui.color.matrix = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.vui.color.primaries = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.vui.color.range = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.vui.color.transfer = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 default.color.matrix = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 default.color.primaries = 3
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 default.color.range = 2
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 default.color.transfer = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 input.buffers.max-size.value = 13271040
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 input.delay.value = 4
12-21 23:47:40.418 D/CCodecConfig(17165):   string input.media-type.value = "video/avc"
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 output.delay.value = 18
12-21 23:47:40.418 D/CCodecConfig(17165):   string output.media-type.value = "video/raw"
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 raw.color.matrix = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 raw.color.primaries = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 raw.color.range = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 raw.color.transfer = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float raw.hdr-static-info.mastering.blue.x = 1.4013e-45
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float raw.hdr-static-info.mastering.blue.y = 1.4013e-45
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float raw.hdr-
12-21 23:47:40.418 W/ColorUtils(17165): expected specified color aspects (0:0:0:0)
12-21 23:47:40.423 D/SurfaceUtils(17165): connecting to surface 0x7796e34010, reason connectToSurface
12-21 23:47:40.423 I/MediaCodec(17165): [c2.qti.avc.decoder] setting surface generation to 17577022
12-21 23:47:40.423 D/SurfaceUtils(17165): disconnecting from surface 0x7796e34010, reason connectToSurface(reconnect)
12-21 23:47:40.423 D/SurfaceUtils(17165): connecting to surface 0x7796e34010, reason connectToSurface(reconnect)
12-21 23:47:40.423 D/CCodecConfig(17165): no c2 equivalents for csd-1
12-21 23:47:40.423 D/CCodecConfig(17165): no c2 equivalents for native-window
12-21 23:47:40.424 D/CCodecConfig(17165): c2 config diff is   c2::u32 input.buffers.max-size.value = 7077888
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.max-size.height = 360
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.max-size.width = 640
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.pixel-format.value = 34
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.size.height = 360
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.size.width = 640
12-21 23:47:40.425 W/Codec2Client(17165): query -- param skipped: index = 1107298332.
12-21 23:47:40.425 D/CCodec  (17165): client requested max input size 21629, which is smaller than what component recommended (7077888); overriding with component recommendation.
12-21 23:47:40.425 W/CCodec  (17165): This behavior is subject to change. It is recommended that app developers double check whether the requested max input size is in reasonable range.
12-21 23:47:40.425 D/CCodec  (17165): setup formats input: AMessage(what = 0x00000000) = {
12-21 23:47:40.425 D/CCodec  (17165):   int32_t feature-secure-playback = 0
12-21 23:47:40.425 D/CCodec  (17165):   int32_t frame-rate = 30
12-21 23:47:40.425 D/CCodec  (17165):   int32_t height = 360
12-21 23:47:40.425 D/CCodec  (17165):   int32_t level = 1
12-21 23:47:40.425 D/CCodec  (17165):   int32_t max-input-size = 7077888
12-21 23:47:40.425 D/CCodec  (17165):   string mime = "video/avc"
12-21 23:47:40.425 D/CCodec  (17165):   int32_t priority = 1
12-21 23:47:40.425 D/CCodec  (17165):   int32_t profile = 1
12-21 23:47:40.425 D/CCodec  (17165):   int32_t width = 640
12-21 23:47:40.425 D/CCodec  (17165):   Rect crop(0, 0, 639, 359)
12-21 23:47:40.425 D/CCodec  (17165): } and output: AMessage(what = 0x00000000) = {
12-21 23:47:40.425 D/CCodec  (17165):   int32_t android._video-scaling = 1
12-21 23:47:40.425 D/CCodec  (17165):   Rect crop(0, 0, 639, 359)
12-21 23:47:40.425 D/CCodec  (17165):   int32_t color-standard = 4
12-21 23:47:40.425 D/CCodec  (17165):   int32_t color-range = 2
12-21 23:47:40.425 D/CCodec  (17165):   int32_t color-transfer = 3
12-21 23:47:40.425 D/CCodec  (17165):   int32_t android._dataspace = 259
12-21 23:47:40.425 D/CCodec  (17165):   int32_t width = 640
12-21 23:47:40.425 D/CCodec  (17165):   int32_t feature-secure-playback = 0
12-21 23:47:40.425 D/CCodec  (17165):   int32_t frame-rate = 30
12-21 23:47:40.425 D/CCodec  (17165):   int32_t height = 360
12-21 23:47:40.425 D/CCodec  (17165):   int32_t max-height = 360
12-21 23:47:40.425 D/CCodec  (17165):   int32_t max-width = 640
12-21 23:47:40.425 D/CCodec  (17165):   string mime = "video/raw"
12-21 23:47:40.425 D/CCodec  (17165):   int32_t priority = 1
12-21 23:47:40.425 D/CCodec  (17165):   int32_t rotation-degrees = 0
12-21 23:47:40.425 D/CCodec  (17165):   Buffer hdr-static-info = {
12-21 23:47:40.425 D/CCodec  (17165):     00000000:  00 00 00 00 00 00 00 00  00 00 00 00 00 00 00 00  ................
12-21 23:47:40.425 D/CCodec  (17165):     00000010:  00 00 00 00 00 00 00 00  00                       .........
12-21 23:47:40.425 D/CCodec  (17165):   }
12-21 23:47:40.425 D/CCodec  (17165):   int32_t android._color-format = 2130708
12-21 23:47:40.437 W/Codec2Client(17165): query -- param skipped: index = 1342179345.
12-21 23:47:40.437 W/Codec2Client(17165): query -- param skipped: index = 2415921170.
12-21 23:47:40.437 W/Codec2Client(17165): query -- param skipped: index = 1610614798.
12-21 23:47:40.438 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Query input allocators returned 0 params => BAD_INDEX (6)
12-21 23:47:40.439 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Created input block pool with allocatorID 16 => poolID 140 - OK (0)
12-21 23:47:40.439 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Query output allocators returned 0 params => BAD_INDEX (6)
12-21 23:47:40.440 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Query output surface allocator returned 0 params => BAD_INDEX (6)
12-21 23:47:40.446 I/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Created output block pool with allocatorID 18 => poolID 795 - OK
12-21 23:47:40.447 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Configured output block pool ids 795 => OK
12-21 23:47:40.447 D/Codec2-block_helper(17165): remote graphic buffer migration 0/0
12-21 23:47:40.447 D/Codec2Client(17165): generation remote change 17577022
12-21 23:47:40.459 D/BufferPoolAccessor(17165): bufferpool2 0x7797ca5420 : 0(0 size) total buffers - 0(0 size) used buffers - 0/8 (recycle/alloc) - 8/31 (fetch/transfer)
12-21 23:47:40.459 D/BufferPoolAccessor(17165): Destruction - bufferpool2 0x7797ca5420 cached: 0/0M, 0/0% in use; allocs: 8, 0% recycled; transfers: 31, 74% unfetced
12-21 23:47:40.482 D/CCodecConfig(17165): c2 config diff is   c2::u32 raw.color.matrix = 1
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.color.primaries = 1
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.color.range = 2
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.color.transfer = 3
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.crop.height = 360
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.crop.left = 0
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.crop.top = 0
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.crop.width = 640
12-21 23:47:40.482 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] onWorkDone: output format changed to AMessage(what = 0x00000000) = {
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t android._video-scaling = 1
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   Rect crop(0, 0, 639, 359)
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t color-standard = 1
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t color-range = 2
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t color-transfer = 3
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t android._dataspace = 260
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t width = 640
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t feature-secure-playback = 0
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t frame-rate = 30
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t height = 360
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t max-height = 360
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t max-width = 640
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   string mime = "video/raw"
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t priority = 1
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t rotation-degrees = 0
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   Buffer hdr-static-info = {
12-21 23:47:40.482 D/CCodecBufferChannel(17165):     00000000:  00 00 00 00 00 00 00 00  00 00 00 00 00 00 00 00  ................
12-21 23:47:40.482 D/CCodecBufferChannel(17165):     00000010:  00 00 00 00 00 00 00 00  00                       .........
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   }
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t android._color-format = 2130708361
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t color-format = 2130708361
12-21 23:47:40.482 D/CCodecBufferChannel(17165): }
12-21 23:47:41.069 D/Mono    (17165): GC_TAR_BRIDGE bridges 0 objects 0 opaque 0 colors 0 colors-bridged 0 colors-visible 119 xref 4 cache-hit 0 cache-semihit 0 cache-miss 0 setup 0.01ms tarjan 0.03ms scc-setup 0.05ms gather-xref 0.00ms xref-setup 0.00ms cleanup 0.00ms
12-21 23:47:41.069 D/Mono    (17165): GC_BRIDGE: Complete, was running for 0.13ms
12-21 23:47:41.069 D/Mono    (17165): GC_MINOR: (Nursery full) time 2.87ms, stw 4.31ms promoted 0K major size: 2576K in use: 1497K los size: 17004K in use: 15497K
12-21 23:47:41.384 W/AudioTrack(17165): Use of stream types is deprecated for operations other than volume control
12-21 23:47:41.384 W/AudioTrack(17165): See the documentation of AudioTrack() for what to use instead with android.media.AudioAttributes to qualify your playback use case
12-21 23:47:42.116 D/Mono    (17165): GC_BRIDGE waiting for bridge processing to finish
12-21 23:47:42.121 I/ame.DoodleSmas(17165): Explicit concurrent copying GC freed 19087(932KB) AllocSpace objects, 13(692KB) LOS objects, 49% free, 4691KB/9383KB, paused 46us total 23.431ms
12-21 23:47:42.122 D/Mono    (17165): GC_TAR_BRIDGE bridges 157 objects 157 opaque 0 colors 157 colors-bridged 157 colors-visible 157 xref 0 cache-hit 0 cache-semihit 0 cache-miss 0 setup 0.00ms tarjan 0.03ms scc-setup 0.03ms gather-xref 0.00ms xref-setup 0.00ms cleanup 0.02ms
12-21 23:47:42.122 D/Mono    (17165): GC_BRIDGE: Complete, was running for 24.91ms
12-21 23:47:42.122 D/Mono    (17165): GC_MINOR: (Nursery full) time 4.00ms, stw 4.73ms promoted 74K major size: 2576K in use: 1572K los size: 24172K in use: 21766K
12-21 23:47:42.563 I/Choreographer(17165): Skipped 72 frames!  The application may be doing too much work on its main thread.
12-21 23:47:42.567 W/libEGL  (17165): EGLNativeWindowType 0x77924dc010 disconnect failed
4

1 回答 1

0

万一将来有人对此有问题,以前格式的破坏可记录表面代码在返回时会间歇性地运行。我将它移到了当应用程序表面被破坏并且它现在可以工作时暂停执行的代码。

public class ARRenderThread : Thread, ISurfaceHolderCallback2
        {
            RecordableSurfaceView mSurfaceView;

            EGLDisplay mEGLDisplay;

            EGLContext mEGLContext;

            EGLSurface mEGLSurface;

            EGLSurface mEGLSurfaceMedia;

            public LinkedList<Runnable> mRunnableQueue = new LinkedList<Runnable>();

            int[] config = new int[] {
                    EGL14.EglRedSize, 8,
                    EGL14.EglGreenSize, 8,
                    EGL14.EglBlueSize, 8,
                    EGL14.EglAlphaSize, 8,
                    EGL14.EglRenderableType, EGL14.EglOpenglEs2Bit,
                    EGLExt.EglRecordableAndroid, 1,
//                    EGL14.EglSurfaceType, EGL14.EglPbufferBit,
                    EGL14.EglDepthSize, 16,
                    EGL14.EglNone
            };


            public ARRenderThread(RecordableSurfaceView surfaceView)
            {
                this.mSurfaceView = surfaceView;
                if (Build.VERSION.SdkInt >= Build.VERSION_CODES.O)
                {
                    config[10] = EGLExt.EglRecordableAndroid;
                }
            }

            public AtomicBoolean mLoop = new AtomicBoolean(false);

            EGLConfig chooseEglConfig(EGLDisplay eglDisplay)
            {
                int[] configsCount = new int[] { 0 };
                EGLConfig[] configs = new EGLConfig[1];
                EGL14.EglChooseConfig(eglDisplay, config, 0, configs, 0, configs.Length, configsCount,
                        0);
                return configs[0];
            }
            public override void Run()
            {
                if (mSurfaceView.mHasGLContext.Get())
                {
                    return;
                }
                mEGLDisplay = EGL14.EglGetDisplay(EGL14.EglDefaultDisplay);
                int[] version = new int[2];
                EGL14.EglInitialize(mEGLDisplay, version, 0, version, 1);
                EGLConfig eglConfig = chooseEglConfig(mEGLDisplay);
                mEGLContext = EGL14
                        .EglCreateContext(mEGLDisplay, eglConfig, EGL14.EglNoContext,
                                new int[] { EGL14.EglContextClientVersion, 2, EGL14.EglNone }, 0);

                int[] surfaceAttribs = {
                            EGL14.EglNone
                    };

                mEGLSurface = EGL14
                        .EglCreateWindowSurface(mEGLDisplay, eglConfig, mSurfaceView,
                                surfaceAttribs, 0);
                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);

                // guarantee to only report surface as created once GL context
                // associated with the surface has been created, and call on the GL thread
                // NOT the main thread but BEFORE the codec surface is attached to the GL context
                RendererCallbacks result;
                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                {

                    result.onSurfaceCreated();

                }

                mSurfaceView.mMediaSurfaceCreated.Set(false);

                GLES20.GlClearColor(0.1f, 0.1f, 0.1f, 1.0f);

                mSurfaceView.mHasGLContext.Set(true);

                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                {

                    result.onContextCreated();

                }

                mLoop.Set(true);

                while (mLoop.Get())
                {

                    if (!mSurfaceView.mPaused)
                    {
                        bool shouldRender = false;

                        //we're just rendering when requested, so check that no one
                        //has requested and if not, just continue
                        if (mSurfaceView.mRenderMode.Get() == (int)Rendermode.WhenDirty)
                        {

                            if (mSurfaceView.mRenderRequested.Get())
                            {
                                mSurfaceView.mRenderRequested.Set(false);
                                shouldRender = true;
                            }

                        }
                        else
                        {
                            shouldRender = true;
                        }

                        if (mSurfaceView.mSizeChange.Get())
                        {

                            GLES20.GlViewport(0, 0, mSurfaceView.mWidth, mSurfaceView.mHeight);

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onSurfaceChanged(mSurfaceView.mWidth, mSurfaceView.mHeight);

                            }

                            mSurfaceView.mSizeChange.Set(false);
                        }

                        if (shouldRender)
                        {

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onPreDrawFrame();

                            }

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onDrawScreen();

                            }

                            EGL14.EglSwapBuffers(mEGLDisplay, mEGLSurface);

                            if (mSurfaceView.mIsRecording.Get())
                            {
                                if (!mSurfaceView.mMediaSurfaceCreated.Get())
                                {
                                    mEGLSurfaceMedia = EGL14
                                        .EglCreateWindowSurface(mEGLDisplay, eglConfig, mSurfaceView.mSurface,
                                                surfaceAttribs, 0);
                                    mSurfaceView.mMediaSurfaceCreated.Set(true);
                                }

                                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurfaceMedia, mEGLSurfaceMedia,
                                        mEGLContext);

                                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                                {

                                    GLES20.GlViewport(0, 0, mSurfaceView.mOutWidth, mSurfaceView.mOutHeight);
                                    //EGLExt.EglPresentationTimeANDROID(mEGLDisplay, mEGLSurfaceMedia, (JavaSystem.CurrentTimeMillis() - RecordableSurfaceView.mStartTimeMillisecs) * 1000L *1000L);
                                    result.onDrawRecording();
                                    GLES20.GlViewport(0, 0, mSurfaceView.mWidth, mSurfaceView.mHeight);
                                }

                                EGL14.EglSwapBuffers(mEGLDisplay, mEGLSurfaceMedia);
                                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface,
                                        mEGLContext);
                            }
                        }

                        while (mRunnableQueue.Count > 0)
                        {
                            Runnable ev = mRunnableQueue.First.Value;
                            mRunnableQueue.RemoveFirst();
                            ev.Run();
                        }
                    }

                    /*
                    try
                    {
                        Thread.Sleep((long)(1f / 120.0f * 1000f));
                    }
                    catch (InterruptedException intex) // THIS IS KEY TO BLACKOUT BUG, THIS CATCH NEVER HAPPENS AND SO THE OLD SURFACE IS NEVER NUKED / REMADE mHasGLContext NEVER SET TO FALSE
                    {
                        if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                        {
                            result.onSurfaceDestroyed();
                        }

                        if (mEGLDisplay != null)
                        {
                            EGL14.EglMakeCurrent(mEGLDisplay, EGL14.EglNoSurface,
                                    EGL14.EglNoSurface,
                                    EGL14.EglNoContext);

                            if (mEGLSurface != null)
                            {
                                EGL14.EglDestroySurface(mEGLDisplay, mEGLSurface);
                            }

                            if (mEGLSurfaceMedia != null)
                            {
                                EGL14.EglDestroySurface(mEGLDisplay, mEGLSurfaceMedia);
                            }

                            EGL14.EglDestroyContext(mEGLDisplay, mEGLContext);
                            mSurfaceView.mHasGLContext.Set(false);
                            EGL14.EglReleaseThread();
                            EGL14.EglTerminate(mEGLDisplay);
                            mSurfaceView.mSurface.Release();

                        }
                        return;
                    }*/
                }
            }

            public void SurfaceRedrawNeeded(ISurfaceHolder surfaceHolder)
            {

            }

            public void SurfaceCreated(ISurfaceHolder surfaceHolder)
            {

                if (!this.IsAlive && !this.IsInterrupted && this.GetState() != State.Terminated)
                {
                    this.Start();
                }
            }

            public void SurfaceChanged(ISurfaceHolder surfaceHolder, Android.Graphics.Format i, int width, int height)
            {

                if (mSurfaceView.mWidth != width)
                {
                    mSurfaceView.mWidth = width;
                    mSurfaceView.mSizeChange.Set(true);
                }

                if (mSurfaceView.mHeight != height)
                {
                    mSurfaceView.mHeight = height;
                    mSurfaceView.mSizeChange.Set(true);
                }


            }

            public void SurfaceDestroyed(ISurfaceHolder surfaceHolder)
            {
                mLoop.Set(false);
                this.Interrupt();
                mSurfaceView.Holder.RemoveCallback(this);

                //MOVED SURFACE DESTROYING CODE TO FUNCTION CALLED WHEN APP IS PAUSED INSTEAD OF UNSTABLE CATCH UPON RETURN_______
                if (mEGLDisplay != null)
                {
                    EGL14.EglMakeCurrent(mEGLDisplay, EGL14.EglNoSurface,
                            EGL14.EglNoSurface,
                            EGL14.EglNoContext);

                    if (mEGLSurface != null)
                    {
                        EGL14.EglDestroySurface(mEGLDisplay, mEGLSurface);
                    }

                    if (mEGLSurfaceMedia != null)
                    {
                        EGL14.EglDestroySurface(mEGLDisplay, mEGLSurfaceMedia);
                    }

                    EGL14.EglDestroyContext(mEGLDisplay, mEGLContext);
                    mSurfaceView.mHasGLContext.Set(false);
                    EGL14.EglReleaseThread();
                    EGL14.EglTerminate(mEGLDisplay);
                    mSurfaceView.mSurface.Release();

                }
                //______________________________________________________________________________________________________________
            }
        }

    }
}
于 2020-01-16T02:52:18.260 回答