簡體   English   中英

偶爾 I/Choreographer(17165):跳過 ## 幀黑屏鎖定,Android Xamarin c#

[英]Occasional I/Choreographer(17165): Skipped ## frames black screen lock up, Android Xamarin c#

有時,當導入照片或從觀看視頻等活動返回應用程序時,我們的應用程序會鎖定並出現黑屏,盡管某些功能仍在下方繼續運行,例如吐司。 日志的結尾始終如一地說:

I/Choreographer(17165): Skipped ## frames!  The application may be doing too much work on its main thread.
12-21 23:59:15.620 W/libEGL  (17165): EGLNativeWindowType 0x77323f0010 disconnect failed

這是一個不一致和間歇性的問題,因此很難確定。 任何想法,將不勝感激。 我假設一些線程問題正在鎖定 UI 線程,但因為它很少發生並且不容易重現,我真的不知道發生了什么。

編輯:這僅在從某些意圖活動(例如發送電子郵件、選擇照片等)返回應用程序時發生。我設法通過一些位置良好的斷點使其可重現。

這是我認為負責的 c# 代碼。 我添加了 REF(1) 2 和 3,以指示我的斷點在哪里。 似乎REF(2)在凍結時永遠不會到達,因此有時永遠不會重新創建被破壞的表面,因此永遠不會恢復渲染......我們正在使用可記錄的表面視圖,這是它的子類,請原諒我多久這是:

private class ARRenderThread : Thread, ISurfaceHolderCallback2
        {
            RecordableSurfaceView mSurfaceView;

            EGLDisplay mEGLDisplay;

            EGLContext mEGLContext;

            EGLSurface mEGLSurface;

            EGLSurface mEGLSurfaceMedia;

            public LinkedList<Runnable> mRunnableQueue = new LinkedList<Runnable>();

            int[] config = new int[] {
                    EGL14.EglRedSize, 8,
                    EGL14.EglGreenSize, 8,
                    EGL14.EglBlueSize, 8,
                    EGL14.EglAlphaSize, 8,
                    EGL14.EglRenderableType, EGL14.EglOpenglEs2Bit,
                    EGLExt.EglRecordableAndroid, 1,
//                    EGL14.EglSurfaceType, EGL14.EglPbufferBit,
                    EGL14.EglDepthSize, 16,
                    EGL14.EglNone
            };


            public ARRenderThread(RecordableSurfaceView surfaceView)
            {
                this.mSurfaceView = surfaceView;
                if (Build.VERSION.SdkInt >= Build.VERSION_CODES.O)
                {
                    config[10] = EGLExt.EglRecordableAndroid;
                }
            }

            private AtomicBoolean mLoop = new AtomicBoolean(false);

            EGLConfig chooseEglConfig(EGLDisplay eglDisplay)
            {
                int[] configsCount = new int[] { 0 };
                EGLConfig[] configs = new EGLConfig[1];
                EGL14.EglChooseConfig(eglDisplay, config, 0, configs, 0, configs.Length, configsCount,
                        0);
                return configs[0];
            }
            public override void Run()
            {
                if (mSurfaceView.mHasGLContext.Get())
                {
                    return;
                }
                mEGLDisplay = EGL14.EglGetDisplay(EGL14.EglDefaultDisplay);
                int[] version = new int[2];
                EGL14.EglInitialize(mEGLDisplay, version, 0, version, 1);
                EGLConfig eglConfig = chooseEglConfig(mEGLDisplay);
                mEGLContext = EGL14
                        .EglCreateContext(mEGLDisplay, eglConfig, EGL14.EglNoContext,
                                new int[] { EGL14.EglContextClientVersion, 2, EGL14.EglNone }, 0);

                int[] surfaceAttribs = {
                            EGL14.EglNone
                    };

                mEGLSurface = EGL14
                        .EglCreateWindowSurface(mEGLDisplay, eglConfig, mSurfaceView,
                                surfaceAttribs, 0);
                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);

                // guarantee to only report surface as created once GL context
                // associated with the surface has been created, and call on the GL thread
                // NOT the main thread but BEFORE the codec surface is attached to the GL context
                RendererCallbacks result;
                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                {

                    result.onSurfaceCreated();

                }

                mSurfaceView.mMediaSurfaceCreated.Set(false);

                GLES20.GlClearColor(0.1f, 0.1f, 0.1f, 1.0f);

                mSurfaceView.mHasGLContext.Set(true);

                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                {

                    result.onContextCreated();

                }

                mLoop.Set(true); // REF(1)

                while (mLoop.Get())
                {

                    if (!mSurfaceView.mPaused)
                    {
                        bool shouldRender = false;

                        //we're just rendering when requested, so check that no one
                        //has requested and if not, just continue
                        if (mSurfaceView.mRenderMode.Get() == (int)Rendermode.WhenDirty)
                        {

                            if (mSurfaceView.mRenderRequested.Get())
                            {
                                mSurfaceView.mRenderRequested.Set(false);
                                shouldRender = true;
                            }

                        }
                        else
                        {
                            shouldRender = true;
                        }

                        if (mSurfaceView.mSizeChange.Get())
                        {

                            GLES20.GlViewport(0, 0, mSurfaceView.mWidth, mSurfaceView.mHeight);

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onSurfaceChanged(mSurfaceView.mWidth, mSurfaceView.mHeight);

                            }

                            mSurfaceView.mSizeChange.Set(false);
                        }

                        if (shouldRender)
                        {

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onPreDrawFrame();

                            }

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onDrawScreen();

                            }

                            EGL14.EglSwapBuffers(mEGLDisplay, mEGLSurface);

                            if (mSurfaceView.mIsRecording.Get())
                            {
                                if (!mSurfaceView.mMediaSurfaceCreated.Get())
                                {
                                    mEGLSurfaceMedia = EGL14
                                        .EglCreateWindowSurface(mEGLDisplay, eglConfig, mSurfaceView.mSurface,
                                                surfaceAttribs, 0);
                                    mSurfaceView.mMediaSurfaceCreated.Set(true);
                                }

                                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurfaceMedia, mEGLSurfaceMedia,
                                        mEGLContext);

                                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                                {

                                    GLES20.GlViewport(0, 0, mSurfaceView.mOutWidth, mSurfaceView.mOutHeight);
                                    //EGLExt.EglPresentationTimeANDROID(mEGLDisplay, mEGLSurfaceMedia, (JavaSystem.CurrentTimeMillis() - RecordableSurfaceView.mStartTimeMillisecs) * 1000L *1000L);
                                    result.onDrawRecording();
                                    GLES20.GlViewport(0, 0, mSurfaceView.mWidth, mSurfaceView.mHeight);
                                }

                                EGL14.EglSwapBuffers(mEGLDisplay, mEGLSurfaceMedia);
                                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface,
                                        mEGLContext);
                            }
                        }

                        while (mRunnableQueue.Count > 0)
                        {
                            Runnable ev = mRunnableQueue.First.Value;
                            mRunnableQueue.RemoveFirst();
                            ev.Run();
                        }
                    }

                    try
                    {
                        Thread.Sleep((long)(1f / 120.0f * 1000f));
                    }
                    catch (InterruptedException intex)
                    {
                        if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result)) // REF(2)
                        {
                            result.onSurfaceDestroyed();
                        }

                        if (mEGLDisplay != null)
                        {
                            EGL14.EglMakeCurrent(mEGLDisplay, EGL14.EglNoSurface,
                                    EGL14.EglNoSurface,
                                    EGL14.EglNoContext);

                            if (mEGLSurface != null)
                            {
                                EGL14.EglDestroySurface(mEGLDisplay, mEGLSurface);
                            }

                            if (mEGLSurfaceMedia != null)
                            {
                                EGL14.EglDestroySurface(mEGLDisplay, mEGLSurfaceMedia);
                            }

                            EGL14.EglDestroyContext(mEGLDisplay, mEGLContext);
                            mSurfaceView.mHasGLContext.Set(false);
                            EGL14.EglReleaseThread();
                            EGL14.EglTerminate(mEGLDisplay);
                            mSurfaceView.mSurface.Release();

                        }
                        return;
                    }
                }
            }

            public void SurfaceRedrawNeeded(ISurfaceHolder surfaceHolder)
            {

            }

            public void SurfaceCreated(ISurfaceHolder surfaceHolder)
            {

                if (!this.IsAlive && !this.IsInterrupted && this.GetState() != State.Terminated)
                {
                    this.Start();
                }
            }

            public void SurfaceChanged(ISurfaceHolder surfaceHolder, Android.Graphics.Format i, int width, int height)
            {

                if (mSurfaceView.mWidth != width)
                {
                    mSurfaceView.mWidth = width;
                    mSurfaceView.mSizeChange.Set(true);
                }

                if (mSurfaceView.mHeight != height)
                {
                    mSurfaceView.mHeight = height;
                    mSurfaceView.mSizeChange.Set(true);
                }


            }

            public void SurfaceDestroyed(ISurfaceHolder surfaceHolder)
            {
                mLoop.Set(false); //REF(3)
                this.Interrupt();
                mSurfaceView.Holder.RemoveCallback(this);
            }
        }

這是日志的較長版本,因為它失敗了,我有 3 個日志產生類似這樣的幾乎相同的輸出:

12-21 23:47:40.384 D/CCodecConfig(17165):   c2::u32 output.delay.value = 2
12-21 23:47:40.384 D/CCodecConfig(17165):   string output.media-type.value = "audio/raw"
12-21 23:47:40.384 D/CCodecConfig(17165):   c2::u32 raw.channel-count.value = 1
12-21 23:47:40.384 D/CCodecConfig(17165):   c2::u32 raw.sample-rate.value = 44100
12-21 23:47:40.384 D/CCodecConfig(17165): }
12-21 23:47:40.385 D/CCodecConfig(17165): no c2 equivalents for language
12-21 23:47:40.385 D/CCodecConfig(17165): config failed => CORRUPTED
12-21 23:47:40.386 D/CCodecConfig(17165): c2 config diff is   c2::u32 raw.channel-count.value = 2
12-21 23:47:40.386 W/Codec2Client(17165): query -- param skipped: index = 1107298332.
12-21 23:47:40.386 D/CCodec  (17165): client requested max input size 469, which is smaller than what component recommended (8192); overriding with component recommendation.
12-21 23:47:40.386 W/CCodec  (17165): This behavior is subject to change. It is recommended that app developers double check whether the requested max input size is in reasonable range.
12-21 23:47:40.386 D/CCodec  (17165): setup formats input: AMessage(what = 0x00000000) = {
12-21 23:47:40.386 D/CCodec  (17165):   int32_t channel-count = 2
12-21 23:47:40.386 D/CCodec  (17165):   int32_t level = 0
12-21 23:47:40.386 D/CCodec  (17165):   int32_t max-input-size = 8192
12-21 23:47:40.386 D/CCodec  (17165):   string mime = "audio/mp4a-latm"
12-21 23:47:40.386 D/CCodec  (17165):   int32_t profile = 2
12-21 23:47:40.386 D/CCodec  (17165):   int32_t sample-rate = 44100
12-21 23:47:40.386 D/CCodec  (17165): } and output: AMessage(what = 0x00000000) = {
12-21 23:47:40.386 D/CCodec  (17165):   int32_t channel-count = 2
12-21 23:47:40.386 D/CCodec  (17165):   string mime = "audio/raw"
12-21 23:47:40.386 D/CCodec  (17165):   int32_t sample-rate = 44100
12-21 23:47:40.386 D/CCodec  (17165): }
12-21 23:47:40.388 W/Codec2Client(17165): query -- param skipped: index = 1342179345.
12-21 23:47:40.388 W/Codec2Client(17165): query -- param skipped: index = 2415921170.
12-21 23:47:40.388 W/Codec2Client(17165): query -- param skipped: index = 1610614798.
12-21 23:47:40.390 D/CCodecBufferChannel(17165): [c2.android.aac.decoder#945] Created input block pool with allocatorID 16 => poolID 139 - OK (0)
12-21 23:47:40.391 D/BufferPoolAccessor(17165): bufferpool2 0x7827bc0c20 : 0(0 size) total buffers - 0(0 size) used buffers - 1/7 (recycle/alloc) - 6/25 (fetch/transfer)
12-21 23:47:40.391 D/BufferPoolAccessor(17165): Destruction - bufferpool2 0x7827bc0c20 cached: 0/0M, 0/0% in use; allocs: 7, 14% recycled; transfers: 25, 76% unfetced
12-21 23:47:40.391 I/CCodecBufferChannel(17165): [c2.android.aac.decoder#945] Created output block pool with allocatorID 16 => poolID 578 - OK
12-21 23:47:40.392 D/CCodecBufferChannel(17165): [c2.android.aac.decoder#945] Configured output block pool ids 578 => OK
12-21 23:47:40.404 D/CCodec  (17165): allocate(c2.qti.avc.decoder)
12-21 23:47:40.405 I/Codec2Client(17165): Creating a Codec2 client to service "default"
12-21 23:47:40.407 I/Codec2Client(17165): Client to Codec2 service "default" created
12-21 23:47:40.407 I/CCodec  (17165): setting up 'default' as default (vendor) store
12-21 23:47:40.410 I/CCodec  (17165): Created component [c2.qti.avc.decoder]
12-21 23:47:40.411 D/CCodecConfig(17165): read media type: video/avc
12-21 23:47:40.412 D/ReflectedParamUpdater(17165): extent() != 1 for single value type: output.buffers.pool-ids.values
12-21 23:47:40.416 D/CCodecConfig(17165): ignoring local param raw.size (0xd2001800) as it is already supported
12-21 23:47:40.416 D/CCodecConfig(17165): ignoring local param raw.color (0xd2001809) as it is already supported
12-21 23:47:40.416 D/CCodecConfig(17165): ignoring local param raw.hdr-static-info (0xd200180a) as it is already supported
12-21 23:47:40.417 I/CCodecConfig(17165): query failed after returning 17 values (BAD_INDEX)
12-21 23:47:40.418 D/CCodecConfig(17165): c2 config diff is Dict {
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::i32 algo.priority.value = -1
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float algo.rate.value = 4.2039e-44
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 algo.secure-mode.value = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float coded.frame-rate.value = 30
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.pl.level = 20480
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.pl.profile = 20480
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.vui.color.matrix = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.vui.color.primaries = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.vui.color.range = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.vui.color.transfer = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 default.color.matrix = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 default.color.primaries = 3
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 default.color.range = 2
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 default.color.transfer = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 input.buffers.max-size.value = 13271040
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 input.delay.value = 4
12-21 23:47:40.418 D/CCodecConfig(17165):   string input.media-type.value = "video/avc"
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 output.delay.value = 18
12-21 23:47:40.418 D/CCodecConfig(17165):   string output.media-type.value = "video/raw"
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 raw.color.matrix = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 raw.color.primaries = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 raw.color.range = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 raw.color.transfer = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float raw.hdr-static-info.mastering.blue.x = 1.4013e-45
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float raw.hdr-static-info.mastering.blue.y = 1.4013e-45
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float raw.hdr-
12-21 23:47:40.418 W/ColorUtils(17165): expected specified color aspects (0:0:0:0)
12-21 23:47:40.423 D/SurfaceUtils(17165): connecting to surface 0x7796e34010, reason connectToSurface
12-21 23:47:40.423 I/MediaCodec(17165): [c2.qti.avc.decoder] setting surface generation to 17577022
12-21 23:47:40.423 D/SurfaceUtils(17165): disconnecting from surface 0x7796e34010, reason connectToSurface(reconnect)
12-21 23:47:40.423 D/SurfaceUtils(17165): connecting to surface 0x7796e34010, reason connectToSurface(reconnect)
12-21 23:47:40.423 D/CCodecConfig(17165): no c2 equivalents for csd-1
12-21 23:47:40.423 D/CCodecConfig(17165): no c2 equivalents for native-window
12-21 23:47:40.424 D/CCodecConfig(17165): c2 config diff is   c2::u32 input.buffers.max-size.value = 7077888
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.max-size.height = 360
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.max-size.width = 640
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.pixel-format.value = 34
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.size.height = 360
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.size.width = 640
12-21 23:47:40.425 W/Codec2Client(17165): query -- param skipped: index = 1107298332.
12-21 23:47:40.425 D/CCodec  (17165): client requested max input size 21629, which is smaller than what component recommended (7077888); overriding with component recommendation.
12-21 23:47:40.425 W/CCodec  (17165): This behavior is subject to change. It is recommended that app developers double check whether the requested max input size is in reasonable range.
12-21 23:47:40.425 D/CCodec  (17165): setup formats input: AMessage(what = 0x00000000) = {
12-21 23:47:40.425 D/CCodec  (17165):   int32_t feature-secure-playback = 0
12-21 23:47:40.425 D/CCodec  (17165):   int32_t frame-rate = 30
12-21 23:47:40.425 D/CCodec  (17165):   int32_t height = 360
12-21 23:47:40.425 D/CCodec  (17165):   int32_t level = 1
12-21 23:47:40.425 D/CCodec  (17165):   int32_t max-input-size = 7077888
12-21 23:47:40.425 D/CCodec  (17165):   string mime = "video/avc"
12-21 23:47:40.425 D/CCodec  (17165):   int32_t priority = 1
12-21 23:47:40.425 D/CCodec  (17165):   int32_t profile = 1
12-21 23:47:40.425 D/CCodec  (17165):   int32_t width = 640
12-21 23:47:40.425 D/CCodec  (17165):   Rect crop(0, 0, 639, 359)
12-21 23:47:40.425 D/CCodec  (17165): } and output: AMessage(what = 0x00000000) = {
12-21 23:47:40.425 D/CCodec  (17165):   int32_t android._video-scaling = 1
12-21 23:47:40.425 D/CCodec  (17165):   Rect crop(0, 0, 639, 359)
12-21 23:47:40.425 D/CCodec  (17165):   int32_t color-standard = 4
12-21 23:47:40.425 D/CCodec  (17165):   int32_t color-range = 2
12-21 23:47:40.425 D/CCodec  (17165):   int32_t color-transfer = 3
12-21 23:47:40.425 D/CCodec  (17165):   int32_t android._dataspace = 259
12-21 23:47:40.425 D/CCodec  (17165):   int32_t width = 640
12-21 23:47:40.425 D/CCodec  (17165):   int32_t feature-secure-playback = 0
12-21 23:47:40.425 D/CCodec  (17165):   int32_t frame-rate = 30
12-21 23:47:40.425 D/CCodec  (17165):   int32_t height = 360
12-21 23:47:40.425 D/CCodec  (17165):   int32_t max-height = 360
12-21 23:47:40.425 D/CCodec  (17165):   int32_t max-width = 640
12-21 23:47:40.425 D/CCodec  (17165):   string mime = "video/raw"
12-21 23:47:40.425 D/CCodec  (17165):   int32_t priority = 1
12-21 23:47:40.425 D/CCodec  (17165):   int32_t rotation-degrees = 0
12-21 23:47:40.425 D/CCodec  (17165):   Buffer hdr-static-info = {
12-21 23:47:40.425 D/CCodec  (17165):     00000000:  00 00 00 00 00 00 00 00  00 00 00 00 00 00 00 00  ................
12-21 23:47:40.425 D/CCodec  (17165):     00000010:  00 00 00 00 00 00 00 00  00                       .........
12-21 23:47:40.425 D/CCodec  (17165):   }
12-21 23:47:40.425 D/CCodec  (17165):   int32_t android._color-format = 2130708
12-21 23:47:40.437 W/Codec2Client(17165): query -- param skipped: index = 1342179345.
12-21 23:47:40.437 W/Codec2Client(17165): query -- param skipped: index = 2415921170.
12-21 23:47:40.437 W/Codec2Client(17165): query -- param skipped: index = 1610614798.
12-21 23:47:40.438 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Query input allocators returned 0 params => BAD_INDEX (6)
12-21 23:47:40.439 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Created input block pool with allocatorID 16 => poolID 140 - OK (0)
12-21 23:47:40.439 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Query output allocators returned 0 params => BAD_INDEX (6)
12-21 23:47:40.440 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Query output surface allocator returned 0 params => BAD_INDEX (6)
12-21 23:47:40.446 I/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Created output block pool with allocatorID 18 => poolID 795 - OK
12-21 23:47:40.447 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Configured output block pool ids 795 => OK
12-21 23:47:40.447 D/Codec2-block_helper(17165): remote graphic buffer migration 0/0
12-21 23:47:40.447 D/Codec2Client(17165): generation remote change 17577022
12-21 23:47:40.459 D/BufferPoolAccessor(17165): bufferpool2 0x7797ca5420 : 0(0 size) total buffers - 0(0 size) used buffers - 0/8 (recycle/alloc) - 8/31 (fetch/transfer)
12-21 23:47:40.459 D/BufferPoolAccessor(17165): Destruction - bufferpool2 0x7797ca5420 cached: 0/0M, 0/0% in use; allocs: 8, 0% recycled; transfers: 31, 74% unfetced
12-21 23:47:40.482 D/CCodecConfig(17165): c2 config diff is   c2::u32 raw.color.matrix = 1
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.color.primaries = 1
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.color.range = 2
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.color.transfer = 3
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.crop.height = 360
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.crop.left = 0
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.crop.top = 0
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.crop.width = 640
12-21 23:47:40.482 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] onWorkDone: output format changed to AMessage(what = 0x00000000) = {
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t android._video-scaling = 1
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   Rect crop(0, 0, 639, 359)
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t color-standard = 1
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t color-range = 2
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t color-transfer = 3
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t android._dataspace = 260
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t width = 640
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t feature-secure-playback = 0
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t frame-rate = 30
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t height = 360
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t max-height = 360
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t max-width = 640
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   string mime = "video/raw"
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t priority = 1
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t rotation-degrees = 0
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   Buffer hdr-static-info = {
12-21 23:47:40.482 D/CCodecBufferChannel(17165):     00000000:  00 00 00 00 00 00 00 00  00 00 00 00 00 00 00 00  ................
12-21 23:47:40.482 D/CCodecBufferChannel(17165):     00000010:  00 00 00 00 00 00 00 00  00                       .........
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   }
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t android._color-format = 2130708361
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t color-format = 2130708361
12-21 23:47:40.482 D/CCodecBufferChannel(17165): }
12-21 23:47:41.069 D/Mono    (17165): GC_TAR_BRIDGE bridges 0 objects 0 opaque 0 colors 0 colors-bridged 0 colors-visible 119 xref 4 cache-hit 0 cache-semihit 0 cache-miss 0 setup 0.01ms tarjan 0.03ms scc-setup 0.05ms gather-xref 0.00ms xref-setup 0.00ms cleanup 0.00ms
12-21 23:47:41.069 D/Mono    (17165): GC_BRIDGE: Complete, was running for 0.13ms
12-21 23:47:41.069 D/Mono    (17165): GC_MINOR: (Nursery full) time 2.87ms, stw 4.31ms promoted 0K major size: 2576K in use: 1497K los size: 17004K in use: 15497K
12-21 23:47:41.384 W/AudioTrack(17165): Use of stream types is deprecated for operations other than volume control
12-21 23:47:41.384 W/AudioTrack(17165): See the documentation of AudioTrack() for what to use instead with android.media.AudioAttributes to qualify your playback use case
12-21 23:47:42.116 D/Mono    (17165): GC_BRIDGE waiting for bridge processing to finish
12-21 23:47:42.121 I/ame.DoodleSmas(17165): Explicit concurrent copying GC freed 19087(932KB) AllocSpace objects, 13(692KB) LOS objects, 49% free, 4691KB/9383KB, paused 46us total 23.431ms
12-21 23:47:42.122 D/Mono    (17165): GC_TAR_BRIDGE bridges 157 objects 157 opaque 0 colors 157 colors-bridged 157 colors-visible 157 xref 0 cache-hit 0 cache-semihit 0 cache-miss 0 setup 0.00ms tarjan 0.03ms scc-setup 0.03ms gather-xref 0.00ms xref-setup 0.00ms cleanup 0.02ms
12-21 23:47:42.122 D/Mono    (17165): GC_BRIDGE: Complete, was running for 24.91ms
12-21 23:47:42.122 D/Mono    (17165): GC_MINOR: (Nursery full) time 4.00ms, stw 4.73ms promoted 74K major size: 2576K in use: 1572K los size: 24172K in use: 21766K
12-21 23:47:42.563 I/Choreographer(17165): Skipped 72 frames!  The application may be doing too much work on its main thread.
12-21 23:47:42.567 W/libEGL  (17165): EGLNativeWindowType 0x77924dc010 disconnect failed

如果將來有人遇到此問題,先前格式的銷毀可記錄表面代碼將在返回時間歇性運行。 我將它移到當應用程序表面被破壞時暫停執行的代碼,現在它可以工作了。

public class ARRenderThread : Thread, ISurfaceHolderCallback2
        {
            RecordableSurfaceView mSurfaceView;

            EGLDisplay mEGLDisplay;

            EGLContext mEGLContext;

            EGLSurface mEGLSurface;

            EGLSurface mEGLSurfaceMedia;

            public LinkedList<Runnable> mRunnableQueue = new LinkedList<Runnable>();

            int[] config = new int[] {
                    EGL14.EglRedSize, 8,
                    EGL14.EglGreenSize, 8,
                    EGL14.EglBlueSize, 8,
                    EGL14.EglAlphaSize, 8,
                    EGL14.EglRenderableType, EGL14.EglOpenglEs2Bit,
                    EGLExt.EglRecordableAndroid, 1,
//                    EGL14.EglSurfaceType, EGL14.EglPbufferBit,
                    EGL14.EglDepthSize, 16,
                    EGL14.EglNone
            };


            public ARRenderThread(RecordableSurfaceView surfaceView)
            {
                this.mSurfaceView = surfaceView;
                if (Build.VERSION.SdkInt >= Build.VERSION_CODES.O)
                {
                    config[10] = EGLExt.EglRecordableAndroid;
                }
            }

            public AtomicBoolean mLoop = new AtomicBoolean(false);

            EGLConfig chooseEglConfig(EGLDisplay eglDisplay)
            {
                int[] configsCount = new int[] { 0 };
                EGLConfig[] configs = new EGLConfig[1];
                EGL14.EglChooseConfig(eglDisplay, config, 0, configs, 0, configs.Length, configsCount,
                        0);
                return configs[0];
            }
            public override void Run()
            {
                if (mSurfaceView.mHasGLContext.Get())
                {
                    return;
                }
                mEGLDisplay = EGL14.EglGetDisplay(EGL14.EglDefaultDisplay);
                int[] version = new int[2];
                EGL14.EglInitialize(mEGLDisplay, version, 0, version, 1);
                EGLConfig eglConfig = chooseEglConfig(mEGLDisplay);
                mEGLContext = EGL14
                        .EglCreateContext(mEGLDisplay, eglConfig, EGL14.EglNoContext,
                                new int[] { EGL14.EglContextClientVersion, 2, EGL14.EglNone }, 0);

                int[] surfaceAttribs = {
                            EGL14.EglNone
                    };

                mEGLSurface = EGL14
                        .EglCreateWindowSurface(mEGLDisplay, eglConfig, mSurfaceView,
                                surfaceAttribs, 0);
                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);

                // guarantee to only report surface as created once GL context
                // associated with the surface has been created, and call on the GL thread
                // NOT the main thread but BEFORE the codec surface is attached to the GL context
                RendererCallbacks result;
                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                {

                    result.onSurfaceCreated();

                }

                mSurfaceView.mMediaSurfaceCreated.Set(false);

                GLES20.GlClearColor(0.1f, 0.1f, 0.1f, 1.0f);

                mSurfaceView.mHasGLContext.Set(true);

                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                {

                    result.onContextCreated();

                }

                mLoop.Set(true);

                while (mLoop.Get())
                {

                    if (!mSurfaceView.mPaused)
                    {
                        bool shouldRender = false;

                        //we're just rendering when requested, so check that no one
                        //has requested and if not, just continue
                        if (mSurfaceView.mRenderMode.Get() == (int)Rendermode.WhenDirty)
                        {

                            if (mSurfaceView.mRenderRequested.Get())
                            {
                                mSurfaceView.mRenderRequested.Set(false);
                                shouldRender = true;
                            }

                        }
                        else
                        {
                            shouldRender = true;
                        }

                        if (mSurfaceView.mSizeChange.Get())
                        {

                            GLES20.GlViewport(0, 0, mSurfaceView.mWidth, mSurfaceView.mHeight);

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onSurfaceChanged(mSurfaceView.mWidth, mSurfaceView.mHeight);

                            }

                            mSurfaceView.mSizeChange.Set(false);
                        }

                        if (shouldRender)
                        {

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onPreDrawFrame();

                            }

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onDrawScreen();

                            }

                            EGL14.EglSwapBuffers(mEGLDisplay, mEGLSurface);

                            if (mSurfaceView.mIsRecording.Get())
                            {
                                if (!mSurfaceView.mMediaSurfaceCreated.Get())
                                {
                                    mEGLSurfaceMedia = EGL14
                                        .EglCreateWindowSurface(mEGLDisplay, eglConfig, mSurfaceView.mSurface,
                                                surfaceAttribs, 0);
                                    mSurfaceView.mMediaSurfaceCreated.Set(true);
                                }

                                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurfaceMedia, mEGLSurfaceMedia,
                                        mEGLContext);

                                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                                {

                                    GLES20.GlViewport(0, 0, mSurfaceView.mOutWidth, mSurfaceView.mOutHeight);
                                    //EGLExt.EglPresentationTimeANDROID(mEGLDisplay, mEGLSurfaceMedia, (JavaSystem.CurrentTimeMillis() - RecordableSurfaceView.mStartTimeMillisecs) * 1000L *1000L);
                                    result.onDrawRecording();
                                    GLES20.GlViewport(0, 0, mSurfaceView.mWidth, mSurfaceView.mHeight);
                                }

                                EGL14.EglSwapBuffers(mEGLDisplay, mEGLSurfaceMedia);
                                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface,
                                        mEGLContext);
                            }
                        }

                        while (mRunnableQueue.Count > 0)
                        {
                            Runnable ev = mRunnableQueue.First.Value;
                            mRunnableQueue.RemoveFirst();
                            ev.Run();
                        }
                    }

                    /*
                    try
                    {
                        Thread.Sleep((long)(1f / 120.0f * 1000f));
                    }
                    catch (InterruptedException intex) // THIS IS KEY TO BLACKOUT BUG, THIS CATCH NEVER HAPPENS AND SO THE OLD SURFACE IS NEVER NUKED / REMADE mHasGLContext NEVER SET TO FALSE
                    {
                        if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                        {
                            result.onSurfaceDestroyed();
                        }

                        if (mEGLDisplay != null)
                        {
                            EGL14.EglMakeCurrent(mEGLDisplay, EGL14.EglNoSurface,
                                    EGL14.EglNoSurface,
                                    EGL14.EglNoContext);

                            if (mEGLSurface != null)
                            {
                                EGL14.EglDestroySurface(mEGLDisplay, mEGLSurface);
                            }

                            if (mEGLSurfaceMedia != null)
                            {
                                EGL14.EglDestroySurface(mEGLDisplay, mEGLSurfaceMedia);
                            }

                            EGL14.EglDestroyContext(mEGLDisplay, mEGLContext);
                            mSurfaceView.mHasGLContext.Set(false);
                            EGL14.EglReleaseThread();
                            EGL14.EglTerminate(mEGLDisplay);
                            mSurfaceView.mSurface.Release();

                        }
                        return;
                    }*/
                }
            }

            public void SurfaceRedrawNeeded(ISurfaceHolder surfaceHolder)
            {

            }

            public void SurfaceCreated(ISurfaceHolder surfaceHolder)
            {

                if (!this.IsAlive && !this.IsInterrupted && this.GetState() != State.Terminated)
                {
                    this.Start();
                }
            }

            public void SurfaceChanged(ISurfaceHolder surfaceHolder, Android.Graphics.Format i, int width, int height)
            {

                if (mSurfaceView.mWidth != width)
                {
                    mSurfaceView.mWidth = width;
                    mSurfaceView.mSizeChange.Set(true);
                }

                if (mSurfaceView.mHeight != height)
                {
                    mSurfaceView.mHeight = height;
                    mSurfaceView.mSizeChange.Set(true);
                }


            }

            public void SurfaceDestroyed(ISurfaceHolder surfaceHolder)
            {
                mLoop.Set(false);
                this.Interrupt();
                mSurfaceView.Holder.RemoveCallback(this);

                //MOVED SURFACE DESTROYING CODE TO FUNCTION CALLED WHEN APP IS PAUSED INSTEAD OF UNSTABLE CATCH UPON RETURN_______
                if (mEGLDisplay != null)
                {
                    EGL14.EglMakeCurrent(mEGLDisplay, EGL14.EglNoSurface,
                            EGL14.EglNoSurface,
                            EGL14.EglNoContext);

                    if (mEGLSurface != null)
                    {
                        EGL14.EglDestroySurface(mEGLDisplay, mEGLSurface);
                    }

                    if (mEGLSurfaceMedia != null)
                    {
                        EGL14.EglDestroySurface(mEGLDisplay, mEGLSurfaceMedia);
                    }

                    EGL14.EglDestroyContext(mEGLDisplay, mEGLContext);
                    mSurfaceView.mHasGLContext.Set(false);
                    EGL14.EglReleaseThread();
                    EGL14.EglTerminate(mEGLDisplay);
                    mSurfaceView.mSurface.Release();

                }
                //______________________________________________________________________________________________________________
            }
        }

    }
}

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM