Occasional I/Choreographer(17165): Skipped ## frames black screen lock up, Android Xamarin c#

0

Occasionally when importing photos or returning to the app from an activity such as watching videos our app locks up and gets a black screen, although some functions still continue underneath such as toasts. The end of the log consistently says:

I/Choreographer(17165): Skipped ## frames!  The application may be doing too much work on its main thread.
12-21 23:59:15.620 W/libEGL  (17165): EGLNativeWindowType 0x77323f0010 disconnect failed

It is an inconsistent and intermittent issue, so it's been very hard to peg down. Any ideas would be appreciated. I'm assuming some threading issue is locking up the UI thread but because it happens so rarely and isn't easily reproducible I really have no idea what is happening.

EDIT: This only happens when coming back to the app from some intent activity such as send email, pick photo etc. I've managed to make it reproducible by a few well-placed breakpoints.

Here is the c# code that I believe is responsible. I added REF(1) 2 and 3, to indicate where my breakpoints were. It appears REF(2) is never reached when it freezes, so the destroyed surface is never being recreated sometimes and thus never resumes rendering... We are using a recordable surface view, this is a subclass of it, forgive me for how long it is:

private class ARRenderThread : Thread, ISurfaceHolderCallback2
        {
            RecordableSurfaceView mSurfaceView;

            EGLDisplay mEGLDisplay;

            EGLContext mEGLContext;

            EGLSurface mEGLSurface;

            EGLSurface mEGLSurfaceMedia;

            public LinkedList<Runnable> mRunnableQueue = new LinkedList<Runnable>();

            int[] config = new int[] {
                    EGL14.EglRedSize, 8,
                    EGL14.EglGreenSize, 8,
                    EGL14.EglBlueSize, 8,
                    EGL14.EglAlphaSize, 8,
                    EGL14.EglRenderableType, EGL14.EglOpenglEs2Bit,
                    EGLExt.EglRecordableAndroid, 1,
//                    EGL14.EglSurfaceType, EGL14.EglPbufferBit,
                    EGL14.EglDepthSize, 16,
                    EGL14.EglNone
            };


            public ARRenderThread(RecordableSurfaceView surfaceView)
            {
                this.mSurfaceView = surfaceView;
                if (Build.VERSION.SdkInt >= Build.VERSION_CODES.O)
                {
                    config[10] = EGLExt.EglRecordableAndroid;
                }
            }

            private AtomicBoolean mLoop = new AtomicBoolean(false);

            EGLConfig chooseEglConfig(EGLDisplay eglDisplay)
            {
                int[] configsCount = new int[] { 0 };
                EGLConfig[] configs = new EGLConfig[1];
                EGL14.EglChooseConfig(eglDisplay, config, 0, configs, 0, configs.Length, configsCount,
                        0);
                return configs[0];
            }
            public override void Run()
            {
                if (mSurfaceView.mHasGLContext.Get())
                {
                    return;
                }
                mEGLDisplay = EGL14.EglGetDisplay(EGL14.EglDefaultDisplay);
                int[] version = new int[2];
                EGL14.EglInitialize(mEGLDisplay, version, 0, version, 1);
                EGLConfig eglConfig = chooseEglConfig(mEGLDisplay);
                mEGLContext = EGL14
                        .EglCreateContext(mEGLDisplay, eglConfig, EGL14.EglNoContext,
                                new int[] { EGL14.EglContextClientVersion, 2, EGL14.EglNone }, 0);

                int[] surfaceAttribs = {
                            EGL14.EglNone
                    };

                mEGLSurface = EGL14
                        .EglCreateWindowSurface(mEGLDisplay, eglConfig, mSurfaceView,
                                surfaceAttribs, 0);
                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);

                // guarantee to only report surface as created once GL context
                // associated with the surface has been created, and call on the GL thread
                // NOT the main thread but BEFORE the codec surface is attached to the GL context
                RendererCallbacks result;
                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                {

                    result.onSurfaceCreated();

                }

                mSurfaceView.mMediaSurfaceCreated.Set(false);

                GLES20.GlClearColor(0.1f, 0.1f, 0.1f, 1.0f);

                mSurfaceView.mHasGLContext.Set(true);

                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                {

                    result.onContextCreated();

                }

                mLoop.Set(true); // REF(1)

                while (mLoop.Get())
                {

                    if (!mSurfaceView.mPaused)
                    {
                        bool shouldRender = false;

                        //we're just rendering when requested, so check that no one
                        //has requested and if not, just continue
                        if (mSurfaceView.mRenderMode.Get() == (int)Rendermode.WhenDirty)
                        {

                            if (mSurfaceView.mRenderRequested.Get())
                            {
                                mSurfaceView.mRenderRequested.Set(false);
                                shouldRender = true;
                            }

                        }
                        else
                        {
                            shouldRender = true;
                        }

                        if (mSurfaceView.mSizeChange.Get())
                        {

                            GLES20.GlViewport(0, 0, mSurfaceView.mWidth, mSurfaceView.mHeight);

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onSurfaceChanged(mSurfaceView.mWidth, mSurfaceView.mHeight);

                            }

                            mSurfaceView.mSizeChange.Set(false);
                        }

                        if (shouldRender)
                        {

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onPreDrawFrame();

                            }

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onDrawScreen();

                            }

                            EGL14.EglSwapBuffers(mEGLDisplay, mEGLSurface);

                            if (mSurfaceView.mIsRecording.Get())
                            {
                                if (!mSurfaceView.mMediaSurfaceCreated.Get())
                                {
                                    mEGLSurfaceMedia = EGL14
                                        .EglCreateWindowSurface(mEGLDisplay, eglConfig, mSurfaceView.mSurface,
                                                surfaceAttribs, 0);
                                    mSurfaceView.mMediaSurfaceCreated.Set(true);
                                }

                                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurfaceMedia, mEGLSurfaceMedia,
                                        mEGLContext);

                                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                                {

                                    GLES20.GlViewport(0, 0, mSurfaceView.mOutWidth, mSurfaceView.mOutHeight);
                                    //EGLExt.EglPresentationTimeANDROID(mEGLDisplay, mEGLSurfaceMedia, (JavaSystem.CurrentTimeMillis() - RecordableSurfaceView.mStartTimeMillisecs) * 1000L *1000L);
                                    result.onDrawRecording();
                                    GLES20.GlViewport(0, 0, mSurfaceView.mWidth, mSurfaceView.mHeight);
                                }

                                EGL14.EglSwapBuffers(mEGLDisplay, mEGLSurfaceMedia);
                                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface,
                                        mEGLContext);
                            }
                        }

                        while (mRunnableQueue.Count > 0)
                        {
                            Runnable ev = mRunnableQueue.First.Value;
                            mRunnableQueue.RemoveFirst();
                            ev.Run();
                        }
                    }

                    try
                    {
                        Thread.Sleep((long)(1f / 120.0f * 1000f));
                    }
                    catch (InterruptedException intex)
                    {
                        if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result)) // REF(2)
                        {
                            result.onSurfaceDestroyed();
                        }

                        if (mEGLDisplay != null)
                        {
                            EGL14.EglMakeCurrent(mEGLDisplay, EGL14.EglNoSurface,
                                    EGL14.EglNoSurface,
                                    EGL14.EglNoContext);

                            if (mEGLSurface != null)
                            {
                                EGL14.EglDestroySurface(mEGLDisplay, mEGLSurface);
                            }

                            if (mEGLSurfaceMedia != null)
                            {
                                EGL14.EglDestroySurface(mEGLDisplay, mEGLSurfaceMedia);
                            }

                            EGL14.EglDestroyContext(mEGLDisplay, mEGLContext);
                            mSurfaceView.mHasGLContext.Set(false);
                            EGL14.EglReleaseThread();
                            EGL14.EglTerminate(mEGLDisplay);
                            mSurfaceView.mSurface.Release();

                        }
                        return;
                    }
                }
            }

            public void SurfaceRedrawNeeded(ISurfaceHolder surfaceHolder)
            {

            }

            public void SurfaceCreated(ISurfaceHolder surfaceHolder)
            {

                if (!this.IsAlive && !this.IsInterrupted && this.GetState() != State.Terminated)
                {
                    this.Start();
                }
            }

            public void SurfaceChanged(ISurfaceHolder surfaceHolder, Android.Graphics.Format i, int width, int height)
            {

                if (mSurfaceView.mWidth != width)
                {
                    mSurfaceView.mWidth = width;
                    mSurfaceView.mSizeChange.Set(true);
                }

                if (mSurfaceView.mHeight != height)
                {
                    mSurfaceView.mHeight = height;
                    mSurfaceView.mSizeChange.Set(true);
                }


            }

            public void SurfaceDestroyed(ISurfaceHolder surfaceHolder)
            {
                mLoop.Set(false); //REF(3)
                this.Interrupt();
                mSurfaceView.Holder.RemoveCallback(this);
            }
        }

Here is a longer version of the log as it fails, I have 3 logs that produce near-identical outputs like this:

12-21 23:47:40.384 D/CCodecConfig(17165):   c2::u32 output.delay.value = 2
12-21 23:47:40.384 D/CCodecConfig(17165):   string output.media-type.value = "audio/raw"
12-21 23:47:40.384 D/CCodecConfig(17165):   c2::u32 raw.channel-count.value = 1
12-21 23:47:40.384 D/CCodecConfig(17165):   c2::u32 raw.sample-rate.value = 44100
12-21 23:47:40.384 D/CCodecConfig(17165): }
12-21 23:47:40.385 D/CCodecConfig(17165): no c2 equivalents for language
12-21 23:47:40.385 D/CCodecConfig(17165): config failed => CORRUPTED
12-21 23:47:40.386 D/CCodecConfig(17165): c2 config diff is   c2::u32 raw.channel-count.value = 2
12-21 23:47:40.386 W/Codec2Client(17165): query -- param skipped: index = 1107298332.
12-21 23:47:40.386 D/CCodec  (17165): client requested max input size 469, which is smaller than what component recommended (8192); overriding with component recommendation.
12-21 23:47:40.386 W/CCodec  (17165): This behavior is subject to change. It is recommended that app developers double check whether the requested max input size is in reasonable range.
12-21 23:47:40.386 D/CCodec  (17165): setup formats input: AMessage(what = 0x00000000) = {
12-21 23:47:40.386 D/CCodec  (17165):   int32_t channel-count = 2
12-21 23:47:40.386 D/CCodec  (17165):   int32_t level = 0
12-21 23:47:40.386 D/CCodec  (17165):   int32_t max-input-size = 8192
12-21 23:47:40.386 D/CCodec  (17165):   string mime = "audio/mp4a-latm"
12-21 23:47:40.386 D/CCodec  (17165):   int32_t profile = 2
12-21 23:47:40.386 D/CCodec  (17165):   int32_t sample-rate = 44100
12-21 23:47:40.386 D/CCodec  (17165): } and output: AMessage(what = 0x00000000) = {
12-21 23:47:40.386 D/CCodec  (17165):   int32_t channel-count = 2
12-21 23:47:40.386 D/CCodec  (17165):   string mime = "audio/raw"
12-21 23:47:40.386 D/CCodec  (17165):   int32_t sample-rate = 44100
12-21 23:47:40.386 D/CCodec  (17165): }
12-21 23:47:40.388 W/Codec2Client(17165): query -- param skipped: index = 1342179345.
12-21 23:47:40.388 W/Codec2Client(17165): query -- param skipped: index = 2415921170.
12-21 23:47:40.388 W/Codec2Client(17165): query -- param skipped: index = 1610614798.
12-21 23:47:40.390 D/CCodecBufferChannel(17165): [c2.android.aac.decoder#945] Created input block pool with allocatorID 16 => poolID 139 - OK (0)
12-21 23:47:40.391 D/BufferPoolAccessor(17165): bufferpool2 0x7827bc0c20 : 0(0 size) total buffers - 0(0 size) used buffers - 1/7 (recycle/alloc) - 6/25 (fetch/transfer)
12-21 23:47:40.391 D/BufferPoolAccessor(17165): Destruction - bufferpool2 0x7827bc0c20 cached: 0/0M, 0/0% in use; allocs: 7, 14% recycled; transfers: 25, 76% unfetced
12-21 23:47:40.391 I/CCodecBufferChannel(17165): [c2.android.aac.decoder#945] Created output block pool with allocatorID 16 => poolID 578 - OK
12-21 23:47:40.392 D/CCodecBufferChannel(17165): [c2.android.aac.decoder#945] Configured output block pool ids 578 => OK
12-21 23:47:40.404 D/CCodec  (17165): allocate(c2.qti.avc.decoder)
12-21 23:47:40.405 I/Codec2Client(17165): Creating a Codec2 client to service "default"
12-21 23:47:40.407 I/Codec2Client(17165): Client to Codec2 service "default" created
12-21 23:47:40.407 I/CCodec  (17165): setting up 'default' as default (vendor) store
12-21 23:47:40.410 I/CCodec  (17165): Created component [c2.qti.avc.decoder]
12-21 23:47:40.411 D/CCodecConfig(17165): read media type: video/avc
12-21 23:47:40.412 D/ReflectedParamUpdater(17165): extent() != 1 for single value type: output.buffers.pool-ids.values
12-21 23:47:40.416 D/CCodecConfig(17165): ignoring local param raw.size (0xd2001800) as it is already supported
12-21 23:47:40.416 D/CCodecConfig(17165): ignoring local param raw.color (0xd2001809) as it is already supported
12-21 23:47:40.416 D/CCodecConfig(17165): ignoring local param raw.hdr-static-info (0xd200180a) as it is already supported
12-21 23:47:40.417 I/CCodecConfig(17165): query failed after returning 17 values (BAD_INDEX)
12-21 23:47:40.418 D/CCodecConfig(17165): c2 config diff is Dict {
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::i32 algo.priority.value = -1
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float algo.rate.value = 4.2039e-44
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 algo.secure-mode.value = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float coded.frame-rate.value = 30
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.pl.level = 20480
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.pl.profile = 20480
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.vui.color.matrix = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.vui.color.primaries = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.vui.color.range = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 coded.vui.color.transfer = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 default.color.matrix = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 default.color.primaries = 3
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 default.color.range = 2
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 default.color.transfer = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 input.buffers.max-size.value = 13271040
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 input.delay.value = 4
12-21 23:47:40.418 D/CCodecConfig(17165):   string input.media-type.value = "video/avc"
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 output.delay.value = 18
12-21 23:47:40.418 D/CCodecConfig(17165):   string output.media-type.value = "video/raw"
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 raw.color.matrix = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 raw.color.primaries = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 raw.color.range = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::u32 raw.color.transfer = 0
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float raw.hdr-static-info.mastering.blue.x = 1.4013e-45
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float raw.hdr-static-info.mastering.blue.y = 1.4013e-45
12-21 23:47:40.418 D/CCodecConfig(17165):   c2::float raw.hdr-
12-21 23:47:40.418 W/ColorUtils(17165): expected specified color aspects (0:0:0:0)
12-21 23:47:40.423 D/SurfaceUtils(17165): connecting to surface 0x7796e34010, reason connectToSurface
12-21 23:47:40.423 I/MediaCodec(17165): [c2.qti.avc.decoder] setting surface generation to 17577022
12-21 23:47:40.423 D/SurfaceUtils(17165): disconnecting from surface 0x7796e34010, reason connectToSurface(reconnect)
12-21 23:47:40.423 D/SurfaceUtils(17165): connecting to surface 0x7796e34010, reason connectToSurface(reconnect)
12-21 23:47:40.423 D/CCodecConfig(17165): no c2 equivalents for csd-1
12-21 23:47:40.423 D/CCodecConfig(17165): no c2 equivalents for native-window
12-21 23:47:40.424 D/CCodecConfig(17165): c2 config diff is   c2::u32 input.buffers.max-size.value = 7077888
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.max-size.height = 360
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.max-size.width = 640
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.pixel-format.value = 34
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.size.height = 360
12-21 23:47:40.424 D/CCodecConfig(17165):   c2::u32 raw.size.width = 640
12-21 23:47:40.425 W/Codec2Client(17165): query -- param skipped: index = 1107298332.
12-21 23:47:40.425 D/CCodec  (17165): client requested max input size 21629, which is smaller than what component recommended (7077888); overriding with component recommendation.
12-21 23:47:40.425 W/CCodec  (17165): This behavior is subject to change. It is recommended that app developers double check whether the requested max input size is in reasonable range.
12-21 23:47:40.425 D/CCodec  (17165): setup formats input: AMessage(what = 0x00000000) = {
12-21 23:47:40.425 D/CCodec  (17165):   int32_t feature-secure-playback = 0
12-21 23:47:40.425 D/CCodec  (17165):   int32_t frame-rate = 30
12-21 23:47:40.425 D/CCodec  (17165):   int32_t height = 360
12-21 23:47:40.425 D/CCodec  (17165):   int32_t level = 1
12-21 23:47:40.425 D/CCodec  (17165):   int32_t max-input-size = 7077888
12-21 23:47:40.425 D/CCodec  (17165):   string mime = "video/avc"
12-21 23:47:40.425 D/CCodec  (17165):   int32_t priority = 1
12-21 23:47:40.425 D/CCodec  (17165):   int32_t profile = 1
12-21 23:47:40.425 D/CCodec  (17165):   int32_t width = 640
12-21 23:47:40.425 D/CCodec  (17165):   Rect crop(0, 0, 639, 359)
12-21 23:47:40.425 D/CCodec  (17165): } and output: AMessage(what = 0x00000000) = {
12-21 23:47:40.425 D/CCodec  (17165):   int32_t android._video-scaling = 1
12-21 23:47:40.425 D/CCodec  (17165):   Rect crop(0, 0, 639, 359)
12-21 23:47:40.425 D/CCodec  (17165):   int32_t color-standard = 4
12-21 23:47:40.425 D/CCodec  (17165):   int32_t color-range = 2
12-21 23:47:40.425 D/CCodec  (17165):   int32_t color-transfer = 3
12-21 23:47:40.425 D/CCodec  (17165):   int32_t android._dataspace = 259
12-21 23:47:40.425 D/CCodec  (17165):   int32_t width = 640
12-21 23:47:40.425 D/CCodec  (17165):   int32_t feature-secure-playback = 0
12-21 23:47:40.425 D/CCodec  (17165):   int32_t frame-rate = 30
12-21 23:47:40.425 D/CCodec  (17165):   int32_t height = 360
12-21 23:47:40.425 D/CCodec  (17165):   int32_t max-height = 360
12-21 23:47:40.425 D/CCodec  (17165):   int32_t max-width = 640
12-21 23:47:40.425 D/CCodec  (17165):   string mime = "video/raw"
12-21 23:47:40.425 D/CCodec  (17165):   int32_t priority = 1
12-21 23:47:40.425 D/CCodec  (17165):   int32_t rotation-degrees = 0
12-21 23:47:40.425 D/CCodec  (17165):   Buffer hdr-static-info = {
12-21 23:47:40.425 D/CCodec  (17165):     00000000:  00 00 00 00 00 00 00 00  00 00 00 00 00 00 00 00  ................
12-21 23:47:40.425 D/CCodec  (17165):     00000010:  00 00 00 00 00 00 00 00  00                       .........
12-21 23:47:40.425 D/CCodec  (17165):   }
12-21 23:47:40.425 D/CCodec  (17165):   int32_t android._color-format = 2130708
12-21 23:47:40.437 W/Codec2Client(17165): query -- param skipped: index = 1342179345.
12-21 23:47:40.437 W/Codec2Client(17165): query -- param skipped: index = 2415921170.
12-21 23:47:40.437 W/Codec2Client(17165): query -- param skipped: index = 1610614798.
12-21 23:47:40.438 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Query input allocators returned 0 params => BAD_INDEX (6)
12-21 23:47:40.439 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Created input block pool with allocatorID 16 => poolID 140 - OK (0)
12-21 23:47:40.439 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Query output allocators returned 0 params => BAD_INDEX (6)
12-21 23:47:40.440 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Query output surface allocator returned 0 params => BAD_INDEX (6)
12-21 23:47:40.446 I/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Created output block pool with allocatorID 18 => poolID 795 - OK
12-21 23:47:40.447 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] Configured output block pool ids 795 => OK
12-21 23:47:40.447 D/Codec2-block_helper(17165): remote graphic buffer migration 0/0
12-21 23:47:40.447 D/Codec2Client(17165): generation remote change 17577022
12-21 23:47:40.459 D/BufferPoolAccessor(17165): bufferpool2 0x7797ca5420 : 0(0 size) total buffers - 0(0 size) used buffers - 0/8 (recycle/alloc) - 8/31 (fetch/transfer)
12-21 23:47:40.459 D/BufferPoolAccessor(17165): Destruction - bufferpool2 0x7797ca5420 cached: 0/0M, 0/0% in use; allocs: 8, 0% recycled; transfers: 31, 74% unfetced
12-21 23:47:40.482 D/CCodecConfig(17165): c2 config diff is   c2::u32 raw.color.matrix = 1
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.color.primaries = 1
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.color.range = 2
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.color.transfer = 3
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.crop.height = 360
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.crop.left = 0
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.crop.top = 0
12-21 23:47:40.482 D/CCodecConfig(17165):   c2::u32 raw.crop.width = 640
12-21 23:47:40.482 D/CCodecBufferChannel(17165): [c2.qti.avc.decoder#185] onWorkDone: output format changed to AMessage(what = 0x00000000) = {
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t android._video-scaling = 1
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   Rect crop(0, 0, 639, 359)
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t color-standard = 1
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t color-range = 2
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t color-transfer = 3
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t android._dataspace = 260
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t width = 640
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t feature-secure-playback = 0
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t frame-rate = 30
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t height = 360
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t max-height = 360
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t max-width = 640
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   string mime = "video/raw"
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t priority = 1
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t rotation-degrees = 0
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   Buffer hdr-static-info = {
12-21 23:47:40.482 D/CCodecBufferChannel(17165):     00000000:  00 00 00 00 00 00 00 00  00 00 00 00 00 00 00 00  ................
12-21 23:47:40.482 D/CCodecBufferChannel(17165):     00000010:  00 00 00 00 00 00 00 00  00                       .........
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   }
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t android._color-format = 2130708361
12-21 23:47:40.482 D/CCodecBufferChannel(17165):   int32_t color-format = 2130708361
12-21 23:47:40.482 D/CCodecBufferChannel(17165): }
12-21 23:47:41.069 D/Mono    (17165): GC_TAR_BRIDGE bridges 0 objects 0 opaque 0 colors 0 colors-bridged 0 colors-visible 119 xref 4 cache-hit 0 cache-semihit 0 cache-miss 0 setup 0.01ms tarjan 0.03ms scc-setup 0.05ms gather-xref 0.00ms xref-setup 0.00ms cleanup 0.00ms
12-21 23:47:41.069 D/Mono    (17165): GC_BRIDGE: Complete, was running for 0.13ms
12-21 23:47:41.069 D/Mono    (17165): GC_MINOR: (Nursery full) time 2.87ms, stw 4.31ms promoted 0K major size: 2576K in use: 1497K los size: 17004K in use: 15497K
12-21 23:47:41.384 W/AudioTrack(17165): Use of stream types is deprecated for operations other than volume control
12-21 23:47:41.384 W/AudioTrack(17165): See the documentation of AudioTrack() for what to use instead with android.media.AudioAttributes to qualify your playback use case
12-21 23:47:42.116 D/Mono    (17165): GC_BRIDGE waiting for bridge processing to finish
12-21 23:47:42.121 I/ame.DoodleSmas(17165): Explicit concurrent copying GC freed 19087(932KB) AllocSpace objects, 13(692KB) LOS objects, 49% free, 4691KB/9383KB, paused 46us total 23.431ms
12-21 23:47:42.122 D/Mono    (17165): GC_TAR_BRIDGE bridges 157 objects 157 opaque 0 colors 157 colors-bridged 157 colors-visible 157 xref 0 cache-hit 0 cache-semihit 0 cache-miss 0 setup 0.00ms tarjan 0.03ms scc-setup 0.03ms gather-xref 0.00ms xref-setup 0.00ms cleanup 0.02ms
12-21 23:47:42.122 D/Mono    (17165): GC_BRIDGE: Complete, was running for 24.91ms
12-21 23:47:42.122 D/Mono    (17165): GC_MINOR: (Nursery full) time 4.00ms, stw 4.73ms promoted 74K major size: 2576K in use: 1572K los size: 24172K in use: 21766K
12-21 23:47:42.563 I/Choreographer(17165): Skipped 72 frames!  The application may be doing too much work on its main thread.
12-21 23:47:42.567 W/libEGL  (17165): EGLNativeWindowType 0x77924dc010 disconnect failed
c#
android
xamarin
graphics
android-threading
asked on Stack Overflow Jan 13, 2020 by rnisson • edited Jan 14, 2020 by rnisson

1 Answer

0

In case anyone has problems with this in the future, the destroy recordable surface code in the previous format is running upon return and intermittently. I moved it down to the code that executes on pause when the app surface is destroyed and it works now.

public class ARRenderThread : Thread, ISurfaceHolderCallback2
        {
            RecordableSurfaceView mSurfaceView;

            EGLDisplay mEGLDisplay;

            EGLContext mEGLContext;

            EGLSurface mEGLSurface;

            EGLSurface mEGLSurfaceMedia;

            public LinkedList<Runnable> mRunnableQueue = new LinkedList<Runnable>();

            int[] config = new int[] {
                    EGL14.EglRedSize, 8,
                    EGL14.EglGreenSize, 8,
                    EGL14.EglBlueSize, 8,
                    EGL14.EglAlphaSize, 8,
                    EGL14.EglRenderableType, EGL14.EglOpenglEs2Bit,
                    EGLExt.EglRecordableAndroid, 1,
//                    EGL14.EglSurfaceType, EGL14.EglPbufferBit,
                    EGL14.EglDepthSize, 16,
                    EGL14.EglNone
            };


            public ARRenderThread(RecordableSurfaceView surfaceView)
            {
                this.mSurfaceView = surfaceView;
                if (Build.VERSION.SdkInt >= Build.VERSION_CODES.O)
                {
                    config[10] = EGLExt.EglRecordableAndroid;
                }
            }

            public AtomicBoolean mLoop = new AtomicBoolean(false);

            EGLConfig chooseEglConfig(EGLDisplay eglDisplay)
            {
                int[] configsCount = new int[] { 0 };
                EGLConfig[] configs = new EGLConfig[1];
                EGL14.EglChooseConfig(eglDisplay, config, 0, configs, 0, configs.Length, configsCount,
                        0);
                return configs[0];
            }
            public override void Run()
            {
                if (mSurfaceView.mHasGLContext.Get())
                {
                    return;
                }
                mEGLDisplay = EGL14.EglGetDisplay(EGL14.EglDefaultDisplay);
                int[] version = new int[2];
                EGL14.EglInitialize(mEGLDisplay, version, 0, version, 1);
                EGLConfig eglConfig = chooseEglConfig(mEGLDisplay);
                mEGLContext = EGL14
                        .EglCreateContext(mEGLDisplay, eglConfig, EGL14.EglNoContext,
                                new int[] { EGL14.EglContextClientVersion, 2, EGL14.EglNone }, 0);

                int[] surfaceAttribs = {
                            EGL14.EglNone
                    };

                mEGLSurface = EGL14
                        .EglCreateWindowSurface(mEGLDisplay, eglConfig, mSurfaceView,
                                surfaceAttribs, 0);
                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);

                // guarantee to only report surface as created once GL context
                // associated with the surface has been created, and call on the GL thread
                // NOT the main thread but BEFORE the codec surface is attached to the GL context
                RendererCallbacks result;
                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                {

                    result.onSurfaceCreated();

                }

                mSurfaceView.mMediaSurfaceCreated.Set(false);

                GLES20.GlClearColor(0.1f, 0.1f, 0.1f, 1.0f);

                mSurfaceView.mHasGLContext.Set(true);

                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                {

                    result.onContextCreated();

                }

                mLoop.Set(true);

                while (mLoop.Get())
                {

                    if (!mSurfaceView.mPaused)
                    {
                        bool shouldRender = false;

                        //we're just rendering when requested, so check that no one
                        //has requested and if not, just continue
                        if (mSurfaceView.mRenderMode.Get() == (int)Rendermode.WhenDirty)
                        {

                            if (mSurfaceView.mRenderRequested.Get())
                            {
                                mSurfaceView.mRenderRequested.Set(false);
                                shouldRender = true;
                            }

                        }
                        else
                        {
                            shouldRender = true;
                        }

                        if (mSurfaceView.mSizeChange.Get())
                        {

                            GLES20.GlViewport(0, 0, mSurfaceView.mWidth, mSurfaceView.mHeight);

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onSurfaceChanged(mSurfaceView.mWidth, mSurfaceView.mHeight);

                            }

                            mSurfaceView.mSizeChange.Set(false);
                        }

                        if (shouldRender)
                        {

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onPreDrawFrame();

                            }

                            if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                            {

                                result.onDrawScreen();

                            }

                            EGL14.EglSwapBuffers(mEGLDisplay, mEGLSurface);

                            if (mSurfaceView.mIsRecording.Get())
                            {
                                if (!mSurfaceView.mMediaSurfaceCreated.Get())
                                {
                                    mEGLSurfaceMedia = EGL14
                                        .EglCreateWindowSurface(mEGLDisplay, eglConfig, mSurfaceView.mSurface,
                                                surfaceAttribs, 0);
                                    mSurfaceView.mMediaSurfaceCreated.Set(true);
                                }

                                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurfaceMedia, mEGLSurfaceMedia,
                                        mEGLContext);

                                if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                                {

                                    GLES20.GlViewport(0, 0, mSurfaceView.mOutWidth, mSurfaceView.mOutHeight);
                                    //EGLExt.EglPresentationTimeANDROID(mEGLDisplay, mEGLSurfaceMedia, (JavaSystem.CurrentTimeMillis() - RecordableSurfaceView.mStartTimeMillisecs) * 1000L *1000L);
                                    result.onDrawRecording();
                                    GLES20.GlViewport(0, 0, mSurfaceView.mWidth, mSurfaceView.mHeight);
                                }

                                EGL14.EglSwapBuffers(mEGLDisplay, mEGLSurfaceMedia);
                                EGL14.EglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface,
                                        mEGLContext);
                            }
                        }

                        while (mRunnableQueue.Count > 0)
                        {
                            Runnable ev = mRunnableQueue.First.Value;
                            mRunnableQueue.RemoveFirst();
                            ev.Run();
                        }
                    }

                    /*
                    try
                    {
                        Thread.Sleep((long)(1f / 120.0f * 1000f));
                    }
                    catch (InterruptedException intex) // THIS IS KEY TO BLACKOUT BUG, THIS CATCH NEVER HAPPENS AND SO THE OLD SURFACE IS NEVER NUKED / REMADE mHasGLContext NEVER SET TO FALSE
                    {
                        if (mSurfaceView.mRendererCallbacksWeakReference != null && mSurfaceView.mRendererCallbacksWeakReference.TryGetTarget(out result))
                        {
                            result.onSurfaceDestroyed();
                        }

                        if (mEGLDisplay != null)
                        {
                            EGL14.EglMakeCurrent(mEGLDisplay, EGL14.EglNoSurface,
                                    EGL14.EglNoSurface,
                                    EGL14.EglNoContext);

                            if (mEGLSurface != null)
                            {
                                EGL14.EglDestroySurface(mEGLDisplay, mEGLSurface);
                            }

                            if (mEGLSurfaceMedia != null)
                            {
                                EGL14.EglDestroySurface(mEGLDisplay, mEGLSurfaceMedia);
                            }

                            EGL14.EglDestroyContext(mEGLDisplay, mEGLContext);
                            mSurfaceView.mHasGLContext.Set(false);
                            EGL14.EglReleaseThread();
                            EGL14.EglTerminate(mEGLDisplay);
                            mSurfaceView.mSurface.Release();

                        }
                        return;
                    }*/
                }
            }

            public void SurfaceRedrawNeeded(ISurfaceHolder surfaceHolder)
            {

            }

            public void SurfaceCreated(ISurfaceHolder surfaceHolder)
            {

                if (!this.IsAlive && !this.IsInterrupted && this.GetState() != State.Terminated)
                {
                    this.Start();
                }
            }

            public void SurfaceChanged(ISurfaceHolder surfaceHolder, Android.Graphics.Format i, int width, int height)
            {

                if (mSurfaceView.mWidth != width)
                {
                    mSurfaceView.mWidth = width;
                    mSurfaceView.mSizeChange.Set(true);
                }

                if (mSurfaceView.mHeight != height)
                {
                    mSurfaceView.mHeight = height;
                    mSurfaceView.mSizeChange.Set(true);
                }


            }

            public void SurfaceDestroyed(ISurfaceHolder surfaceHolder)
            {
                mLoop.Set(false);
                this.Interrupt();
                mSurfaceView.Holder.RemoveCallback(this);

                //MOVED SURFACE DESTROYING CODE TO FUNCTION CALLED WHEN APP IS PAUSED INSTEAD OF UNSTABLE CATCH UPON RETURN_______
                if (mEGLDisplay != null)
                {
                    EGL14.EglMakeCurrent(mEGLDisplay, EGL14.EglNoSurface,
                            EGL14.EglNoSurface,
                            EGL14.EglNoContext);

                    if (mEGLSurface != null)
                    {
                        EGL14.EglDestroySurface(mEGLDisplay, mEGLSurface);
                    }

                    if (mEGLSurfaceMedia != null)
                    {
                        EGL14.EglDestroySurface(mEGLDisplay, mEGLSurfaceMedia);
                    }

                    EGL14.EglDestroyContext(mEGLDisplay, mEGLContext);
                    mSurfaceView.mHasGLContext.Set(false);
                    EGL14.EglReleaseThread();
                    EGL14.EglTerminate(mEGLDisplay);
                    mSurfaceView.mSurface.Release();

                }
                //______________________________________________________________________________________________________________
            }
        }

    }
}
answered on Stack Overflow Jan 16, 2020 by rnisson

User contributions licensed under CC BY-SA 3.0