Search code examples
javaandroidlibvlcandroid-textureviewvlc-android

Is there any fast alternatives for TextureView.getBitmap() function?


I'm using android vlc (LibVLC) along with a TextureView in order to play a live rtsp stream in my android application. Everything is working fine, however i need to get the current playing frame every possible time for some object detection tasks, where i'm using the getBitmap() function to do so. The problem here is the this function is too slow and it takes more and more time as the image size rendered in the TextureView increases.

So is there any other way doing that faster?

Please note that i'd tried the getDrawingCache() function on both TextureView & SurfaceView, but it was always returning a transparent bitmap, so after small research i realized that it was because VLC uses hardware acceleration to render the frames over the surface texture.

I did also find many similar solutions to this answer by fadden talking about using glReadPixels() function and pointing to grafika as a reference for code samples. However (and unfortunately) i have almost no skills when working with OpenGL. So if you can validate the linked answer, could you please then link me to a simple direct code sample (concerning my case)?

public class MainActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener,
    org.videolan.libvlc.media.MediaPlayer.OnBufferingUpdateListener,
    org.videolan.libvlc.media.MediaPlayer.OnCompletionListener,
    org.videolan.libvlc.media.MediaPlayer.OnPreparedListener,
    org.videolan.libvlc.media.MediaPlayer.OnVideoSizeChangedListener {

private AppCompatActivity me = this;    
private MediaPlayer mMediaPlayer;    
private TextureView mTextureViewmTextureView;
private String mUrl = "/storage/emulated/0/videos/test.mp4";
private static final String TAG = "MainActivity";

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.content_main);

    mMediaPlayer = new MediaPlayer(VLCInstance.get());
    mTextureViewmTextureView = (TextureView) findViewById(R.id.player);
    mTextureView.setSurfaceTextureListener(this);
}

private void attachViewSurface() {
        final IVLCVout vlcVout = mMediaPlayer.getVLCVout();
        mMediaPlayer.setScale(0);
        vlcVout.detachViews();
        vlcVout.setVideoView(mTextureView);
        vlcVout.setWindowSize(mTextureView.getWidth(), mTextureView.getHeight());
        vlcVout.attachViews();
        mTextureView.setKeepScreenOn(true);
}


private void play(String path) {
   try {
        Media media;
        if (new File(path).exists()) {
            media = new Media(VLCInstance.get(), path);
        } else {
            media = new Media(VLCInstance.get(), Uri.parse(path));
        }

        mMediaPlayer.setMedia(media);
        mMediaPlayer.play();
    } catch (Exception e) {
        Log.e(TAG, e.getMessage());
    }
}

@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
   attachViewSurface();

   if (mMediaPlayer.hasMedia())
       mMediaPlayer.play();
   else
       play(mUrl);
}

public Bitmap getImage() {
   return mTextureView.getBitmap();
}

}

Solution

  • After this long time i decided to give this answer, that i'm using as an alternative for now. I found that FFMpegFrameGrabber of JavaCPP could be used to play both rtsp streams or video files, however you have two problems here:

    1. FFMpegFrameGrabber.Grab() that read the direct next frame available, is too slow so that i was able to grab no more than 6 frames per second on my device (CPU: 1.5 GHz 64Bit Octa Core ARM Cortex-A53)
    2. FFMpegFrameGrabber doesn't have rendering abilities, it just grabs the current video frame into an OpenCV Mat object or Javacv Frame one (you can use the class AndroidFrameConverter of the same library to convert the Frame object into a Bitmap).

    Concerning the first problem, i can go clean with it where i don't need more than 5 fps in my case.

    For the second one, i developed an OpenGL Bitmap based renderer that can render the bitmap images grabbed by the grabber in almost no time (it's very fast). Here is my code:

    app.gradle:

    implementation group: 'org.bytedeco', name: 'javacv-platform', version: '1.4.3'
    implementation group: 'org.bytedeco', name: 'javacv', version: '1.4.3'
    

    grabber:

    class Player extends AsyncTask<BitmapRenderer, Bitmap, Object> {
        BitmapRenderer glRenderer;
        FFmpegFrameGrabber grabber = null;
    
        @Override
        protected Bitmap doInBackground(BitmapRenderer... objects) {
            glRenderer = objects[0];
    
            try {
                grabber = new FFmpegFrameGrabber("/storage/emulated/0/Download/test.mp4");
                grabber.start();
                OpenCVFrameConverter.ToMat converter = new OpenCVFrameConverter.ToMat();
                Frame grabbedImage;
                while ((grabbedImage = grabber.grabImage()) != null) {
                    Log.e("Android", "Frame Grabbed " + grabbedImage.imageWidth + "x" + grabbedImage.imageHeight);
                    AndroidFrameConverter frameConverter = new AndroidFrameConverter();
                    Bitmap bitmap = frameConverter.convert(grabbedImage);
                    publishProgress(bitmap);
    
                    opencv_core.Mat grabbedMat = converter.convert(grabbedImage);
                    if (grabbedMat != null)
                        imwrite("/storage/emulated/0/Download/videoplayback.jpg", grabbedMat);
                }
    
            } catch (FrameGrabber.Exception e) {
                e.printStackTrace();
                Log.e("Android", e.getMessage(), e);
            }
            return null;
        }
    
        @Override
        protected void onProgressUpdate(Bitmap... values) {
            super.onProgressUpdate(values);
            glRenderer.draw(values[0]);
        }
    
        @Override
        protected void onPostExecute(Object objects) {
            super.onPostExecute(objects);
            try {
                grabber.stop();
                grabber.release();
            } catch (FrameGrabber.Exception e1) {
            }
        }
    }
    

    Renderer:

    package com.example.gphspc.javacvtest;
    
    import android.graphics.Bitmap;
    import android.opengl.GLSurfaceView;
    import android.opengl.GLUtils;
    import android.view.ViewGroup;
    
    import java.nio.Buffer;
    import java.nio.ByteBuffer;
    import java.nio.ByteOrder;
    
    import javax.microedition.khronos.egl.EGLConfig;
    import javax.microedition.khronos.opengles.GL10;
    
    public class BitmapRenderer implements GLSurfaceView.Renderer {
    
        private int[] textures;
        private Bitmap bitmap;
        private GLSurfaceView glSurfaceView;
        private int parentWidth, parentHeight;
        private boolean sizeModified = false;
    
        public BitmapRenderer(GLSurfaceView glSurfaceView) {
            this.glSurfaceView = glSurfaceView;
            this.glSurfaceView.setEGLContextClientVersion(1);
            this.glSurfaceView.setRenderer(this);
            this.glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
        }
    
        private static final float[] VERTEX_COORDINATES = new float[]{
                -1.0f, +1.0f, 0.0f,
                +1.0f, +1.0f, 0.0f,
                -1.0f, -1.0f, 0.0f,
                +1.0f, -1.0f, 0.0f
        };
    
        private static final float[] TEXTURE_COORDINATES = new float[]{
                0.0f, 0.0f,
                1.0f, 0.0f,
                0.0f, 1.0f,
                1.0f, 1.0f
        };
    
        private static final Buffer TEXCOORD_BUFFER = ByteBuffer.allocateDirect(TEXTURE_COORDINATES.length * 4)
                .order(ByteOrder.nativeOrder()).asFloatBuffer().put(TEXTURE_COORDINATES).rewind();
        private static final Buffer VERTEX_BUFFER = ByteBuffer.allocateDirect(VERTEX_COORDINATES.length * 4)
                .order(ByteOrder.nativeOrder()).asFloatBuffer().put(VERTEX_COORDINATES).rewind();
    
        public void draw(Bitmap bitmap) {
            if (bitmap == null)
                return;
    
            this.bitmap = bitmap;
    
            if (!sizeModified) {
                ViewGroup.LayoutParams layoutParams = glSurfaceView.getLayoutParams();
                Dimension newDims = getRelativeSize(new Dimension(bitmap.getWidth(), bitmap.getHeight()), glSurfaceView.getWidth(), glSurfaceView.getHeight());
                layoutParams.width = newDims.getWidth();
                layoutParams.height = newDims.getHeight();
                glSurfaceView.setLayoutParams(layoutParams);
                sizeModified = true;
            }
    
            glSurfaceView.requestRender();
        }
    
        public static Dimension getRelativeSize(Dimension dimension, int width, int height) {
            int toWidth = width, toHeight = height;
    
            int imgWidth = (int) dimension.getWidth();
            int imgHeight = (int) dimension.getHeight();
    
            if (imgWidth > imgHeight) {
                toWidth = (int) ((double) height / ((double) imgHeight / imgWidth));
                if (toWidth > width)
                    toWidth = width;
                toHeight = (int) (toWidth * ((double) imgHeight / imgWidth));
            } else if (imgWidth < imgHeight) {
                toHeight = (int) ((double) width / ((double) imgWidth / imgHeight));
                if (toHeight > height)
                    toHeight = height;
                toWidth = (int) (toHeight * ((double) imgWidth / imgHeight));
            }
    
            return new Dimension(toWidth, toHeight);
        }
    
        @Override
        public void onSurfaceCreated(GL10 gl, EGLConfig config) {
            textures = new int[1];
            gl.glEnable(GL10.GL_TEXTURE_2D);
            gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
            gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
    
            ViewGroup.LayoutParams layoutParams = glSurfaceView.getLayoutParams();
            parentWidth = layoutParams.width;
            parentHeight = layoutParams.height;
        }
    
        @Override
        public void onSurfaceChanged(GL10 gl, int width, int height) {
            gl.glViewport(0, 0, width, height);
    //        gl.glOrthof(0f, width, 0f, height, -1f, 1f);
        }
    
        @Override
        public void onDrawFrame(GL10 gl) {
            if (bitmap != null) {
    
                gl.glGenTextures(1, textures, 0);
                gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
    
                gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
                gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
                gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
                gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
    
                GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
    
                gl.glActiveTexture(GL10.GL_TEXTURE0);
                gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
    
                gl.glVertexPointer(3, GL10.GL_FLOAT, 0, VERTEX_BUFFER);
                gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, TEXCOORD_BUFFER);
                gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
            }
        }
    }
    
    class Dimension {
        int width = 0, height = 0;
    
        public Dimension(int width, int height) {
            this.width = width;
            this.height = height;
        }
    
        public int getWidth() {
            return width;
        }
    
        public void setWidth(int width) {
            this.width = width;
        }
    
        public int getHeight() {
            return height;
        }
    
        public void setHeight(int height) {
            this.height = height;
        }
    }