java android opencv jcodec

java - Grabación de Live OpenCV Processing en Android



jcodec (4)

Mi objetivo es hacer un par de cosas:

  1. Utilice OpenCV y JavaCameraView para procesar fotogramas de la alimentación de la cámara del teléfono
  2. Habilita la grabación de ese video procesado cuando sucede

Los tengo a ambos trabajando, pero la forma en que tuve que implementar el número 2 es ridícula:

  1. Para cada cuadro, escriba el tapete procesado como un archivo de imagen.
  2. Cuando la grabación se detenga, use la biblioteca Android de JCodec para unirlos en un archivo de video.

Eso funciona, pero tiene un montón de inconvenientes: la velocidad de fotogramas baja insoportablemente durante una grabación, y el paso de costura tarda aproximadamente medio segundo por fotograma, y ​​se queda sin memoria para videos de más de un par de segundos de duración, y eso es después Baje la resolución de mi cámara para asegurarme de que las imágenes sean lo más pequeñas posible. Incluso entonces, la velocidad de fotogramas del video está fuera de sintonía con la realidad, y el video se ve increíblemente acelerado.

Esto parece ridículo por muchas razones, entonces mi pregunta es: ¿hay una mejor manera de hacer esto?

Aquí hay un pequeño ejemplo si alguien quiere ejecutarlo. Esto requiere que el proyecto OpenCV para Android esté disponible aquí , y el proyecto JCodec para Android disponible aquí .

Manifest.xml:

<uses-sdk android:minSdkVersion="8" android:targetSdkVersion="22" /> <application android:allowBackup="true" android:icon="@drawable/ic_launcher" android:label="@string/app_name" android:theme="@android:style/Theme.NoTitleBar.Fullscreen" > <activity android:name=".MainActivity" android:screenOrientation="landscape" android:configChanges="orientation|keyboardHidden|screenSize" android:label="@string/app_name" > <intent-filter> <action android:name="android.intent.action.MAIN" /> <category android:name="android.intent.category.LAUNCHER" /> </intent-filter> </activity> </application> <uses-permission android:name="android.permission.CAMERA"/> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />

Actividad principal:

package com.example.videotest; import java.io.File; import java.util.List; import org.opencv.android.BaseLoaderCallback; import org.opencv.android.LoaderCallbackInterface; import org.opencv.android.OpenCVLoader; import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2; import org.opencv.core.Mat; import org.opencv.core.Scalar; import org.opencv.imgproc.Imgproc; import android.app.Activity; import android.media.MediaScannerConnection; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.SurfaceView; import android.view.View; import android.view.WindowManager; import android.widget.Toast; public class MainActivity extends Activity implements CvCameraViewListener2{ private CameraView cameraView; private Mat edgesMat; private final Scalar greenScalar = new Scalar(0,255,0); private int resolutionIndex = 0; private MatVideoWriter matVideoWriter = new MatVideoWriter(); private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { @Override public void onManagerConnected(int status) { switch (status) { case LoaderCallbackInterface.SUCCESS: { Log.i("VideoTest", "OpenCV loaded successfully"); cameraView.enableView(); } break; default: { super.onManagerConnected(status); } break; } } }; public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_main); cameraView = (CameraView) findViewById(R.id.cameraView); cameraView.setVisibility(SurfaceView.VISIBLE); cameraView.setCvCameraViewListener(this); } @Override public void onPause() { super.onPause(); if (cameraView != null){ cameraView.disableView(); } } @Override public void onResume() { super.onResume(); OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback); } public void onDestroy() { super.onDestroy(); if (cameraView != null) cameraView.disableView(); } public void onCameraViewStarted(int width, int height) { edgesMat = new Mat(); } public void onCameraViewStopped() { if (edgesMat != null) edgesMat.release(); edgesMat = null; } public Mat onCameraFrame(CvCameraViewFrame inputFrame) { Mat rgba = inputFrame.rgba(); org.opencv.core.Size sizeRgba = rgba.size(); int rows = (int) sizeRgba.height; int cols = (int) sizeRgba.width; int left = cols / 8; int top = rows / 8; int width = cols * 3 / 4; int height = rows * 3 / 4; //get sub-image Mat rgbaInnerWindow = rgba.submat(top, top + height, left, left + width); //create edgesMat from sub-image Imgproc.Canny(rgbaInnerWindow, edgesMat, 100, 100); Mat colorEdges = new Mat(); Mat killMe = colorEdges; edgesMat.copyTo(colorEdges); Imgproc.cvtColor(colorEdges, colorEdges, Imgproc.COLOR_GRAY2BGRA); colorEdges = colorEdges.setTo(greenScalar, edgesMat); colorEdges.copyTo(rgbaInnerWindow, edgesMat); killMe.release(); colorEdges.release(); rgbaInnerWindow.release(); if(matVideoWriter.isRecording()){ matVideoWriter.write(rgba); } return rgba; } public void changeResolution(View v){ List<android.hardware.Camera.Size> cameraResolutionList = cameraView.getResolutionList(); resolutionIndex++; if(resolutionIndex >= cameraResolutionList.size()){ resolutionIndex = 0; } android.hardware.Camera.Size resolution = cameraResolutionList.get(resolutionIndex); cameraView.setResolution(resolution.width, resolution.height); resolution = cameraView.getResolution(); String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString(); Toast.makeText(this, caption, Toast.LENGTH_SHORT).show(); } public void startVideo(View v){ if(matVideoWriter.isRecording()){ matVideoWriter.stop(); File file = new File(getExternalFilesDir(null), "VideoTest/images/"); for(String img : file.list()){ String scanMe = new File(file, img).getAbsolutePath(); MediaScannerConnection.scanFile(this, new String[]{scanMe}, null, null); Log.i("VideoTest", "Scanning: " +scanMe); } file = new File(file, "video.mp4"); MediaScannerConnection.scanFile(this, new String[]{file.getAbsolutePath()}, null, null); } else{ String state = Environment.getExternalStorageState(); Log.i("VideoTest", "state: " + state); File ext = getExternalFilesDir(null); Log.i("VideoTest", "ext: " + ext.getAbsolutePath()); File file = new File(getExternalFilesDir(null), "VideoTest/images/"); if(!file.exists()){ boolean success = file.mkdirs(); Log.i("VideoTest", "mkdirs: " + success); } else{ Log.i("VideoTest", "file exists."); } Log.i("VideoTest", "starting recording: " + file.getAbsolutePath()); matVideoWriter.start(file); } } }

Vista de cámara:

package com.example.videotest; import java.io.FileOutputStream; import java.util.List; import org.opencv.android.JavaCameraView; import android.content.Context; import android.hardware.Camera; import android.hardware.Camera.PictureCallback; import android.util.AttributeSet; import android.util.Log; public class CameraView extends JavaCameraView{ private String mPictureFileName; public CameraView(Context context, AttributeSet attrs) { super(context, attrs); } public List<String> getEffectList() { return mCamera.getParameters().getSupportedColorEffects(); } public boolean isEffectSupported() { return (mCamera.getParameters().getColorEffect() != null); } public String getEffect() { return mCamera.getParameters().getColorEffect(); } public void setEffect(String effect) { Camera.Parameters params = mCamera.getParameters(); params.setColorEffect(effect); mCamera.setParameters(params); } public List<android.hardware.Camera.Size> getResolutionList() { return mCamera.getParameters().getSupportedPreviewSizes(); } public void setResolution(int width, int height) { disconnectCamera(); mMaxHeight = height; mMaxWidth = width; connectCamera(getWidth(), getHeight()); } public android.hardware.Camera.Size getResolution() { return mCamera.getParameters().getPreviewSize(); } }

MatVideoWriter:

package com.example.videotest; import java.io.File; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import org.jcodec.api.android.SequenceEncoder; import org.opencv.core.Mat; import org.opencv.highgui.Highgui; import org.opencv.imgproc.Imgproc; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.util.Log; public class MatVideoWriter { boolean recording; File dir; int imageIndex = 0; public void start(File dir){ this.dir = dir; recording = true; } public void stop(){ recording = false; try{ File file = new File(dir, "video.mp4"); SequenceEncoder encoder = new SequenceEncoder(file); List<File> files = Arrays.asList(dir.listFiles()); Collections.sort(files, new Comparator<File>(){ @Override public int compare(File lhs, File rhs) { return lhs.getName().compareTo(rhs.getName()); } }); for(File f : files){ Log.i("VideoTest", "Encoding image: " + f.getAbsolutePath()); try{ Bitmap frame = BitmapFactory.decodeFile(f.getAbsolutePath()); encoder.encodeImage(frame); } catch(Exception e){ e.printStackTrace(); } } encoder.finish(); } catch(Exception e){ e.printStackTrace(); } } public void write(Mat mat){ //convert from BGR to RGB Mat rgbMat = new Mat(); Imgproc.cvtColor(mat, rgbMat, Imgproc.COLOR_BGR2RGB); File file = new File(dir, "img" + imageIndex + ".png"); String filename = file.toString(); boolean success = Highgui.imwrite(filename, rgbMat); Log.i("VideoTest", "Success writing img" + imageIndex +".png: " + success); imageIndex++; } public boolean isRecording() { return recording; } }

Editar: No he recibido ningún comentario o respuesta, así que he visitado el foro de OpenCV aquí .


He resuelto un problema similar al crear un MediaRecorder y pasarlo a un OpenCV CameraBridgeViewBase , que he modificado de la siguiente manera.

protected MediaRecorder mRecorder; protected Surface mSurface = null; public void setRecorder(MediaRecorder rec) { mRecorder = rec; if (mRecorder != null) { mSurface = mRecorder.getSurface(); }

y

protected void deliverAndDrawFrame(CvCameraViewFrame frame) { Mat modified; if (mListener != null) { modified = mListener.onCameraFrame(frame); } else { modified = frame.rgba(); } boolean bmpValid = true; if (modified != null) { try { Utils.matToBitmap(modified, mCacheBitmap); } catch(Exception e) { Log.e(TAG, "Mat type: " + modified); Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight()); Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage()); bmpValid = false; } } if (bmpValid && mCacheBitmap != null) { Canvas canvas; if (mRecorder != null) { canvas = mSurface.lockCanvas(null); canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR); Log.d(TAG, "mStretch value: " + mScale); if (mScale != 0) { canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()), new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2), (int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2), (int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()), (int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null); } else { canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()), new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2, (canvas.getHeight() - mCacheBitmap.getHeight()) / 2, (canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(), (canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null); } if (mFpsMeter != null) { mFpsMeter.measure(); mFpsMeter.draw(canvas, 20, 30); } mSurface.unlockCanvasAndPost(canvas); } } ... }

Dejé la parte original de deliverAndDrawFrame como está, para que siga mostrando la salida a la superficie original. De esta forma puedo procesar imágenes desde una cámara implementando onCameraFrame en MainActivity y MainActivity las imágenes resultantes en un video, sin la necesidad de ffmpeg .

EDIT He configurado MediaRecorder siguiente manera

recorder.setAudioSource(MediaRecorder.AudioSource.MIC); recorder.setVideoSource(MediaRecorder.VideoSource.SURFACE); CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH); recorder.setProfile(cpHigh); recorder.setOutputFile("out.mp4"); recorder.setVideoSize(mOpenCvCameraView.mFrameWidth, mOpenCvCameraView.mFrameHeight); recorder.setOnInfoListener(this); recorder.setOnErrorListener(this); recorder.prepare();

registrarlo con OpenCvCameraView

mOpenCvCameraView.setRecorder(recorder);

y comienza a grabar

recorder.start();


@HaDang me señaló estos enlaces:

http://www.walking-productions.com/notslop/2013/01/16/android-live-streaming-courtesy-of-javacv-and-ffmpeg/

https://code.google.com/p/javacv/source/browse/samples/RecordActivity.java

Ese ejemplo usa un contenedor Java de FFMPEG para hacer la grabación de video. Este proyecto es un punto de partida bastante útil para cualquiera que desee hacer lo mismo: https://github.com/vanevery/JavaCV_0.3_stream_test

Tomé ese proyecto anterior y lo puse en mi ejemplo. Es muy complicado, pero funciona:

package com.example.videotest; import java.io.File; import java.io.IOException; import java.nio.ShortBuffer; import java.util.List; import org.opencv.android.BaseLoaderCallback; import org.opencv.android.LoaderCallbackInterface; import org.opencv.android.OpenCVLoader; import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2; import org.opencv.core.Mat; import org.opencv.core.Scalar; import org.opencv.imgproc.Imgproc; import com.googlecode.javacv.FFmpegFrameRecorder; import com.googlecode.javacv.FrameRecorder.Exception; import com.googlecode.javacv.cpp.opencv_core.IplImage; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.media.MediaScannerConnection; import android.os.Bundle; import android.os.Environment; import android.util.Log; import android.view.SurfaceView; import android.view.View; import android.view.WindowManager; import android.widget.Toast; public class MainActivity extends Activity implements CvCameraViewListener2{ private CameraView cameraView; private Mat edgesMat; private final Scalar greenScalar = new Scalar(0,255,0); private int resolutionIndex = 0; private IplImage videoImage = null; boolean recording = false; private volatile FFmpegFrameRecorder recorder; private int sampleAudioRateInHz = 44100; private int imageWidth = 320; private int imageHeight = 240; private int frameRate = 30; private Thread audioThread; volatile boolean runAudioThread = true; private AudioRecord audioRecord; private AudioRecordRunnable audioRecordRunnable; private String ffmpeg_link; long startTime = 0; private String LOG_TAG = "VideoTest"; private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { @Override public void onManagerConnected(int status) { switch (status) { case LoaderCallbackInterface.SUCCESS: Log.i("VideoTest", "OpenCV loaded successfully"); cameraView.enableView(); break; default: super.onManagerConnected(status); break; } } }; public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.activity_main); cameraView = (CameraView) findViewById(R.id.cameraView); cameraView.setVisibility(SurfaceView.VISIBLE); cameraView.setCvCameraViewListener(this); } private void initRecorder() { Log.w(LOG_TAG,"initRecorder"); int depth = com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U; int channels = 4; // if (yuvIplimage == null) { // Recreated after frame size is set in surface change method videoImage = IplImage.create(imageWidth, imageHeight, depth, channels); //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2); Log.v(LOG_TAG, "IplImage.create"); // } File videoFile = new File(getExternalFilesDir(null), "VideoTest/images/video.mp4"); boolean mk = videoFile.getParentFile().mkdirs(); Log.v(LOG_TAG, "Mkdir: " + mk); boolean del = videoFile.delete(); Log.v(LOG_TAG, "del: " + del); try { boolean created = videoFile.createNewFile(); Log.v(LOG_TAG, "Created: " + created); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } ffmpeg_link = videoFile.getAbsolutePath(); recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1); Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: " + imageWidth + " imageHeight " + imageHeight); recorder.setFormat("mp4"); Log.v(LOG_TAG, "recorder.setFormat(/"mp4/")"); recorder.setSampleRate(sampleAudioRateInHz); Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)"); // re-set in the surface changed method as well recorder.setFrameRate(frameRate); Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)"); // Create audio recording thread audioRecordRunnable = new AudioRecordRunnable(); audioThread = new Thread(audioRecordRunnable); } @Override public void onPause() { super.onPause(); if (cameraView != null){ cameraView.disableView(); } } @Override public void onResume() { super.onResume(); OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback); } public void onDestroy() { super.onDestroy(); if (cameraView != null) cameraView.disableView(); } public void onCameraViewStarted(int width, int height) { edgesMat = new Mat(); } public void onCameraViewStopped() { if (edgesMat != null) edgesMat.release(); edgesMat = null; } public Mat onCameraFrame(CvCameraViewFrame inputFrame) { Mat rgba = inputFrame.rgba(); org.opencv.core.Size sizeRgba = rgba.size(); int rows = (int) sizeRgba.height; int cols = (int) sizeRgba.width; int left = cols / 8; int top = rows / 8; int width = cols * 3 / 4; int height = rows * 3 / 4; //get sub-image Mat rgbaInnerWindow = rgba.submat(top, top + height, left, left + width); //create edgesMat from sub-image Imgproc.Canny(rgbaInnerWindow, edgesMat, 100, 100); Mat colorEdges = new Mat(); Mat killMe = colorEdges; edgesMat.copyTo(colorEdges); Imgproc.cvtColor(colorEdges, colorEdges, Imgproc.COLOR_GRAY2BGRA); colorEdges = colorEdges.setTo(greenScalar, edgesMat); colorEdges.copyTo(rgbaInnerWindow, edgesMat); killMe.release(); colorEdges.release(); rgbaInnerWindow.release(); if(recording){ byte[] byteFrame = new byte[(int) (rgba.total() * rgba.channels())]; rgba.get(0, 0, byteFrame); onFrame(byteFrame); } return rgba; } public void stopRecording() { // This should stop the audio thread from running runAudioThread = false; if (recorder != null) { Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder"); try { recorder.stop(); recorder.release(); } catch (FFmpegFrameRecorder.Exception e) { e.printStackTrace(); } recorder = null; } MediaScannerConnection.scanFile(MainActivity.this, new String[]{ffmpeg_link}, null, null); } public void changeResolution(View v){ List<android.hardware.Camera.Size> cameraResolutionList = cameraView.getResolutionList(); resolutionIndex++; if(resolutionIndex >= cameraResolutionList.size()){ resolutionIndex = 0; } android.hardware.Camera.Size resolution = cameraResolutionList.get(resolutionIndex); cameraView.setResolution(resolution.width, resolution.height); resolution = cameraView.getResolution(); String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString(); Toast.makeText(this, caption, Toast.LENGTH_SHORT).show(); imageWidth = resolution.width; imageHeight = resolution.height; frameRate = cameraView.getFrameRate(); initRecorder(); } int frames = 0; private void onFrame(byte[] data){ if (videoImage != null && recording) { long videoTimestamp = 1000 * (System.currentTimeMillis() - startTime); // Put the camera preview frame right into the yuvIplimage object videoImage.getByteBuffer().put(data); try { // Get the correct time recorder.setTimestamp(videoTimestamp); // Record the image into FFmpegFrameRecorder recorder.record(videoImage); frames++; Log.i(LOG_TAG, "Wrote Frame: " + frames); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } } public void startVideo(View v){ recording = !recording; Log.i(LOG_TAG, "Recording: " + recording); if(recording){ startTime = System.currentTimeMillis(); try { recorder.start(); Log.i(LOG_TAG, "STARTED RECORDING."); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } else{ stopRecording(); } } class AudioRecordRunnable implements Runnable { @Override public void run() { // Set the thread priority android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); // Audio int bufferSize; short[] audioData; int bufferReadResult; bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); audioData = new short[bufferSize]; Log.d(LOG_TAG, "audioRecord.startRecording()"); audioRecord.startRecording(); // Audio Capture/Encoding Loop while (runAudioThread) { // Read from audioRecord bufferReadResult = audioRecord.read(audioData, 0, audioData.length); if (bufferReadResult > 0) { //Log.v(LOG_TAG,"audioRecord bufferReadResult: " + bufferReadResult); // Changes in this variable may not be picked up despite it being "volatile" if (recording) { try { // Write to FFmpegFrameRecorder recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult)); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } } } Log.v(LOG_TAG,"AudioThread Finished"); /* Capture/Encoding finished, release recorder */ if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; MediaScannerConnection.scanFile(MainActivity.this, new String[]{ffmpeg_link}, null, null); Log.v(LOG_TAG,"audioRecord released"); } } } }


Otra opción que funciona solo en Lollipop y Marshmallow es usar el nuevo MediaProjectionManager para capturar y registrar lo que sucede en la pantalla de su dispositivo. Un gran ejemplo está aquí:

http://www.mattsnider.com/video-recording-with-mediaprojectionmanager/

Es totalmente independiente de la cámara de su dispositivo y no necesita ningún acceso a ella ni a lo que esté haciendo con OpenCV. Solo está grabando lo que haya mostrado en su pantalla.