intent - play video android
No se puede hacer una decodificación de video de bajo nivel en Android 4.2 sin usar el extractor de medios (1)
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
FileInputStream fis = new FileInputStream(new File(
"ur file path"));
byte[] buf = new byte[1024];
int n;
while (-1 != (n = fis.read(buf))) {
baos.write(buf, 0, n);
}
byte[] videoBytes = baos.toByteArray();
// use this videoBytes which is low level of original video
} catch (Exception e) {
e.printStackTrace();
}
Quería decodificar cuadros de video sin usar extractor. Así que solo probé una pequeña muestra, donde uso el extractor de medios, pero no hago extractor.readsample()
para copiar los datos del flujo de bits en el búfer de entrada. los buffers de bytes de entrada y luego pusieron en cola el búfer de entrada ... Pero cuando llamo a decoder.dequeueOutputBuffer(info, 10000)
, devuelve MediaCodec.INFO_TRY_AGAIN_LATER
. Si bien funciona bien si uso extractor.readsample()
.
Por favor encuentre el fragmento de código a continuación:
Lado de Java:
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.app.Activity;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class VideoBrowser extends Activity implements SurfaceHolder.Callback {
private static final String SAMPLE = Environment.getExternalStorageDirectory() + "/obama.mp4";
private PlayerThread mPlayer = null;
private static native <jintArray> int AVinitializecntxt(String strl, jintArray arr);
private native int AVREADVIDEO(byte[] array);
public int FLAG = 0;
public int jk = 0;
File f1;
FileOutputStream f;
static {
Log.i("ABCD", "BEFORE");
System.loadLibrary("ffmpeg");
System.loadLibrary("ffmpeg-test-jni");
Log.i("ABCD", "Success");
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
SurfaceView sv = new SurfaceView(this);
sv.getHolder().addCallback(this);
setContentView(sv);
int val;
int[] array = new int[6];
int END_OF_FILE = 0;
int aud_stream = 0;
int vid_stream = 0;
String urlString = "/mnt/sdcard/obama.mp4";
f1 = new File("/mnt/sdcard/t.h264");
try {
f = new FileOutputStream(f1);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// This is where I call the function to initialize the ffmpeg inside JNI
val = AVinitializecntxt(urlString, array);
FLAG = val;
Log.i("ABCD", "FLAG : "+ FLAG + val);
}
protected void onDestroy() {
super.onDestroy();
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if (mPlayer == null) {
mPlayer = new PlayerThread(holder.getSurface());
mPlayer.start();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (mPlayer != null) {
mPlayer.interrupt();
}
}
private class PlayerThread extends Thread {
private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
// private VideoPlayer VideoPlayerAPIInterfaceClass = new VideoPlayer();
public PlayerThread(Surface surface) {
this.surface = surface;
}
@Override
public void run() {
if(FLAG == 1){
extractor = new MediaExtractor();
extractor.setDataSource(SAMPLE);
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
decoder = MediaCodec.createDecoderByType("video/avc");
// Log.i("ABCD", "MIME : " + mime);
decoder.configure(format, surface, null, 0);
break;
}
}
if (decoder == null) {
Log.e("DecodeActivity", "Can''t find video info!");
return;
}
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
BufferInfo info = new BufferInfo();
boolean isEOS = false;
long startMs = System.currentTimeMillis();
int outIndex1 = -1 ;
while(outIndex1 < 0){
outIndex1 = decoder.dequeueOutputBuffer(info, 10000);
Log.i("ABCD", "etgeuieoy");
}
while (!Thread.interrupted()) {
if (!isEOS) {
int inIndex = decoder.dequeueInputBuffer(10000);
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
// int sampleSize = extractor.readSampleData(buffer, 0);
byte[] bytes = new byte[buffer.capacity()];
// This is where we call JNI function to memcopy the encoded bitstream into the input buffer
int sampleSize = [b]AVREADVIDEO[/b](bytes);
buffer.clear();
buffer.put(bytes, 0, sampleSize);
if (sampleSize < 0) {
// We shouldn''t stop the playback at this point, just pass the EOS
// flag to decoder, we will get it again from the
// dequeueOutputBuffer
// Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize,0, 0);
extractor.advance();
}
}
}
int outIndex = decoder.dequeueOutputBuffer(info, 10000);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
break;
default:
ByteBuffer buffer = outputBuffers[outIndex];
Log.v("DecodeActivity", "We can''t use this buffer but render it due to the API limit, " + buffer);
// We use a very simple clock to keep the video FPS, or the video
// playback will be too fast
while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
try {
sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}
// Log.i("ABCD", "RELEASING OUTPUT BUFFER");
decoder.releaseOutputBuffer(outIndex, true);
//decoder.releaseOutputBuffer(outIndex, false);
break;
}
// All decoded frames have been rendered, we can stop playing now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}
}
decoder.stop();
decoder.release();
extractor.release();
}
}
}
}
JNI Side :
JNIEXPORT jint JNICALL
Java_com_alldigital_videoplayer_VideoBrowser_AVREADVIDEO(JNIEnv *pEnv,
jobject pObj, jbyteArray array ) {
AV_ctxt *avctxt = &aud_vid_ctxt;
jbyte *buf = (*pEnv)->GetByteArrayElements(pEnv,
array, NULL);
if (buf == NULL) {
LOGERR(10, "AVVIDEOREAD", "Bytes null");
}
AVPacket *packet;
packet = av_malloc(sizeof(AVPacket));
av_init_packet(packet);
int avread_res = av_read_frame(avctxt->gFormatCtx, packet);
int size = packet->size;
if (avread_res >= 0) {
if (packet->stream_index == avctxt->gVideoStreamIndex) {
// packet->size,packet->
if (NULL
== memcpy(buf,
(char *) packet->data,
packet->size
))
LOGERR(10, "AV_AUDIO_DECODE",
"memcpy for audio buffer failed");
}
}
(*pEnv)->ReleaseByteArrayElements(pEnv, array, buf,
0);
av_free_packet(packet);
packet = NULL;
return size;
}
¿Puede alguien ayudarme rápidamente en esto, aunque estoy copiando los datos codificados de cada fotograma a través de FFmpeg sin llamar al extractor? Entonces, ¿por qué tengo este problema de tiempo de espera de salida del búfer?
Gracias