[英]MediaCodec.dequeueOutputBuffer taking very long when encoding h264 on Android
我正在嘗試使用MediaCodec對Android上的h264視頻進行實時視頻流編碼,但dequeueOutputBuffer持續時間非常長(實際上它有時非常快,但在其他時間非常慢,請參閱下面的日志輸出)。 我已經看到它甚至可以達到200毫秒,以便輸出緩沖區准備就緒。 我的代碼是否有問題,或者您認為這是OMX.Nvidia.h264.encoder的問題?
也許我需要將圖像從1280x720下采樣到更小的尺寸? 或者我可能需要在等待輸出緩沖區時出隊並排隊更多輸入緩沖區? (有6個輸入和6個輸出緩沖區可用)。 我正在使用Android API 19,所以我無法使用異步MediaCodec處理方法。 我實際上正在從Google Project Tango平板電腦上傳輸圖像,所以我懷疑的是,Tango的后台操作可能花費的時間太長而導致編碼器變慢。 關於什么可能會減緩這么多的想法?
01-20 23:36:30.728 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.400666ms.
01-20 23:36:30.855 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 94.290667ms.
01-20 23:36:30.880 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.57ms.
01-20 23:36:30.929 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 4.878417ms.
01-20 23:36:31.042 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 77.495417ms.
01-20 23:36:31.064 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.3225ms.
01-20 23:36:31.182 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 74.777583ms.
01-20 23:36:31.195 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.23ms.
01-20 23:36:31.246 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 17.243583ms.
01-20 23:36:31.350 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 80.14725ms.
01-20 23:36:31.373 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 2.493834ms.
01-20 23:36:31.421 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 13.273ms.
01-20 23:36:31.546 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 93.543667ms.
01-20 23:36:31.576 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 5.309334ms.
01-20 23:36:31.619 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 13.402583ms.
01-20 23:36:31.686 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 22.5485ms.
01-20 23:36:31.809 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 91.392083ms.
我的相關代碼如下:
public class StreamingThread extends Thread {
...
// encoding
private MediaCodec mVideoEncoder = null;
private ByteBuffer[] mEncoderInputBuffers = null;
private ByteBuffer[] mEncoderOutputBuffers = null;
private NV21Convertor mNV21Converter = null;
public static native VideoFrame getNewFrame();
public StreamingThread()
{
this.setPriority(MAX_PRIORITY);
}
@Override
public void run()
{
Looper.prepare();
init();
Looper.loop();
}
private void init()
{
mHandler = new Handler() {
public void handleMessage(Message msg) {
// process incoming messages here
switch(msg.what)
{
case HAVE_NEW_FRAME: // new frame has arrived (signaled from main thread)
processBufferedFrames();
break;
case CLOSE_THREAD:
close();
break;
default:
Log.e(LOGTAG, "received unknown message!");
}
}
};
try {
...
// set up video encoding
final String mime = "video/avc"; // H.264/AVC
listAvailableEncoders(mime); // (this creates some debug output only)
String codec = "OMX.Nvidia.h264.encoder"; // instead, hard-code the codec we want to use for now
mVideoEncoder = MediaCodec.createByCodecName(codec);
if(mVideoEncoder == null)
Log.e(LOGTAG, "Media codec " + codec + " is not available!");
// TODO: change, based on what we're streaming...
int FRAME_WIDTH = 1280;
int FRAME_HEIGHT = 720;
// https://github.com/fyhertz/libstreaming/blob/ac44416d88ed3112869ef0f7eab151a184bbb78d/src/net/majorkernelpanic/streaming/hw/EncoderDebugger.java
mNV21Converter = new NV21Convertor();
mNV21Converter.setSize(FRAME_WIDTH, FRAME_HEIGHT);
mNV21Converter.setEncoderColorFormat(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
mNV21Converter.setColorPanesReversed(true);
mNV21Converter.setYPadding(0);
MediaFormat format = MediaFormat.createVideoFormat(mime, FRAME_WIDTH, FRAME_HEIGHT);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
// TODO: optimize bit rate
format.setInteger(MediaFormat.KEY_BIT_RATE, 250000); // 4 Million bits/second = 0.48 Megabytes/s
mVideoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mVideoEncoder.start();
mEncoderInputBuffers = mVideoEncoder.getInputBuffers();
mEncoderOutputBuffers = mVideoEncoder.getOutputBuffers();
Log.d(LOGTAG, "Number of input buffers " + mEncoderInputBuffers.length);
Log.d(LOGTAG, "Number of output buffers " + mEncoderOutputBuffers.length);
initialized = true;
} catch (Exception e) {
e.printStackTrace();
}
}
private void close()
{
Looper.myLooper().quit();
mVideoEncoder.stop();
mVideoEncoder.release();
mVideoEncoder = null;
}
private void processBufferedFrames()
{
if (!initialized)
return;
VideoFrame frame = getNewFrame();
try {
sendTCPFrame(frame);
} catch (Exception e) {
e.printStackTrace();
}
}
private void sendTCPFrame(VideoFrame frame)
{
long start = System.nanoTime();
long start2 = System.nanoTime();
int inputBufferIndex = -1;
while((inputBufferIndex = mVideoEncoder.dequeueInputBuffer(-1)) < 0 ) { // -1: wait indefinitely for the buffer
switch(inputBufferIndex) {
default:
Log.e(LOGTAG, "dequeueInputBuffer returned unknown value: " + inputBufferIndex);
}
}
// fill in input (raw) data:
mEncoderInputBuffers[inputBufferIndex].clear();
long stop2 = System.nanoTime();
Log.d(LOGTAG, "dequeueInputBuffer took " + (stop2 - start2) / 1e6 + "ms.");
start2 = System.nanoTime();
byte[] pixels = mNV21Converter.convert(frame.pixels);
stop2 = System.nanoTime();
Log.d(LOGTAG, "mNV21Converter.convert took " + (stop2-start2)/1e6 + "ms.");
start2 = System.nanoTime();
mEncoderInputBuffers[inputBufferIndex].put(pixels);
stop2 = System.nanoTime();
Log.d(LOGTAG, "mEncoderInputBuffers[inputBufferIndex].put(pixels) took " + (stop2 - start2) / 1e6 + "ms.");
start2 = System.nanoTime();
//mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, 0, 0);
//mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, System.nanoTime() / 1000, 0);
mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, System.nanoTime(), 0);
stop2 = System.nanoTime();
Log.d(LOGTAG, "queueInputBuffer took " + (stop2 - start2) / 1e6 + "ms.");
start2 = System.nanoTime();
// wait for encoded data to become available:
int outputBufferIndex = -1;
MediaCodec.BufferInfo bufInfo = new MediaCodec.BufferInfo();
long timeoutUs = -1;//10000; // microseconds
while((outputBufferIndex = mVideoEncoder.dequeueOutputBuffer(bufInfo, timeoutUs)) < 0 ) { // -1: wait indefinitely for the buffer
Log.i(LOGTAG, "dequeueOutputBuffer returned value: " + outputBufferIndex);
switch(outputBufferIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
// output buffers have changed, move reference
mEncoderOutputBuffers = mVideoEncoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
// Subsequent data will conform to new format.
//MediaFormat format = codec.getOutputFormat();
Log.e(LOGTAG, "dequeueOutputBuffer returned INFO_OUTPUT_FORMAT_CHANGED ?!");
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.w(LOGTAG, "dequeueOutputBuffer return INFO_TRY_AGAIN_LATER");
break;
default:
Log.e(LOGTAG, "dequeueOutputBuffer returned unknown value: " + outputBufferIndex);
}
}
stop2 = System.nanoTime();
Log.d(LOGTAG, "dequeueOutputBuffer took " + (stop2 - start2) / 1e6 + "ms.");
// output (encoded) data available!
Log.d(LOGTAG, "encoded buffer info: size = " + bufInfo.size + ", offset = " + bufInfo.offset + ", presentationTimeUs = " + bufInfo.presentationTimeUs + ", flags = " + bufInfo.flags);
ByteBuffer encodedData = mEncoderOutputBuffers[outputBufferIndex];
final int sizeOfImageData = bufInfo.size;
long stop = System.nanoTime();
Log.d(LOGTAG, "Encoding image took " + (stop-start)/1e6 + "ms.");
start = System.nanoTime();
// assemble header:
...
encodedData.rewind();
// copy (!) raw image data to "direct" (array-backed) buffer:
ByteBuffer imageBuffer = ByteBuffer.allocateDirect(encodedData.remaining());
imageBuffer.put(encodedData); // TODO: can this copy be avoided?
stop = System.nanoTime();
Log.d(LOGTAG, "Preparing content for streaming took " + (stop - start) / 1e6 + "ms.");
// do streaming via TCP
...
mVideoEncoder.releaseOutputBuffer(outputBufferIndex, false);
}
// see http://developer.android.com/reference/android/media/MediaCodecInfo.html
private void listAvailableEncoders(String mimeType)
{
Log.d(LOGTAG, "Available encoders for mime type " + mimeType + ":");
for (int i = 0; i < MediaCodecList.getCodecCount(); i++) {
MediaCodecInfo codec = MediaCodecList.getCodecInfoAt(i);
if (!codec.isEncoder())
continue;
String[] types = codec.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
//if (types[j].equalsIgnoreCase(mimeType)) {
String msg = "- name: " + codec.getName() + ", supported color formats for " + mimeType + ":";
MediaCodecInfo.CodecCapabilities cap = codec.getCapabilitiesForType(mimeType);
for(int k = 0; k < cap.colorFormats.length; ++k) msg = msg + " " + cap.colorFormats[k];
Log.d(LOGTAG, msg);
// break;
//}
}
}
}
是的,您的代碼有問題 - 您正在等待當前幀從編碼器輸出,然后再繼續下一幀。 大多數硬件編解碼器的延遲比您預期的要多一些,並且為了獲得編碼器能夠獲得的正確吞吐量,您需要異步使用它。
也就是說,在發送一個用於編碼的輸入緩沖區之后,您不應該等待編碼的輸出緩沖區,而只是檢查是否有輸出。 然后,您應該繼續輸入下一個緩沖區,然后再次檢查是否有可用的輸出。 只有在沒有立即獲得輸入緩沖區時,才能開始等待輸出。 這樣,編碼器總有多個輸入緩沖器可供開始工作,以使其忙於實際實現其能夠的幀速率。
(如果您可以使用Android 5.0,可以查看MediaCodec.setCallback
,這樣可以更容易地異步處理。)
甚至有一些編解碼器(主要是解碼器,如果我的內存正確地為我服務),在你傳遞了多個輸入緩沖區之前,它甚至都不會輸出第一個緩沖區。
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.