这是我的第一个问题,所以如果我错过了什么,请告诉我!
使用 Android API 16 的新媒体编解码器实现尝试解码视频,以便我可以发送要作为纹理应用的帧(纹理部分已经完成)。因此,我在堆栈外提供了一些帮助,但在runOutputBuffer()
我outputBufIndex
回来时-1
(或在我提供的无限循环中作为-1
超时),任何人都可以帮助解决这个问题,和/或提供关于去哪里的任何建议从那里?
感谢您的帮助,这是我的代码:
public MediaDecoder( BPRenderView bpview )
{
surface = bpview;
extractor = new MediaExtractor( );
extractor.setDataSource( filePath );
format = extractor.getTrackFormat( 0 );
mime = format.getString( MediaFormat.KEY_MIME );
createDecoder( );
runInputBuffer( );
}
public void createDecoder( )
{
codec = MediaCodec.createDecoderByType( "video/avc" );
// format =extractor.getTrackFormat( 0 );
Log.d( LOG_TAG, "Track Format: " + mime );
// format.setInteger( MediaFormat.KEY_BIT_RATE, 125000 );
// format.setInteger( MediaFormat.KEY_FRAME_RATE, 15 );
// format.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar );
// format.setInteger( MediaFormat.KEY_I_FRAME_INTERVAL, 5 );
codec.configure( format, null, null, 0 );
codec.start( );
codecInputBuffers = codec.getInputBuffers( );
codecOutputBuffers = codec.getOutputBuffers( );
extractor.selectTrack( 0 );
}
public void runInputBuffer( )
{
// This should take in the entire video and put it in the input buffer
int inputBufIndex = codec.dequeueInputBuffer( -1 );
if( inputBufIndex >= 0 )
{
ByteBuffer dstBuf = codecInputBuffers[ inputBufIndex ];
int sampleSize = extractor.readSampleData( dstBuf, 0 );
Log.d( "Sample Size", String.valueOf( sampleSize ) );
long presentationTimeUs = 0;
if( sampleSize < 0 )
{
sawInputEOS = true;
sampleSize = 0;
}
else
{
presentationTimeUs = extractor.getSampleTime( );
}
Log.d( LOG_TAG, "Input Buffer" );
Log.d( "InputBufIndex:", String.valueOf( inputBufIndex ) );
Log.d( "PresentationTimeUS", String.valueOf( presentationTimeUs ) );
codec.queueInputBuffer( inputBufIndex, 0, // offset
sampleSize, presentationTimeUs, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0 );
if( !sawInputEOS )
{
Log.d( "Extractor", " Advancing" );
extractor.advance( );
}
}
runOutputBuffer( );
}
public void runOutputBuffer( )
{
BufferInfo info = new BufferInfo( );
final int res = codec.dequeueOutputBuffer( info, -1 );
Log.d( "RES: ", String.valueOf( res ) );
if( res >= 0 )
{
int outputBufIndex = res;
ByteBuffer buf = codecOutputBuffers[ outputBufIndex ];
final byte[ ] chunk = new byte[ info.size ];
buf.get( chunk ); // Read the buffer all at once
buf.clear( ); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
if( chunk.length > 0 )
{
Log.d( "Chunk: ", String.valueOf( chunk.length ) );
surface.setTexture( chunk, 320, 240 );
// mAudioTrack.write( chunk, 0, chunk.length );
// do the things
}
codec.releaseOutputBuffer( outputBufIndex, false /* render */);
if( ( info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM ) != 0 )
{
sawOutputEOS = true;
}
}
else if( res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED )
{
codecOutputBuffers = codec.getOutputBuffers( );
}
else if( res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED )
{
final MediaFormat oformat = codec.getOutputFormat( );
Log.d( LOG_TAG, "Output format has changed to " + oformat );
// mAudioTrack.setPlaybackRate( oformat.getInteger( MediaFormat.KEY_SAMPLE_RATE ) );
}
}
}