I am trying to create a video with a series of images using MediaCodec and MediaMuxer. I want to also apply an Effect on each Bitmap before the encoding.
The video gets created with the correct duration but the output is only black screen.
I have been looking at the EncodeAndMux example at bigflake.com (Modified the generateFrame()
and added a shared context when creating the CodecInputSurface
)
and the HelloEffects from Google to setup the EffectFactory.
My idea was to create 2 EGLContexts - one for rendering the effect and one for the MediaCodec(encoder) and swapping the buffers
I am new to OpenGl and Android so please tell me if I am going about this the wrong way. Any help with helping me understand what the problem is would be greatly appreciated
Here is my code :
public class WriteMovieActivity extends Activity {
private static final String TAG = "WRITE MOVIE";
private static final File OUTPUT_DIR = Environment.getExternalStorageDirectory();
private int BPM;
private int numberOfPhotos;
private List durations;
private int durationInSec;
private Cursor mCursor;
private MyApplication mApp;
private String filterName;
private String size;
private int firstImageIndex;
private int lastImageIndex;
private String albumName;
private String orientation;
private int[] imageIDs;
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 10; // 10fps
private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
private ImageLoader loader;
private CodecInputSurface mInputSurface;
private MediaMuxer mMuxer;
private MediaCodec mEncoder;
private MediaCodec.BufferInfo mBufferInfo;
private int mBitRate = -1;
private int mTrackIndex;
private boolean mMuxerStarted;
private int mWidth;
private int mHeight;
private int mImageWidth;
private int mImageHeight;
private DisplayImageOptions options;
private long presentationTime;
private long durationInNanosec;
private int[] mTextures = new int[2];
private int texId = -1;
private EffectContext mEffectcontext;
private Effect mEffect;
private TextureRenderer mTexRenderer = new TextureRenderer();
private GLEnv mEnv;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_write_movie);
mApp = (MyApplication) getApplication();
firstImageIndex = mApp.getFirstImageIndex();
lastImageIndex = mApp.getLastImageIndex();
numberOfPhotos = lastImageIndex - firstImageIndex +1 ;
Log.v(TAG,"FirstImage " + firstImageIndex +"------ lastImage "+ lastImageIndex );
albumName = mApp.getAlbumName();
filterName = mApp.getFilter();
size = mApp.getSize();
orientation = mApp.getOrientation();
loader = ImageLoader.getInstance();
if (size == "small") {
mBitRate = 1000000;
if (orientation == "portrait") {
mWidth = 144;
mHeight = 176;
} else {
mWidth = 176;
mHeight = 144;
}
} else if (size == "default") {
mBitRate = 2000000;
if (orientation == "portrait") {
mWidth = 240;
mHeight = 320;
} else {
mWidth = 320;
mHeight = 240;
}
} else {
mBitRate = 6000000;
if (orientation == "portrait") {
mWidth = 720;
mHeight = 1280;
} else {
mWidth = 1280;
mHeight = 720;
}
}
//Get the image ids for bucket
imageIDs = getImagesFromBucket(albumName);
BitmapFactory.Options resizeOptions = new BitmapFactory.Options();
resizeOptions.inSampleSize = 1; // decrease size 1 time
resizeOptions.inScaled = true;
options = new DisplayImageOptions.Builder()
.cacheOnDisk(true)
.considerExifParams(true)
.bitmapConfig(Bitmap.Config.RGB_565)
.decodingOptions(resizeOptions)
.postProcessor(new BitmapProcessor() {
@Override
public Bitmap process(Bitmap bmp) {
return scaleBitmap(bmp);
}
})
.build();
BPM = Math.round(mApp.getBPM());
durationInSec = mApp.getDuration();
if (durationInSec == 0) {
//TODO: Get the duration of the song
}
durations = getDurationsArray(durationInSec);
mEnv = new GLEnv();
mEnv.makeCurrent();
mEffectcontext = EffectContext.createWithCurrentGlContext();
mTexRenderer.init();
}
private Bitmap scaleBitmap(Bitmap bm) {
int width = bm.getWidth();
int height = bm.getHeight();
//Log.v("Pictures", "Width and height are " + width + "--" + height);
if (width > height) {
// landscape
float ratio = (float) width / mWidth;
width = mWidth;
height = (int)(height / ratio);
} else if (height > width) {
// portrait
float ratio = (float) height / mHeight;
height = mHeight;
width = (int)(width / ratio);
} else {
// square
height = mHeight;
width = mWidth;
}
//Log.v("Pictures", "after scaling Width and height are " + width + "--" + height);
bm = Bitmap.createScaledBitmap(bm, width, height, true);
return bm;
}
public void createMovie(View view) throws Throwable {
Log.v(TAG,"CREATING MOVIE");
try {
prepareEncoder();
presentationTime = 0;
int j = 0;
for (int i = firstImageIndex ; i <= lastImageIndex; i++) {
drainEncoder(false);
generateSurfaceFrame(i);
mInputSurface.setPresentationTime(presentationTime);
Log.v(TAG,"sending frame " + i + " to encoder");
mInputSurface.swapBuffers();
//Convert frame duration from milliseconds to nanoseconds
durationInNanosec = (long)((float)durations.get(j) * 1000000);
presentationTime += durationInNanosec;
j++;
}
drainEncoder(true);
} catch(Throwable e) {
Log.e(TAG,e.getMessage(),e);
} finally {
// release encoder, muxer, and input Surface
releaseEncoder();
Log.v(TAG,"VIDEO CREATED");
Toast.makeText(this, "Video Created!",
Toast.LENGTH_LONG).show();
}
}
private void generateSurfaceFrame(int frameIndex) {
mEnv.makeCurrent();
if (texId >= 0){
mEnv.releaseTexture(texId);
}
int imageID = imageIDs[frameIndex];
Uri imageURI = Uri.withAppendedPath(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, "" + imageID);
texId = loadTextures(loader.loadImageSync(imageURI.toString()));
initAndApplyEffect();
renderResult();
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
ByteBuffer pixelBuffer = ByteBuffer.allocateDirect(mImageWidth * mImageHeight * 4).order(ByteOrder.nativeOrder());
GLES20.glReadPixels(0, 0, mImageWidth, mImageHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
mEnv.checkForEGLErrors("store Pixels");
mInputSurface.makeCurrent();
}
private void initAndApplyEffect()
{
EffectFactory effectFactory = mEffectcontext.getFactory();
if (mEffect != null)
{
mEffect.release();
}
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_BRIGHTNESS);
mEffect.setParameter("brightness", 2.0f);
mEffect.apply(mTextures[0], mWidth, mHeight, mTextures[1]);
}
private int loadTextures(Bitmap bitmap)
{
// Generate textures
GLES20.glGenTextures(2, mTextures, 0);
mImageWidth = bitmap.getWidth();
mImageHeight = bitmap.getHeight();
mTexRenderer.updateTextureSize(mImageWidth, mImageHeight);
// Upload to texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[0]);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Set texture parameters
GLToolbox.initTexParams();
return mTextures[0];
}
private void renderResult()
{
mTexRenderer.renderTexture(mTextures[1]);
//mTexRenderer.renderTexture(mTextures[0]);
}
private void prepareEncoder() {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
Log.v(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
//
// If you want to have two EGL contexts -- one for display, one for recording --
// you will likely want to defer instantiation of CodecInputSurface until after the
// "display" EGL context is created, then modify the eglCreateContext call to
// take eglGetCurrentContext() as the share_context argument.
try {
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
} catch (IOException e) {
e.printStackTrace();
}
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = new CodecInputSurface(mEncoder.createInputSurface());
mEncoder.start();
// Output filename. Ideally this would use Context.getFilesDir() rather than a
// hard-coded output directory.
String outputPath = new File(OUTPUT_DIR,
"test." + mWidth + "x" + mHeight + ".mp4").toString();
Log.d(TAG, "output file is " + outputPath);
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
try {
mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException ioe) {
throw new RuntimeException("MediaMuxer creation failed", ioe);
}
mTrackIndex = -1;
mMuxerStarted = false;
}
private void releaseEncoder() {
Log.v(TAG, "releasing encoder objects");
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mInputSurface != null) {
mInputSurface.release();
mInputSurface = null;
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
private void drainEncoder(boolean endOfStream) {
final int TIMEOUT_USEC = 10000;
Log.v(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream) {
Log.v(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
Log.v(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
Log.v(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
Log.v(TAG, "sent " + mBufferInfo.size + " bytes to muxer");
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
Log.v(TAG, "end of stream reached");
}
break; // out of while
}
}
}
}
private static class CodecInputSurface {
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
private Surface mSurface;
/**
* Creates a CodecInputSurface from a Surface.
*/
public CodecInputSurface(Surface surface) {
if (surface == null) {
throw new NullPointerException();
}
mSurface = surface;
eglSetup();
}
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
*/
private void eglSetup() {
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
throw new RuntimeException("unable to initialize EGL14");
}
// Configure EGL for recording and OpenGL ES 2.0.
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0);
checkEglError("eglCreateContext RGB888+recordable ES2");
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.eglGetCurrentContext(),
attrib_list, 0);
checkEglError("eglCreateContext");
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
}
/**
* Discards all resources held by this class, notably the EGL context. Also releases the
* Surface that was passed to our constructor.
*/
public void release() {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
}
mSurface.release();
mEGLDisplay = EGL14.EGL_NO_DISPLAY;
mEGLContext = EGL14.EGL_NO_CONTEXT;
mEGLSurface = EGL14.EGL_NO_SURFACE;
mSurface = null;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
checkEglError("eglMakeCurrent");
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*/
public boolean swapBuffers() {
boolean result = EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
checkEglError("eglSwapBuffers");
return result;
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public void setPresentationTime(long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
checkEglError("eglPresentationTimeANDROID");
}
/**
* Checks for EGL errors. Throws an exception if one is found.
*/
private void checkEglError(String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
}
}
This is the logcat:
05-06 17:27:19.871 17114-17114/com.example.andreaskaitis.myapplication V/WRITE MOVIE﹕ drainEncoder(false)
05-06 17:27:19.876 17114-17114/com.example.andreaskaitis.myapplication V/WRITE MOVIE﹕ sent 10 bytes to muxer
05-06 17:27:19.926 17114-17114/com.example.andreaskaitis.myapplication D/dalvikvm﹕ GC_FOR_ALLOC freed 51250K, 55% free 24821K/54524K, paused 31ms, total 31ms
05-06 17:27:20.011 17114-17114/com.example.andreaskaitis.myapplication I/dalvikvm-heap﹕ Grow heap (frag case) to 62.052MB for 38340880-byte allocation
05-06 17:27:20.021 17114-17124/com.example.andreaskaitis.myapplication D/dalvikvm﹕ GC_FOR_ALLOC freed <1K, 33% free 62263K/91968K, paused 13ms, total 13ms
05-06 17:27:20.286 17114-17114/com.example.andreaskaitis.myapplication D/dalvikvm﹕ GC_FOR_ALLOC freed 48K, 33% free 62215K/91968K, paused 9ms, total 10ms
05-06 17:27:20.336 17114-17114/com.example.andreaskaitis.myapplication I/dalvikvm-heap﹕ Grow heap (frag case) to 87.003MB for 26211856-byte allocation
05-06 17:27:20.346 17114-17124/com.example.andreaskaitis.myapplication D/dalvikvm﹕ GC_FOR_ALLOC freed <1K, 26% free 87813K/117568K, paused 12ms, total 12ms
05-06 17:27:20.491 17114-17114/com.example.andreaskaitis.myapplication D/dalvikvm﹕ GC_FOR_ALLOC freed 37454K, 58% free 50370K/117568K, paused 15ms, total 15ms
05-06 17:27:20.506 17114-17114/com.example.andreaskaitis.myapplication I/dalvikvm-heap﹕ Grow heap (frag case) to 75.435MB for 26211856-byte allocation
05-06 17:27:20.521 17114-17114/com.example.andreaskaitis.myapplication D/dalvikvm﹕ GC_FOR_ALLOC freed <1K, 36% free 75967K/117568K, paused 13ms, total 13ms
05-06 17:27:20.521 17114-17114/com.example.andreaskaitis.myapplication V/WRITE MOVIE﹕ sending frame 58 to encoder
05-06 17:27:20.521 17114-17114/com.example.andreaskaitis.myapplication V/WRITE MOVIE﹕ drainEncoder(true)
05-06 17:27:20.521 17114-17114/com.example.andreaskaitis.myapplication V/WRITE MOVIE﹕ sending EOS to encoder
05-06 17:27:20.526 17114-17114/com.example.andreaskaitis.myapplication V/WRITE MOVIE﹕ sent 10 bytes to muxer
05-06 17:27:20.531 17114-17114/com.example.andreaskaitis.myapplication V/WRITE MOVIE﹕ end of stream reached
05-06 17:27:20.531 17114-17114/com.example.andreaskaitis.myapplication V/WRITE MOVIE﹕ releasing encoder objects
05-06 17:27:20.531 17114-17709/com.example.andreaskaitis.myapplication I/ACodec﹕ [OMX.Exynos.AVC.Encoder] Now Executing->Idle
05-06 17:27:20.541 17114-17709/com.example.andreaskaitis.myapplication I/ACodec﹕ [OMX.Exynos.AVC.Encoder] Now Idle->Loaded
05-06 17:27:20.541 17114-17709/com.example.andreaskaitis.myapplication I/ACodec﹕ [OMX.Exynos.AVC.Encoder] Now Loaded
05-06 17:27:20.541 17114-17709/com.example.andreaskaitis.myapplication I/ACodec﹕ [OMX.Exynos.AVC.Encoder] Now uninitialized
05-06 17:27:20.546 17114-17114/com.example.andreaskaitis.myapplication D/MPEG4Writer﹕ Stopping Video track
05-06 17:27:20.546 17114-17724/com.example.andreaskaitis.myapplication I/MPEG4Writer﹕ Received total/0-length (59/0) buffers and encoded 59 frames. - video
05-06 17:27:20.546 17114-17114/com.example.andreaskaitis.myapplication D/MPEG4Writer﹕ Stopping Video track source
05-06 17:27:20.546 17114-17114/com.example.andreaskaitis.myapplication D/MPEG4Writer﹕ Video track stopped
05-06 17:27:20.546 17114-17114/com.example.andreaskaitis.myapplication D/MPEG4Writer﹕ Stopping writer thread
05-06 17:27:20.546 17114-17723/com.example.andreaskaitis.myapplication D/MPEG4Writer﹕ 0 chunks are written in the last batch
05-06 17:27:20.546 17114-17114/com.example.andreaskaitis.myapplication D/MPEG4Writer﹕ Writer thread stopped
05-06 17:27:20.551 17114-17114/com.example.andreaskaitis.myapplication I/MPEG4Writer﹕ The mp4 file will not be streamable.
05-06 17:27:20.551 17114-17114/com.example.andreaskaitis.myapplication D/MPEG4Writer﹕ Stopping Video track
05-06 17:27:20.551 17114-17114/com.example.andreaskaitis.myapplication V/WRITE MOVIE﹕ VIDEO CREATED