1

我想用 ffmpeg 在 Android 上播放音频。但是当我运行这个项目时,发生了错误

我应该怎么办?

java端

import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.os.SystemClock;

public class FFmpegBasic extends Activity
{
    private AudioTrack track;
    private FileOutputStream os;
    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState)
    {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.main);
        createEngine();

        int bufSize = AudioTrack.getMinBufferSize(44100,
                                                  AudioFormat.CHANNEL_CONFIGURATION_STEREO, 
                                                  AudioFormat.ENCODING_PCM_16BIT);


        track = new AudioTrack(AudioManager.STREAM_MUSIC, 
                               44100, 
                               AudioFormat.CHANNEL_CONFIGURATION_STEREO, 
                               AudioFormat.ENCODING_PCM_16BIT, 
                               bufSize,
                               AudioTrack.MODE_STREAM);

        byte[] bytes = new byte[bufSize];

        try {
            os = new FileOutputStream("/sdcard/a.out",false);
        } catch (FileNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        String result = "/mnt/sdcard/Wildlife.mp3";
        loadFile(result,bytes);

        try {
            os.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    void playSound(byte[] buf, int size) {  
        if(track.getPlayState()!=AudioTrack.PLAYSTATE_PLAYING)
            track.play();
        track.write(buf, 0, size);

        try {
            os.write(buf,0,size);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }


    private native void createEngine();
    private native void loadFile(String file, byte[] array);

    /** Load jni .so on initialization*/ 
    static {
         System.loadLibrary("avutil"); 
         System.loadLibrary("avcore"); 
         System.loadLibrary("avcodec");
         System.loadLibrary("avformat");
         System.loadLibrary("avdevice");
         System.loadLibrary("swscale");
         System.loadLibrary("avfilter");
         System.loadLibrary("ffmpeg");
         System.loadLibrary("basicplayer");
    }
}

c侧


#include <assert.h>
#include <jni.h>
#include <string.h>
#include <android/log.h>

#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "avcodec.h"
#include "avformat.h"




void Java_net_jbong_FFmpegBasic_FFmpegBasic_createEngine(JNIEnv* env, jclass clazz)
    {
        //avcodec_init();

        av_register_all();


    }

void Java_net_jbong_FFmpegBasic_FFmpegBasic_loadFile(JNIEnv* env, jobject obj, jstring file, jbyteArray array)
{

    AVFormatContext *gFormatCtx = NULL;
    AVCodecContext *gAudioCodecCtx = NULL;
    AVCodec *gAudioCodec = NULL;
    int gAudioStreamIdx = -1;
    char *gAudioBuffer = NULL;
    int i, outsize = 0;
    AVPacket packet;
    const char *str; 
    str = (*env)->GetStringUTFChars(env, file, NULL); 
    jclass cls = (*env)->GetObjectClass(env, obj); 
    jmethodID play = (*env)->GetMethodID(env, cls, "playSound", "([BI)V"); 

    if (gFormatCtx != NULL)
        return -1;
    if (av_open_input_file(&gFormatCtx,str,NULL,0,NULL)!=0)
        return -2;
    if (av_find_stream_info(gFormatCtx) < 0)
        return -3;

    for(i=0; i<gFormatCtx->nb_streams; i++)
    {
        if(gFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
        {
            gAudioStreamIdx = i;
            break;
        }
    }

    if (gAudioStreamIdx == -1)
        return -4;
    gAudioCodecCtx = gFormatCtx->streams[gAudioStreamIdx]->codec;
    gAudioCodec = avcodec_find_decoder(gAudioCodecCtx->codec_id);

    if (gAudioCodec == NULL)
        return -5;

    if (avcodec_open(gAudioCodecCtx, gAudioCodec)<0)
        return -6;


    gAudioBuffer = (char *)av_malloc(AVCODEC_MAX_AUDIO_FRAME_SIZE *2); 
    int decode = 0;

    while (av_read_frame(gFormatCtx, &packet) >= 0)
    {
     if (gFormatCtx-> streams[packet.stream_index]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
     {
         int data_size = AVCODEC_MAX_AUDIO_FRAME_SIZE * 8; 
         gAudioBuffer = (char *)av_malloc(data_size);
         int size=packet.size;
         while(size > 0)
         {
          int len = avcodec_decode_audio3(gAudioCodecCtx,
            (short *) gAudioBuffer, &data_size, &packet);
          if (data_size > 0)
          {
                    jbyte *bytes = (*env)->GetByteArrayElements(env, array, NULL);
                       memcpy(bytes + decode, (int16_t *)gAudioBuffer, size);
                       (*env)->ReleaseByteArrayElements(env, array, bytes, 0);
                       (*env)->CallVoidMethod(env, obj, play, array, data_size); 
                       decode += size;
                       size -= len;

          }
         }
     }
     av_free_packet(&packet);
    }

    av_close_input_file(gFormatCtx);
    return 0;
}

为什么我的 android logcat 向我显示此消息?“打开跟踪文件时出错:没有这样的文件或目录 (2)”

4

1 回答 1

0

你播放的网址是什么?如果支持,您可以检查您的 ffmpeg 协议吗?ffmpeg -protocol

于 2013-09-22T14:51:59.207 回答