0

由于 GC,我在应用程序的性能方面遇到了麻烦,而且我没有太多的经验来理解发生了什么。这是正在发生的事情的详细情况。

我正在尝试通过应用 STFT(快速解释)来构建一个实时处理音频的应用程序。

基本上,我采用每个缓冲区(在我的情况下,最小缓冲区大小为 1148 字节),我应用一个窗口函数并获得一个帧矩阵;然后在每一帧上我应用 FFT;最后,我可以根据频率和时间对每一帧应用一些增益。然后我返回路径以获得缓冲区的修改版本。

由于我的采样频率为 8000Hz,因此每个缓冲区的处理时间都小于 1148/8000 = 144ms。使用System.currentTimeMillis()我评估每个缓冲区处理通常需要 70 到 100 毫秒,所以这很好。

但是问题来自垃圾收集器:我的内存似乎已满,如下图所示;GC 动作有时会让我的音频崩溃。

我的申请记录

问题是我注意到了两件事:

  • 如果我不执行 FFT 和 IFFT,而只是将帧保持原样,则不会显示 CG_CONCURRENT 消息。那是因为 FFT 会产生大量数据(复数数组)

  • 所有操作都在一个单独的线程中完成;所以我试图在应用程序启动后从 DDMS 的角度手动生成 CG。从下面的代码中可以看到,在 onCreate 方法中,应用程序只加载了布局。但是当我导致 CG 时,我看到我的堆已经使用了 90% 以上!我查看了堆转储并生成了 Leaks Suspect 报告,大部分内存被类 'android.content.res.Resources' 和 'android.graphics.Bitmap' 占用......(这里是截图)

应用程序启动后的堆使用情况

泄漏报告饼图

那么,您有什么建议吗?我觉得这很奇怪,我的内存一开始就已经使用了 90%。而且我的堆不能增加一点以满足我的需要

代码

MainActivity.java

package com.example.fileoutjava;

import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;

import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.View;

public class MainActivity extends Activity {

    static final int BUFFER_FACTOR = 1; 

    DataInputStream dis;
    static final int FREQ = 8000;
    static final int FRAME_LENGHT = 32;
    static final int FRAME_SHIFT = 16;
    boolean isMusicStopped = true;
    AudioTrack at;
    Thread playThread;
    long time;


    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);

    }


    public void playMusic(View v) {
        if (at == null) {
            Log.d("PLAY MUSIC", "LAUNCHING NEW PLAYER");
            playThread = new Thread(musicPlayerThread);
            playThread.start();
        }    
    }

    public void stopMusic(View v) {
        isMusicStopped = true;
        playThread = null;
    }


    Runnable musicPlayerThread = new Runnable() {       
        public void run() {
            Thread.currentThread().setPriority(Thread.MAX_PRIORITY);

            /* eg: 8000 bytes per second, 1000 bytes = 125 ms */

            InputStream is = null;
            DataInputStream dis = null;

            try {
                is = MainActivity.this.getApplicationContext().getAssets().open("test.wav");
            } catch (IOException e) {
                e.printStackTrace();
            }
            if (is!=null)
                dis = new DataInputStream(is); //dis = new DataInputStream(new BufferedInputStream(is,bSize));

            isMusicStopped = false;

            int min_bSize = AudioTrack.getMinBufferSize(FREQ, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
            int bSize = min_bSize*BUFFER_FACTOR;

            STFT stft = new STFT(FRAME_SHIFT,FRAME_LENGHT,FREQ,bSize);

            at = new AudioTrack(AudioManager.STREAM_MUSIC, FREQ, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bSize, AudioTrack.MODE_STREAM);            
            at.play();

            int count = 0;
            byte[] buffer = new byte[bSize];

            time = System.currentTimeMillis();

            try {
                while (!isMusicStopped && (count = dis.read(buffer, 0, bSize)) >= 0) {


                    Log.d("TIME ELAPSED", ""+(System.currentTimeMillis()-time));
                    time = System.currentTimeMillis();

                    //Windowing
                    stft.frameBuffer(buffer);

                    //fourier transform and inverse
                    stft.fourierAnalysis();

                    // Overlapp-Add
                    stft.buildBuffer(buffer);

                    at.write(buffer, 0, count);
                }

                if (at != null) {
                    at.stop();
                    at.flush();
                    at.release();
                    at = null;
                }

                if (dis != null) {
                    dis.close();
                    dis = null;
                }

                if (is != null) {
                    is.close();
                    is = null;
                }

                if (stft != null) stft = null;

            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
    };


    private void stop() {
        isMusicStopped = true;
        playThread = null;
    }

    @Override
    public boolean onCreateOptionsMenu(Menu menu) {
        // Inflate the menu; this adds items to the action bar if it is present.
        getMenuInflater().inflate(R.menu.main, menu);
        return true;
    }

    @Override
    protected void onPause() {
        this.stop();
        super.onPause();
    }

    @Override
    protected void onDestroy() {
        this.stop();
        super.onDestroy();
    }

}

STFT.java

package com.example.fileoutjava;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;

import android.util.Log;

import com.badlogic.gdx.audio.analysis.FFT;

public class STFT {

    private int fs, fl; //frame shift and frame length in ms
    private int n_fs, n_fl; //frame shift and length in samples
    private int buf_len; //length of the buffer array (bytes)
    private int data_len; //length of the buffer array (converted to short)
    private int padded_data_len; //put 0 padding before and after the buffer short[] data
    private float n_segs; //number of frames that can be taken from one buffer array
    private float[][] stft_matrix;
    private float[] window; //Hamming coefficient
    private float norm_factor = 0;
    private boolean search_norm_factor = true;
    private FFT fft;
    private int i,j,k; //index for loops
    private ByteBuffer bb;
    private float[] tmp_buf;
    private float[] tmp_fft;
    private float[] tmp_ifft;

    public STFT(int frame_shift, int frame_length, int freq, int buf_len) {
        fs = frame_shift;
        fl = frame_length;
        this.buf_len = buf_len;
        this.data_len = buf_len/2;      

        //compute values from ms to samples
        n_fs = (int) Math.floor(fs*freq/1000);
        n_fl = (int) Math.floor(fl*freq/1000);

        padded_data_len = 2*n_fl + data_len;

        //create coefficients
        window = hamming(n_fl);

        tmp_buf = new float[padded_data_len];
        bb = ByteBuffer.allocateDirect(2);
        bb.order(ByteOrder.LITTLE_ENDIAN);

        //compute how many frames can be extracted from the buffer
        n_segs = 1 + (float) (Math.ceil((this.padded_data_len-n_fl)/n_fs));

        //data matrix: size of frame (with padding from previous frame) * number of segments
        stft_matrix = new float[n_fl][(int)n_segs];

        Log.d("STFT STATS", "BufLen:"+this.buf_len+" // Flen:"+n_fl+" // FSh:"+n_fs+
                " // Nsegs:"+n_segs);

        //Initialize the FFT object
        fft = new FFT(n_fl*2,freq);

        //buffers for FFT data, with zero padding
        tmp_fft= new float[n_fl*2];
        tmp_ifft = new float[n_fl];

        for (int i=0; i<n_fl*2; i++) {
            tmp_fft[i] = 0;
            tmp_ifft[i/2] = 0;
        }


    }

    //frames the whole buffer into the stft matrix
    public void frameBuffer(byte[] buf) {

        //initialize tmp_buffer and add 0 padding
        for (k=0; k<padded_data_len; k++)
            tmp_buf[k] = 0;

        //fill the short[] buffer converting from byte[] buffer

        for (i=0; i<buf_len; i+=2) {
            bb.position(0);
            bb.put(buf[i]);
            bb.put(buf[i+1]);
            tmp_buf[n_fl+i/2] = (float) bb.getShort(0);
        }


        //frame the short[] buffer into the matrix using windowing
        for (j=0; j<n_segs; j++) {
            for (int i=0; i<n_fl; i++) {
                stft_matrix[i][j] = tmp_buf[j*n_fs+i]*window[i];

                //NORMALIZATION FACTOR RETRIEVAL: only the first time
                if (search_norm_factor && (j*n_fs+i) == 512)
                    norm_factor+=window[i];
            }
        }

        if (search_norm_factor)
            norm_factor *= 1.2;
        //retrieve the norm factor only the first time
        search_norm_factor = false;

    }

    //sums all frames from STFT matrix into one buffer
    public void buildBuffer(byte[] output) {

        //initialize tmp_buffer and add 0 padding
        for (k=0; k<padded_data_len; k++)
            tmp_buf[k] = 0;

        //Overlap-Add
        for (j=0; j<n_segs; j++) {
            for (i=0; i<n_fl; i++) {
                tmp_buf[j*n_fs+i] += stft_matrix[i][j];
            }
        }

        //convert from short[] to byte[] (with normalization)
        for (i=0; i<buf_len; i+=2) {
            bb.position(0);
            bb.putShort( (short) (tmp_buf[n_fl+i/2]/norm_factor) );
            output[i] = bb.get(0);
            output[i+1] = bb.get(1);
        }

    }

    //FFT and IFFT of the buffer
    public void fourierAnalysis() {

        for (j=0; j<n_segs;j++) {

            for (i=0; i<n_fl; i++) {
                tmp_fft[i] = stft_matrix[i][j];
            }

            fft.forward(tmp_fft);
            //OPERATIONS ON THE SPECTRUM ?
            fft.inverse(tmp_ifft);


            for (int i=0; i<n_fl; i++) {
                stft_matrix[i][j] = tmp_ifft[i];
            }
        }   
    }


    //utility method for Hamming coefficients
    private float[] hamming(int len){
        float[] win = new float[len];
        for (i=0; i<len; i++){
            win[i] = (float) (0.54-0.46*Math.cos((2*Math.PI*i)/(len-1)));
        }

        return win;
    }

}
4

1 回答 1

0

这个答案的开头https://stackoverflow.com/a/10679174/987358解释了它:堆会根据需要增加,并且在应用程序启动时不需要更多。

于 2013-05-13T17:41:39.510 回答