1

我正在尝试编写一个简单的 Android 应用程序,该应用程序在播放时显示 .wav 文件的各种音高。我正在使用 TarsosDSP 进行音高处理并使用 AudioTrack 来播放文件。

在深入研究此处的代码之前,我正在运行带有 JRE 1.8.0 的 Android Studio 3.4.2,并且我的 minSdkVersion 为 23。

根据我对 TarsosDSP 工作原理的理解,我将 wav 流连接到 AudioDispatcher 对象,附加处理器(播放器本身和音高评估器),然后将调度程序分配给一个线程并启动它以启动一切。据我了解,我也可能在做一些愚蠢的事情(某处......)。

我在使用 AudioTrack Builder 时遇到了问题,因为我发现很多示例都使用了现在已弃用的使用 AudioManager.STREAM_MUSIC 的构造函数。

更新:我设法找到或多或少做我想做的事情的人(只需要让谷歌从韩语翻译它): https ://junyoung-jamong.github.io/signal/processing/2019/ 02/09/How-to-use-tarsosDSP-in-Android.html

重构之后,我能够将我正在做的 AudioPlayer 东西移到我的 AudioMethods 类中。

更新 所以在更新代码之后,我让文件正确播放,并且音高评估似乎正在工作,但我只得到两个样本,并且在我真正听到音频播放之前它们在 logcat 中注册。有人可以解释一下吗?另外,如果有人能告诉我如何让它采集两个以上的样本,我想知道这是在哪里设置/计算的。

D/EBB Inside Run: Pitch:372.05637
D/EBB Inside Run: Pitch:412.30508

主要活动

public class MainActivity extends AppCompatActivity {
private TextView local_NoteText;
private TextView local_PitchText;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    local_PitchText = findViewById(R.id.pitchText);
    local_NoteText = findViewById(R.id.noteText);

}

@Override
public boolean onCreateOptionsMenu(Menu menu) {
    // Inflate the menu; this adds items to the action bar if it is present.
    getMenuInflater().inflate(R.menu.menu_main, menu);
    return true;
}

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    // Handle action bar item clicks here. The action bar will
    // automatically handle clicks on the Home/Up button, so long
    // as you specify a parent activity in AndroidManifest.xml.
    int id = item.getItemId();

    //noinspection SimplifiableIfStatement
    if (id == R.id.action_settings) {
        return true;
    }

    return super.onOptionsItemSelected(item);
}

public void ProcessTone(View view) throws IOException {
    //get the test file
    final AssetFileDescriptor afd = getResources().openRawResourceFd(R.raw.avery_test);
    AudioMethods audioMethods = new AudioMethods();

    TarsosDSPAudioFormat tarsosDSPAudioFormat = new TarsosDSPAudioFormat(TarsosDSPAudioFormat.Encoding.PCM_SIGNED,
            16000,
            16,
            1,
            2,
            16000,
            ByteOrder.BIG_ENDIAN.equals(ByteOrder.nativeOrder()));

    audioMethods.getPitchFromFile(afd, MainActivity.this, tarsosDSPAudioFormat, local_NoteText,local_PitchText);
  }

}

音频方法

public class AudioMethods {
public static AudioDispatcher dispatcher;
public float pitchInHz;
public int millSecond;

 public void getPitchFromFile(final AssetFileDescriptor afd, final Activity activity, TarsosDSPAudioFormat tarsosDSPAudioFormat,final TextView pitchText,final TextView noteText) {
    try {
        releaseDispatcher(dispatcher);

        FileInputStream fileInputStream = new FileInputStream(afd.getFileDescriptor());
        fileInputStream.skip(afd.getStartOffset());

       // I only need this to get the number of elapsed seconds if the dispatcher doesn't detect when the audio file is finished.
       MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
        mediaMetadataRetriever.setDataSource(afd.getFileDescriptor(),afd.getStartOffset(),afd.getLength());
        String durationStr = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
        millSecond = Integer.parseInt(durationStr);

        dispatcher = new AudioDispatcher(new UniversalAudioInputStream(fileInputStream, tarsosDSPAudioFormat), 2048, 0);
        final AudioProcessor playerProcessor = new AndroidAudioPlayer(tarsosDSPAudioFormat, 16000, 0);

        dispatcher.addAudioProcessor(playerProcessor);

        PitchDetectionHandler pitchDetectionHandler = new PitchDetectionHandler() {

           public void handlePitch(final PitchDetectionResult res, AudioEvent e) {
               pitchInHz  = res.getPitch();
               //if(pitchInHz > 0){Log.d("EBB Outside Run","Pitch:" + pitchInHz);}
                activity.runOnUiThread(new Runnable() {
                    @Override
                    public void run() {
                                if(pitchInHz > 0){Log.d("EBB Inside Run","Pitch:" + pitchInHz);}
                                pitchText.setText(pitchInHz + "");
                                processPitch(pitchInHz);
                    }
                });
            }

            public void processPitch(float pitchInHz) {

                if(pitchInHz >= 110 && pitchInHz < 123.47) {
                    //A
                    noteText.setText("A");
                }
                else if(pitchInHz >= 123.47 && pitchInHz < 130.81) {
                    //B
                    noteText.setText("B");
                }
                else if(pitchInHz >= 130.81 && pitchInHz < 146.83) {
                    //C
                    noteText.setText("C");
                }
                else if(pitchInHz >= 146.83 && pitchInHz < 164.81) {
                    //D
                    noteText.setText("D");
                }
                else if(pitchInHz >= 164.81 && pitchInHz <= 174.61) {
                    //E
                    noteText.setText("E");
                }
                else if(pitchInHz >= 174.61 && pitchInHz < 185) {
                    //F
                    noteText.setText("F");
                }
                else if(pitchInHz >= 185 && pitchInHz < 196) {
                    //G
                    noteText.setText("G");
                }
            }
        };

        AudioProcessor pitchProcessor = new PitchProcessor(PitchProcessor.PitchEstimationAlgorithm.FFT_YIN, 44100, 2048, pitchDetectionHandler);
        dispatcher.addAudioProcessor(pitchProcessor);
        dispatcher.run();

        Thread audioThread = new Thread(dispatcher, "Audio Thread");
        audioThread.start();

    } catch (Exception e) {
        e.printStackTrace();
    }

}

public void releaseDispatcher(AudioDispatcher dispatcher)
{
    if(dispatcher != null)
    {
        if(!dispatcher.isStopped())
            dispatcher.stop();

        dispatcher = null;
    }
}

protected void onStop(AudioDispatcher dispatcher) {
    //super.onStop();
    releaseDispatcher(dispatcher);
}

 //I don't need these guys yet
 /*public void stopRecording()
{
    releaseDispatcher();
}


@Override
protected void onStop() {
    super.onStop();
    releaseDispatcher();
}*/

}
4

0 回答 0