我需要在android上实现音频PPM(脉冲位置调制)
参考:http ://en.wikipedia.org/wiki/Pulse-position_modulation
我想使用智能手机的音频麦克风输入接收 PPM。
用于无线电控制、无人机等的无线电通常具有 PPM 输出。发射机(和 pc 飞行模拟器)通常具有 PPM 输入。
我想知道你是否可以帮助我完成这项任务。
正如你在这里所读到的,我仍然编写了 ppm 编码器类:android PPM 编码器音频库
这些是一些起始文档和工具:
1)smartpropplus是一个windows软件,接收PPM音频并解码http://sourceforge.net/p/smartpropoplus/code/HEAD/tree/SPP4/
2)这就是 PPM 的结构:http ://www.aerodesign.de/peter/2000/PCM/PCM_PPM_eng.html#Anker144123
3)这是一个简单的图像,解释了信号的结构:http ://www.aerodesign.de/peter/2000/PCM/frame_ppm.gif
4) 使用示波器进行 ppm 信号测量:http ://www.andrewhazelden.com/blog/2011/08/analyzing-rc-radio-ppm-signals/
编辑: 在等待您的支持时,我开始编写示例应用程序:
这是 PPMdecoder.java 类,它缺少信号解码和校准阶段......但它允许正确地从麦克风获取信号
注意:代码中的“//待完成”注释标识了解决此问题的缺失代码。
类 PPMdecoder.java
package com.tr3ma.PPMtestProject;
import java.util.ArrayList;
import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.AsyncTask;
import android.util.Log;
public class PPMDecoder {
public int SAMPLE_RATE = 44100;
public int ppmFrameBufferSize = (int)(SAMPLE_RATE * 0.0225); // 22KHz * 22,5ms that it is the duration of a frame ppm
public int audioBufferSize;
int calibrationStatus=0;
private ArrayList<Float> channelValues;
AudioManager audioManager;
RecordAudio receivePPMSignalTask;
private boolean started;
long elapsedTimeSinceLastPublish;
long lastPublishMilliseconds;
IAsyncFetchListener fetchListener = null;
public void setListener(IAsyncFetchListener listener) {
this.fetchListener = listener;
}
public PPMDecoder(Context context)
{
audioManager = (AudioManager)context.getSystemService(Context.AUDIO_SERVICE);
channelValues = new ArrayList<Float>(8);
for (int i = 0; i < 8; i++) {
channelValues.add(null);
}
}
public int startDecoding()
{
try {
audioBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT)*2;
if (audioBufferSize<=0 ) return -2;
started = true;
receivePPMSignalTask = new RecordAudio();
receivePPMSignalTask.execute();
return 0;
} catch (Exception e) {
e.printStackTrace();
}
return -1;
}
public int stopDecoding()
{
try {
started = false;
receivePPMSignalTask.cancel(true);
receivePPMSignalTask = null;
return 0;
} catch (Exception e) {
e.printStackTrace();
}
return -1;
}
private float samplesToTime(int samples)
{
return (((float)samples/( (float)SAMPLE_RATE)) * (float)1000); //time is expressed in milliseconds
}
public int getChannelValue(int channel)
{
float tmpVal = channelValues.get(channel-1); //get the value
tmpVal = (((tmpVal-(float)0.68181818) * (float)255 ) / (float)1.0); //convert to value between 0 and 255
if (tmpVal<0) return 0;
if (tmpVal>255) return 255;
return (int)Math.round( tmpVal);
}
public int setSamplingRate(int freq) {
//we can change the sampling frequency in case the default one is not supported
try {
SAMPLE_RATE=freq;
ppmFrameBufferSize = (int)(SAMPLE_RATE* 0.0225); // 22KHz * 22,5ms
audioBufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT)*2;
if (audioBufferSize<=0 ) return -2;
started=false;
stopDecoding();
startDecoding();
//frame=new byte[streamBufferSize];
return 0;
} catch (Exception e) {
e.printStackTrace();
}
return -1;
}
public class RecordAudio extends AsyncTask<Void, Void, Throwable> {
@Override
protected Throwable doInBackground(Void... arg0) {
try {
AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, audioBufferSize);
short[] buffer = new short[audioBufferSize];
//final double[] fftData = new double[audioBufferSize];
//RealDoubleFFT fft = new RealDoubleFFT(audioBufferSize);
audioRecord.startRecording();
lastPublishMilliseconds=System.currentTimeMillis();
while (started) {
//Log.e("audioRecord Time1 before read", "" + System.currentTimeMillis() );
int bufferReadResult = audioRecord.read(buffer, 0,audioBufferSize);
//Log.e("audioRecord Time2 after read", "" + System.currentTimeMillis() );
//Log.e("audioRecord bufferReadResult", "" + bufferReadResult );
if(AudioRecord.ERROR_INVALID_OPERATION == bufferReadResult){
//Log.e("audioRecord ErrorInvalidOperation", "Error " + System.currentTimeMillis() );
} else {
//process the data
if (calibrationStatus>0){
//Log.w(TAG, "WE are calibrating");
//To Be Done................................
continue;
}
//decode
//To Be Done.......................
//each 100ms publish the channels value
elapsedTimeSinceLastPublish=System.currentTimeMillis()-lastPublishMilliseconds;
if (elapsedTimeSinceLastPublish>100){
publishProgress();
lastPublishMilliseconds=System.currentTimeMillis();
}
}
}
audioRecord.stop();
audioRecord.release();
audioRecord=null;
} catch (Throwable t) {
t.printStackTrace();
Log.e("audioRecord", "Recording Failed");
return t;
}
return null;
} //fine di doInBackground
@Override
protected void onProgressUpdate(Void... arg0) {
//generate interupt in the father thread
if (fetchListener != null) fetchListener.update(channelValues);
}
protected void onPostExecute(Throwable result){
if (result==null){
return;
}
//if we arrive here, report error "Sound Recorder Busy" and reset the acquisition
//To be done...
}
} //Fine Classe RecordAudio (AsyncTask)
}
这是在 UI 线程上创建侦听器所需的接口 IAsyncFetchListener.java,以允许解码器使用通道值定期更新 UI。
package com.tr3ma.PPMtestProject;
import java.util.ArrayList;
import java.util.EventListener;
public interface IAsyncFetchListener extends EventListener {
void update(ArrayList<Float> channelValues );
}
这是示例活动类 Test.java :
package com.tr3ma.PPMtestProject;
import java.util.ArrayList;
import android.os.Bundle;
import android.widget.TextView;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import com.tr3ma.PPMtestProject.R;
public class Test extends Activity {
PPMDecoder ppmdecoder;
....
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_test);
ppmdecoder=new PPMDecoder(this);
ppmdecoder.setListener(new IAsyncFetchListener() {
public void update(ArrayList<Float> channelValues ) {
// do something with channelValues
//To Be Done................................
}
});
//start to receive the signal through the microphone
int result=ppmdecoder.startDecoding();
if (result!=0){
//error occoured, something went wrong
AlertDialog.Builder alert = new AlertDialog.Builder(this);
alert.setTitle("Error");
alert.setMessage("Error during audio signal receiving. Error Number " + result);
alert.setPositiveButton("Ok", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
}
});
alert.show();
}
.....
}
@Override
protected void onDestroy() {
super.onDestroy();
int result=ppmdecoder.stopDecoding();
if (result!=0){
AlertDialog.Builder alert = new AlertDialog.Builder(this);
alert.setTitle("Error");
alert.setMessage("Error while stopping the audio receiving. Error number " + result);
alert.setPositiveButton("Ok", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
}
});
alert.show();
}
....
}
}
这是 AndroidManifest.xml
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.tr3ma.PPMtestProject"
android:versionCode="1"
android:versionName="1.0" >
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-sdk
android:minSdkVersion="8"
android:targetSdkVersion="17" />
<application
android:allowBackup="true"
android:icon="@drawable/ic_launcher"
android:label="@string/app_name"
android:theme="@style/AppTheme" >
<activity
android:name="com.tr3ma.PPMtestProject.Test"
android:label="@string/app_name"
android:screenOrientation="landscape"
>
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>