我想要接收语音流并播放它。所以我配置了套接字并且接收工作正常,但是我在正确播放我的流时遇到了问题。我完全确定我的传入流没问题。在安卓和iOS上测试。
接收流
private async void SocketOnMessageReceived(DatagramSocket sender, DatagramSocketMessageReceivedEventArgs args)
{
var result = args.GetDataStream();
var resultStream = result.AsStreamForRead(4096);
using (var reader = new StreamReader(resultStream))
{
var audio = await reader.ReadToEndAsync();
Deployment.Current.Dispatcher.BeginInvoke(() =>
{
Debug.WriteLine("audio.length "+audio.Length);
AudioSteam.AddBytes(audio);
});
}
}
为了播放声音,我修改了本教程的一些代码
我修改后的代码
protected override void GetSampleAsync(MediaStreamType mediaStreamType)
{
// Send out the sample
ReportGetSampleCompleted(new MediaStreamSample(mediaStreamDescription,
memoryStream,
0,
BufferSize,
timestamp,
mediaSampleAttributes));
// Prepare for next sample
timestamp += BufferSamples * 10000000L / sampleRate;
}
并为流提供添加一种方法
public void AddBytes(string bytes) {
var audioBytes = System.Text.Encoding.UTF8.GetBytes(bytes);
memoryStream.Seek(0, SeekOrigin.Begin);
memoryStream.Write(audioBytes,0,audioBytes.Length);
}
怎么了?在哪个部分,接球还是打球,我搞砸了?
更新:看起来 ui 线程不是阅读连续声音的最佳方式。所以我摆脱了它,现在接收方法看起来像这样
private void SocketOnMessageReceived(DatagramSocket sender, DatagramSocketMessageReceivedEventArgs args)
{
var result = args.GetDataStream();
var resultStream = result.AsStreamForRead(4096);
var reader = new StreamReader(resultStream);
var audio = reader.ReadToEnd();
AudioSteam.AddBytes(audio);
}
更重要的是,它可能是 MediaSourceStream 的错误设置。ATM是这样设置的
const int ChannelCount = 1;
const int BitsPerSample = 2;
const int BufferSamples = 16;
const int BufferSize = 4096; //same as max buffer for iOS and Android
protected override void OpenMediaAsync()
{
int byteRate = sampleRate * ChannelCount * BitsPerSample / 8;
Debug.WriteLine("ByteRate: "+byteRate);
short blockAlign = (short)(ChannelCount * (BitsPerSample / 8));
//Build string-based wave-format structure
string waveFormat = "";
waveFormat += ToLittleEndianString(string.Format("{0:X4}", 1)); // indicates PCM
waveFormat += ToLittleEndianString(string.Format("{0:X4}", ChannelCount));
waveFormat += ToLittleEndianString(string.Format("{0:X8}", sampleRate));
waveFormat += ToLittleEndianString(string.Format("{0:X8}", byteRate));
waveFormat += ToLittleEndianString(string.Format("{0:X4}", blockAlign));
waveFormat += ToLittleEndianString(string.Format("{0:X4}", BitsPerSample));
waveFormat += ToLittleEndianString(string.Format("{0:X4}", 0));
// Put wave format string in media streams dictionary
var mediaStreamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = waveFormat;
// Make description to add to available streams list
var availableMediaStreams = new List<MediaStreamDescription>();
mediaStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);
availableMediaStreams.Add(mediaStreamDescription);
// Set some appropriate keys in the media source dictionary
var mediaSourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = "0";
mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "false";
// Signal that the open operation is completed
ReportOpenMediaCompleted(mediaSourceAttributes, availableMediaStreams);
}
我正在尝试将其设置为类似于这样设置的 iOS/Android
AudioStreamBasicDescription audioFormat;
audioFormat.mSampleRate = 44100.00;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsPacked | kAudioFormatFlagIsSignedInteger;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerPacket = 2;
audioFormat.mBytesPerFrame = 2;
我希望更新能带来一些启示。