我正在使用 Sharppcap 库从 SIP 呼叫中获取数据包。到目前为止,一切都很好。但是,当我加入这些数据包(MemoryStream 中的 G.711 Alaw 数据包)并使用我从https://www.codeproject.com/Articles/14237/Using-the-G711-standard获得的 AlawDecoder dll 转换它并写入带有解码数组字节的 Wav 文件,我可以听录音,但它听起来不连贯且缓慢。我是音频编程的新手,所以我没有很多经验。下面是我为实现此目的而编写的代码片段:
private static void Device_OnPacketArrival(object sender, CaptureEventArgs e)
{
var time = e.Packet.Timeval.Date.AddHours(-3);
var len = e.Packet.Data.Length;
var packet = PacketDotNet.Packet.ParsePacket(e.Packet.LinkLayerType, e.Packet.Data);
var device = sender as ICaptureDevice;
var tcpPacket = packet.Extract<PacketDotNet.TcpPacket>();
var udpPacket = packet.Extract<PacketDotNet.UdpPacket>();
if (udpPacket != null)
{
var ipPacket = (PacketDotNet.IPPacket)udpPacket.ParentPacket;
System.Net.IPAddress srcIp = ipPacket.SourceAddress;
System.Net.IPAddress dstIp = ipPacket.DestinationAddress;
int srcPort = udpPacket.SourcePort;
int dstPort = udpPacket.DestinationPort;
byte[] udpHeaderData = udpPacket.HeaderData;
byte[] udpPayloadData = udpPacket.PayloadData;
string decodedUdpPayloadData = Encoding.UTF8.GetString(udpPayloadData);
if (decodedUdpPayloadData.Contains("m=audio"))
{
FindRTPAudioPort(device, decodedUdpPayloadData);
}
else if (device.Filter != "udp port 5060")
{
RtpPacketsToWave(device, udpPayloadData);
}
else
{
Console.WriteLine("{0}:{1}:{2},{3} Len={4} {5}:{6} -> {7}:{8} UDP Packet " +
"\n {9} \n Hex DUMP: {10} \n",
time.Hour, time.Minute, time.Second, time.Millisecond, len,
srcIp, srcPort, dstIp, dstPort,
decodedUdpPayloadData,
BitConverter.ToString(udpPayloadData));
}
}
else if (tcpPacket != null)
{
var ipPacket = (PacketDotNet.IPPacket)tcpPacket.ParentPacket;
System.Net.IPAddress srcIp = ipPacket.SourceAddress;
System.Net.IPAddress dstIp = ipPacket.DestinationAddress;
int srcPort = tcpPacket.SourcePort;
int dstPort = tcpPacket.DestinationPort;
Console.WriteLine("{0}:{1}:{2},{3} Len={4} {5}:{6} -> {7}:{8}",
time.Hour, time.Minute, time.Second, time.Millisecond, len,
srcIp, srcPort, dstIp, dstPort);
}
else
{
Console.WriteLine("\n");
}
}
private static void RtpPacketsToWave(ICaptureDevice dev, byte[] payloadData)
{
try
{
MemoryStreamSingleton memoryStreamSingleton = MemoryStreamSingleton.GetInstance();
MemoryStream memStream;
byte[] headlessPayloadData = new byte[160];
if (payloadData.Length == 172)
{
//Skips first 12 bytes containing the packet header
headlessPayloadData = payloadData.Skip(12).ToArray();
memStream = new MemoryStream(headlessPayloadData);
memStream.CopyTo(memoryStreamSingleton);
}
Console.WriteLine("Payload length: {0}", headlessPayloadData.Length);
Console.WriteLine(memoryStreamSingleton.Length);
if(memoryStreamSingleton.Length > 600000)
{
WaveFileGenerator(memoryStreamSingleton.ToArray());
dev.StopCapture();
}
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
}
}
private static void WaveFileGenerator(byte[] buffer)
{
try
{
Console.WriteLine("Device closed, generating audio file..");
WaveFormat waveFormat = new WaveFormat(8000, 16, 1);
short[] pcm16bit = ALawDecoder.ALawDecode(buffer);
byte[] result1 = new byte[pcm16bit.Length * sizeof(short)];
Buffer.BlockCopy(pcm16bit, 0, result1, 0, result1.Length);
var outputWave = new WaveFileWriter(@"tmp/test.wav", waveFormat);
outputWave.Write(result1, 0, result1.Length);
outputWave.Close();
var waveFileProvider = new WaveFileReader(@"tmp/test.wav");
MonoToStereoProvider16 toStereo = new MonoToStereoProvider16(waveFileProvider);
WaveFileWriter.CreateWaveFile("test.wav", toStereo);
waveFileProvider.Dispose();
File.Delete(@"tmp/test.wav");
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
File.WriteAllText("log.txt", ex.ToString());
}
}
我无法理解我错过了什么......