我有一个客户端和服务器应用程序。服务器请求音频流,客户端处理得很好。
客户端继续设置 AudioFormat(注意:两端的设置相同)和 TargetDataLine。然后,它使用 ByteArrayOutput 流将数据写入 Socket 输出流。
服务器接收数据并以线程方法读取它。在每个缓冲区读取期间,它会保存到 AudioInputStream,该 AudioInputStream 被传递给 playSound 方法,该方法被线程化并同步以继续播放声音。
当我将 playSound 方法设为非线程时,它运行良好但略有故障。我也知道播放声音非线程会导致声帧卡住任何帮助都非常感谢,并且我可以使这个音频流更高效和快速的任何方法也受到欢迎。
客户:
私有 void captureAudio() 抛出 CommandException {
Socket session = new Socket(host_, port_);
try {
final AudioFormat format = getFormat();
DataLine.Info info = new DataLine.Info(
TargetDataLine.class, format);
final TargetDataLine line = (TargetDataLine)
AudioSystem.getLine(info);
line.open(format);
line.start();
int bufferSize = (int)format.getSampleRate() * format.getFrameSize();
byte buffer[] = new byte[bufferSize];
running = true;
try {
while (running) {
int count = line.read(buffer, 0, buffer.length);
if (count > 0) {
BufferedOutputStream out_ = null;
out_ = new BufferedOutputStream(socket_.getOutputStream());
out_.write(buffer, 0, count);
out_.flush();
}
}
out_.close();
line.close();
} catch (IOException e) {
throw new CommandException("I/O problems: " + e,Command.TRANSFER_ERROR);
}
} catch (LineUnavailableException e) {
throw new CommandException("Line unavailable: " + e,Command.ERROR);
}
else {
throw new CommandException("Unable to Connect to Server",Command.CONNECTION_ERROR);
}
}
private AudioFormat getFormat() {
float sampleRate = 16000;
int sampleSizeInBits = 16;
int channels = 2;
boolean signed = true;
boolean bigEndian = true;
return new AudioFormat(sampleRate,sampleSizeInBits, channels, signed, bigEndian);
}
服务器:public void readSocket(final Socket socket) { new Thread() {
@Override
public void run() {
InputStream input;
try {
input = socket.getInputStream();
final AudioFormat format = getFormat();
int bufferSize = (int)format.getSampleRate() * format.getFrameSize();
byte buffer[] = new byte[bufferSize];
int bytesRead;
while (((bytesRead = input.read(buffer, 0, bufferSize)) != -1 ) ) {
if (bytesRead > 0) {
play(new AudioInputStream(input, format, buffer.length / format.getFrameSize()));
}
}
socket.close();
} catch (Exception ex) {
}
}
}.start();
}
private AudioFormat getFormat() {
float sampleRate = 16000;
int sampleSizeInBits = 16;
int channels = 2;
boolean signed = true;
boolean bigEndian = true;
return new AudioFormat(sampleRate,
sampleSizeInBits, channels, signed, bigEndian);
}
私有同步无效播放(最终 AudioInputStream ais){新线程(){
@Override
public void run() {
try {
final AudioFormat format = getFormat();
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
SourceDataLine line = (SourceDataLine)AudioSystem.getLine(info);
line.open(format);
line.start();
int bufferSize = (int) format.getSampleRate()
* format.getFrameSize();
byte buffer[] = new byte[bufferSize];
int count;
while ((count = ais.read(buffer, 0, buffer.length)) != -1) {
if (count > 0) {
line.write(buffer, 0, count);
}
}
line.drain();
line.close();
ais.close();
} catch (LineUnavailableException ex) {
} catch (IOException ex) {
}
}
}.start();
}