1

我正在使用 Xuggler 转换从 java Robot 类捕获的图像和从 TargetDataLine 类读取的声音,并将其编码为视频。然后,我尝试通过 http(Socket OutputStream)将此视频数据(在写入我的标头之后)流式传输到 Flash 客户端,但无论我在客户端使用什么缓冲区值,它都会播放和断断续续(从不流畅播放)。

我正在寻求帮助并显示我的 java 代码,因为我怀疑这可能与我如何编码视频或通过我没有得到的 http 套接字发送数据有关。

ByteArrayURLHandler ba = new ByteArrayURLHandler();
final IRational FRAME_RATE = IRational.make(30); 
final int SECONDS_TO_RUN_FOR = 20; 
final Robot robot = new Robot(); 
final Toolkit toolkit = Toolkit.getDefaultToolkit(); 
final Rectangle screenBounds = new Rectangle(toolkit.getScreenSize()); 
IMediaWriter writer;

writer = ToolFactory.makeWriter(
    XugglerIO.map( 
        XugglerIO.generateUniqueName(out, ".flv"), 
        out 
    )); 

writer.addListener(new MediaListenerAdapter() { 
    public void onAddStream(IAddStreamEvent event) { 
        event.getSource().getContainer().setInputBufferLength(1000);
        IStreamCoder coder = event.getSource().getContainer().getStream(event.getStreamIndex()).getStreamCoder();
        if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_AUDIO) { 
            coder.setFlag(IStreamCoder.Flags.FLAG_QSCALE, false);   
            coder.setBitRate(32000); 
            System.out.println("onaddstream"+ coder.getPropertyNames().toString());
        } 
        if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO) { 
            // coder.setBitRate(64000); 
            // coder.setBitRateTolerance(64000); 
        } 
    } 
});

writer.addVideoStream(videoStreamIndex, videoStreamId, 1024, 768);
final int channelCount = 1;       

int audionumber =   writer.addAudioStream(audioStreamIndex, audioStreamId,1, 44100);
int bufferSize = (int)audioFormat.getSampleRate()   *audioFormat.getFrameSize();//*6;///6;
byte[] audioBuf;// = new byte[bufferSize]; 

int i = 0;

final int audioStreamIndex = 1;
final int audioStreamId = 1;
BufferedImage screen, bgrScreen;
long startTime = System.nanoTime();
while(keepGoing)
{ 

    audioBuf = new byte[bufferSize]; 
    i++;

    screen = robot.createScreenCapture(screenBounds); 

    bgrScreen = convertToType(screen, BufferedImage.TYPE_3BYTE_BGR); 
    long nanoTs = System.nanoTime()-startTime; 
    writer.encodeVideo(0, bgrScreen, nanoTs, TimeUnit.NANOSECONDS);
    audioBuf = new byte[line.available()];
    int nBytesRead = line.read(audioBuf, 0, audioBuf.length); 

    IBuffer iBuf = IBuffer.make(null, audioBuf, 0, nBytesRead);

    IAudioSamples smp = IAudioSamples.make(iBuf,1,IAudioSamples.Format.FMT_S16);
    if (smp == null) {
        return;
    }

    long numSample = audioBuf.length / smp.getSampleSize();

    smp.setComplete(true, numSample,(int)
    audioFormat.getSampleRate(), audioFormat.getChannels(),
    IAudioSamples.Format.FMT_S16, nanoTs/1000);

    writer.encodeAudio(1, smp);

    writer.flush();
}
4

0 回答 0