1

我正在使用 IP CAMERA,我正在尝试在 Android中使用 ffmpeg教程

我得到框架并发送到位图并且运行良好

但是当它从 rtsp 获取帧时,错误的帧已损坏

我发现了这个这个

但它不起作用

有人有解决方案吗?

[h264 @ 0xaacfe20] concealing 977 DC, 977 AC, 977 MV errors
[h264 @ 0xaacfe20] negative number of zero coeffs at 11 9
[h264 @ 0xaacfe20] error while decoding MB 11 9

当我在电脑上运行教程时,我发现

这是我的代码

AVFormatContext *gFormatCtx = NULL;

AVCodecContext *gVideoCodecCtx = NULL;
AVCodec *gVideoCodec = NULL;
int gVideoStreamIdx = -1;

AVFrame *gFrame = NULL;
AVFrame *gFrameRGB = NULL;

struct SwsContext *gImgConvertCtx = NULL;

int gPictureSize = 0;
uint8_t *gVideoBuffer = NULL;


int openMovie(const char filePath[])
{
int i;

if (gFormatCtx != NULL)
    return -1;
/*
if (av_open_input_file(&gFormatCtx, filePath, NULL, 0, NULL) != 0)
    return -2;
*/
gFormatCtx = avformat_alloc_context();

if (avformat_open_input(&gFormatCtx, filePath,NULL,NULL) != 0)
    return -2;

if (av_find_stream_info(gFormatCtx) < 0)
    return -3;

for (i = 0; i < gFormatCtx->nb_streams; i++) {
    if (gFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
        gVideoStreamIdx = i;

        break;
    }
}
if (gVideoStreamIdx == -1)
    return -4;

gVideoCodecCtx = gFormatCtx->streams[gVideoStreamIdx]->codec;

gVideoCodec = avcodec_find_decoder(gVideoCodecCtx->codec_id);
if (gVideoCodec == NULL)
    return -5;

if (avcodec_open(gVideoCodecCtx, gVideoCodec) < 0)
    return -6;

gFrame = avcodec_alloc_frame();
if (gFrame == NULL)
    return -7;

gFrameRGB = avcodec_alloc_frame();
if (gFrameRGB == NULL)
    return -8;

gPictureSize = avpicture_get_size(PIX_FMT_RGB565LE, gVideoCodecCtx->width, gVideoCodecCtx->height);
gVideoBuffer = (uint8_t*)(malloc(sizeof(uint8_t) * gPictureSize));

avpicture_fill((AVPicture*)gFrameRGB, gVideoBuffer, PIX_FMT_RGB565LE, gVideoCodecCtx->width, gVideoCodecCtx->height);

return 0;
}

int decodeFrame()
{
int frameFinished = 0;
AVPacket packet;

gVideoCodecCtx->time_base= (AVRational){1, 10};

while (av_read_frame(gFormatCtx, &packet) >= 0) {
    if (packet.stream_index == gVideoStreamIdx) {
        avcodec_decode_video2(gVideoCodecCtx, gFrame, &frameFinished, &packet);

        if (frameFinished) {
            gImgConvertCtx = sws_getCachedContext(gImgConvertCtx,
                gVideoCodecCtx->width, gVideoCodecCtx->height, gVideoCodecCtx->pix_fmt,
                gVideoCodecCtx->width, gVideoCodecCtx->height, PIX_FMT_RGB565LE, SWS_BICUBIC, NULL, NULL, NULL);

            sws_scale(gImgConvertCtx, gFrame->data, gFrame->linesize, 0, gVideoCodecCtx->height, gFrameRGB->data, gFrameRGB->linesize);

            av_free_packet(&packet);

            return 0;
        }
    }

    av_free_packet(&packet);
}

return -1;
}

void copyPixels(uint8_t *pixels)
{
memcpy(pixels, gFrameRGB->data[0], gPictureSize);
}

int getWidth()
{
return gVideoCodecCtx->width;
}

int getHeight()
{
return gVideoCodecCtx->height;
}

void closeMovie()
{
if (gVideoBuffer != NULL) {
    free(gVideoBuffer);
    gVideoBuffer = NULL;
}

if (gFrame != NULL)
    av_freep(gFrame);
if (gFrameRGB != NULL)
    av_freep(gFrameRGB);

if (gVideoCodecCtx != NULL) {
    avcodec_close(gVideoCodecCtx);
    gVideoCodecCtx = NULL;
}

if (gFormatCtx != NULL) {
    av_close_input_file(gFormatCtx);
    gFormatCtx = NULL;
}

}
4

0 回答 0