os::AutoMutex lock(&mutex);
sws = nullptr;
- swsBuffer = nullptr;
h264WorkBuffer = nullptr;
h264WorkBufferLength = 0;
return false;
}
- int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB32, rect.width(), rect.height(), 1);
- swsBuffer = new uint8_t[numBytes];
-
initialized = true;
return true;
}
return;
av_parser_close(parser);
avcodec_free_context(&avctx);
+ av_frame_free(&rgbFrame);
av_frame_free(&frame);
- delete[] swsBuffer;
free(h264WorkBuffer);
initialized = false;
}
frame->width, frame->height, AV_PIX_FMT_RGB32,
0, nullptr, nullptr, nullptr);
- int stride;
- pb->getBuffer(rect, &stride);
- int dst_linesize = stride * pb->getPF().bpp/8; // stride is in pixels, linesize is in bytes (stride x4). We need bytes
+ if (rgbFrame && (rgbFrame->width != frame->width || rgbFrame->height != frame->height)) {
+ av_frame_free(&rgbFrame);
+
+ }
+
+ if (!rgbFrame) {
+ rgbFrame = av_frame_alloc();
+ // TODO: Can we really assume that the pixel format will always be RGB32?
+ rgbFrame->format = AV_PIX_FMT_RGB32;
+ rgbFrame->width = frame->width;
+ rgbFrame->height = frame->height;
+ av_frame_get_buffer(rgbFrame, 0);
+ }
- sws_scale(sws, frame->data, frame->linesize, 0, frame->height, &swsBuffer, &dst_linesize);
+ sws_scale(sws, frame->data, frame->linesize, 0, frame->height, rgbFrame->data,
+ rgbFrame->linesize);
- pb->imageRect(rect, swsBuffer, stride);
+ pb->imageRect(rect, rgbFrame->data[0], rgbFrame->linesize[0] / 4);
}