[英]Encoding frames to video with ffmpeg
I am trying to encode a video in Unreal Engine 4 with C++. 我正在尝试使用C ++在Unreal Engine 4中对视频进行编码。 I have access to the separate frames. 我可以访问单独的框架。 Below is the code which reads viewport's
displayed pixels and stores in buffer. 以下是读取viewport's
显示的像素并将其存储在缓冲区中的代码。
//Safely get render target resource.
FRenderTarget* RenderTarget = TextureRenderTarget->GameThread_GetRenderTargetResource();
FIntPoint Size = RenderTarget->GetSizeXY();
auto ImageBytes = Size.X* Size.Y * static_cast<int32>(sizeof(FColor));
TArray<uint8> RawData;
RawData.AddUninitialized(ImageBytes);
//Get image raw data.
if (!RenderTarget->ReadPixelsPtr((FColor*)RawData.GetData()))
{
RawData.Empty();
UE_LOG(ExportRenderTargetBPFLibrary, Error, TEXT("ExportRenderTargetAsImage: Failed to get raw data."));
return false;
}
Buffer::getInstance().add(RawData);
Unreal Engine has IImageWrapperModule
with which you can get an image from frame, but noting for video encoding. 虚幻引擎具有IImageWrapperModule
,您可以使用它从帧中获取图像,但要注意视频编码。 What I want is to encode frames in real time basis for live streaming service. 我想要的是对实时流服务实时编码帧。
I found this post Encoding a screenshot into a video using FFMPEG which is kind of what I want, but I have problems adapting this solution for my case. 我发现了这篇文章, 使用FFMPEG将屏幕快照编码为视频 ,这是我想要的那种方式,但是我在适应这种解决方案时遇到了问题。 The code is outdated (for example avcodec_encode_video
changed to avcodec_encode_video2
with different parameters). 代码已过时(例如,使用不同的参数将avcodec_encode_video
更改为avcodec_encode_video2
)。
Bellow is the code of encoder. 波纹管是编码器的代码。
void Compressor::DoWork()
{
AVCodec* codec;
AVCodecContext* c = NULL;
//uint8_t* outbuf;
//int /*i, out_size,*/ outbuf_size;
UE_LOG(LogTemp, Warning, TEXT("encoding"));
codec = avcodec_find_encoder(AV_CODEC_ID_MPEG1VIDEO); // finding the H264 encoder
if (!codec) {
UE_LOG(LogTemp, Warning, TEXT("codec not found"));
exit(1);
}
else UE_LOG(LogTemp, Warning, TEXT("codec found"));
c = avcodec_alloc_context3(codec);
c->bit_rate = 400000;
c->width = 1280; // resolution must be a multiple of two (1280x720),(1900x1080),(720x480)
c->height = 720;
c->time_base.num = 1; // framerate numerator
c->time_base.den = 25; // framerate denominator
c->gop_size = 10; // emit one intra frame every ten frames
c->max_b_frames = 1; // maximum number of b-frames between non b-frames
c->keyint_min = 1; // minimum GOP size
c->i_quant_factor = (float)0.71; // qscale factor between P and I frames
//c->b_frame_strategy = 20; ///// find out exactly what this does
c->qcompress = (float)0.6; ///// find out exactly what this does
c->qmin = 20; // minimum quantizer
c->qmax = 51; // maximum quantizer
c->max_qdiff = 4; // maximum quantizer difference between frames
c->refs = 4; // number of reference frames
c->trellis = 1; // trellis RD Quantization
c->pix_fmt = AV_PIX_FMT_YUV420P; // universal pixel format for video encoding
c->codec_id = AV_CODEC_ID_MPEG1VIDEO;
c->codec_type = AVMEDIA_TYPE_VIDEO;
if (avcodec_open2(c, codec, NULL) < 0) {
UE_LOG(LogTemp, Warning, TEXT("could not open codec")); // opening the codec
//exit(1);
}
else UE_LOG(LogTemp, Warning, TEXT("codec oppened"));
FString FinalFilename = FString("C:/Screen/sample.mpg");
auto &PlatformFile = FPlatformFileManager::Get().GetPlatformFile();
auto FileHandle = PlatformFile.OpenWrite(*FinalFilename, true);
if (FileHandle)
{
delete FileHandle; // remove when ready
UE_LOG(LogTemp, Warning, TEXT("file opened"));
while (true)
{
UE_LOG(LogTemp, Warning, TEXT("removing from buffer"));
int nbytes = avpicture_get_size(AV_PIX_FMT_YUV420P, c->width, c->height); // allocating outbuffer
uint8_t* outbuffer = (uint8_t*)av_malloc(nbytes * sizeof(uint8_t));
AVFrame* inpic = av_frame_alloc();
AVFrame* outpic = av_frame_alloc();
outpic->pts = (int64_t)((float)1 * (1000.0 / ((float)(c->time_base.den))) * 90); // setting frame pts
avpicture_fill((AVPicture*)inpic, (uint8_t*)Buffer::getInstance().remove().GetData(),
AV_PIX_FMT_PAL8, c->width, c->height); // fill image with input screenshot
avpicture_fill((AVPicture*)outpic, outbuffer, AV_PIX_FMT_YUV420P, c->width, c->height); // clear output picture for buffer copy
av_image_alloc(outpic->data, outpic->linesize, c->width, c->height, c->pix_fmt, 1);
/*
inpic->data[0] += inpic->linesize[0]*(screenHeight-1);
// flipping frame
inpic->linesize[0] = -inpic->linesize[0];
// flipping frame
struct SwsContext* fooContext = sws_getContext(screenWidth, screenHeight, PIX_FMT_RGB32, c->width, c->height, PIX_FMT_YUV420P, SWS_FAST_BILINEAR, NULL, NULL, NULL);
sws_scale(fooContext, inpic->data, inpic->linesize, 0, c->height, outpic->data, outpic->linesize); // converting frame size and format
out_size = avcodec_encode_video(c, outbuf, outbuf_size, outpic);
// save in file
*/
}
delete FileHandle;
}
else
{
UE_LOG(LogTemp, Warning, TEXT("Can't open file"));
}
}
Can someone explain flipping frame part (why it's done?) and how to use avcodec_encode_video2
function instead of avcodec_encode_video
? 有人可以解释翻转框架的部分(为什么这样做吗?),以及如何使用avcodec_encode_video2
函数而不是avcodec_encode_video
?
Not only avcodec_encode_video
is out dated, avcodec_encode_video2
has been tagged with deprecated for a while too. 不仅avcodec_encode_video
已过期, avcodec_encode_video2
也已被标记为已弃用一段时间。 You should use the new avcodec_send_frame
and avcodec_receive_packet
for encoding now. 您现在应该使用新的avcodec_send_frame
和avcodec_receive_packet
进行编码。
The "flipping" part doesn't do any good for encoding, and I strongly suggest don't do that in your code. “翻转”部分对编码没有任何好处,我强烈建议不要在您的代码中这样做。 If you find the output size is not right, just switch swscale
interpolation algorithm flag to SWS_ACCURATE_RND
. 如果发现输出大小不正确,只需将swscale
插值算法标志切换为SWS_ACCURATE_RND
。
Besides old avcodec_encode_video
API, there are several potential risks: 除了旧的avcodec_encode_video
API外,还有其他潜在风险:
AV_CODEC_ID_H264
, not AV_CODEC_ID_MPEG1VIDEO
, also the ffmpeg libs should be built with libx264
. 要使用H264编码器,请使用AV_CODEC_ID_H264
而不是AV_CODEC_ID_MPEG1VIDEO
查找它,而且ffmpeg库也应使用libx264
。
avcodec_find_encoder_by_name("h264_nvenc")
will be much better. 或者,如果您有支持avcodec_find_encoder_by_name("h264_nvenc")
的有效nvidia卡,则avcodec_find_encoder_by_name("h264_nvenc")
会更好。 delete FileHandle
is executed twice. delete FileHandle
被执行两次。 avpicture...
functions has been deprecated for a long time. avpicture...
功能已被弃用很长时间了。 Use other functions instead. 请改用其他功能。 And if performance is critical, move all encoding process to a independent thread instead of the game thread. 如果性能至关重要,请将所有编码过程移到一个独立的线程上,而不是游戏线程上。
I have some codes for encoding UE4 viewport output in my custom GameViewportClient
class, which are similar to ffmpeg official muxing
and encode_video
example. 我有我的自定义编码UE4视输出部分代码GameViewportClient
类,这是类似的ffmpeg正式muxing
和encode_video
例子。
MyGameViewportClient.h: MyGameViewportClient.h:
UCLASS(Config=Game)
class FUSIONCUT_API UMyGameViewportClient : public UGameViewportClient
{
GENERATED_BODY()
public:
virtual void Draw(FViewport* Viewport, FCanvas* SceneCanvas) override;
void FirstTimeInit();
void InitCodec();
void TidyUp();
void SetAutoRecording(bool val);
void RecordNextFrame();
bool CanRecordNextFrame();
void SetRecording(bool val);
void SetLevelDelay(int32 delay);
void SetOver(bool val);
void SetAbandon(bool val);
void SetFilePath(FString out_file);
void SetThumbnail(FString thumbnail_file, int32 thumbnail_frame);
void SaveThumbnailImage();
private:
UPROPERTY(Config)
FString DeviceNum;
UPROPERTY(Config)
FString H264Crf;
UPROPERTY(Config)
int DeviceIndex;
UPROPERTY()
UFunction* ProgressFunc;
UPROPERTY()
UFunction* FinishFunc;
FIntPoint ViewportSize;
int count;
TArray<FColor> ColorBuffer;
TArray<uint8> IMG_Buffer;
struct OutputStream {
AVStream* Stream;
AVCodecContext* Ctx;
int64_t NextPts;
AVFrame* Frame;
struct SwsContext* SwsCtx;
};
OutputStream VideoSt = { 0 };
AVOutputFormat* Fmt;
AVFormatContext* FmtCtx;
AVCodec* VideoCodec;
AVDictionary* Opt = nullptr;
SwsContext* SwsCtx;
AVPacket Pkt;
int GotOutput;
int InLineSize[1];
bool Start;
bool Over;
bool FirstTime;
bool Abandon;
bool AutoRecording;
bool RecordingNextFrame;
double LastSendingTime;
std::string FilePath;
FString UEFilePath;
int32 LevelDelay;
void EncodeAndWrite();
void CaptureFrame();
void AddStream(enum AVCodecID CodecID);
void OpenVideo();
int WriteFrame(bool need_save_thumbnail = true);
void CloseStream();
void AllocPicture();
int FFmpegEncode(AVFrame *frame);
};
MyGameViewportClient.cpp: MyGameViewportClient.cpp:
void UMyGameViewportClient::InitCodec()
{
ViewportSize = Viewport->GetSizeXY();
av_register_all();
avformat_alloc_output_context2(&FmtCtx, nullptr, nullptr, FilePath.c_str());
if (!FmtCtx)
{
UE_LOG(LogTemp, Error, TEXT("cannot alloc format context"));
return;
}
Fmt = FmtCtx->oformat;
//auto codec_id = AV_CODEC_ID_H264;
const char codec_name[32] = "h264_nvenc";
//auto codec = avcodec_find_encoder(codec_id);
auto codec = avcodec_find_encoder_by_name(codec_name);
av_format_set_video_codec(FmtCtx, codec);
if (Fmt->video_codec != AV_CODEC_ID_NONE)
{
AddStream(Fmt->video_codec);
}
OpenVideo();
VideoSt.NextPts = 0;
av_dump_format(FmtCtx, 0, FilePath.c_str(), 1);
if (!(Fmt->flags & AVFMT_NOFILE))
{
auto ret = avio_open(&FmtCtx->pb, FilePath.c_str(), AVIO_FLAG_WRITE);
if (ret < 0)
{
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp, Error, TEXT("Could not open %s: %s"), *UEFilePath, *errstr);
return;
}
}
auto ret = avformat_write_header(FmtCtx, &Opt);
if (ret < 0)
{
UE_LOG(LogTemp, Error, TEXT("Error occurred when writing header to: %s"), *UEFilePath);
return;
}
InLineSize[0] = 4 * VideoSt.Ctx->width;
SwsCtx = sws_getContext(VideoSt.Ctx->width, VideoSt.Ctx->height, AV_PIX_FMT_RGBA,
VideoSt.Ctx->width, VideoSt.Ctx->height, VideoSt.Ctx->pix_fmt,
0, nullptr, nullptr, nullptr);
}
void UMyGameViewportClient::OpenVideo()
{
auto c = VideoSt.Ctx;
AVDictionary* opt = nullptr;
av_dict_copy(&opt, Opt, 0);
auto ret = avcodec_open2(c, VideoCodec, &opt);
av_dict_free(&opt);
if (ret < 0)
{
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp, Error, TEXT("Could not open video codec: %s"), *errstr);
}
AllocPicture();
if (!VideoSt.Frame)
{
UE_LOG(LogTemp, Error, TEXT("Could not allocate video frame"));
return;
}
if (avcodec_parameters_from_context(VideoSt.Stream->codecpar, c))
{
UE_LOG(LogTemp, Error, TEXT("Could not copy the stream parameters"));
}
}
void UMyGameViewportClient::AllocPicture()
{
VideoSt.Frame = av_frame_alloc();
if (!VideoSt.Frame)
{
UE_LOG(LogTemp, Error, TEXT("av_frame_alloc failed."));
return;
}
VideoSt.Frame->format = VideoSt.Ctx->pix_fmt;
VideoSt.Frame->width = ViewportSize.X;
VideoSt.Frame->height = ViewportSize.Y;
if (av_frame_get_buffer(VideoSt.Frame, 32) < 0)
{
UE_LOG(LogTemp, Error, TEXT("Could not allocate frame data"));
}
}
void UMyGameViewportClient::AddStream(enum AVCodecID CodecID)
{
VideoCodec = avcodec_find_encoder(CodecID);
if (!VideoCodec)
{
UE_LOG(LogTemp, Error, TEXT("Could not find encoder for '%s'"), ANSI_TO_TCHAR(avcodec_get_name(CodecID)));
}
VideoSt.Stream = avformat_new_stream(FmtCtx, nullptr);
if (!VideoSt.Stream)
{
UE_LOG(LogTemp, Error, TEXT("Could not allocate stream"));
}
VideoSt.Stream->id = FmtCtx->nb_streams - 1;
VideoSt.Ctx = avcodec_alloc_context3(VideoCodec);
if (!VideoSt.Ctx)
{
UE_LOG(LogTemp, Error, TEXT("Could not alloc an encoding context"));
}
VideoSt.Ctx->codec_id = CodecID;
VideoSt.Ctx->width = ViewportSize.X;
VideoSt.Ctx->height = ViewportSize.Y;
VideoSt.Stream->time_base = VideoSt.Ctx->time_base = { 1, FRAMERATE };
VideoSt.Ctx->gop_size = 10;
VideoSt.Ctx->max_b_frames = 1;
VideoSt.Ctx->pix_fmt = AV_PIX_FMT_YUV420P;
av_opt_set(VideoSt.Ctx->priv_data, "cq", TCHAR_TO_ANSI(*H264Crf), 0); // change `cq` to `crf` if using libx264
av_opt_set(VideoSt.Ctx->priv_data, "gpu", TCHAR_TO_ANSI(*DeviceNum), 0); // comment this line if using libx264
if (FmtCtx->oformat->flags & AVFMT_GLOBALHEADER)
VideoSt.Ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
void UMyGameViewportClient::EncodeAndWrite()
{
Pkt = { nullptr };
av_init_packet(&Pkt);
fflush(stdout);
IMG_Buffer.SetNum(ColorBuffer.Num() * 4);
uint8* DestPtr = nullptr;
for (auto i = 0; i < ColorBuffer.Num(); i++)
{
DestPtr = &IMG_Buffer[i * 4];
auto SrcPtr = ColorBuffer[i];
*DestPtr++ = SrcPtr.R;
*DestPtr++ = SrcPtr.G;
*DestPtr++ = SrcPtr.B;
*DestPtr++ = SrcPtr.A;
}
uint8* inData[1] = { IMG_Buffer.GetData() };
sws_scale(SwsCtx, inData, InLineSize, 0, VideoSt.Ctx->height, VideoSt.Frame->data, VideoSt.Frame->linesize);
VideoSt.Frame->pts = VideoSt.NextPts++;
if (FFmpegEncode(VideoSt.Frame) < 0)
UE_LOG(LogTemp, Error, TEXT("Error encoding frame %d"), count);
auto ret = WriteFrame();
if (ret < 0)
{
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp, Error, TEXT("Error while writing video frame: %s"), *errstr);
}
av_packet_unref(&Pkt);
}
int UMyGameViewportClient::WriteFrame()
{
av_packet_rescale_ts(&Pkt, VideoSt.Ctx->time_base, VideoSt.Stream->time_base);
Pkt.stream_index = VideoSt.Stream->index;
return av_interleaved_write_frame(FmtCtx, &Pkt);
}
int UMyGameViewportClient::FFmpegEncode(AVFrame *frame) {
GotOutput = 0;
auto ret = avcodec_send_frame(VideoSt.Ctx, frame);
if (ret < 0 && ret != AVERROR_EOF) {
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp, Warning, TEXT("error during sending frame, error : %s"), *errstr);
return -1;
}
ret = avcodec_receive_packet(VideoSt.Ctx, &Pkt);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
return 0;
if (ret < 0)
{
auto errstr = FString(av_make_error_string(ret).c_str());
UE_LOG(LogTemp, Error, TEXT("Error during receiving frame, error : %s"), *errstr);
av_packet_unref(&Pkt);
return -1;
}
GotOutput = 1;
return 0;
}
void UMyGameViewportClient::CloseStream()
{
avcodec_free_context(&VideoSt.Ctx);
av_frame_free(&VideoSt.Frame);
sws_freeContext(SwsCtx);
if (!(Fmt->flags & AVFMT_NOFILE))
{
auto ret = avio_closep(&FmtCtx->pb);
if (ret < 0)
{
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp, Error, TEXT("avio close failed: %s"), *errstr);
}
}
avformat_free_context(FmtCtx);
}
void UMyGameViewportClient::TidyUp()
{
/* get the delayed frames */
for (GotOutput = 1; GotOutput; count++)
{
fflush(stdout);
FFmpegEncode(nullptr);
if (GotOutput)
{
auto ret = WriteFrame(false);
if (ret < 0)
{
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp, Error, TEXT("Error while writing video frame: %s"), *errstr);
}
av_packet_unref(&Pkt);
}
}
auto ret = av_write_trailer(FmtCtx);
if (ret < 0)
{
auto errstr = FString(av_err2str(ret));
UE_LOG(LogTemp, Error, TEXT("writing trailer error: %s"), *errstr);
}
CloseStream();
}
void UMyGameViewportClient::Draw(FViewport* Viewport, FCanvas* SceneCanvas)
{
Super::Draw(Viewport, SceneCanvas);
if (Over) // You may need to set this in other class
{
Over = false;
TidyUp();
}
else {
CaptureFrame();
}
}
void UMyGameViewportClient::CaptureFrame()
{
if (!Viewport) {
UE_LOG(LogTemp, Error, TEXT("No viewport"));
return;
}
if (ViewportSize.X == 0 || ViewportSize.Y == 0) {
UE_LOG(LogTemp, Error, TEXT("Viewport size is 0"));
return;
}
ColorBuffer.Empty();
if (!Viewport->ReadPixels(ColorBuffer, FReadSurfaceDataFlags(),
FIntRect(0, 0, ViewportSize.X, ViewportSize.Y)))
{
UE_LOG(LogTemp, Error, TEXT("Cannot read from viewport"));
return;
}
EncodeAndWrite(); // call InitCodec() before this
}
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.