KPipewire

encoder.cpp
1/*
2 SPDX-FileCopyrightText: 2023 Aleix Pol Gonzalez <aleixpol@kde.org>
3 SPDX-FileCopyrightText: 2023 Marco Martin <mart@kde.org>
4 SPDX-FileCopyrightText: 2023 Arjen Hiemstra <ahiemstra@heimr.nl>
5
6 SPDX-License-Identifier: LGPL-2.1-only OR LGPL-3.0-only OR LicenseRef-KDE-Accepted-LGPL
7*/
8
9#include "encoder_p.h"
10
11#include <mutex>
12
13extern "C" {
14#include <libavcodec/avcodec.h>
15#include <libavfilter/avfilter.h>
16#include <libavfilter/buffersink.h>
17#include <libavfilter/buffersrc.h>
18#include <libavutil/avutil.h>
19#include <libavutil/hwcontext.h>
20#include <libavutil/hwcontext_drm.h>
21#include <libavutil/imgutils.h>
22}
23
24#include <libdrm/drm_fourcc.h>
25
26#include "vaapiutils_p.h"
27
28#include "logging_record.h"
29
30#undef av_err2str
31// The one provided by libav fails to compile on GCC due to passing data from the function scope outside
32char str[AV_ERROR_MAX_STRING_SIZE];
33char *av_err2str(int errnum)
34{
35 return av_make_error_string(str, AV_ERROR_MAX_STRING_SIZE, errnum);
36}
37
38static AVPixelFormat convertQImageFormatToAVPixelFormat(QImage::Format format)
39{
40 // Listing those handed by SpaToQImageFormat
41 switch (format) {
43 return AV_PIX_FMT_RGB24;
45 return AV_PIX_FMT_BGR24;
48 return AV_PIX_FMT_RGBA;
51 return AV_PIX_FMT_RGB32;
52 default:
53 qDebug() << "Unexpected pixel format" << format;
54 return AV_PIX_FMT_RGB32;
55 }
56}
57
58static int percentageToFrameQuality(quint8 quality)
59{
60 return std::max(1, int(FF_LAMBDA_MAX - (quality / 100.0) * FF_LAMBDA_MAX));
61}
62
63Encoder::Encoder(PipeWireProduce *produce)
64 : QObject(nullptr)
65 , m_produce(produce)
66{
67}
68
69Encoder::~Encoder()
70{
71 if (m_avFilterGraph) {
72 avfilter_graph_free(&m_avFilterGraph);
73 }
74
75 if (m_avCodecContext) {
76 avcodec_free_context(&m_avCodecContext);
77 }
78}
79
80std::pair<int, int> Encoder::encodeFrame(int maximumFrames)
81{
82 auto frame = av_frame_alloc();
83 if (!frame) {
84 qFatal("Failed to allocate memory");
85 }
86
87 int filtered = 0;
88 int queued = 0;
89
90 for (;;) {
91 if (auto result = av_buffersink_get_frame(m_outputFilter, frame); result < 0) {
92 if (result != AVERROR_EOF && result != AVERROR(EAGAIN)) {
93 qCWarning(PIPEWIRERECORD_LOGGING) << "Failed receiving filtered frame:" << av_err2str(result);
94 }
95 break;
96 }
97
98 filtered++;
99
100 if (queued + 1 < maximumFrames) {
101 auto ret = -1;
102 {
103 std::lock_guard guard(m_avCodecMutex);
104 ret = avcodec_send_frame(m_avCodecContext, frame);
105 }
106 if (ret < 0) {
107 if (ret != AVERROR_EOF && ret != AVERROR(EAGAIN)) {
108 qCWarning(PIPEWIRERECORD_LOGGING) << "Error sending a frame for encoding:" << av_err2str(ret);
109 }
110 break;
111 }
112 queued++;
113 } else {
114 qCWarning(PIPEWIRERECORD_LOGGING) << "Encode queue is full, discarding filtered frame" << frame->pts;
115 }
116 av_frame_unref(frame);
117 }
118
119 av_frame_free(&frame);
120
121 return std::make_pair(filtered, queued);
122}
123
124int Encoder::receivePacket()
125{
126 auto packet = av_packet_alloc();
127 if (!packet) {
128 qFatal("Failed to allocate memory");
129 }
130
131 int received = 0;
132
133 for (;;) {
134 auto ret = -1;
135 {
136 std::lock_guard guard(m_avCodecMutex);
137 ret = avcodec_receive_packet(m_avCodecContext, packet);
138 }
139 if (ret < 0) {
140 if (ret != AVERROR_EOF && ret != AVERROR(EAGAIN)) {
141 qCWarning(PIPEWIRERECORD_LOGGING) << "Error encoding a frame: " << av_err2str(ret);
142 }
143 av_packet_unref(packet);
144 break;
145 }
146
147 received++;
148
149 m_produce->processPacket(packet);
150 av_packet_unref(packet);
151 }
152
153 av_packet_free(&packet);
154
155 return received;
156}
157
158void Encoder::finish()
159{
160 std::lock_guard guard(m_avCodecMutex);
161 avcodec_send_frame(m_avCodecContext, nullptr);
162}
163
164AVCodecContext *Encoder::avCodecContext() const
165{
166 return m_avCodecContext;
167}
168
169void Encoder::setQuality(std::optional<quint8> quality)
170{
171 m_quality = quality;
172 if (m_avCodecContext) {
173 m_avCodecContext->global_quality = percentageToAbsoluteQuality(quality);
174 }
175}
176
177bool Encoder::supportsHardwareEncoding()
178{
179 return !VaapiUtils::instance()->devicePath().isEmpty();
180}
181
182void Encoder::setEncodingPreference(PipeWireBaseEncodedStream::EncodingPreference preference)
183{
184 m_encodingPreference = preference;
185}
186
187AVDictionary *Encoder::buildEncodingOptions()
188{
189 AVDictionary *options = NULL;
190
191 av_dict_set_int(&options, "threads", qMin(16, QThread::idealThreadCount()), 0);
192
193 switch (m_encodingPreference) {
194 case PipeWireBaseEncodedStream::EncodingPreference::NoPreference:
195 av_dict_set(&options, "preset", "veryfast", 0);
196 break;
197 case PipeWireBaseEncodedStream::EncodingPreference::Quality:
198 av_dict_set(&options, "preset", "medium", 0);
199 break;
200 case PipeWireBaseEncodedStream::EncodingPreference::Speed:
201 av_dict_set(&options, "preset", "ultrafast", 0);
202 av_dict_set(&options, "tune", "zerolatency", 0);
203 break;
204 case PipeWireBaseEncodedStream::EncodingPreference::Size:
205 av_dict_set(&options, "preset", "slow", 0);
206 break;
207 default: // Same as NoPreference
208 av_dict_set(&options, "preset", "veryfast", 0);
209 break;
210 }
211
212 return options;
213}
214
215void Encoder::maybeLogOptions(AVDictionary *options)
216{
217 if (PIPEWIRERECORD_LOGGING().isInfoEnabled()) {
218 char *buffer = NULL;
219 av_dict_get_string(options, &buffer, '=', ',');
220 qCInfo(PIPEWIRERECORD_LOGGING) << "Using encoding options:" << buffer;
221 av_freep(&buffer);
222 }
223}
224
225SoftwareEncoder::SoftwareEncoder(PipeWireProduce *produce)
226 : Encoder(produce)
227{
228}
229
230bool SoftwareEncoder::filterFrame(const PipeWireFrame &frame)
231{
232 auto size = m_produce->m_stream->size();
233
234 QImage image;
235 if (frame.dmabuf) {
236 image = QImage(m_produce->m_stream->size(), QImage::Format_RGBA8888_Premultiplied);
237 if (!m_dmaBufHandler.downloadFrame(image, frame)) {
238 m_produce->m_stream->renegotiateModifierFailed(frame.format, frame.dmabuf->modifier);
239 return false;
240 }
241 } else if (frame.dataFrame) {
242 image = frame.dataFrame->toImage();
243 } else {
244 return false;
245 }
246
247 AVFrame *avFrame = av_frame_alloc();
248 if (!avFrame) {
249 qFatal("Failed to allocate memory");
250 }
251 avFrame->format = convertQImageFormatToAVPixelFormat(image.format());
252 avFrame->width = size.width();
253 avFrame->height = size.height();
254 if (m_quality) {
255 avFrame->quality = percentageToFrameQuality(m_quality.value());
256 }
257
258 av_frame_get_buffer(avFrame, 32);
259
260 const std::uint8_t *buffers[] = {image.constBits(), nullptr};
261 const int strides[] = {static_cast<int>(image.bytesPerLine()), 0, 0, 0};
262
263 av_image_copy(avFrame->data, avFrame->linesize, buffers, strides, static_cast<AVPixelFormat>(avFrame->format), size.width(), size.height());
264
265 if (frame.presentationTimestamp) {
266 avFrame->pts = m_produce->framePts(frame.presentationTimestamp);
267 }
268
269 if (auto result = av_buffersrc_add_frame(m_inputFilter, avFrame); result < 0) {
270 qCWarning(PIPEWIRERECORD_LOGGING) << "Failed to submit frame for filtering";
271 }
272
273 return true;
274}
275
276bool SoftwareEncoder::createFilterGraph(const QSize &size)
277{
278 m_avFilterGraph = avfilter_graph_alloc();
279 if (!m_avFilterGraph) {
280 qFatal("Failed to allocate memory");
281 }
282
283 int ret = avfilter_graph_create_filter(&m_inputFilter,
284 avfilter_get_by_name("buffer"),
285 "in",
286 "width=1:height=1:pix_fmt=rgba:time_base=1/1",
287 nullptr,
288 m_avFilterGraph);
289 if (ret < 0) {
290 qCWarning(PIPEWIRERECORD_LOGGING) << "Failed to create the buffer filter";
291 return false;
292 }
293
294 auto parameters = av_buffersrc_parameters_alloc();
295 if (!parameters) {
296 qFatal("Failed to allocate memory");
297 }
298
299 parameters->format = AV_PIX_FMT_RGBA;
300 parameters->width = size.width();
301 parameters->height = size.height();
302 parameters->time_base = {1, 1000};
303
304 av_buffersrc_parameters_set(m_inputFilter, parameters);
305 av_free(parameters);
306 parameters = nullptr;
307
308 ret = avfilter_graph_create_filter(&m_outputFilter, avfilter_get_by_name("buffersink"), "out", nullptr, nullptr, m_avFilterGraph);
309 if (ret < 0) {
310 qCWarning(PIPEWIRERECORD_LOGGING) << "Could not create buffer output filter";
311 return false;
312 }
313
314 auto inputs = avfilter_inout_alloc();
315 if (!inputs) {
316 qFatal("Failed to allocate memory");
317 }
318 inputs->name = av_strdup("in");
319 inputs->filter_ctx = m_inputFilter;
320 inputs->pad_idx = 0;
321 inputs->next = nullptr;
322
323 auto outputs = avfilter_inout_alloc();
324 if (!outputs) {
325 qFatal("Failed to allocate memory");
326 }
327 outputs->name = av_strdup("out");
328 outputs->filter_ctx = m_outputFilter;
329 outputs->pad_idx = 0;
330 outputs->next = nullptr;
331
332 ret = avfilter_graph_parse(m_avFilterGraph, m_filterGraphToParse.toUtf8().data(), outputs, inputs, NULL);
333 if (ret < 0) {
334 qCWarning(PIPEWIRERECORD_LOGGING) << "Failed creating filter graph";
335 return false;
336 }
337
338 ret = avfilter_graph_config(m_avFilterGraph, nullptr);
339 if (ret < 0) {
340 qCWarning(PIPEWIRERECORD_LOGGING) << "Failed configuring filter graph";
341 return false;
342 }
343
344 return true;
345}
346
347HardwareEncoder::HardwareEncoder(PipeWireProduce *produce)
348 : Encoder(produce)
349{
350}
351
352HardwareEncoder::~HardwareEncoder()
353{
354 if (m_drmFramesContext) {
355 av_free(m_drmFramesContext);
356 }
357
358 if (m_drmContext) {
359 av_free(m_drmContext);
360 }
361}
362
363bool HardwareEncoder::filterFrame(const PipeWireFrame &frame)
364{
365 if (!frame.dmabuf) {
366 return false;
367 }
368
369 auto attribs = frame.dmabuf.value();
370
371 auto drmFrame = av_frame_alloc();
372 if (!drmFrame) {
373 qFatal("Failed to allocate memory");
374 }
375 drmFrame->format = AV_PIX_FMT_DRM_PRIME;
376 drmFrame->width = attribs.width;
377 drmFrame->height = attribs.height;
378 if (m_quality) {
379 drmFrame->quality = percentageToFrameQuality(m_quality.value());
380 }
381
382 AVDRMFrameDescriptor *frameDesc = (AVDRMFrameDescriptor *)av_mallocz(sizeof(AVDRMFrameDescriptor));
383 frameDesc->nb_layers = 1;
384 frameDesc->layers[0].nb_planes = attribs.planes.count();
385 frameDesc->layers[0].format = attribs.format;
386 for (int i = 0; i < attribs.planes.count(); ++i) {
387 const auto &plane = attribs.planes[i];
388 frameDesc->layers[0].planes[i].object_index = 0;
389 frameDesc->layers[0].planes[i].offset = plane.offset;
390 frameDesc->layers[0].planes[i].pitch = plane.stride;
391 }
392 frameDesc->nb_objects = 1;
393 frameDesc->objects[0].fd = attribs.planes[0].fd;
394 frameDesc->objects[0].format_modifier = attribs.modifier;
395 frameDesc->objects[0].size = attribs.width * attribs.height * 4;
396
397 drmFrame->data[0] = reinterpret_cast<uint8_t *>(frameDesc);
398 drmFrame->buf[0] = av_buffer_create(reinterpret_cast<uint8_t *>(frameDesc), sizeof(*frameDesc), av_buffer_default_free, nullptr, 0);
399 if (frame.presentationTimestamp) {
400 drmFrame->pts = m_produce->framePts(frame.presentationTimestamp);
401 }
402
403 if (auto result = av_buffersrc_add_frame(m_inputFilter, drmFrame); result < 0) {
404 qCDebug(PIPEWIRERECORD_LOGGING) << "Failed sending frame for encoding" << av_err2str(result);
405 av_frame_unref(drmFrame);
406 return false;
407 }
408
409 av_frame_free(&drmFrame);
410 return true;
411}
412
413QByteArray HardwareEncoder::checkVaapi(const QSize &size)
414{
415 auto utils = VaapiUtils::instance();
416 if (utils->devicePath().isEmpty()) {
417 qCWarning(PIPEWIRERECORD_LOGGING) << "Hardware encoding is not supported on this device.";
418 return QByteArray{};
419 }
420
421 auto minSize = utils->minimumSize();
422 if (size.width() < minSize.width() || size.height() < minSize.height()) {
423 qCWarning(PIPEWIRERECORD_LOGGING) << "Requested size" << size << "less than minimum supported hardware size" << minSize;
424 return QByteArray{};
425 }
426
427 auto maxSize = utils->maximumSize();
428 if (size.width() > maxSize.width() || size.height() > maxSize.height()) {
429 qCWarning(PIPEWIRERECORD_LOGGING) << "Requested size" << size << "exceeds maximum supported hardware size" << maxSize;
430 return QByteArray{};
431 }
432
433 return utils->devicePath();
434}
435
436bool HardwareEncoder::createDrmContext(const QSize &size)
437{
438 auto path = checkVaapi(size);
439 if (path.isEmpty()) {
440 return false;
441 }
442
443 int err = av_hwdevice_ctx_create(&m_drmContext, AV_HWDEVICE_TYPE_DRM, path.data(), NULL, AV_HWFRAME_MAP_READ);
444 if (err < 0) {
445 qCWarning(PIPEWIRERECORD_LOGGING) << "Failed to create DRM device. Error" << av_err2str(err);
446 return false;
447 }
448
449 m_drmFramesContext = av_hwframe_ctx_alloc(m_drmContext);
450 if (!m_drmFramesContext) {
451 qCWarning(PIPEWIRERECORD_LOGGING) << "Failed to create DRM frames context";
452 return false;
453 }
454
455 auto framesContext = reinterpret_cast<AVHWFramesContext *>(m_drmFramesContext->data);
456 framesContext->format = AV_PIX_FMT_DRM_PRIME;
457 framesContext->sw_format = AV_PIX_FMT_0BGR;
458 framesContext->width = size.width();
459 framesContext->height = size.height();
460
461 if (auto result = av_hwframe_ctx_init(m_drmFramesContext); result < 0) {
462 qCWarning(PIPEWIRERECORD_LOGGING) << "Failed initializing DRM frames context" << av_err2str(result);
463 av_buffer_unref(&m_drmFramesContext);
464 return false;
465 }
466
467 return true;
468}
469
470#include "moc_encoder_p.cpp"
QString path(const QString &relativePath)
qsizetype bytesPerLine() const const
const uchar * constBits() const const
Format format() const const
int height() const const
int width() const const
QChar * data()
bool isEmpty() const const
QFuture< typename qValueType< Iterator >::value_type > filtered(Iterator begin, Iterator end, KeepFunctor &&filterFunction)
int idealThreadCount()
This file is part of the KDE documentation.
Documentation copyright © 1996-2025 The KDE developers.
Generated on Fri Apr 11 2025 11:54:43 by doxygen 1.13.2 written by Dimitri van Heesch, © 1997-2006

KDE's Doxygen guidelines are available online.