blob: 1de0bbd83933cbe67e301c875fa84de99bab639c (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
|
// SPDX-FileCopyrightText: Copyright 2023 yuzu Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later
#pragma once
#include <memory>
#include <optional>
#include <span>
#include <vector>
#include <queue>
#include "common/common_funcs.h"
#include "common/common_types.h"
#include "video_core/host1x/nvdec_common.h"
extern "C" {
#if defined(__GNUC__) || defined(__clang__)
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
#include <libavcodec/avcodec.h>
#include <libavfilter/avfilter.h>
#include <libavfilter/buffersink.h>
#include <libavfilter/buffersrc.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#if defined(__GNUC__) || defined(__clang__)
#pragma GCC diagnostic pop
#endif
}
namespace FFmpeg {
class Packet;
class Frame;
class Decoder;
class HardwareContext;
class DecoderContext;
class DeinterlaceFilter;
// Wraps an AVPacket, a container for compressed bitstream data.
class Packet {
public:
YUZU_NON_COPYABLE(Packet);
YUZU_NON_MOVEABLE(Packet);
explicit Packet(std::span<const u8> data);
~Packet();
AVPacket* GetPacket() const {
return m_packet;
}
private:
AVPacket* m_packet{};
};
// Wraps an AVFrame, a container for audio and video stream data.
class Frame {
public:
YUZU_NON_COPYABLE(Frame);
YUZU_NON_MOVEABLE(Frame);
explicit Frame();
~Frame();
int GetWidth() const {
return m_frame->width;
}
int GetHeight() const {
return m_frame->height;
}
AVPixelFormat GetPixelFormat() const {
return static_cast<AVPixelFormat>(m_frame->format);
}
int GetStride(int plane) const {
return m_frame->linesize[plane];
}
int* GetStrides() const {
return m_frame->linesize;
}
u8* GetData(int plane) const {
return m_frame->data[plane];
}
u8** GetPlanes() const {
return m_frame->data;
}
void SetFormat(int format) {
m_frame->format = format;
}
AVFrame* GetFrame() const {
return m_frame;
}
private:
AVFrame* m_frame{};
};
// Wraps an AVCodec, a type containing information about a codec.
class Decoder {
public:
YUZU_NON_COPYABLE(Decoder);
YUZU_NON_MOVEABLE(Decoder);
explicit Decoder(Tegra::Host1x::NvdecCommon::VideoCodec codec);
~Decoder() = default;
bool SupportsDecodingOnDevice(AVPixelFormat* out_pix_fmt, AVHWDeviceType type) const;
const AVCodec* GetCodec() const {
return m_codec;
}
private:
const AVCodec* m_codec{};
};
// Wraps AVBufferRef for an accelerated decoder.
class HardwareContext {
public:
YUZU_NON_COPYABLE(HardwareContext);
YUZU_NON_MOVEABLE(HardwareContext);
static std::vector<AVHWDeviceType> GetSupportedDeviceTypes();
explicit HardwareContext() = default;
~HardwareContext();
bool InitializeForDecoder(DecoderContext& decoder_context, const Decoder& decoder);
AVBufferRef* GetBufferRef() const {
return m_gpu_decoder;
}
private:
bool InitializeWithType(AVHWDeviceType type);
AVBufferRef* m_gpu_decoder{};
};
// Wraps an AVCodecContext.
class DecoderContext {
public:
YUZU_NON_COPYABLE(DecoderContext);
YUZU_NON_MOVEABLE(DecoderContext);
explicit DecoderContext(const Decoder& decoder);
~DecoderContext();
void InitializeHardwareDecoder(const HardwareContext& context, AVPixelFormat hw_pix_fmt);
bool OpenContext(const Decoder& decoder);
bool SendPacket(const Packet& packet);
std::unique_ptr<Frame> ReceiveFrame(bool* out_is_interlaced);
AVCodecContext* GetCodecContext() const {
return m_codec_context;
}
private:
AVCodecContext* m_codec_context{};
};
// Wraps an AVFilterGraph.
class DeinterlaceFilter {
public:
YUZU_NON_COPYABLE(DeinterlaceFilter);
YUZU_NON_MOVEABLE(DeinterlaceFilter);
explicit DeinterlaceFilter(const Frame& frame);
~DeinterlaceFilter();
bool AddSourceFrame(const Frame& frame);
std::unique_ptr<Frame> DrainSinkFrame();
private:
AVFilterGraph* m_filter_graph{};
AVFilterContext* m_source_context{};
AVFilterContext* m_sink_context{};
bool m_initialized{};
};
class DecodeApi {
public:
YUZU_NON_COPYABLE(DecodeApi);
YUZU_NON_MOVEABLE(DecodeApi);
DecodeApi() = default;
~DecodeApi() = default;
bool Initialize(Tegra::Host1x::NvdecCommon::VideoCodec codec);
void Reset();
bool SendPacket(std::span<const u8> packet_data, size_t configuration_size);
void ReceiveFrames(std::queue<std::unique_ptr<Frame>>& frame_queue);
private:
std::optional<FFmpeg::Decoder> m_decoder;
std::optional<FFmpeg::DecoderContext> m_decoder_context;
std::optional<FFmpeg::HardwareContext> m_hardware_context;
std::optional<FFmpeg::DeinterlaceFilter> m_deinterlace_filter;
};
} // namespace FFmpeg
|