1 /*
2 * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
12
13 #include "api/video/encoded_image.h"
14 #include "api/video/i420_buffer.h"
15 #include "api/video/video_frame_buffer.h"
16 #include "common_video/include/video_frame_buffer.h"
17 #include "common_video/libyuv/include/webrtc_libyuv.h"
18 #include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h"
19 #include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h"
20 #include "rtc_base/logging.h"
21
22 namespace webrtc {
23
24 class MultiplexDecoderAdapter::AdapterDecodedImageCallback
25 : public webrtc::DecodedImageCallback {
26 public:
AdapterDecodedImageCallback(webrtc::MultiplexDecoderAdapter * adapter,AlphaCodecStream stream_idx)27 AdapterDecodedImageCallback(webrtc::MultiplexDecoderAdapter* adapter,
28 AlphaCodecStream stream_idx)
29 : adapter_(adapter), stream_idx_(stream_idx) {}
30
Decoded(VideoFrame & decoded_image,absl::optional<int32_t> decode_time_ms,absl::optional<uint8_t> qp)31 void Decoded(VideoFrame& decoded_image,
32 absl::optional<int32_t> decode_time_ms,
33 absl::optional<uint8_t> qp) override {
34 if (!adapter_)
35 return;
36 adapter_->Decoded(stream_idx_, &decoded_image, decode_time_ms, qp);
37 }
Decoded(VideoFrame & decoded_image)38 int32_t Decoded(VideoFrame& decoded_image) override {
39 RTC_DCHECK_NOTREACHED();
40 return WEBRTC_VIDEO_CODEC_OK;
41 }
Decoded(VideoFrame & decoded_image,int64_t decode_time_ms)42 int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override {
43 RTC_DCHECK_NOTREACHED();
44 return WEBRTC_VIDEO_CODEC_OK;
45 }
46
47 private:
48 MultiplexDecoderAdapter* adapter_;
49 const AlphaCodecStream stream_idx_;
50 };
51
52 struct MultiplexDecoderAdapter::DecodedImageData {
DecodedImageDatawebrtc::MultiplexDecoderAdapter::DecodedImageData53 explicit DecodedImageData(AlphaCodecStream stream_idx)
54 : stream_idx_(stream_idx),
55 decoded_image_(
56 VideoFrame::Builder()
57 .set_video_frame_buffer(
58 I420Buffer::Create(1 /* width */, 1 /* height */))
59 .set_timestamp_rtp(0)
60 .set_timestamp_us(0)
61 .set_rotation(kVideoRotation_0)
62 .build()) {
63 RTC_DCHECK_EQ(kAXXStream, stream_idx);
64 }
DecodedImageDatawebrtc::MultiplexDecoderAdapter::DecodedImageData65 DecodedImageData(AlphaCodecStream stream_idx,
66 const VideoFrame& decoded_image,
67 const absl::optional<int32_t>& decode_time_ms,
68 const absl::optional<uint8_t>& qp)
69 : stream_idx_(stream_idx),
70 decoded_image_(decoded_image),
71 decode_time_ms_(decode_time_ms),
72 qp_(qp) {}
73
74 DecodedImageData() = delete;
75 DecodedImageData(const DecodedImageData&) = delete;
76 DecodedImageData& operator=(const DecodedImageData&) = delete;
77
78 const AlphaCodecStream stream_idx_;
79 VideoFrame decoded_image_;
80 const absl::optional<int32_t> decode_time_ms_;
81 const absl::optional<uint8_t> qp_;
82 };
83
84 struct MultiplexDecoderAdapter::AugmentingData {
AugmentingDatawebrtc::MultiplexDecoderAdapter::AugmentingData85 AugmentingData(std::unique_ptr<uint8_t[]> augmenting_data, uint16_t data_size)
86 : data_(std::move(augmenting_data)), size_(data_size) {}
87 AugmentingData() = delete;
88 AugmentingData(const AugmentingData&) = delete;
89 AugmentingData& operator=(const AugmentingData&) = delete;
90
91 std::unique_ptr<uint8_t[]> data_;
92 const uint16_t size_;
93 };
94
MultiplexDecoderAdapter(VideoDecoderFactory * factory,const SdpVideoFormat & associated_format,bool supports_augmenting_data)95 MultiplexDecoderAdapter::MultiplexDecoderAdapter(
96 VideoDecoderFactory* factory,
97 const SdpVideoFormat& associated_format,
98 bool supports_augmenting_data)
99 : factory_(factory),
100 associated_format_(associated_format),
101 supports_augmenting_data_(supports_augmenting_data) {}
102
~MultiplexDecoderAdapter()103 MultiplexDecoderAdapter::~MultiplexDecoderAdapter() {
104 Release();
105 }
106
Configure(const Settings & settings)107 bool MultiplexDecoderAdapter::Configure(const Settings& settings) {
108 RTC_DCHECK_EQ(settings.codec_type(), kVideoCodecMultiplex);
109 Settings associated_settings = settings;
110 associated_settings.set_codec_type(
111 PayloadStringToCodecType(associated_format_.name));
112 for (size_t i = 0; i < kAlphaCodecStreams; ++i) {
113 std::unique_ptr<VideoDecoder> decoder =
114 factory_->CreateVideoDecoder(associated_format_);
115 if (!decoder->Configure(associated_settings)) {
116 return false;
117 }
118 adapter_callbacks_.emplace_back(
119 new MultiplexDecoderAdapter::AdapterDecodedImageCallback(
120 this, static_cast<AlphaCodecStream>(i)));
121 decoder->RegisterDecodeCompleteCallback(adapter_callbacks_.back().get());
122 decoders_.emplace_back(std::move(decoder));
123 }
124 return true;
125 }
126
Decode(const EncodedImage & input_image,bool missing_frames,int64_t render_time_ms)127 int32_t MultiplexDecoderAdapter::Decode(const EncodedImage& input_image,
128 bool missing_frames,
129 int64_t render_time_ms) {
130 MultiplexImage image = MultiplexEncodedImagePacker::Unpack(input_image);
131
132 if (supports_augmenting_data_) {
133 RTC_DCHECK(decoded_augmenting_data_.find(input_image.Timestamp()) ==
134 decoded_augmenting_data_.end());
135 decoded_augmenting_data_.emplace(
136 std::piecewise_construct,
137 std::forward_as_tuple(input_image.Timestamp()),
138 std::forward_as_tuple(std::move(image.augmenting_data),
139 image.augmenting_data_size));
140 }
141
142 if (image.component_count == 1) {
143 RTC_DCHECK(decoded_data_.find(input_image.Timestamp()) ==
144 decoded_data_.end());
145 decoded_data_.emplace(std::piecewise_construct,
146 std::forward_as_tuple(input_image.Timestamp()),
147 std::forward_as_tuple(kAXXStream));
148 }
149 int32_t rv = 0;
150 for (size_t i = 0; i < image.image_components.size(); i++) {
151 rv = decoders_[image.image_components[i].component_index]->Decode(
152 image.image_components[i].encoded_image, missing_frames,
153 render_time_ms);
154 if (rv != WEBRTC_VIDEO_CODEC_OK)
155 return rv;
156 }
157 return rv;
158 }
159
RegisterDecodeCompleteCallback(DecodedImageCallback * callback)160 int32_t MultiplexDecoderAdapter::RegisterDecodeCompleteCallback(
161 DecodedImageCallback* callback) {
162 decoded_complete_callback_ = callback;
163 return WEBRTC_VIDEO_CODEC_OK;
164 }
165
Release()166 int32_t MultiplexDecoderAdapter::Release() {
167 for (auto& decoder : decoders_) {
168 const int32_t rv = decoder->Release();
169 if (rv)
170 return rv;
171 }
172 decoders_.clear();
173 adapter_callbacks_.clear();
174 return WEBRTC_VIDEO_CODEC_OK;
175 }
176
Decoded(AlphaCodecStream stream_idx,VideoFrame * decoded_image,absl::optional<int32_t> decode_time_ms,absl::optional<uint8_t> qp)177 void MultiplexDecoderAdapter::Decoded(AlphaCodecStream stream_idx,
178 VideoFrame* decoded_image,
179 absl::optional<int32_t> decode_time_ms,
180 absl::optional<uint8_t> qp) {
181 const auto& other_decoded_data_it =
182 decoded_data_.find(decoded_image->timestamp());
183 const auto& augmenting_data_it =
184 decoded_augmenting_data_.find(decoded_image->timestamp());
185 const bool has_augmenting_data =
186 augmenting_data_it != decoded_augmenting_data_.end();
187 if (other_decoded_data_it != decoded_data_.end()) {
188 uint16_t augmenting_data_size =
189 has_augmenting_data ? augmenting_data_it->second.size_ : 0;
190 std::unique_ptr<uint8_t[]> augmenting_data =
191 has_augmenting_data ? std::move(augmenting_data_it->second.data_)
192 : nullptr;
193 auto& other_image_data = other_decoded_data_it->second;
194 if (stream_idx == kYUVStream) {
195 RTC_DCHECK_EQ(kAXXStream, other_image_data.stream_idx_);
196 MergeAlphaImages(decoded_image, decode_time_ms, qp,
197 &other_image_data.decoded_image_,
198 other_image_data.decode_time_ms_, other_image_data.qp_,
199 std::move(augmenting_data), augmenting_data_size);
200 } else {
201 RTC_DCHECK_EQ(kYUVStream, other_image_data.stream_idx_);
202 RTC_DCHECK_EQ(kAXXStream, stream_idx);
203 MergeAlphaImages(&other_image_data.decoded_image_,
204 other_image_data.decode_time_ms_, other_image_data.qp_,
205 decoded_image, decode_time_ms, qp,
206 std::move(augmenting_data), augmenting_data_size);
207 }
208 decoded_data_.erase(decoded_data_.begin(), other_decoded_data_it);
209 if (has_augmenting_data) {
210 decoded_augmenting_data_.erase(decoded_augmenting_data_.begin(),
211 augmenting_data_it);
212 }
213 return;
214 }
215 RTC_DCHECK(decoded_data_.find(decoded_image->timestamp()) ==
216 decoded_data_.end());
217 decoded_data_.emplace(
218 std::piecewise_construct,
219 std::forward_as_tuple(decoded_image->timestamp()),
220 std::forward_as_tuple(stream_idx, *decoded_image, decode_time_ms, qp));
221 }
222
MergeAlphaImages(VideoFrame * decoded_image,const absl::optional<int32_t> & decode_time_ms,const absl::optional<uint8_t> & qp,VideoFrame * alpha_decoded_image,const absl::optional<int32_t> & alpha_decode_time_ms,const absl::optional<uint8_t> & alpha_qp,std::unique_ptr<uint8_t[]> augmenting_data,uint16_t augmenting_data_length)223 void MultiplexDecoderAdapter::MergeAlphaImages(
224 VideoFrame* decoded_image,
225 const absl::optional<int32_t>& decode_time_ms,
226 const absl::optional<uint8_t>& qp,
227 VideoFrame* alpha_decoded_image,
228 const absl::optional<int32_t>& alpha_decode_time_ms,
229 const absl::optional<uint8_t>& alpha_qp,
230 std::unique_ptr<uint8_t[]> augmenting_data,
231 uint16_t augmenting_data_length) {
232 rtc::scoped_refptr<VideoFrameBuffer> merged_buffer;
233 if (!alpha_decoded_image->timestamp()) {
234 merged_buffer = decoded_image->video_frame_buffer();
235 } else {
236 rtc::scoped_refptr<webrtc::I420BufferInterface> yuv_buffer =
237 decoded_image->video_frame_buffer()->ToI420();
238 rtc::scoped_refptr<webrtc::I420BufferInterface> alpha_buffer =
239 alpha_decoded_image->video_frame_buffer()->ToI420();
240 RTC_DCHECK_EQ(yuv_buffer->width(), alpha_buffer->width());
241 RTC_DCHECK_EQ(yuv_buffer->height(), alpha_buffer->height());
242 merged_buffer = WrapI420ABuffer(
243 yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
244 yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
245 yuv_buffer->DataV(), yuv_buffer->StrideV(), alpha_buffer->DataY(),
246 alpha_buffer->StrideY(),
247 // To keep references alive.
248 [yuv_buffer, alpha_buffer] {});
249 }
250 if (supports_augmenting_data_) {
251 merged_buffer = rtc::make_ref_counted<AugmentedVideoFrameBuffer>(
252 merged_buffer, std::move(augmenting_data), augmenting_data_length);
253 }
254
255 VideoFrame merged_image = VideoFrame::Builder()
256 .set_video_frame_buffer(merged_buffer)
257 .set_timestamp_rtp(decoded_image->timestamp())
258 .set_timestamp_us(0)
259 .set_rotation(decoded_image->rotation())
260 .set_id(decoded_image->id())
261 .set_packet_infos(decoded_image->packet_infos())
262 .build();
263 decoded_complete_callback_->Decoded(merged_image, decode_time_ms, qp);
264 }
265
266 } // namespace webrtc
267