1 /*
2 * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
12
13 #include <algorithm>
14 #include <map>
15 #include <memory>
16 #include <set>
17 #include <string>
18 #include <utility>
19 #include <vector>
20
21 #include "api/array_view.h"
22 #include "api/numerics/samples_stats_counter.h"
23 #include "api/test/metrics/metric.h"
24 #include "api/units/time_delta.h"
25 #include "api/units/timestamp.h"
26 #include "api/video/video_frame.h"
27 #include "rtc_base/checks.h"
28 #include "rtc_base/logging.h"
29 #include "rtc_base/strings/string_builder.h"
30 #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frame_in_flight.h"
31 #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_frames_comparator.h"
32 #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_internal_shared_objects.h"
33 #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
34 #include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_stream_state.h"
35 #include "test/pc/e2e/metric_metadata_keys.h"
36
37 namespace webrtc {
38 namespace {
39
40 using ::webrtc::test::ImprovementDirection;
41 using ::webrtc::test::Unit;
42 using ::webrtc::webrtc_pc_e2e::MetricMetadataKey;
43
44 constexpr int kBitsInByte = 8;
45 constexpr absl::string_view kSkipRenderedFrameReasonProcessed = "processed";
46 constexpr absl::string_view kSkipRenderedFrameReasonRendered = "rendered";
47 constexpr absl::string_view kSkipRenderedFrameReasonDropped =
48 "considered dropped";
49
LogFrameCounters(const std::string & name,const FrameCounters & counters)50 void LogFrameCounters(const std::string& name, const FrameCounters& counters) {
51 RTC_LOG(LS_INFO) << "[" << name
52 << "] Captured : " << counters.captured;
53 RTC_LOG(LS_INFO) << "[" << name
54 << "] Pre encoded : " << counters.pre_encoded;
55 RTC_LOG(LS_INFO) << "[" << name
56 << "] Encoded : " << counters.encoded;
57 RTC_LOG(LS_INFO) << "[" << name
58 << "] Received : " << counters.received;
59 RTC_LOG(LS_INFO) << "[" << name
60 << "] Decoded : " << counters.decoded;
61 RTC_LOG(LS_INFO) << "[" << name
62 << "] Rendered : " << counters.rendered;
63 RTC_LOG(LS_INFO) << "[" << name
64 << "] Dropped : " << counters.dropped;
65 RTC_LOG(LS_INFO) << "[" << name
66 << "] Failed to decode : " << counters.failed_to_decode;
67 }
68
LogStreamInternalStats(const std::string & name,const StreamStats & stats,Timestamp start_time)69 void LogStreamInternalStats(const std::string& name,
70 const StreamStats& stats,
71 Timestamp start_time) {
72 for (const auto& entry : stats.dropped_by_phase) {
73 RTC_LOG(LS_INFO) << "[" << name << "] Dropped at " << ToString(entry.first)
74 << ": " << entry.second;
75 }
76 Timestamp first_encoded_frame_time = Timestamp::PlusInfinity();
77 for (const StreamCodecInfo& encoder : stats.encoders) {
78 RTC_DCHECK(encoder.switched_on_at.IsFinite());
79 RTC_DCHECK(encoder.switched_from_at.IsFinite());
80 if (first_encoded_frame_time.IsInfinite()) {
81 first_encoded_frame_time = encoder.switched_on_at;
82 }
83 RTC_LOG(LS_INFO)
84 << "[" << name << "] Used encoder: \"" << encoder.codec_name
85 << "\" used from (frame_id=" << encoder.first_frame_id
86 << "; from_stream_start="
87 << (encoder.switched_on_at - stats.stream_started_time).ms()
88 << "ms, from_call_start=" << (encoder.switched_on_at - start_time).ms()
89 << "ms) until (frame_id=" << encoder.last_frame_id
90 << "; from_stream_start="
91 << (encoder.switched_from_at - stats.stream_started_time).ms()
92 << "ms, from_call_start="
93 << (encoder.switched_from_at - start_time).ms() << "ms)";
94 }
95 for (const StreamCodecInfo& decoder : stats.decoders) {
96 RTC_DCHECK(decoder.switched_on_at.IsFinite());
97 RTC_DCHECK(decoder.switched_from_at.IsFinite());
98 RTC_LOG(LS_INFO)
99 << "[" << name << "] Used decoder: \"" << decoder.codec_name
100 << "\" used from (frame_id=" << decoder.first_frame_id
101 << "; from_stream_start="
102 << (decoder.switched_on_at - stats.stream_started_time).ms()
103 << "ms, from_call_start=" << (decoder.switched_on_at - start_time).ms()
104 << "ms) until (frame_id=" << decoder.last_frame_id
105 << "; from_stream_start="
106 << (decoder.switched_from_at - stats.stream_started_time).ms()
107 << "ms, from_call_start="
108 << (decoder.switched_from_at - start_time).ms() << "ms)";
109 }
110 }
111
112 template <typename T>
MaybeGetValue(const std::map<size_t,T> & map,size_t key)113 absl::optional<T> MaybeGetValue(const std::map<size_t, T>& map, size_t key) {
114 auto it = map.find(key);
115 if (it == map.end()) {
116 return absl::nullopt;
117 }
118 return it->second;
119 }
120
StatsSample(double value,Timestamp sampling_time)121 SamplesStatsCounter::StatsSample StatsSample(double value,
122 Timestamp sampling_time) {
123 return SamplesStatsCounter::StatsSample{value, sampling_time};
124 }
125
126 } // namespace
127
DefaultVideoQualityAnalyzer(webrtc::Clock * clock,test::MetricsLogger * metrics_logger,DefaultVideoQualityAnalyzerOptions options)128 DefaultVideoQualityAnalyzer::DefaultVideoQualityAnalyzer(
129 webrtc::Clock* clock,
130 test::MetricsLogger* metrics_logger,
131 DefaultVideoQualityAnalyzerOptions options)
132 : options_(options),
133 clock_(clock),
134 metrics_logger_(metrics_logger),
135 frames_comparator_(clock, cpu_measurer_, options) {
136 RTC_CHECK(metrics_logger_);
137 }
138
~DefaultVideoQualityAnalyzer()139 DefaultVideoQualityAnalyzer::~DefaultVideoQualityAnalyzer() {
140 Stop();
141 }
142
Start(std::string test_case_name,rtc::ArrayView<const std::string> peer_names,int max_threads_count)143 void DefaultVideoQualityAnalyzer::Start(
144 std::string test_case_name,
145 rtc::ArrayView<const std::string> peer_names,
146 int max_threads_count) {
147 test_label_ = std::move(test_case_name);
148 frames_comparator_.Start(max_threads_count);
149 {
150 MutexLock lock(&mutex_);
151 peers_ = std::make_unique<NamesCollection>(peer_names);
152 RTC_CHECK(start_time_.IsMinusInfinity());
153
154 RTC_CHECK_EQ(state_, State::kNew)
155 << "DefaultVideoQualityAnalyzer is already started";
156 state_ = State::kActive;
157 start_time_ = Now();
158 }
159 }
160
OnFrameCaptured(absl::string_view peer_name,const std::string & stream_label,const webrtc::VideoFrame & frame)161 uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
162 absl::string_view peer_name,
163 const std::string& stream_label,
164 const webrtc::VideoFrame& frame) {
165 // `next_frame_id` is atomic, so we needn't lock here.
166 Timestamp captured_time = Now();
167 Timestamp start_time = Timestamp::MinusInfinity();
168 size_t peer_index = -1;
169 size_t peers_count = -1;
170 size_t stream_index;
171 uint16_t frame_id = VideoFrame::kNotSetId;
172 {
173 MutexLock lock(&mutex_);
174 frame_id = GetNextFrameId();
175 RTC_CHECK_EQ(state_, State::kActive)
176 << "DefaultVideoQualityAnalyzer has to be started before use";
177 // Create a local copy of `start_time_`, peer's index and total peers count
178 // to access it without holding a `mutex_` during access to
179 // `frames_comparator_`.
180 start_time = start_time_;
181 peer_index = peers_->index(peer_name);
182 peers_count = peers_->size();
183 stream_index = streams_.AddIfAbsent(stream_label);
184 }
185 // Ensure stats for this stream exists.
186 frames_comparator_.EnsureStatsForStream(stream_index, peer_index, peers_count,
187 captured_time, start_time);
188 {
189 MutexLock lock(&mutex_);
190 stream_to_sender_[stream_index] = peer_index;
191 frame_counters_.captured++;
192 for (size_t i : peers_->GetAllIndexes()) {
193 if (i != peer_index || options_.enable_receive_own_stream) {
194 InternalStatsKey key(stream_index, peer_index, i);
195 stream_frame_counters_[key].captured++;
196 }
197 }
198
199 std::set<size_t> frame_receivers_indexes = peers_->GetPresentIndexes();
200 if (!options_.enable_receive_own_stream) {
201 frame_receivers_indexes.erase(peer_index);
202 }
203
204 auto state_it = stream_states_.find(stream_index);
205 if (state_it == stream_states_.end()) {
206 stream_states_.emplace(
207 stream_index,
208 StreamState(peer_index, frame_receivers_indexes, captured_time));
209 }
210 StreamState* state = &stream_states_.at(stream_index);
211 state->PushBack(frame_id);
212 // Update frames in flight info.
213 auto it = captured_frames_in_flight_.find(frame_id);
214 if (it != captured_frames_in_flight_.end()) {
215 // If we overflow uint16_t and hit previous frame id and this frame is
216 // still in flight, it means that this stream wasn't rendered for long
217 // time and we need to process existing frame as dropped.
218 for (size_t i : peers_->GetPresentIndexes()) {
219 if (i == peer_index && !options_.enable_receive_own_stream) {
220 continue;
221 }
222
223 uint16_t oldest_frame_id = state->PopFront(i);
224 RTC_DCHECK_EQ(frame_id, oldest_frame_id);
225 frame_counters_.dropped++;
226 InternalStatsKey key(stream_index, peer_index, i);
227 stream_frame_counters_.at(key).dropped++;
228
229 analyzer_stats_.frames_in_flight_left_count.AddSample(
230 StatsSample(captured_frames_in_flight_.size(), Now()));
231 frames_comparator_.AddComparison(
232 InternalStatsKey(stream_index, peer_index, i),
233 /*captured=*/absl::nullopt,
234 /*rendered=*/absl::nullopt, FrameComparisonType::kDroppedFrame,
235 it->second.GetStatsForPeer(i));
236 }
237
238 captured_frames_in_flight_.erase(it);
239 }
240 captured_frames_in_flight_.emplace(
241 frame_id, FrameInFlight(stream_index, frame, captured_time,
242 std::move(frame_receivers_indexes)));
243 // Set frame id on local copy of the frame
244 captured_frames_in_flight_.at(frame_id).SetFrameId(frame_id);
245
246 // Update history stream<->frame mapping
247 for (auto it = stream_to_frame_id_history_.begin();
248 it != stream_to_frame_id_history_.end(); ++it) {
249 it->second.erase(frame_id);
250 }
251 stream_to_frame_id_history_[stream_index].insert(frame_id);
252 stream_to_frame_id_full_history_[stream_index].push_back(frame_id);
253
254 // If state has too many frames that are in flight => remove the oldest
255 // queued frame in order to avoid to use too much memory.
256 if (state->GetAliveFramesCount() >
257 options_.max_frames_in_flight_per_stream_count) {
258 uint16_t frame_id_to_remove = state->MarkNextAliveFrameAsDead();
259 auto it = captured_frames_in_flight_.find(frame_id_to_remove);
260 RTC_CHECK(it != captured_frames_in_flight_.end())
261 << "Frame with ID " << frame_id_to_remove
262 << " is expected to be in flight, but hasn't been found in "
263 << "|captured_frames_in_flight_|";
264 bool is_removed = it->second.RemoveFrame();
265 RTC_DCHECK(is_removed)
266 << "Invalid stream state: alive frame is removed already";
267 }
268 if (options_.report_infra_metrics) {
269 analyzer_stats_.on_frame_captured_processing_time_ms.AddSample(
270 (Now() - captured_time).ms<double>());
271 }
272 }
273 return frame_id;
274 }
275
OnFramePreEncode(absl::string_view peer_name,const webrtc::VideoFrame & frame)276 void DefaultVideoQualityAnalyzer::OnFramePreEncode(
277 absl::string_view peer_name,
278 const webrtc::VideoFrame& frame) {
279 Timestamp processing_started = Now();
280 MutexLock lock(&mutex_);
281 RTC_CHECK_EQ(state_, State::kActive)
282 << "DefaultVideoQualityAnalyzer has to be started before use";
283
284 auto it = captured_frames_in_flight_.find(frame.id());
285 RTC_CHECK(it != captured_frames_in_flight_.end())
286 << "Frame id=" << frame.id() << " not found";
287 FrameInFlight& frame_in_flight = it->second;
288 frame_counters_.pre_encoded++;
289 size_t peer_index = peers_->index(peer_name);
290 for (size_t i : peers_->GetAllIndexes()) {
291 if (i != peer_index || options_.enable_receive_own_stream) {
292 InternalStatsKey key(frame_in_flight.stream(), peer_index, i);
293 stream_frame_counters_.at(key).pre_encoded++;
294 }
295 }
296 frame_in_flight.SetPreEncodeTime(Now());
297
298 if (options_.report_infra_metrics) {
299 analyzer_stats_.on_frame_pre_encode_processing_time_ms.AddSample(
300 (Now() - processing_started).ms<double>());
301 }
302 }
303
OnFrameEncoded(absl::string_view peer_name,uint16_t frame_id,const webrtc::EncodedImage & encoded_image,const EncoderStats & stats,bool discarded)304 void DefaultVideoQualityAnalyzer::OnFrameEncoded(
305 absl::string_view peer_name,
306 uint16_t frame_id,
307 const webrtc::EncodedImage& encoded_image,
308 const EncoderStats& stats,
309 bool discarded) {
310 if (discarded)
311 return;
312
313 Timestamp processing_started = Now();
314 MutexLock lock(&mutex_);
315 RTC_CHECK_EQ(state_, State::kActive)
316 << "DefaultVideoQualityAnalyzer has to be started before use";
317
318 auto it = captured_frames_in_flight_.find(frame_id);
319 if (it == captured_frames_in_flight_.end()) {
320 RTC_LOG(LS_WARNING)
321 << "The encoding of video frame with id [" << frame_id << "] for peer ["
322 << peer_name << "] finished after all receivers rendered this frame or "
323 << "were removed. It can be OK for simulcast/SVC if higher quality "
324 << "stream is not required or the last receiver was unregistered "
325 << "between encoding of different layers, but it may indicate an ERROR "
326 << "for singlecast or if it happens often.";
327 return;
328 }
329 FrameInFlight& frame_in_flight = it->second;
330 // For SVC we can receive multiple encoded images for one frame, so to cover
331 // all cases we have to pick the last encode time.
332 if (!frame_in_flight.HasEncodedTime()) {
333 // Increase counters only when we meet this frame first time.
334 frame_counters_.encoded++;
335 size_t peer_index = peers_->index(peer_name);
336 for (size_t i : peers_->GetAllIndexes()) {
337 if (i != peer_index || options_.enable_receive_own_stream) {
338 InternalStatsKey key(frame_in_flight.stream(), peer_index, i);
339 stream_frame_counters_.at(key).encoded++;
340 }
341 }
342 }
343 Timestamp now = Now();
344 StreamCodecInfo used_encoder;
345 used_encoder.codec_name = stats.encoder_name;
346 used_encoder.first_frame_id = frame_id;
347 used_encoder.last_frame_id = frame_id;
348 used_encoder.switched_on_at = now;
349 used_encoder.switched_from_at = now;
350 frame_in_flight.OnFrameEncoded(
351 now, encoded_image._frameType, DataSize::Bytes(encoded_image.size()),
352 stats.target_encode_bitrate, stats.qp, used_encoder);
353
354 if (options_.report_infra_metrics) {
355 analyzer_stats_.on_frame_encoded_processing_time_ms.AddSample(
356 (Now() - processing_started).ms<double>());
357 }
358 }
359
OnFrameDropped(absl::string_view peer_name,webrtc::EncodedImageCallback::DropReason reason)360 void DefaultVideoQualityAnalyzer::OnFrameDropped(
361 absl::string_view peer_name,
362 webrtc::EncodedImageCallback::DropReason reason) {
363 // Here we do nothing, because we will see this drop on renderer side.
364 }
365
OnFramePreDecode(absl::string_view peer_name,uint16_t frame_id,const webrtc::EncodedImage & input_image)366 void DefaultVideoQualityAnalyzer::OnFramePreDecode(
367 absl::string_view peer_name,
368 uint16_t frame_id,
369 const webrtc::EncodedImage& input_image) {
370 Timestamp processing_started = Now();
371 MutexLock lock(&mutex_);
372 RTC_CHECK_EQ(state_, State::kActive)
373 << "DefaultVideoQualityAnalyzer has to be started before use";
374
375 size_t peer_index = peers_->index(peer_name);
376
377 if (frame_id == VideoFrame::kNotSetId) {
378 frame_counters_.received++;
379 unknown_sender_frame_counters_[std::string(peer_name)].received++;
380 return;
381 }
382
383 auto it = captured_frames_in_flight_.find(frame_id);
384 if (it == captured_frames_in_flight_.end() ||
385 it->second.HasReceivedTime(peer_index)) {
386 // It means this frame was predecoded before, so we can skip it. It may
387 // happen when we have multiple simulcast streams in one track and received
388 // the same picture from two different streams because SFU can't reliably
389 // correlate two simulcast streams and started relaying the second stream
390 // from the same frame it has relayed right before for the first stream.
391 return;
392 }
393
394 frame_counters_.received++;
395 InternalStatsKey key(it->second.stream(),
396 stream_to_sender_.at(it->second.stream()), peer_index);
397 stream_frame_counters_.at(key).received++;
398 // Determine the time of the last received packet of this video frame.
399 RTC_DCHECK(!input_image.PacketInfos().empty());
400 Timestamp last_receive_time =
401 std::max_element(input_image.PacketInfos().cbegin(),
402 input_image.PacketInfos().cend(),
403 [](const RtpPacketInfo& a, const RtpPacketInfo& b) {
404 return a.receive_time() < b.receive_time();
405 })
406 ->receive_time();
407 it->second.OnFramePreDecode(peer_index,
408 /*received_time=*/last_receive_time,
409 /*decode_start_time=*/Now(),
410 input_image._frameType,
411 DataSize::Bytes(input_image.size()));
412
413 if (options_.report_infra_metrics) {
414 analyzer_stats_.on_frame_pre_decode_processing_time_ms.AddSample(
415 (Now() - processing_started).ms<double>());
416 }
417 }
418
OnFrameDecoded(absl::string_view peer_name,const webrtc::VideoFrame & frame,const DecoderStats & stats)419 void DefaultVideoQualityAnalyzer::OnFrameDecoded(
420 absl::string_view peer_name,
421 const webrtc::VideoFrame& frame,
422 const DecoderStats& stats) {
423 Timestamp processing_started = Now();
424 MutexLock lock(&mutex_);
425 RTC_CHECK_EQ(state_, State::kActive)
426 << "DefaultVideoQualityAnalyzer has to be started before use";
427
428 size_t peer_index = peers_->index(peer_name);
429
430 if (frame.id() == VideoFrame::kNotSetId) {
431 frame_counters_.decoded++;
432 unknown_sender_frame_counters_[std::string(peer_name)].decoded++;
433 return;
434 }
435
436 auto it = captured_frames_in_flight_.find(frame.id());
437 if (it == captured_frames_in_flight_.end() ||
438 it->second.HasDecodeEndTime(peer_index)) {
439 // It means this frame was decoded before, so we can skip it. It may happen
440 // when we have multiple simulcast streams in one track and received
441 // the same frame from two different streams because SFU can't reliably
442 // correlate two simulcast streams and started relaying the second stream
443 // from the same frame it has relayed right before for the first stream.
444 return;
445 }
446 frame_counters_.decoded++;
447 InternalStatsKey key(it->second.stream(),
448 stream_to_sender_.at(it->second.stream()), peer_index);
449 stream_frame_counters_.at(key).decoded++;
450 Timestamp now = Now();
451 StreamCodecInfo used_decoder;
452 used_decoder.codec_name = stats.decoder_name;
453 used_decoder.first_frame_id = frame.id();
454 used_decoder.last_frame_id = frame.id();
455 used_decoder.switched_on_at = now;
456 used_decoder.switched_from_at = now;
457 it->second.OnFrameDecoded(peer_index, now, frame.width(), frame.height(),
458 used_decoder);
459
460 if (options_.report_infra_metrics) {
461 analyzer_stats_.on_frame_decoded_processing_time_ms.AddSample(
462 (Now() - processing_started).ms<double>());
463 }
464 }
465
OnFrameRendered(absl::string_view peer_name,const webrtc::VideoFrame & frame)466 void DefaultVideoQualityAnalyzer::OnFrameRendered(
467 absl::string_view peer_name,
468 const webrtc::VideoFrame& frame) {
469 Timestamp processing_started = Now();
470 MutexLock lock(&mutex_);
471 RTC_CHECK_EQ(state_, State::kActive)
472 << "DefaultVideoQualityAnalyzer has to be started before use";
473
474 size_t peer_index = peers_->index(peer_name);
475
476 if (frame.id() == VideoFrame::kNotSetId) {
477 frame_counters_.rendered++;
478 unknown_sender_frame_counters_[std::string(peer_name)].rendered++;
479 return;
480 }
481
482 auto frame_it = captured_frames_in_flight_.find(frame.id());
483 if (frame_it == captured_frames_in_flight_.end() ||
484 frame_it->second.HasRenderedTime(peer_index) ||
485 frame_it->second.IsDropped(peer_index)) {
486 // It means this frame was rendered or dropped before, so we can skip it.
487 // It may happen when we have multiple simulcast streams in one track and
488 // received the same frame from two different streams because SFU can't
489 // reliably correlate two simulcast streams and started relaying the second
490 // stream from the same frame it has relayed right before for the first
491 // stream.
492 absl::string_view reason = kSkipRenderedFrameReasonProcessed;
493 if (frame_it != captured_frames_in_flight_.end()) {
494 if (frame_it->second.HasRenderedTime(peer_index)) {
495 reason = kSkipRenderedFrameReasonRendered;
496 } else if (frame_it->second.IsDropped(peer_index)) {
497 reason = kSkipRenderedFrameReasonDropped;
498 }
499 }
500 RTC_LOG(LS_WARNING)
501 << "Peer " << peer_name
502 << "; Received frame out of order: received frame with id "
503 << frame.id() << " which was " << reason << " before";
504 return;
505 }
506
507 // Find corresponding captured frame.
508 FrameInFlight* frame_in_flight = &frame_it->second;
509 absl::optional<VideoFrame> captured_frame = frame_in_flight->frame();
510
511 const size_t stream_index = frame_in_flight->stream();
512 StreamState* state = &stream_states_.at(stream_index);
513 const InternalStatsKey stats_key(stream_index, state->sender(), peer_index);
514
515 // Update frames counters.
516 frame_counters_.rendered++;
517 stream_frame_counters_.at(stats_key).rendered++;
518
519 // Update current frame stats.
520 frame_in_flight->OnFrameRendered(peer_index, Now());
521
522 // After we received frame here we need to check if there are any dropped
523 // frames between this one and last one, that was rendered for this video
524 // stream.
525 int dropped_count = 0;
526 while (!state->IsEmpty(peer_index) &&
527 state->Front(peer_index) != frame.id()) {
528 dropped_count++;
529 uint16_t dropped_frame_id = state->PopFront(peer_index);
530 // Frame with id `dropped_frame_id` was dropped. We need:
531 // 1. Update global and stream frame counters
532 // 2. Extract corresponding frame from `captured_frames_in_flight_`
533 // 3. Send extracted frame to comparison with dropped=true
534 // 4. Cleanup dropped frame
535 frame_counters_.dropped++;
536 stream_frame_counters_.at(stats_key).dropped++;
537
538 auto dropped_frame_it = captured_frames_in_flight_.find(dropped_frame_id);
539 RTC_DCHECK(dropped_frame_it != captured_frames_in_flight_.end());
540 dropped_frame_it->second.MarkDropped(peer_index);
541
542 analyzer_stats_.frames_in_flight_left_count.AddSample(
543 StatsSample(captured_frames_in_flight_.size(), Now()));
544 frames_comparator_.AddComparison(
545 stats_key, /*captured=*/absl::nullopt, /*rendered=*/absl::nullopt,
546 FrameComparisonType::kDroppedFrame,
547 dropped_frame_it->second.GetStatsForPeer(peer_index));
548
549 if (dropped_frame_it->second.HaveAllPeersReceived()) {
550 captured_frames_in_flight_.erase(dropped_frame_it);
551 }
552 }
553 RTC_DCHECK(!state->IsEmpty(peer_index));
554 state->PopFront(peer_index);
555
556 if (state->last_rendered_frame_time(peer_index)) {
557 frame_in_flight->SetPrevFrameRenderedTime(
558 peer_index, state->last_rendered_frame_time(peer_index).value());
559 }
560 state->SetLastRenderedFrameTime(peer_index,
561 frame_in_flight->rendered_time(peer_index));
562 analyzer_stats_.frames_in_flight_left_count.AddSample(
563 StatsSample(captured_frames_in_flight_.size(), Now()));
564 frames_comparator_.AddComparison(
565 stats_key, dropped_count, captured_frame, /*rendered=*/frame,
566 FrameComparisonType::kRegular,
567 frame_in_flight->GetStatsForPeer(peer_index));
568
569 if (frame_it->second.HaveAllPeersReceived()) {
570 captured_frames_in_flight_.erase(frame_it);
571 }
572
573 if (options_.report_infra_metrics) {
574 analyzer_stats_.on_frame_rendered_processing_time_ms.AddSample(
575 (Now() - processing_started).ms<double>());
576 }
577 }
578
OnEncoderError(absl::string_view peer_name,const webrtc::VideoFrame & frame,int32_t error_code)579 void DefaultVideoQualityAnalyzer::OnEncoderError(
580 absl::string_view peer_name,
581 const webrtc::VideoFrame& frame,
582 int32_t error_code) {
583 RTC_LOG(LS_ERROR) << "Encoder error for frame.id=" << frame.id()
584 << ", code=" << error_code;
585 }
586
OnDecoderError(absl::string_view peer_name,uint16_t frame_id,int32_t error_code,const DecoderStats & stats)587 void DefaultVideoQualityAnalyzer::OnDecoderError(absl::string_view peer_name,
588 uint16_t frame_id,
589 int32_t error_code,
590 const DecoderStats& stats) {
591 RTC_LOG(LS_ERROR) << "Decoder error for frame_id=" << frame_id
592 << ", code=" << error_code;
593
594 Timestamp processing_started = Now();
595 MutexLock lock(&mutex_);
596 RTC_CHECK_EQ(state_, State::kActive)
597 << "DefaultVideoQualityAnalyzer has to be started before use";
598
599 size_t peer_index = peers_->index(peer_name);
600
601 if (frame_id == VideoFrame::kNotSetId) {
602 frame_counters_.failed_to_decode++;
603 unknown_sender_frame_counters_[std::string(peer_name)].failed_to_decode++;
604 return;
605 }
606
607 auto it = captured_frames_in_flight_.find(frame_id);
608 if (it == captured_frames_in_flight_.end() ||
609 it->second.HasDecodeEndTime(peer_index)) {
610 // It means this frame was decoded before, so we can skip it. It may happen
611 // when we have multiple simulcast streams in one track and received
612 // the same frame from two different streams because SFU can't reliably
613 // correlate two simulcast streams and started relaying the second stream
614 // from the same frame it has relayed right before for the first stream.
615 return;
616 }
617 frame_counters_.failed_to_decode++;
618 InternalStatsKey key(it->second.stream(),
619 stream_to_sender_.at(it->second.stream()), peer_index);
620 stream_frame_counters_.at(key).failed_to_decode++;
621 Timestamp now = Now();
622 StreamCodecInfo used_decoder;
623 used_decoder.codec_name = stats.decoder_name;
624 used_decoder.first_frame_id = frame_id;
625 used_decoder.last_frame_id = frame_id;
626 used_decoder.switched_on_at = now;
627 used_decoder.switched_from_at = now;
628 it->second.OnDecoderError(peer_index, used_decoder);
629
630 if (options_.report_infra_metrics) {
631 analyzer_stats_.on_decoder_error_processing_time_ms.AddSample(
632 (Now() - processing_started).ms<double>());
633 }
634 }
635
RegisterParticipantInCall(absl::string_view peer_name)636 void DefaultVideoQualityAnalyzer::RegisterParticipantInCall(
637 absl::string_view peer_name) {
638 MutexLock lock(&mutex_);
639 RTC_CHECK(!peers_->HasName(peer_name));
640 size_t new_peer_index = peers_->AddIfAbsent(peer_name);
641
642 // Ensure stats for receiving (for frames from other peers to this one)
643 // streams exists. Since in flight frames will be sent to the new peer
644 // as well. Sending stats (from this peer to others) will be added by
645 // DefaultVideoQualityAnalyzer::OnFrameCaptured.
646 std::vector<std::pair<InternalStatsKey, Timestamp>> stream_started_time;
647 for (auto [stream_index, sender_peer_index] : stream_to_sender_) {
648 InternalStatsKey key(stream_index, sender_peer_index, new_peer_index);
649
650 // To initiate `FrameCounters` for the stream we should pick frame
651 // counters with the same stream index and the same sender's peer index
652 // and any receiver's peer index and copy from its sender side
653 // counters.
654 FrameCounters counters;
655 for (size_t i : peers_->GetPresentIndexes()) {
656 InternalStatsKey prototype_key(stream_index, sender_peer_index, i);
657 auto it = stream_frame_counters_.find(prototype_key);
658 if (it != stream_frame_counters_.end()) {
659 counters.captured = it->second.captured;
660 counters.pre_encoded = it->second.pre_encoded;
661 counters.encoded = it->second.encoded;
662 break;
663 }
664 }
665 // It may happen if we had only one peer before this method was invoked,
666 // then `counters` will be empty. In such case empty `counters` are ok.
667 stream_frame_counters_.insert({key, std::move(counters)});
668
669 stream_started_time.push_back(
670 {key, stream_states_.at(stream_index).stream_started_time()});
671 }
672 frames_comparator_.RegisterParticipantInCall(stream_started_time,
673 start_time_);
674 // Ensure, that frames states are handled correctly
675 // (e.g. dropped frames tracking).
676 for (auto& [stream_index, stream_state] : stream_states_) {
677 stream_state.AddPeer(new_peer_index);
678 }
679 // Register new peer for every frame in flight.
680 // It is guaranteed, that no garbage FrameInFlight objects will stay in
681 // memory because of adding new peer. Even if the new peer won't receive the
682 // frame, the frame will be removed by OnFrameRendered after next frame comes
683 // for the new peer. It is important because FrameInFlight is a large object.
684 for (auto& [frame_id, frame_in_flight] : captured_frames_in_flight_) {
685 frame_in_flight.AddExpectedReceiver(new_peer_index);
686 }
687 }
688
UnregisterParticipantInCall(absl::string_view peer_name)689 void DefaultVideoQualityAnalyzer::UnregisterParticipantInCall(
690 absl::string_view peer_name) {
691 MutexLock lock(&mutex_);
692 RTC_CHECK(peers_->HasName(peer_name));
693 absl::optional<size_t> peer_index = peers_->RemoveIfPresent(peer_name);
694 RTC_CHECK(peer_index.has_value());
695
696 for (auto& [stream_index, stream_state] : stream_states_) {
697 if (!options_.enable_receive_own_stream &&
698 peer_index == stream_state.sender()) {
699 continue;
700 }
701
702 AddExistingFramesInFlightForStreamToComparator(stream_index, stream_state,
703 *peer_index);
704
705 stream_state.RemovePeer(*peer_index);
706 }
707
708 // Remove peer from every frame in flight. If we removed that last expected
709 // receiver for the frame, then we should removed this frame if it was
710 // already encoded. If frame wasn't encoded, it still will be used by sender
711 // side pipeline, so we can't delete it yet.
712 for (auto it = captured_frames_in_flight_.begin();
713 it != captured_frames_in_flight_.end();) {
714 FrameInFlight& frame_in_flight = it->second;
715 frame_in_flight.RemoveExpectedReceiver(*peer_index);
716 // If frame was fully sent and all receivers received it, then erase it.
717 // It may happen that when we remove FrameInFlight only some Simulcast/SVC
718 // layers were encoded and frame has encoded time, but more layers might be
719 // encoded after removal. In such case it's safe to still remove a frame,
720 // because OnFrameEncoded method will correctly handle the case when there
721 // is no FrameInFlight for the received encoded image.
722 if (frame_in_flight.HasEncodedTime() &&
723 frame_in_flight.HaveAllPeersReceived()) {
724 it = captured_frames_in_flight_.erase(it);
725 } else {
726 it++;
727 }
728 }
729 }
730
Stop()731 void DefaultVideoQualityAnalyzer::Stop() {
732 std::map<InternalStatsKey, Timestamp> last_rendered_frame_times;
733 {
734 MutexLock lock(&mutex_);
735 if (state_ == State::kStopped) {
736 return;
737 }
738 RTC_CHECK_EQ(state_, State::kActive)
739 << "DefaultVideoQualityAnalyzer has to be started before use";
740
741 state_ = State::kStopped;
742
743 // Add the amount of frames in flight to the analyzer stats before all left
744 // frames in flight will be sent to the `frames_compartor_`.
745 analyzer_stats_.frames_in_flight_left_count.AddSample(
746 StatsSample(captured_frames_in_flight_.size(), Now()));
747
748 for (auto& state_entry : stream_states_) {
749 const size_t stream_index = state_entry.first;
750 StreamState& stream_state = state_entry.second;
751
752 // Populate `last_rendered_frame_times` map for all peers that were met in
753 // call, not only for the currently presented ones.
754 for (size_t peer_index : peers_->GetAllIndexes()) {
755 if (peer_index == stream_state.sender() &&
756 !options_.enable_receive_own_stream) {
757 continue;
758 }
759
760 InternalStatsKey stats_key(stream_index, stream_state.sender(),
761 peer_index);
762
763 // If there are no freezes in the call we have to report
764 // time_between_freezes_ms as call duration and in such case
765 // `stream_last_freeze_end_time` for this stream will be `start_time_`.
766 // If there is freeze, then we need add time from last rendered frame
767 // to last freeze end as time between freezes.
768 if (stream_state.last_rendered_frame_time(peer_index)) {
769 last_rendered_frame_times.emplace(
770 stats_key,
771 stream_state.last_rendered_frame_time(peer_index).value());
772 }
773 }
774
775 // Push left frame in flight for analysis for the peers that are still in
776 // the call.
777 for (size_t peer_index : peers_->GetPresentIndexes()) {
778 if (peer_index == stream_state.sender() &&
779 !options_.enable_receive_own_stream) {
780 continue;
781 }
782
783 AddExistingFramesInFlightForStreamToComparator(
784 stream_index, stream_state, peer_index);
785 }
786 }
787 }
788 frames_comparator_.Stop(last_rendered_frame_times);
789
790 // Perform final Metrics update. On this place analyzer is stopped and no one
791 // holds any locks.
792 {
793 MutexLock lock(&mutex_);
794 FramesComparatorStats frames_comparator_stats =
795 frames_comparator_.frames_comparator_stats();
796 analyzer_stats_.comparisons_queue_size =
797 std::move(frames_comparator_stats.comparisons_queue_size);
798 analyzer_stats_.comparisons_done = frames_comparator_stats.comparisons_done;
799 analyzer_stats_.cpu_overloaded_comparisons_done =
800 frames_comparator_stats.cpu_overloaded_comparisons_done;
801 analyzer_stats_.memory_overloaded_comparisons_done =
802 frames_comparator_stats.memory_overloaded_comparisons_done;
803 }
804 ReportResults();
805 }
806
GetStreamLabel(uint16_t frame_id)807 std::string DefaultVideoQualityAnalyzer::GetStreamLabel(uint16_t frame_id) {
808 MutexLock lock1(&mutex_);
809 auto it = captured_frames_in_flight_.find(frame_id);
810 if (it != captured_frames_in_flight_.end()) {
811 return streams_.name(it->second.stream());
812 }
813 for (auto hist_it = stream_to_frame_id_history_.begin();
814 hist_it != stream_to_frame_id_history_.end(); ++hist_it) {
815 auto hist_set_it = hist_it->second.find(frame_id);
816 if (hist_set_it != hist_it->second.end()) {
817 return streams_.name(hist_it->first);
818 }
819 }
820 RTC_CHECK(false) << "Unknown frame_id=" << frame_id;
821 }
822
GetKnownVideoStreams() const823 std::set<StatsKey> DefaultVideoQualityAnalyzer::GetKnownVideoStreams() const {
824 MutexLock lock(&mutex_);
825 std::set<StatsKey> out;
826 for (auto& item : frames_comparator_.stream_stats()) {
827 RTC_LOG(LS_INFO) << item.first.ToString() << " ==> "
828 << ToStatsKey(item.first).ToString();
829 out.insert(ToStatsKey(item.first));
830 }
831 return out;
832 }
833
GetKnownStreams() const834 VideoStreamsInfo DefaultVideoQualityAnalyzer::GetKnownStreams() const {
835 MutexLock lock(&mutex_);
836 std::map<std::string, std::string> stream_to_sender;
837 std::map<std::string, std::set<std::string>> sender_to_streams;
838 std::map<std::string, std::set<std::string>> stream_to_receivers;
839
840 for (auto& item : frames_comparator_.stream_stats()) {
841 const std::string& stream_label = streams_.name(item.first.stream);
842 const std::string& sender = peers_->name(item.first.sender);
843 const std::string& receiver = peers_->name(item.first.receiver);
844 RTC_LOG(LS_INFO) << item.first.ToString() << " ==> "
845 << "stream=" << stream_label << "; sender=" << sender
846 << "; receiver=" << receiver;
847 stream_to_sender.emplace(stream_label, sender);
848 auto streams_it = sender_to_streams.find(sender);
849 if (streams_it != sender_to_streams.end()) {
850 streams_it->second.emplace(stream_label);
851 } else {
852 sender_to_streams.emplace(sender, std::set<std::string>{stream_label});
853 }
854 auto receivers_it = stream_to_receivers.find(stream_label);
855 if (receivers_it != stream_to_receivers.end()) {
856 receivers_it->second.emplace(receiver);
857 } else {
858 stream_to_receivers.emplace(stream_label,
859 std::set<std::string>{receiver});
860 }
861 }
862
863 return VideoStreamsInfo(std::move(stream_to_sender),
864 std::move(sender_to_streams),
865 std::move(stream_to_receivers));
866 }
867
GetGlobalCounters() const868 FrameCounters DefaultVideoQualityAnalyzer::GetGlobalCounters() const {
869 MutexLock lock(&mutex_);
870 return frame_counters_;
871 }
872
873 std::map<std::string, FrameCounters>
GetUnknownSenderFrameCounters() const874 DefaultVideoQualityAnalyzer::GetUnknownSenderFrameCounters() const {
875 MutexLock lock(&mutex_);
876 return unknown_sender_frame_counters_;
877 }
878
879 std::map<StatsKey, FrameCounters>
GetPerStreamCounters() const880 DefaultVideoQualityAnalyzer::GetPerStreamCounters() const {
881 MutexLock lock(&mutex_);
882 std::map<StatsKey, FrameCounters> out;
883 for (auto& item : stream_frame_counters_) {
884 out.emplace(ToStatsKey(item.first), item.second);
885 }
886 return out;
887 }
888
GetStats() const889 std::map<StatsKey, StreamStats> DefaultVideoQualityAnalyzer::GetStats() const {
890 MutexLock lock1(&mutex_);
891 std::map<StatsKey, StreamStats> out;
892 for (auto& item : frames_comparator_.stream_stats()) {
893 out.emplace(ToStatsKey(item.first), item.second);
894 }
895 return out;
896 }
897
GetAnalyzerStats() const898 AnalyzerStats DefaultVideoQualityAnalyzer::GetAnalyzerStats() const {
899 MutexLock lock(&mutex_);
900 return analyzer_stats_;
901 }
902
GetNextFrameId()903 uint16_t DefaultVideoQualityAnalyzer::GetNextFrameId() {
904 uint16_t frame_id = next_frame_id_++;
905 if (next_frame_id_ == VideoFrame::kNotSetId) {
906 next_frame_id_ = 1;
907 }
908 return frame_id;
909 }
910
911 void DefaultVideoQualityAnalyzer::
AddExistingFramesInFlightForStreamToComparator(size_t stream_index,StreamState & stream_state,size_t peer_index)912 AddExistingFramesInFlightForStreamToComparator(size_t stream_index,
913 StreamState& stream_state,
914 size_t peer_index) {
915 InternalStatsKey stats_key(stream_index, stream_state.sender(), peer_index);
916
917 // Add frames in flight for this stream into frames comparator.
918 // Frames in flight were not rendered, so they won't affect stream's
919 // last rendered frame time.
920 while (!stream_state.IsEmpty(peer_index)) {
921 uint16_t frame_id = stream_state.PopFront(peer_index);
922 auto it = captured_frames_in_flight_.find(frame_id);
923 RTC_DCHECK(it != captured_frames_in_flight_.end());
924 FrameInFlight& frame = it->second;
925
926 frames_comparator_.AddComparison(stats_key, /*captured=*/absl::nullopt,
927 /*rendered=*/absl::nullopt,
928 FrameComparisonType::kFrameInFlight,
929 frame.GetStatsForPeer(peer_index));
930 }
931 }
932
ReportResults()933 void DefaultVideoQualityAnalyzer::ReportResults() {
934 MutexLock lock(&mutex_);
935 for (auto& item : frames_comparator_.stream_stats()) {
936 ReportResults(item.first, item.second,
937 stream_frame_counters_.at(item.first));
938 }
939 metrics_logger_->LogSingleValueMetric("cpu_usage_%", test_label_,
940 GetCpuUsagePercent(), Unit::kUnitless,
941 ImprovementDirection::kSmallerIsBetter);
942 LogFrameCounters("Global", frame_counters_);
943 if (!unknown_sender_frame_counters_.empty()) {
944 RTC_LOG(LS_INFO) << "Received frame counters with unknown frame id:";
945 for (const auto& [peer_name, frame_counters] :
946 unknown_sender_frame_counters_) {
947 LogFrameCounters(peer_name, frame_counters);
948 }
949 }
950 RTC_LOG(LS_INFO) << "Received frame counters per stream:";
951 for (const auto& [stats_key, stream_stats] :
952 frames_comparator_.stream_stats()) {
953 LogFrameCounters(ToStatsKey(stats_key).ToString(),
954 stream_frame_counters_.at(stats_key));
955 LogStreamInternalStats(ToStatsKey(stats_key).ToString(), stream_stats,
956 start_time_);
957 }
958 if (!analyzer_stats_.comparisons_queue_size.IsEmpty()) {
959 RTC_LOG(LS_INFO) << "comparisons_queue_size min="
960 << analyzer_stats_.comparisons_queue_size.GetMin()
961 << "; max="
962 << analyzer_stats_.comparisons_queue_size.GetMax()
963 << "; 99%="
964 << analyzer_stats_.comparisons_queue_size.GetPercentile(
965 0.99);
966 }
967 RTC_LOG(LS_INFO) << "comparisons_done=" << analyzer_stats_.comparisons_done;
968 RTC_LOG(LS_INFO) << "cpu_overloaded_comparisons_done="
969 << analyzer_stats_.cpu_overloaded_comparisons_done;
970 RTC_LOG(LS_INFO) << "memory_overloaded_comparisons_done="
971 << analyzer_stats_.memory_overloaded_comparisons_done;
972 if (options_.report_infra_metrics) {
973 metrics_logger_->LogMetric("comparisons_queue_size", test_label_,
974 analyzer_stats_.comparisons_queue_size,
975 Unit::kCount,
976 ImprovementDirection::kSmallerIsBetter);
977 metrics_logger_->LogMetric("frames_in_flight_left_count", test_label_,
978 analyzer_stats_.frames_in_flight_left_count,
979 Unit::kCount,
980 ImprovementDirection::kSmallerIsBetter);
981 metrics_logger_->LogSingleValueMetric(
982 "comparisons_done", test_label_, analyzer_stats_.comparisons_done,
983 Unit::kCount, ImprovementDirection::kNeitherIsBetter);
984 metrics_logger_->LogSingleValueMetric(
985 "cpu_overloaded_comparisons_done", test_label_,
986 analyzer_stats_.cpu_overloaded_comparisons_done, Unit::kCount,
987 ImprovementDirection::kNeitherIsBetter);
988 metrics_logger_->LogSingleValueMetric(
989 "memory_overloaded_comparisons_done", test_label_,
990 analyzer_stats_.memory_overloaded_comparisons_done, Unit::kCount,
991 ImprovementDirection::kNeitherIsBetter);
992 metrics_logger_->LogSingleValueMetric(
993 "test_duration", test_label_, (Now() - start_time_).ms(),
994 Unit::kMilliseconds, ImprovementDirection::kNeitherIsBetter);
995
996 metrics_logger_->LogMetric(
997 "on_frame_captured_processing_time_ms", test_label_,
998 analyzer_stats_.on_frame_captured_processing_time_ms,
999 Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
1000 metrics_logger_->LogMetric(
1001 "on_frame_pre_encode_processing_time_ms", test_label_,
1002 analyzer_stats_.on_frame_pre_encode_processing_time_ms,
1003 Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
1004 metrics_logger_->LogMetric(
1005 "on_frame_encoded_processing_time_ms", test_label_,
1006 analyzer_stats_.on_frame_encoded_processing_time_ms,
1007 Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
1008 metrics_logger_->LogMetric(
1009 "on_frame_pre_decode_processing_time_ms", test_label_,
1010 analyzer_stats_.on_frame_pre_decode_processing_time_ms,
1011 Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
1012 metrics_logger_->LogMetric(
1013 "on_frame_decoded_processing_time_ms", test_label_,
1014 analyzer_stats_.on_frame_decoded_processing_time_ms,
1015 Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
1016 metrics_logger_->LogMetric(
1017 "on_frame_rendered_processing_time_ms", test_label_,
1018 analyzer_stats_.on_frame_rendered_processing_time_ms,
1019 Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
1020 metrics_logger_->LogMetric(
1021 "on_decoder_error_processing_time_ms", test_label_,
1022 analyzer_stats_.on_decoder_error_processing_time_ms,
1023 Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter);
1024 }
1025 }
1026
ReportResults(const InternalStatsKey & key,const StreamStats & stats,const FrameCounters & frame_counters)1027 void DefaultVideoQualityAnalyzer::ReportResults(
1028 const InternalStatsKey& key,
1029 const StreamStats& stats,
1030 const FrameCounters& frame_counters) {
1031 TimeDelta test_duration = Now() - start_time_;
1032 std::string test_case_name = GetTestCaseName(ToMetricName(key));
1033 std::map<std::string, std::string> metric_metadata{
1034 {MetricMetadataKey::kPeerMetadataKey, peers_->name(key.sender)},
1035 {MetricMetadataKey::kVideoStreamMetadataKey, streams_.name(key.stream)},
1036 {MetricMetadataKey::kSenderMetadataKey, peers_->name(key.sender)},
1037 {MetricMetadataKey::kReceiverMetadataKey, peers_->name(key.receiver)}};
1038
1039 double sum_squared_interframe_delays_secs = 0;
1040 Timestamp video_start_time = Timestamp::PlusInfinity();
1041 Timestamp video_end_time = Timestamp::MinusInfinity();
1042 for (const SamplesStatsCounter::StatsSample& sample :
1043 stats.time_between_rendered_frames_ms.GetTimedSamples()) {
1044 double interframe_delay_ms = sample.value;
1045 const double interframe_delays_secs = interframe_delay_ms / 1000.0;
1046 // Sum of squared inter frame intervals is used to calculate the harmonic
1047 // frame rate metric. The metric aims to reflect overall experience related
1048 // to smoothness of video playback and includes both freezes and pauses.
1049 sum_squared_interframe_delays_secs +=
1050 interframe_delays_secs * interframe_delays_secs;
1051 if (sample.time < video_start_time) {
1052 video_start_time = sample.time;
1053 }
1054 if (sample.time > video_end_time) {
1055 video_end_time = sample.time;
1056 }
1057 }
1058 double harmonic_framerate_fps = 0;
1059 TimeDelta video_duration = video_end_time - video_start_time;
1060 if (sum_squared_interframe_delays_secs > 0.0 && video_duration.IsFinite()) {
1061 harmonic_framerate_fps =
1062 video_duration.seconds<double>() / sum_squared_interframe_delays_secs;
1063 }
1064
1065 metrics_logger_->LogMetric(
1066 "psnr_dB", test_case_name, stats.psnr, Unit::kUnitless,
1067 ImprovementDirection::kBiggerIsBetter, metric_metadata);
1068 metrics_logger_->LogMetric(
1069 "ssim", test_case_name, stats.ssim, Unit::kUnitless,
1070 ImprovementDirection::kBiggerIsBetter, metric_metadata);
1071 metrics_logger_->LogMetric("transport_time", test_case_name,
1072 stats.transport_time_ms, Unit::kMilliseconds,
1073 ImprovementDirection::kSmallerIsBetter,
1074 metric_metadata);
1075 metrics_logger_->LogMetric(
1076 "total_delay_incl_transport", test_case_name,
1077 stats.total_delay_incl_transport_ms, Unit::kMilliseconds,
1078 ImprovementDirection::kSmallerIsBetter, metric_metadata);
1079 metrics_logger_->LogMetric(
1080 "time_between_rendered_frames", test_case_name,
1081 stats.time_between_rendered_frames_ms, Unit::kMilliseconds,
1082 ImprovementDirection::kSmallerIsBetter, metric_metadata);
1083 metrics_logger_->LogSingleValueMetric(
1084 "harmonic_framerate", test_case_name, harmonic_framerate_fps,
1085 Unit::kHertz, ImprovementDirection::kBiggerIsBetter, metric_metadata);
1086 metrics_logger_->LogSingleValueMetric(
1087 "encode_frame_rate", test_case_name,
1088 stats.encode_frame_rate.IsEmpty()
1089 ? 0
1090 : stats.encode_frame_rate.GetEventsPerSecond(),
1091 Unit::kHertz, ImprovementDirection::kBiggerIsBetter, metric_metadata);
1092 metrics_logger_->LogMetric(
1093 "encode_time", test_case_name, stats.encode_time_ms, Unit::kMilliseconds,
1094 ImprovementDirection::kSmallerIsBetter, metric_metadata);
1095 metrics_logger_->LogMetric("time_between_freezes", test_case_name,
1096 stats.time_between_freezes_ms, Unit::kMilliseconds,
1097 ImprovementDirection::kBiggerIsBetter,
1098 metric_metadata);
1099 metrics_logger_->LogMetric("freeze_time_ms", test_case_name,
1100 stats.freeze_time_ms, Unit::kMilliseconds,
1101 ImprovementDirection::kSmallerIsBetter,
1102 metric_metadata);
1103 metrics_logger_->LogMetric(
1104 "pixels_per_frame", test_case_name, stats.resolution_of_decoded_frame,
1105 Unit::kCount, ImprovementDirection::kBiggerIsBetter, metric_metadata);
1106 metrics_logger_->LogSingleValueMetric(
1107 "min_psnr_dB", test_case_name,
1108 stats.psnr.IsEmpty() ? 0 : stats.psnr.GetMin(), Unit::kUnitless,
1109 ImprovementDirection::kBiggerIsBetter, metric_metadata);
1110 metrics_logger_->LogMetric(
1111 "decode_time", test_case_name, stats.decode_time_ms, Unit::kMilliseconds,
1112 ImprovementDirection::kSmallerIsBetter, metric_metadata);
1113 metrics_logger_->LogMetric(
1114 "receive_to_render_time", test_case_name, stats.receive_to_render_time_ms,
1115 Unit::kMilliseconds, ImprovementDirection::kSmallerIsBetter,
1116 metric_metadata);
1117 metrics_logger_->LogSingleValueMetric(
1118 "dropped_frames", test_case_name, frame_counters.dropped, Unit::kCount,
1119 ImprovementDirection::kSmallerIsBetter, metric_metadata);
1120 metrics_logger_->LogSingleValueMetric(
1121 "frames_in_flight", test_case_name,
1122 frame_counters.captured - frame_counters.rendered -
1123 frame_counters.dropped,
1124 Unit::kCount, ImprovementDirection::kSmallerIsBetter, metric_metadata);
1125 metrics_logger_->LogSingleValueMetric(
1126 "rendered_frames", test_case_name, frame_counters.rendered, Unit::kCount,
1127 ImprovementDirection::kBiggerIsBetter, metric_metadata);
1128 metrics_logger_->LogMetric(
1129 "max_skipped", test_case_name, stats.skipped_between_rendered,
1130 Unit::kCount, ImprovementDirection::kSmallerIsBetter, metric_metadata);
1131 metrics_logger_->LogMetric(
1132 "target_encode_bitrate", test_case_name,
1133 stats.target_encode_bitrate / 1000, Unit::kKilobitsPerSecond,
1134 ImprovementDirection::kNeitherIsBetter, metric_metadata);
1135 metrics_logger_->LogMetric("qp", test_case_name, stats.qp, Unit::kUnitless,
1136 ImprovementDirection::kSmallerIsBetter,
1137 metric_metadata);
1138 metrics_logger_->LogSingleValueMetric(
1139 "actual_encode_bitrate", test_case_name,
1140 static_cast<double>(stats.total_encoded_images_payload) /
1141 test_duration.seconds<double>() * kBitsInByte / 1000,
1142 Unit::kKilobitsPerSecond, ImprovementDirection::kNeitherIsBetter,
1143 metric_metadata);
1144
1145 if (options_.report_detailed_frame_stats) {
1146 metrics_logger_->LogSingleValueMetric(
1147 "capture_frame_rate", test_case_name,
1148 stats.capture_frame_rate.IsEmpty()
1149 ? 0
1150 : stats.capture_frame_rate.GetEventsPerSecond(),
1151 Unit::kHertz, ImprovementDirection::kBiggerIsBetter, metric_metadata);
1152 metrics_logger_->LogSingleValueMetric(
1153 "num_encoded_frames", test_case_name, frame_counters.encoded,
1154 Unit::kCount, ImprovementDirection::kBiggerIsBetter, metric_metadata);
1155 metrics_logger_->LogSingleValueMetric(
1156 "num_decoded_frames", test_case_name, frame_counters.decoded,
1157 Unit::kCount, ImprovementDirection::kBiggerIsBetter, metric_metadata);
1158 metrics_logger_->LogSingleValueMetric(
1159 "num_send_key_frames", test_case_name, stats.num_send_key_frames,
1160 Unit::kCount, ImprovementDirection::kBiggerIsBetter, metric_metadata);
1161 metrics_logger_->LogSingleValueMetric(
1162 "num_recv_key_frames", test_case_name, stats.num_recv_key_frames,
1163 Unit::kCount, ImprovementDirection::kBiggerIsBetter, metric_metadata);
1164
1165 metrics_logger_->LogMetric("recv_key_frame_size_bytes", test_case_name,
1166 stats.recv_key_frame_size_bytes, Unit::kCount,
1167 ImprovementDirection::kBiggerIsBetter,
1168 metric_metadata);
1169 metrics_logger_->LogMetric("recv_delta_frame_size_bytes", test_case_name,
1170 stats.recv_delta_frame_size_bytes, Unit::kCount,
1171 ImprovementDirection::kBiggerIsBetter,
1172 metric_metadata);
1173 }
1174 }
1175
GetTestCaseName(const std::string & stream_label) const1176 std::string DefaultVideoQualityAnalyzer::GetTestCaseName(
1177 const std::string& stream_label) const {
1178 return test_label_ + "/" + stream_label;
1179 }
1180
Now()1181 Timestamp DefaultVideoQualityAnalyzer::Now() {
1182 return clock_->CurrentTime();
1183 }
1184
ToStatsKey(const InternalStatsKey & key) const1185 StatsKey DefaultVideoQualityAnalyzer::ToStatsKey(
1186 const InternalStatsKey& key) const {
1187 return StatsKey(streams_.name(key.stream), peers_->name(key.receiver));
1188 }
1189
ToMetricName(const InternalStatsKey & key) const1190 std::string DefaultVideoQualityAnalyzer::ToMetricName(
1191 const InternalStatsKey& key) const {
1192 const std::string& stream_label = streams_.name(key.stream);
1193 if (peers_->GetKnownSize() <= 2 && key.sender != key.receiver) {
1194 // TODO(titovartem): remove this special case.
1195 return stream_label;
1196 }
1197 rtc::StringBuilder out;
1198 out << stream_label << "_" << peers_->name(key.sender) << "_"
1199 << peers_->name(key.receiver);
1200 return out.str();
1201 }
1202
GetCpuUsagePercent()1203 double DefaultVideoQualityAnalyzer::GetCpuUsagePercent() {
1204 return cpu_measurer_.GetCpuUsagePercent();
1205 }
1206
1207 std::map<std::string, std::vector<uint16_t>>
GetStreamFrames() const1208 DefaultVideoQualityAnalyzer::GetStreamFrames() const {
1209 MutexLock lock(&mutex_);
1210 std::map<std::string, std::vector<uint16_t>> out;
1211 for (auto entry_it : stream_to_frame_id_full_history_) {
1212 out.insert({streams_.name(entry_it.first), entry_it.second});
1213 }
1214 return out;
1215 }
1216
1217 } // namespace webrtc
1218