xref: /aosp_15_r20/external/webrtc/sdk/android/src/jni/audio_device/audio_track_jni.cc (revision d9f758449e529ab9291ac668be2861e7a55c2422)
1 /*
2  *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "sdk/android/src/jni/audio_device/audio_track_jni.h"
12 
13 #include <utility>
14 
15 #include "rtc_base/arraysize.h"
16 #include "rtc_base/checks.h"
17 #include "rtc_base/logging.h"
18 #include "rtc_base/platform_thread.h"
19 #include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h"
20 #include "sdk/android/src/jni/jni_helpers.h"
21 #include "system_wrappers/include/field_trial.h"
22 #include "system_wrappers/include/metrics.h"
23 
24 namespace webrtc {
25 
26 namespace jni {
27 
CreateJavaWebRtcAudioTrack(JNIEnv * env,const JavaRef<jobject> & j_context,const JavaRef<jobject> & j_audio_manager)28 ScopedJavaLocalRef<jobject> AudioTrackJni::CreateJavaWebRtcAudioTrack(
29     JNIEnv* env,
30     const JavaRef<jobject>& j_context,
31     const JavaRef<jobject>& j_audio_manager) {
32   return Java_WebRtcAudioTrack_Constructor(env, j_context, j_audio_manager);
33 }
34 
AudioTrackJni(JNIEnv * env,const AudioParameters & audio_parameters,const JavaRef<jobject> & j_webrtc_audio_track)35 AudioTrackJni::AudioTrackJni(JNIEnv* env,
36                              const AudioParameters& audio_parameters,
37                              const JavaRef<jobject>& j_webrtc_audio_track)
38     : j_audio_track_(env, j_webrtc_audio_track),
39       audio_parameters_(audio_parameters),
40       direct_buffer_address_(nullptr),
41       direct_buffer_capacity_in_bytes_(0),
42       frames_per_buffer_(0),
43       initialized_(false),
44       playing_(false),
45       audio_device_buffer_(nullptr) {
46   RTC_LOG(LS_INFO) << "ctor";
47   RTC_DCHECK(audio_parameters_.is_valid());
48   Java_WebRtcAudioTrack_setNativeAudioTrack(env, j_audio_track_,
49                                             jni::jlongFromPointer(this));
50   // Detach from this thread since construction is allowed to happen on a
51   // different thread.
52   thread_checker_.Detach();
53   thread_checker_java_.Detach();
54 }
55 
~AudioTrackJni()56 AudioTrackJni::~AudioTrackJni() {
57   RTC_LOG(LS_INFO) << "dtor";
58   RTC_DCHECK(thread_checker_.IsCurrent());
59   Terminate();
60 }
61 
Init()62 int32_t AudioTrackJni::Init() {
63   RTC_LOG(LS_INFO) << "Init";
64   env_ = AttachCurrentThreadIfNeeded();
65   RTC_DCHECK(thread_checker_.IsCurrent());
66   return 0;
67 }
68 
Terminate()69 int32_t AudioTrackJni::Terminate() {
70   RTC_LOG(LS_INFO) << "Terminate";
71   RTC_DCHECK(thread_checker_.IsCurrent());
72   StopPlayout();
73   thread_checker_.Detach();
74   return 0;
75 }
76 
InitPlayout()77 int32_t AudioTrackJni::InitPlayout() {
78   RTC_LOG(LS_INFO) << "InitPlayout";
79   RTC_DCHECK(thread_checker_.IsCurrent());
80   if (initialized_) {
81     // Already initialized.
82     return 0;
83   }
84   RTC_DCHECK(!playing_);
85   double buffer_size_factor =
86       strtod(webrtc::field_trial::FindFullName(
87                  "WebRTC-AudioDevicePlayoutBufferSizeFactor")
88                  .c_str(),
89              nullptr);
90   if (buffer_size_factor == 0)
91     buffer_size_factor = 1.0;
92   int requested_buffer_size_bytes = Java_WebRtcAudioTrack_initPlayout(
93       env_, j_audio_track_, audio_parameters_.sample_rate(),
94       static_cast<int>(audio_parameters_.channels()), buffer_size_factor);
95   if (requested_buffer_size_bytes < 0) {
96     RTC_LOG(LS_ERROR) << "InitPlayout failed";
97     return -1;
98   }
99   // Update UMA histograms for both the requested and actual buffer size.
100   // To avoid division by zero, we assume the sample rate is 48k if an invalid
101   // value is found.
102   const int sample_rate = audio_parameters_.sample_rate() <= 0
103                               ? 48000
104                               : audio_parameters_.sample_rate();
105   // This calculation assumes that audio is mono.
106   const int requested_buffer_size_ms =
107       (requested_buffer_size_bytes * 1000) / (2 * sample_rate);
108   RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
109                        requested_buffer_size_ms, 0, 1000, 100);
110   int actual_buffer_size_frames =
111       Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_);
112   if (actual_buffer_size_frames >= 0) {
113     const int actual_buffer_size_ms =
114         actual_buffer_size_frames * 1000 / sample_rate;
115     RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs",
116                          actual_buffer_size_ms, 0, 1000, 100);
117   }
118 
119   initialized_ = true;
120   return 0;
121 }
122 
PlayoutIsInitialized() const123 bool AudioTrackJni::PlayoutIsInitialized() const {
124   return initialized_;
125 }
126 
StartPlayout()127 int32_t AudioTrackJni::StartPlayout() {
128   RTC_LOG(LS_INFO) << "StartPlayout";
129   RTC_DCHECK(thread_checker_.IsCurrent());
130   if (playing_) {
131     // Already playing.
132     return 0;
133   }
134   if (!initialized_) {
135     RTC_DLOG(LS_WARNING)
136         << "Playout can not start since InitPlayout must succeed first";
137     return 0;
138   }
139   if (!Java_WebRtcAudioTrack_startPlayout(env_, j_audio_track_)) {
140     RTC_LOG(LS_ERROR) << "StartPlayout failed";
141     return -1;
142   }
143   playing_ = true;
144   return 0;
145 }
146 
StopPlayout()147 int32_t AudioTrackJni::StopPlayout() {
148   RTC_LOG(LS_INFO) << "StopPlayout";
149   RTC_DCHECK(thread_checker_.IsCurrent());
150   if (!initialized_ || !playing_) {
151     return 0;
152   }
153   // Log the difference in initial and current buffer level.
154   const int current_buffer_size_frames =
155       Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_);
156   const int initial_buffer_size_frames =
157       Java_WebRtcAudioTrack_getInitialBufferSizeInFrames(env_, j_audio_track_);
158   const int sample_rate_hz = audio_parameters_.sample_rate();
159   RTC_HISTOGRAM_COUNTS(
160       "WebRTC.Audio.AndroidNativeAudioBufferSizeDifferenceFromInitialMs",
161       (current_buffer_size_frames - initial_buffer_size_frames) * 1000 /
162           sample_rate_hz,
163       -500, 100, 100);
164 
165   if (!Java_WebRtcAudioTrack_stopPlayout(env_, j_audio_track_)) {
166     RTC_LOG(LS_ERROR) << "StopPlayout failed";
167     return -1;
168   }
169   // If we don't detach here, we will hit a RTC_DCHECK next time StartPlayout()
170   // is called since it will create a new Java thread.
171   thread_checker_java_.Detach();
172   initialized_ = false;
173   playing_ = false;
174   direct_buffer_address_ = nullptr;
175   return 0;
176 }
177 
Playing() const178 bool AudioTrackJni::Playing() const {
179   return playing_;
180 }
181 
SpeakerVolumeIsAvailable()182 bool AudioTrackJni::SpeakerVolumeIsAvailable() {
183   return true;
184 }
185 
SetSpeakerVolume(uint32_t volume)186 int AudioTrackJni::SetSpeakerVolume(uint32_t volume) {
187   RTC_LOG(LS_INFO) << "SetSpeakerVolume(" << volume << ")";
188   RTC_DCHECK(thread_checker_.IsCurrent());
189   return Java_WebRtcAudioTrack_setStreamVolume(env_, j_audio_track_,
190                                                static_cast<int>(volume))
191              ? 0
192              : -1;
193 }
194 
MaxSpeakerVolume() const195 absl::optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const {
196   RTC_DCHECK(thread_checker_.IsCurrent());
197   return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, j_audio_track_);
198 }
199 
MinSpeakerVolume() const200 absl::optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const {
201   RTC_DCHECK(thread_checker_.IsCurrent());
202   return 0;
203 }
204 
SpeakerVolume() const205 absl::optional<uint32_t> AudioTrackJni::SpeakerVolume() const {
206   RTC_DCHECK(thread_checker_.IsCurrent());
207   const uint32_t volume =
208       Java_WebRtcAudioTrack_getStreamVolume(env_, j_audio_track_);
209   RTC_LOG(LS_INFO) << "SpeakerVolume: " << volume;
210   return volume;
211 }
212 
GetPlayoutUnderrunCount()213 int AudioTrackJni::GetPlayoutUnderrunCount() {
214   return Java_WebRtcAudioTrack_GetPlayoutUnderrunCount(env_, j_audio_track_);
215 }
216 
217 // TODO(henrika): possibly add stereo support.
AttachAudioBuffer(AudioDeviceBuffer * audioBuffer)218 void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
219   RTC_LOG(LS_INFO) << "AttachAudioBuffer";
220   RTC_DCHECK(thread_checker_.IsCurrent());
221   audio_device_buffer_ = audioBuffer;
222   const int sample_rate_hz = audio_parameters_.sample_rate();
223   RTC_LOG(LS_INFO) << "SetPlayoutSampleRate(" << sample_rate_hz << ")";
224   audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz);
225   const size_t channels = audio_parameters_.channels();
226   RTC_LOG(LS_INFO) << "SetPlayoutChannels(" << channels << ")";
227   audio_device_buffer_->SetPlayoutChannels(channels);
228 }
229 
CacheDirectBufferAddress(JNIEnv * env,const JavaParamRef<jobject> & byte_buffer)230 void AudioTrackJni::CacheDirectBufferAddress(
231     JNIEnv* env,
232     const JavaParamRef<jobject>& byte_buffer) {
233   RTC_LOG(LS_INFO) << "OnCacheDirectBufferAddress";
234   RTC_DCHECK(thread_checker_.IsCurrent());
235   RTC_DCHECK(!direct_buffer_address_);
236   direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer.obj());
237   jlong capacity = env->GetDirectBufferCapacity(byte_buffer.obj());
238   RTC_LOG(LS_INFO) << "direct buffer capacity: " << capacity;
239   direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
240   const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
241   frames_per_buffer_ = direct_buffer_capacity_in_bytes_ / bytes_per_frame;
242   RTC_LOG(LS_INFO) << "frames_per_buffer: " << frames_per_buffer_;
243 }
244 
245 // This method is called on a high-priority thread from Java. The name of
246 // the thread is 'AudioRecordTrack'.
GetPlayoutData(JNIEnv * env,size_t length)247 void AudioTrackJni::GetPlayoutData(JNIEnv* env,
248                                    size_t length) {
249   RTC_DCHECK(thread_checker_java_.IsCurrent());
250   const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
251   RTC_DCHECK_EQ(frames_per_buffer_, length / bytes_per_frame);
252   if (!audio_device_buffer_) {
253     RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called";
254     return;
255   }
256   // Pull decoded data (in 16-bit PCM format) from jitter buffer.
257   int samples = audio_device_buffer_->RequestPlayoutData(frames_per_buffer_);
258   if (samples <= 0) {
259     RTC_LOG(LS_ERROR) << "AudioDeviceBuffer::RequestPlayoutData failed";
260     return;
261   }
262   RTC_DCHECK_EQ(samples, frames_per_buffer_);
263   // Copy decoded data into common byte buffer to ensure that it can be
264   // written to the Java based audio track.
265   samples = audio_device_buffer_->GetPlayoutData(direct_buffer_address_);
266   RTC_DCHECK_EQ(length, bytes_per_frame * samples);
267 }
268 
269 }  // namespace jni
270 
271 }  // namespace webrtc
272