xref: /aosp_15_r20/external/webrtc/modules/audio_device/android/audio_track_jni.cc (revision d9f758449e529ab9291ac668be2861e7a55c2422)
1 /*
2  *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "modules/audio_device/android/audio_track_jni.h"
12 
13 #include <utility>
14 
15 #include "modules/audio_device/android/audio_manager.h"
16 #include "rtc_base/arraysize.h"
17 #include "rtc_base/checks.h"
18 #include "rtc_base/logging.h"
19 #include "rtc_base/platform_thread.h"
20 #include "system_wrappers/include/field_trial.h"
21 #include "system_wrappers/include/metrics.h"
22 
23 namespace webrtc {
24 
25 // AudioTrackJni::JavaAudioTrack implementation.
JavaAudioTrack(NativeRegistration * native_reg,std::unique_ptr<GlobalRef> audio_track)26 AudioTrackJni::JavaAudioTrack::JavaAudioTrack(
27     NativeRegistration* native_reg,
28     std::unique_ptr<GlobalRef> audio_track)
29     : audio_track_(std::move(audio_track)),
30       init_playout_(native_reg->GetMethodId("initPlayout", "(IID)I")),
31       start_playout_(native_reg->GetMethodId("startPlayout", "()Z")),
32       stop_playout_(native_reg->GetMethodId("stopPlayout", "()Z")),
33       set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")),
34       get_stream_max_volume_(
35           native_reg->GetMethodId("getStreamMaxVolume", "()I")),
36       get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")),
37       get_buffer_size_in_frames_(
38           native_reg->GetMethodId("getBufferSizeInFrames", "()I")) {}
39 
~JavaAudioTrack()40 AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {}
41 
InitPlayout(int sample_rate,int channels)42 bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) {
43   double buffer_size_factor =
44       strtod(webrtc::field_trial::FindFullName(
45                  "WebRTC-AudioDevicePlayoutBufferSizeFactor")
46                  .c_str(),
47              nullptr);
48   if (buffer_size_factor == 0)
49     buffer_size_factor = 1.0;
50   int requested_buffer_size_bytes = audio_track_->CallIntMethod(
51       init_playout_, sample_rate, channels, buffer_size_factor);
52   // Update UMA histograms for both the requested and actual buffer size.
53   if (requested_buffer_size_bytes >= 0) {
54     // To avoid division by zero, we assume the sample rate is 48k if an invalid
55     // value is found.
56     sample_rate = sample_rate <= 0 ? 48000 : sample_rate;
57     // This calculation assumes that audio is mono.
58     const int requested_buffer_size_ms =
59         (requested_buffer_size_bytes * 1000) / (2 * sample_rate);
60     RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
61                          requested_buffer_size_ms, 0, 1000, 100);
62     int actual_buffer_size_frames =
63         audio_track_->CallIntMethod(get_buffer_size_in_frames_);
64     if (actual_buffer_size_frames >= 0) {
65       const int actual_buffer_size_ms =
66           actual_buffer_size_frames * 1000 / sample_rate;
67       RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs",
68                            actual_buffer_size_ms, 0, 1000, 100);
69     }
70     return true;
71   }
72   return false;
73 }
74 
StartPlayout()75 bool AudioTrackJni::JavaAudioTrack::StartPlayout() {
76   return audio_track_->CallBooleanMethod(start_playout_);
77 }
78 
StopPlayout()79 bool AudioTrackJni::JavaAudioTrack::StopPlayout() {
80   return audio_track_->CallBooleanMethod(stop_playout_);
81 }
82 
SetStreamVolume(int volume)83 bool AudioTrackJni::JavaAudioTrack::SetStreamVolume(int volume) {
84   return audio_track_->CallBooleanMethod(set_stream_volume_, volume);
85 }
86 
GetStreamMaxVolume()87 int AudioTrackJni::JavaAudioTrack::GetStreamMaxVolume() {
88   return audio_track_->CallIntMethod(get_stream_max_volume_);
89 }
90 
GetStreamVolume()91 int AudioTrackJni::JavaAudioTrack::GetStreamVolume() {
92   return audio_track_->CallIntMethod(get_stream_volume_);
93 }
94 
95 // TODO(henrika): possible extend usage of AudioManager and add it as member.
AudioTrackJni(AudioManager * audio_manager)96 AudioTrackJni::AudioTrackJni(AudioManager* audio_manager)
97     : j_environment_(JVM::GetInstance()->environment()),
98       audio_parameters_(audio_manager->GetPlayoutAudioParameters()),
99       direct_buffer_address_(nullptr),
100       direct_buffer_capacity_in_bytes_(0),
101       frames_per_buffer_(0),
102       initialized_(false),
103       playing_(false),
104       audio_device_buffer_(nullptr) {
105   RTC_LOG(LS_INFO) << "ctor";
106   RTC_DCHECK(audio_parameters_.is_valid());
107   RTC_CHECK(j_environment_);
108   JNINativeMethod native_methods[] = {
109       {"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V",
110        reinterpret_cast<void*>(
111            &webrtc::AudioTrackJni::CacheDirectBufferAddress)},
112       {"nativeGetPlayoutData", "(IJ)V",
113        reinterpret_cast<void*>(&webrtc::AudioTrackJni::GetPlayoutData)}};
114   j_native_registration_ = j_environment_->RegisterNatives(
115       "org/webrtc/voiceengine/WebRtcAudioTrack", native_methods,
116       arraysize(native_methods));
117   j_audio_track_.reset(
118       new JavaAudioTrack(j_native_registration_.get(),
119                          j_native_registration_->NewObject(
120                              "<init>", "(J)V", PointerTojlong(this))));
121   // Detach from this thread since we want to use the checker to verify calls
122   // from the Java based audio thread.
123   thread_checker_java_.Detach();
124 }
125 
~AudioTrackJni()126 AudioTrackJni::~AudioTrackJni() {
127   RTC_LOG(LS_INFO) << "dtor";
128   RTC_DCHECK(thread_checker_.IsCurrent());
129   Terminate();
130 }
131 
Init()132 int32_t AudioTrackJni::Init() {
133   RTC_LOG(LS_INFO) << "Init";
134   RTC_DCHECK(thread_checker_.IsCurrent());
135   return 0;
136 }
137 
Terminate()138 int32_t AudioTrackJni::Terminate() {
139   RTC_LOG(LS_INFO) << "Terminate";
140   RTC_DCHECK(thread_checker_.IsCurrent());
141   StopPlayout();
142   return 0;
143 }
144 
InitPlayout()145 int32_t AudioTrackJni::InitPlayout() {
146   RTC_LOG(LS_INFO) << "InitPlayout";
147   RTC_DCHECK(thread_checker_.IsCurrent());
148   RTC_DCHECK(!initialized_);
149   RTC_DCHECK(!playing_);
150   if (!j_audio_track_->InitPlayout(audio_parameters_.sample_rate(),
151                                    audio_parameters_.channels())) {
152     RTC_LOG(LS_ERROR) << "InitPlayout failed";
153     return -1;
154   }
155   initialized_ = true;
156   return 0;
157 }
158 
StartPlayout()159 int32_t AudioTrackJni::StartPlayout() {
160   RTC_LOG(LS_INFO) << "StartPlayout";
161   RTC_DCHECK(thread_checker_.IsCurrent());
162   RTC_DCHECK(!playing_);
163   if (!initialized_) {
164     RTC_DLOG(LS_WARNING)
165         << "Playout can not start since InitPlayout must succeed first";
166     return 0;
167   }
168   if (!j_audio_track_->StartPlayout()) {
169     RTC_LOG(LS_ERROR) << "StartPlayout failed";
170     return -1;
171   }
172   playing_ = true;
173   return 0;
174 }
175 
StopPlayout()176 int32_t AudioTrackJni::StopPlayout() {
177   RTC_LOG(LS_INFO) << "StopPlayout";
178   RTC_DCHECK(thread_checker_.IsCurrent());
179   if (!initialized_ || !playing_) {
180     return 0;
181   }
182   if (!j_audio_track_->StopPlayout()) {
183     RTC_LOG(LS_ERROR) << "StopPlayout failed";
184     return -1;
185   }
186   // If we don't detach here, we will hit a RTC_DCHECK in OnDataIsRecorded()
187   // next time StartRecording() is called since it will create a new Java
188   // thread.
189   thread_checker_java_.Detach();
190   initialized_ = false;
191   playing_ = false;
192   direct_buffer_address_ = nullptr;
193   return 0;
194 }
195 
SpeakerVolumeIsAvailable(bool & available)196 int AudioTrackJni::SpeakerVolumeIsAvailable(bool& available) {
197   available = true;
198   return 0;
199 }
200 
SetSpeakerVolume(uint32_t volume)201 int AudioTrackJni::SetSpeakerVolume(uint32_t volume) {
202   RTC_LOG(LS_INFO) << "SetSpeakerVolume(" << volume << ")";
203   RTC_DCHECK(thread_checker_.IsCurrent());
204   return j_audio_track_->SetStreamVolume(volume) ? 0 : -1;
205 }
206 
MaxSpeakerVolume(uint32_t & max_volume) const207 int AudioTrackJni::MaxSpeakerVolume(uint32_t& max_volume) const {
208   RTC_DCHECK(thread_checker_.IsCurrent());
209   max_volume = j_audio_track_->GetStreamMaxVolume();
210   return 0;
211 }
212 
MinSpeakerVolume(uint32_t & min_volume) const213 int AudioTrackJni::MinSpeakerVolume(uint32_t& min_volume) const {
214   RTC_DCHECK(thread_checker_.IsCurrent());
215   min_volume = 0;
216   return 0;
217 }
218 
SpeakerVolume(uint32_t & volume) const219 int AudioTrackJni::SpeakerVolume(uint32_t& volume) const {
220   RTC_DCHECK(thread_checker_.IsCurrent());
221   volume = j_audio_track_->GetStreamVolume();
222   RTC_LOG(LS_INFO) << "SpeakerVolume: " << volume;
223   return 0;
224 }
225 
226 // TODO(henrika): possibly add stereo support.
AttachAudioBuffer(AudioDeviceBuffer * audioBuffer)227 void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
228   RTC_LOG(LS_INFO) << "AttachAudioBuffer";
229   RTC_DCHECK(thread_checker_.IsCurrent());
230   audio_device_buffer_ = audioBuffer;
231   const int sample_rate_hz = audio_parameters_.sample_rate();
232   RTC_LOG(LS_INFO) << "SetPlayoutSampleRate(" << sample_rate_hz << ")";
233   audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz);
234   const size_t channels = audio_parameters_.channels();
235   RTC_LOG(LS_INFO) << "SetPlayoutChannels(" << channels << ")";
236   audio_device_buffer_->SetPlayoutChannels(channels);
237 }
238 
239 JNI_FUNCTION_ALIGN
CacheDirectBufferAddress(JNIEnv * env,jobject obj,jobject byte_buffer,jlong nativeAudioTrack)240 void JNICALL AudioTrackJni::CacheDirectBufferAddress(JNIEnv* env,
241                                                      jobject obj,
242                                                      jobject byte_buffer,
243                                                      jlong nativeAudioTrack) {
244   webrtc::AudioTrackJni* this_object =
245       reinterpret_cast<webrtc::AudioTrackJni*>(nativeAudioTrack);
246   this_object->OnCacheDirectBufferAddress(env, byte_buffer);
247 }
248 
OnCacheDirectBufferAddress(JNIEnv * env,jobject byte_buffer)249 void AudioTrackJni::OnCacheDirectBufferAddress(JNIEnv* env,
250                                                jobject byte_buffer) {
251   RTC_LOG(LS_INFO) << "OnCacheDirectBufferAddress";
252   RTC_DCHECK(thread_checker_.IsCurrent());
253   RTC_DCHECK(!direct_buffer_address_);
254   direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer);
255   jlong capacity = env->GetDirectBufferCapacity(byte_buffer);
256   RTC_LOG(LS_INFO) << "direct buffer capacity: " << capacity;
257   direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
258   const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
259   frames_per_buffer_ = direct_buffer_capacity_in_bytes_ / bytes_per_frame;
260   RTC_LOG(LS_INFO) << "frames_per_buffer: " << frames_per_buffer_;
261 }
262 
263 JNI_FUNCTION_ALIGN
GetPlayoutData(JNIEnv * env,jobject obj,jint length,jlong nativeAudioTrack)264 void JNICALL AudioTrackJni::GetPlayoutData(JNIEnv* env,
265                                            jobject obj,
266                                            jint length,
267                                            jlong nativeAudioTrack) {
268   webrtc::AudioTrackJni* this_object =
269       reinterpret_cast<webrtc::AudioTrackJni*>(nativeAudioTrack);
270   this_object->OnGetPlayoutData(static_cast<size_t>(length));
271 }
272 
273 // This method is called on a high-priority thread from Java. The name of
274 // the thread is 'AudioRecordTrack'.
OnGetPlayoutData(size_t length)275 void AudioTrackJni::OnGetPlayoutData(size_t length) {
276   RTC_DCHECK(thread_checker_java_.IsCurrent());
277   const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
278   RTC_DCHECK_EQ(frames_per_buffer_, length / bytes_per_frame);
279   if (!audio_device_buffer_) {
280     RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called";
281     return;
282   }
283   // Pull decoded data (in 16-bit PCM format) from jitter buffer.
284   int samples = audio_device_buffer_->RequestPlayoutData(frames_per_buffer_);
285   if (samples <= 0) {
286     RTC_LOG(LS_ERROR) << "AudioDeviceBuffer::RequestPlayoutData failed";
287     return;
288   }
289   RTC_DCHECK_EQ(samples, frames_per_buffer_);
290   // Copy decoded data into common byte buffer to ensure that it can be
291   // written to the Java based audio track.
292   samples = audio_device_buffer_->GetPlayoutData(direct_buffer_address_);
293   RTC_DCHECK_EQ(length, bytes_per_frame * samples);
294 }
295 
296 }  // namespace webrtc
297