1 /* 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 #ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_ 12 #define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_ 13 14 #include <jni.h> 15 16 #include <memory> 17 18 #include "absl/types/optional.h" 19 #include "api/sequence_checker.h" 20 #include "modules/audio_device/audio_device_buffer.h" 21 #include "modules/audio_device/include/audio_device_defines.h" 22 #include "sdk/android/src/jni/audio_device/audio_common.h" 23 #include "sdk/android/src/jni/audio_device/audio_device_module.h" 24 25 namespace webrtc { 26 27 namespace jni { 28 29 // Implements 16-bit mono PCM audio output support for Android using the Java 30 // AudioTrack interface. Most of the work is done by its Java counterpart in 31 // WebRtcAudioTrack.java. This class is created and lives on a thread in 32 // C++-land, but decoded audio buffers are requested on a high-priority 33 // thread managed by the Java class. 34 // 35 // An instance can be created on any thread, but must then be used on one and 36 // the same thread. All public methods must also be called on the same thread. A 37 // thread checker will RTC_DCHECK if any method is called on an invalid thread 38 // 39 // This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed. 40 // Additional thread checking guarantees that no other (possibly non attached) 41 // thread is used. 42 class AudioTrackJni : public AudioOutput { 43 public: 44 static ScopedJavaLocalRef<jobject> CreateJavaWebRtcAudioTrack( 45 JNIEnv* env, 46 const JavaRef<jobject>& j_context, 47 const JavaRef<jobject>& j_audio_manager); 48 49 AudioTrackJni(JNIEnv* env, 50 const AudioParameters& audio_parameters, 51 const JavaRef<jobject>& j_webrtc_audio_track); 52 ~AudioTrackJni() override; 53 54 int32_t Init() override; 55 int32_t Terminate() override; 56 57 int32_t InitPlayout() override; 58 bool PlayoutIsInitialized() const override; 59 60 int32_t StartPlayout() override; 61 int32_t StopPlayout() override; 62 bool Playing() const override; 63 64 bool SpeakerVolumeIsAvailable() override; 65 int SetSpeakerVolume(uint32_t volume) override; 66 absl::optional<uint32_t> SpeakerVolume() const override; 67 absl::optional<uint32_t> MaxSpeakerVolume() const override; 68 absl::optional<uint32_t> MinSpeakerVolume() const override; 69 int GetPlayoutUnderrunCount() override; 70 71 void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; 72 73 // Called from Java side so we can cache the address of the Java-manged 74 // `byte_buffer` in `direct_buffer_address_`. The size of the buffer 75 // is also stored in `direct_buffer_capacity_in_bytes_`. 76 // Called on the same thread as the creating thread. 77 void CacheDirectBufferAddress(JNIEnv* env, 78 const JavaParamRef<jobject>& byte_buffer); 79 // Called periodically by the Java based WebRtcAudioTrack object when 80 // playout has started. Each call indicates that `length` new bytes should 81 // be written to the memory area `direct_buffer_address_` for playout. 82 // This method is called on a high-priority thread from Java. The name of 83 // the thread is 'AudioTrackThread'. 84 void GetPlayoutData(JNIEnv* env, size_t length); 85 86 private: 87 // Stores thread ID in constructor. 88 SequenceChecker thread_checker_; 89 90 // Stores thread ID in first call to OnGetPlayoutData() from high-priority 91 // thread in Java. Detached during construction of this object. 92 SequenceChecker thread_checker_java_; 93 94 // Wraps the Java specific parts of the AudioTrackJni class. 95 JNIEnv* env_ = nullptr; 96 ScopedJavaGlobalRef<jobject> j_audio_track_; 97 98 // Contains audio parameters provided to this class at construction by the 99 // AudioManager. 100 const AudioParameters audio_parameters_; 101 102 // Cached copy of address to direct audio buffer owned by `j_audio_track_`. 103 void* direct_buffer_address_; 104 105 // Number of bytes in the direct audio buffer owned by `j_audio_track_`. 106 size_t direct_buffer_capacity_in_bytes_; 107 108 // Number of audio frames per audio buffer. Each audio frame corresponds to 109 // one sample of PCM mono data at 16 bits per sample. Hence, each audio 110 // frame contains 2 bytes (given that the Java layer only supports mono). 111 // Example: 480 for 48000 Hz or 441 for 44100 Hz. 112 size_t frames_per_buffer_; 113 114 bool initialized_; 115 116 bool playing_; 117 118 // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the 119 // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). 120 // The AudioDeviceBuffer is a member of the AudioDeviceModuleImpl instance 121 // and therefore outlives this object. 122 AudioDeviceBuffer* audio_device_buffer_; 123 }; 124 125 } // namespace jni 126 127 } // namespace webrtc 128 129 #endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_ 130