// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. syntax = "proto3"; package pandora; import "pandora/host.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/wrappers.proto"; option java_outer_classname = "A2DPProto"; // Service to trigger A2DP (Advanced Audio Distribution Profile) procedures. // // Requirements for the implementer: // - Streams must not be automatically opened, even if discovered. // - The `Host` service must be implemented // // References: // - [A2DP] Bluetooth SIG, Specification of the Bluetooth System, // Advanced Audio Distribution, Version 1.3 or Later // - [AVDTP] Bluetooth SIG, Specification of the Bluetooth System, // Audio/Video Distribution Transport Protocol, Version 1.3 or Later service A2DP { // Open a stream from a local **Source** endpoint to a remote **Sink** // endpoint. // // The returned source should be in the AVDTP_OPEN state (see [AVDTP] 9.1). // The rpc must block until the stream has reached this state. // // A cancellation of this call must result in aborting the current // AVDTP procedure (see [AVDTP] 9.9). rpc OpenSource(OpenSourceRequest) returns (OpenSourceResponse); // Open a stream from a local **Sink** endpoint to a remote **Source** // endpoint. // // The returned sink must be in the AVDTP_OPEN state (see [AVDTP] 9.1). // The rpc must block until the stream has reached this state. // // A cancellation of this call must result in aborting the current // AVDTP procedure (see [AVDTP] 9.9). rpc OpenSink(OpenSinkRequest) returns (OpenSinkResponse); // Wait for a stream from a local **Source** endpoint to // a remote **Sink** endpoint to open. // // The returned source should be in the AVDTP_OPEN state (see [AVDTP] 9.1). // The rpc must block until the stream has reached this state. // // If the peer has opened a source prior to this call, the server will // return it. The server must return the same source only once. rpc WaitSource(WaitSourceRequest) returns (WaitSourceResponse); // Wait for a stream from a local **Sink** endpoint to // a remote **Source** endpoint to open. // // The returned sink should be in the AVDTP_OPEN state (see [AVDTP] 9.1). // The rpc must block until the stream has reached this state. // // If the peer has opened a sink prior to this call, the server will // return it. The server must return the same sink only once. rpc WaitSink(WaitSinkRequest) returns (WaitSinkResponse); // Get if the stream is suspended rpc IsSuspended(IsSuspendedRequest) returns (google.protobuf.BoolValue); // Start an opened stream. // // The rpc must block until the stream has reached the // AVDTP_STREAMING state (see [AVDTP] 9.1). rpc Start(StartRequest) returns (StartResponse); // Suspend a streaming stream. // // The rpc must block until the stream has reached the AVDTP_OPEN // state (see [AVDTP] 9.1). rpc Suspend(SuspendRequest) returns (SuspendResponse); // Close a stream, the source or sink tokens must not be reused afterwards. rpc Close(CloseRequest) returns (CloseResponse); // Get the `AudioEncoding` value of a stream rpc GetAudioEncoding(GetAudioEncodingRequest) returns (GetAudioEncodingResponse); // Playback audio by a `Source` rpc PlaybackAudio(stream PlaybackAudioRequest) returns (PlaybackAudioResponse); // Capture audio from a `Sink` rpc CaptureAudio(CaptureAudioRequest) returns (stream CaptureAudioResponse); // Get codec configuration rpc GetConfiguration(GetConfigurationRequest) returns (GetConfigurationResponse); // Set codec configuration rpc SetConfiguration(SetConfigurationRequest) returns (SetConfigurationResponse); } // Audio encoding formats. enum AudioEncoding { // Interleaved stereo frames with 16-bit signed little-endian linear PCM // samples at 44100Hz sample rate PCM_S16_LE_44K1_STEREO = 0; // Interleaved stereo frames with 16-bit signed little-endian linear PCM // samples at 48000Hz sample rate PCM_S16_LE_48K_STEREO = 1; } // Channel mode. enum ChannelMode { UNKNOWN = 0; MONO = 1; STEREO = 2; DUALMONO = 3; } // A Token representing a Source stream (see [A2DP] 2.2). // It's acquired via an OpenSource on the A2DP service. message Source { // Opaque value filled by the GRPC server, must not // be modified nor crafted. bytes cookie = 1; } // A Token representing a Sink stream (see [A2DP] 2.2). // It's acquired via an OpenSink on the A2DP service. message Sink { // Opaque value filled by the GRPC server, must not // be modified nor crafted. bytes cookie = 1; } // Vendor codec. message Vendor { // 16 bits - Vendor identifier, assigned by BT Sig [Assigned Numbers - 7.1] uint32 id = 1; // 16 bits - Assigned by the vendor uint32 codecId = 2; } // Codec identifier defined for A2DP message CodecId { oneof type { google.protobuf.Empty sbc = 1; google.protobuf.Empty mpeg_aac = 2; Vendor vendor = 3; } } message CodecParameters { // Channel mode: Mono, Dual-Mono or Stereo ChannelMode channel_mode = 1; // Sampling frequencies in Hz. uint32 sampling_frequency_hz = 2; // Fixed point resolution in bits per sample. uint32 bit_depth = 3; // Bitrate limits on a frame basis, defined in bits per second. // The 0 value for both means "undefined" or "don't care". uint32 min_bitrate = 4; uint32 max_bitrate = 5; // Low-latency configuration. The interpretation is vendor specific. bool low_latency = 6; // Lossless effort indication. The 'False' value can be used as "don't care". bool lossless = 7; // Vendor specific parameters. bytes vendor_specific_parameters = 8; } message Configuration { // Codec indentifier. CodecId id = 1; // Codec parameters. CodecParameters parameters = 2; } // Request for the `OpenSource` method. message OpenSourceRequest { // The connection that will open the stream. Connection connection = 1; } // Response for the `OpenSource` method. message OpenSourceResponse { // Result of the `OpenSource` call. oneof result { // Opened stream. Source source = 1; // The Connection disconnected. google.protobuf.Empty disconnected = 2; } } // Request for the `OpenSink` method. message OpenSinkRequest { // The connection that will open the stream. Connection connection = 1; } // Response for the `OpenSink` method. message OpenSinkResponse { // Result of the `OpenSink` call. oneof result { // Opened stream. Sink sink = 1; // The Connection disconnected. google.protobuf.Empty disconnected = 2; } } // Request for the `WaitSource` method. message WaitSourceRequest { // The connection that is awaiting the stream. Connection connection = 1; } // Response for the `WaitSource` method. message WaitSourceResponse { // Result of the `WaitSource` call. oneof result { // Awaited stream. Source source = 1; // The Connection disconnected. google.protobuf.Empty disconnected = 2; } } // Request for the `WaitSink` method. message WaitSinkRequest { // The connection that is awaiting the stream. Connection connection = 1; } // Response for the `WaitSink` method. message WaitSinkResponse { // Result of the `WaitSink` call. oneof result { // Awaited stream. Sink sink = 1; // The Connection disconnected. google.protobuf.Empty disconnected = 2; } } // Request for the `IsSuspended` method. message IsSuspendedRequest { // The stream on which the function will check if it's suspended oneof target { Sink sink = 1; Source source = 2; } } // Request for the `Start` method. message StartRequest { // Target of the start, either a Sink or a Source. oneof target { Sink sink = 1; Source source = 2; } } // Response for the `Start` method. message StartResponse { // Result of the `Start` call. oneof result { // Stream successfully started. google.protobuf.Empty started = 1; // Stream is already in AVDTP_STREAMING state. google.protobuf.Empty already_started = 2; // The Connection disconnected. google.protobuf.Empty disconnected = 3; } } // Request for the `Suspend` method. message SuspendRequest { // Target of the suspend, either a Sink or a Source. oneof target { Sink sink = 1; Source source = 2; } } // Response for the `Suspend` method. message SuspendResponse { // Result of the `Suspend` call. oneof result { // Stream successfully suspended. google.protobuf.Empty suspended = 1; // Stream is already in AVDTP_OPEN state. google.protobuf.Empty already_suspended = 2; // The Connection disconnected. google.protobuf.Empty disconnected = 3; } } // Request for the `Close` method. message CloseRequest { // Target of the close, either a Sink or a Source. oneof target { Sink sink = 1; Source source = 2; } } // Response for the `Close` method. message CloseResponse {} // Request for the `GetAudioEncoding` method. message GetAudioEncodingRequest { // The stream on which the function will read the `AudioEncoding`. oneof target { Sink sink = 1; Source source = 2; } } // Response for the `GetAudioEncoding` method. message GetAudioEncodingResponse { // Audio encoding of the stream. AudioEncoding encoding = 1; } // Request for the `PlaybackAudio` method. message PlaybackAudioRequest { // Source that will playback audio. Source source = 1; // Audio data to playback. // The audio data must be encoded in the specified `AudioEncoding` value // obtained in response of a `GetAudioEncoding` method call. bytes data = 2; } // Response for the `PlaybackAudio` method. message PlaybackAudioResponse {} // Request for the `CaptureAudio` method. message CaptureAudioRequest { // Sink that will capture audio Sink sink = 1; } // Response for the `CaptureAudio` method. message CaptureAudioResponse { // Captured audio data. // The audio data is encoded in the specified `AudioEncoding` value // obtained in response of a `GetAudioEncoding` method call. bytes data = 1; } // Request for the `GetConfiguration` method. message GetConfigurationRequest { // The connection to get codec configuration from. Connection connection = 1; } // Response for the `GetConfiguration` method. message GetConfigurationResponse { // Codec configuration. Configuration configuration = 1; } // Request for the `SetConfiguration` method. message SetConfigurationRequest { // The connection to set codec configuration. Connection connection = 1; // New codec configuration. Configuration configuration = 2; } // Response for the `SetConfiguration` method. message SetConfigurationResponse { // Set configuration result bool success = 1; }