blob: b70c6256ff1651afac69ac6963dc804ec6508d84 [file] [log] [blame]
Phil Burkc0c70e32017-02-09 13:18:38 -08001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "AAudioService"
18//#define LOG_NDEBUG 0
19#include <utils/Log.h>
20
21#include <atomic>
22#include <stdint.h>
23
24#include <utils/String16.h>
25#include <media/nbaio/AudioStreamOutSink.h>
26#include <media/MmapStreamInterface.h>
27
28#include "AAudioServiceStreamBase.h"
29#include "AAudioServiceStreamMMAP.h"
30#include "binding/AudioEndpointParcelable.h"
31#include "SharedMemoryProxy.h"
32#include "utility/AAudioUtilities.h"
33
34using namespace android;
35using namespace aaudio;
36
37#define AAUDIO_BUFFER_CAPACITY_MIN 4 * 512
38#define AAUDIO_SAMPLE_RATE_DEFAULT 48000
39
40/**
41 * Stream that uses an MMAP buffer.
42 */
43
44AAudioServiceStreamMMAP::AAudioServiceStreamMMAP()
45 : AAudioServiceStreamBase()
46 , mMmapStreamCallback(new MyMmapStreamCallback(*this))
47 , mPreviousFrameCounter(0)
48 , mMmapStream(nullptr) {
49}
50
51AAudioServiceStreamMMAP::~AAudioServiceStreamMMAP() {
52 close();
53}
54
55aaudio_result_t AAudioServiceStreamMMAP::close() {
56 ALOGD("AAudioServiceStreamMMAP::close() called, %p", mMmapStream.get());
57 mMmapStream.clear(); // TODO review. Is that all we have to do?
58 return AAudioServiceStreamBase::close();
59}
60
61// Open stream on HAL and pass information about the shared memory buffer back to the client.
62aaudio_result_t AAudioServiceStreamMMAP::open(const aaudio::AAudioStreamRequest &request,
63 aaudio::AAudioStreamConfiguration &configurationOutput) {
64 const audio_attributes_t attributes = {
65 .content_type = AUDIO_CONTENT_TYPE_MUSIC,
66 .usage = AUDIO_USAGE_MEDIA,
67 .source = AUDIO_SOURCE_DEFAULT,
68 .flags = AUDIO_FLAG_LOW_LATENCY,
69 .tags = ""
70 };
71 audio_config_base_t config;
72
73 aaudio_result_t result = AAudioServiceStreamBase::open(request, configurationOutput);
74 if (result != AAUDIO_OK) {
75 ALOGE("AAudioServiceStreamBase open returned %d", result);
76 return result;
77 }
78
79 const AAudioStreamConfiguration &configurationInput = request.getConstantConfiguration();
80 audio_port_handle_t deviceId = configurationInput.getDeviceId();
81
82 ALOGI("open request dump()");
83 request.dump();
84
85 mMmapClient.clientUid = request.getUserId();
86 mMmapClient.clientPid = request.getProcessId();
87 aaudio_direction_t direction = request.getDirection();
88
89 // Fill in config
90 aaudio_audio_format_t aaudioFormat = configurationInput.getAudioFormat();
91 if (aaudioFormat == AAUDIO_UNSPECIFIED || aaudioFormat == AAUDIO_FORMAT_PCM_FLOAT) {
92 ALOGI("open forcing use of AAUDIO_FORMAT_PCM_I16");
93 aaudioFormat = AAUDIO_FORMAT_PCM_I16;
94 }
95 config.format = AAudioConvert_aaudioToAndroidDataFormat(aaudioFormat);
96
97 int32_t aaudioSampleRate = configurationInput.getSampleRate();
98 if (aaudioSampleRate == AAUDIO_UNSPECIFIED) {
99 aaudioSampleRate = AAUDIO_SAMPLE_RATE_DEFAULT;
100 }
101 config.sample_rate = aaudioSampleRate;
102
103 int32_t aaudioSamplesPerFrame = configurationInput.getSamplesPerFrame();
104
105 if (direction == AAUDIO_DIRECTION_OUTPUT) {
106 config.channel_mask = (aaudioSamplesPerFrame == AAUDIO_UNSPECIFIED)
107 ? AUDIO_CHANNEL_OUT_STEREO
108 : audio_channel_out_mask_from_count(aaudioSamplesPerFrame);
109 } else if (direction == AAUDIO_DIRECTION_INPUT) {
110 config.channel_mask = (aaudioSamplesPerFrame == AAUDIO_UNSPECIFIED)
111 ? AUDIO_CHANNEL_IN_STEREO
112 : audio_channel_in_mask_from_count(aaudioSamplesPerFrame);
113 } else {
114 ALOGE("openMmapStream - invalid direction = %d", direction);
115 return AAUDIO_ERROR_ILLEGAL_ARGUMENT;
116 }
117
118 mMmapClient.packageName.setTo(String16("aaudio_service")); // FIXME what should we do here?
119
120 MmapStreamInterface::stream_direction_t streamDirection = (direction == AAUDIO_DIRECTION_OUTPUT)
121 ? MmapStreamInterface::DIRECTION_OUTPUT : MmapStreamInterface::DIRECTION_INPUT;
122
123 // Open HAL stream.
124 status_t status = MmapStreamInterface::openMmapStream(streamDirection,
125 &attributes,
126 &config,
127 mMmapClient,
128 &deviceId,
129 mMmapStreamCallback,
130 mMmapStream);
131 if (status != OK) {
132 ALOGE("openMmapStream returned status %d", status);
133 return AAUDIO_ERROR_UNAVAILABLE;
134 }
135
136 // Create MMAP/NOIRQ buffer.
137 int32_t minSizeFrames = configurationInput.getBufferCapacity();
138 if (minSizeFrames == 0) { // zero will get rejected
139 minSizeFrames = AAUDIO_BUFFER_CAPACITY_MIN;
140 }
141 status = mMmapStream->createMmapBuffer(minSizeFrames, &mMmapBufferinfo);
142 if (status != OK) {
143 ALOGE("%s: createMmapBuffer() returned status %d, return AAUDIO_ERROR_UNAVAILABLE",
144 __FILE__, status);
145 return AAUDIO_ERROR_UNAVAILABLE;
146 } else {
147 ALOGD("createMmapBuffer status %d shared_address = %p buffer_size %d burst_size %d",
148 status, mMmapBufferinfo.shared_memory_address,
149 mMmapBufferinfo.buffer_size_frames,
150 mMmapBufferinfo.burst_size_frames);
151 }
152
153 // Get information about the stream and pass it back to the caller.
154 mSamplesPerFrame = (direction == AAUDIO_DIRECTION_OUTPUT)
155 ? audio_channel_count_from_out_mask(config.channel_mask)
156 : audio_channel_count_from_in_mask(config.channel_mask);
157
158 mAudioDataFileDescriptor = mMmapBufferinfo.shared_memory_fd;
159 mFramesPerBurst = mMmapBufferinfo.burst_size_frames;
160 mCapacityInFrames = mMmapBufferinfo.buffer_size_frames;
161 mAudioFormat = AAudioConvert_androidToAAudioDataFormat(config.format);
162 mSampleRate = config.sample_rate;
163
164 // Fill in AAudioStreamConfiguration
165 configurationOutput.setSampleRate(mSampleRate);
166 configurationOutput.setSamplesPerFrame(mSamplesPerFrame);
167 configurationOutput.setAudioFormat(mAudioFormat);
168 configurationOutput.setDeviceId(deviceId);
169
170 return AAUDIO_OK;
171}
172
173
174/**
175 * Start the flow of data.
176 */
177aaudio_result_t AAudioServiceStreamMMAP::start() {
178 if (mMmapStream == nullptr) return AAUDIO_ERROR_NULL;
179 aaudio_result_t result = mMmapStream->start(mMmapClient, &mPortHandle);
180 if (result != AAUDIO_OK) {
181 ALOGE("AAudioServiceStreamMMAP::start() mMmapStream->start() returned %d", result);
182 processError();
183 } else {
184 result = AAudioServiceStreamBase::start();
185 }
186 return result;
187}
188
189/**
190 * Stop the flow of data such that start() can resume with loss of data.
191 */
192aaudio_result_t AAudioServiceStreamMMAP::pause() {
193 if (mMmapStream == nullptr) return AAUDIO_ERROR_NULL;
194
195 aaudio_result_t result1 = AAudioServiceStreamBase::pause();
196 aaudio_result_t result2 = mMmapStream->stop(mPortHandle);
197 mFramesRead.reset32();
198 return (result1 != AAUDIO_OK) ? result1 : result2;
199}
200
201/**
202 * Discard any data held by the underlying HAL or Service.
203 */
204aaudio_result_t AAudioServiceStreamMMAP::flush() {
205 if (mMmapStream == nullptr) return AAUDIO_ERROR_NULL;
206 // TODO how do we flush an MMAP/NOIRQ buffer? sync pointers?
207 ALOGD("AAudioServiceStreamMMAP::pause() send AAUDIO_SERVICE_EVENT_FLUSHED");
208 sendServiceEvent(AAUDIO_SERVICE_EVENT_FLUSHED);
209 mState = AAUDIO_STREAM_STATE_FLUSHED;
210 return AAUDIO_OK;
211}
212
213
214aaudio_result_t AAudioServiceStreamMMAP::getFreeRunningPosition(int64_t *positionFrames,
215 int64_t *timeNanos) {
216 struct audio_mmap_position position;
217 if (mMmapStream == nullptr) {
218 processError();
219 return AAUDIO_ERROR_NULL;
220 }
221 status_t status = mMmapStream->getMmapPosition(&position);
222 if (status != OK) {
223 ALOGE("sendCurrentTimestamp(): getMmapPosition() returned %d", status);
224 processError();
225 return AAudioConvert_androidToAAudioResult(status);
226 } else {
227 mFramesRead.update32(position.position_frames);
228 *positionFrames = mFramesRead.get();
229 *timeNanos = position.time_nanoseconds;
230 }
231 return AAUDIO_OK;
232}
233
234void AAudioServiceStreamMMAP::onTearDown() {
235 ALOGD("AAudioServiceStreamMMAP::onTearDown() called - TODO");
236};
237
238void AAudioServiceStreamMMAP::onVolumeChanged(audio_channel_mask_t channels,
239 android::Vector<float> values) {
240 // TODO do we really need a different volume for each channel?
241 float volume = values[0];
242 ALOGD("AAudioServiceStreamMMAP::onVolumeChanged() volume[0] = %f", volume);
243 sendServiceEvent(AAUDIO_SERVICE_EVENT_VOLUME, volume);
244};
245
246void AAudioServiceStreamMMAP::onRoutingChanged(audio_port_handle_t deviceId) {
247 ALOGD("AAudioServiceStreamMMAP::onRoutingChanged() called with %d, old = %d",
248 deviceId, mPortHandle);
249 if (mPortHandle > 0 && mPortHandle != deviceId) {
250 sendServiceEvent(AAUDIO_SERVICE_EVENT_DISCONNECTED);
251 }
252 mPortHandle = deviceId;
253};
254
255/**
256 * Get an immutable description of the data queue from the HAL.
257 */
258aaudio_result_t AAudioServiceStreamMMAP::getDownDataDescription(AudioEndpointParcelable &parcelable)
259{
260 // Gather information on the data queue based on HAL info.
261 int32_t bytesPerFrame = calculateBytesPerFrame();
262 int32_t capacityInBytes = mCapacityInFrames * bytesPerFrame;
263 int fdIndex = parcelable.addFileDescriptor(mAudioDataFileDescriptor, capacityInBytes);
264 parcelable.mDownDataQueueParcelable.setupMemory(fdIndex, 0, capacityInBytes);
265 parcelable.mDownDataQueueParcelable.setBytesPerFrame(bytesPerFrame);
266 parcelable.mDownDataQueueParcelable.setFramesPerBurst(mFramesPerBurst);
267 parcelable.mDownDataQueueParcelable.setCapacityInFrames(mCapacityInFrames);
268 return AAUDIO_OK;
269}