blob: 6cb92519a3bb432828eba2e1b1e44673a08200d6 [file] [log] [blame]
Mikhail Naganov521fc492023-07-11 17:24:08 -07001/*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <vector>
18
19#define LOG_TAG "AHAL_ModulePrimary"
20#include <Utils.h>
21#include <android-base/logging.h>
22
23#include "core-impl/ModulePrimary.h"
Mikhail Naganov14fee7b2025-03-11 12:25:13 -070024#include "core-impl/StreamMmapStub.h"
Mikhail Naganovefc2b322025-02-12 16:08:55 -080025#include "core-impl/StreamOffloadStub.h"
Mikhail Naganovcf824f62023-07-24 14:51:36 -070026#include "core-impl/StreamPrimary.h"
Mikhail Naganov521fc492023-07-11 17:24:08 -070027#include "core-impl/Telephony.h"
28
Mikhail Naganovefc2b322025-02-12 16:08:55 -080029using aidl::android::hardware::audio::common::areAllBitPositionFlagsSet;
Mikhail Naganov14fee7b2025-03-11 12:25:13 -070030using aidl::android::hardware::audio::common::hasMmapFlag;
Mikhail Naganov521fc492023-07-11 17:24:08 -070031using aidl::android::hardware::audio::common::SinkMetadata;
32using aidl::android::hardware::audio::common::SourceMetadata;
Mikhail Naganov14fee7b2025-03-11 12:25:13 -070033using aidl::android::hardware::audio::core::StreamDescriptor;
34using aidl::android::media::audio::common::AudioInputFlags;
Mikhail Naganovefc2b322025-02-12 16:08:55 -080035using aidl::android::media::audio::common::AudioIoFlags;
Mikhail Naganov521fc492023-07-11 17:24:08 -070036using aidl::android::media::audio::common::AudioOffloadInfo;
Mikhail Naganovefc2b322025-02-12 16:08:55 -080037using aidl::android::media::audio::common::AudioOutputFlags;
Mikhail Naganov521fc492023-07-11 17:24:08 -070038using aidl::android::media::audio::common::AudioPort;
39using aidl::android::media::audio::common::AudioPortConfig;
Mikhail Naganov14fee7b2025-03-11 12:25:13 -070040using aidl::android::media::audio::common::AudioPortExt;
Mikhail Naganov521fc492023-07-11 17:24:08 -070041using aidl::android::media::audio::common::MicrophoneInfo;
42
43namespace aidl::android::hardware::audio::core {
44
45ndk::ScopedAStatus ModulePrimary::getTelephony(std::shared_ptr<ITelephony>* _aidl_return) {
46 if (!mTelephony) {
47 mTelephony = ndk::SharedRefBase::make<Telephony>();
48 }
Mikhail Naganov780fefb2023-07-21 17:01:38 -070049 *_aidl_return = mTelephony.getInstance();
Mikhail Naganov2eabaf92023-07-19 14:28:47 -070050 LOG(DEBUG) << __func__
51 << ": returning instance of ITelephony: " << _aidl_return->get()->asBinder().get();
Mikhail Naganov521fc492023-07-11 17:24:08 -070052 return ndk::ScopedAStatus::ok();
53}
54
Mikhail Naganovefc2b322025-02-12 16:08:55 -080055ndk::ScopedAStatus ModulePrimary::calculateBufferSizeFrames(
56 const ::aidl::android::media::audio::common::AudioFormatDescription& format,
57 int32_t latencyMs, int32_t sampleRateHz, int32_t* bufferSizeFrames) {
58 if (format.type != ::aidl::android::media::audio::common::AudioFormatType::PCM &&
59 StreamOffloadStub::getSupportedEncodings().count(format.encoding)) {
60 *bufferSizeFrames = sampleRateHz / 2; // 1/2 of a second.
61 return ndk::ScopedAStatus::ok();
62 }
63 return Module::calculateBufferSizeFrames(format, latencyMs, sampleRateHz, bufferSizeFrames);
64}
65
Mikhail Naganov6ddefdb2023-07-19 17:30:06 -070066ndk::ScopedAStatus ModulePrimary::createInputStream(StreamContext&& context,
67 const SinkMetadata& sinkMetadata,
Mikhail Naganov521fc492023-07-11 17:24:08 -070068 const std::vector<MicrophoneInfo>& microphones,
69 std::shared_ptr<StreamIn>* result) {
Mikhail Naganov14fee7b2025-03-11 12:25:13 -070070 if (context.isMmap()) {
71 // "Stub" is used because there is no support for MMAP audio I/O on CVD.
72 return createStreamInstance<StreamInMmapStub>(result, std::move(context), sinkMetadata,
73 microphones);
74 }
Mikhail Naganovcf824f62023-07-24 14:51:36 -070075 return createStreamInstance<StreamInPrimary>(result, std::move(context), sinkMetadata,
76 microphones);
Mikhail Naganov521fc492023-07-11 17:24:08 -070077}
78
79ndk::ScopedAStatus ModulePrimary::createOutputStream(
Mikhail Naganov6ddefdb2023-07-19 17:30:06 -070080 StreamContext&& context, const SourceMetadata& sourceMetadata,
Mikhail Naganov521fc492023-07-11 17:24:08 -070081 const std::optional<AudioOffloadInfo>& offloadInfo, std::shared_ptr<StreamOut>* result) {
Mikhail Naganov14fee7b2025-03-11 12:25:13 -070082 if (context.isMmap()) {
83 // "Stub" is used because there is no support for MMAP audio I/O on CVD.
84 return createStreamInstance<StreamOutMmapStub>(result, std::move(context), sourceMetadata,
85 offloadInfo);
86 } else if (areAllBitPositionFlagsSet(
87 context.getFlags().get<AudioIoFlags::output>(),
88 {AudioOutputFlags::COMPRESS_OFFLOAD, AudioOutputFlags::NON_BLOCKING})) {
Mikhail Naganovefc2b322025-02-12 16:08:55 -080089 // "Stub" is used because there is no actual decoder. The stream just
90 // extracts the clip duration from the media file header and simulates
91 // playback over time.
92 return createStreamInstance<StreamOutOffloadStub>(result, std::move(context),
93 sourceMetadata, offloadInfo);
94 }
Mikhail Naganov14fee7b2025-03-11 12:25:13 -070095 return createStreamInstance<StreamOutPrimary>(result, std::move(context), sourceMetadata,
96 offloadInfo);
Mikhail Naganov521fc492023-07-11 17:24:08 -070097}
98
Mikhail Naganov14fee7b2025-03-11 12:25:13 -070099ndk::ScopedAStatus ModulePrimary::createMmapBuffer(const AudioPortConfig& portConfig,
100 int32_t bufferSizeFrames, int32_t frameSizeBytes,
101 MmapBufferDescriptor* desc) {
102 const size_t bufferSizeBytes = static_cast<size_t>(bufferSizeFrames) * frameSizeBytes;
103 // The actual mmap buffer for I/O is created after the stream exits standby, via
104 // 'IStreamCommon.createMmapBuffer'. But we must return a valid file descriptor here because
105 // 'MmapBufferDescriptor' can not contain a "null" fd.
106 const std::string regionName =
107 std::string("mmap-sim-o-") +
108 std::to_string(portConfig.ext.get<AudioPortExt::Tag::mix>().handle);
109 int fd = ashmem_create_region(regionName.c_str(), bufferSizeBytes);
110 if (fd < 0) {
111 PLOG(ERROR) << __func__ << ": failed to create shared memory region of " << bufferSizeBytes
112 << " bytes";
113 return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
114 }
115 desc->sharedMemory.fd = ndk::ScopedFileDescriptor(fd);
116 desc->sharedMemory.size = bufferSizeBytes;
117 desc->burstSizeFrames = bufferSizeFrames / 2;
118 desc->flags = 0;
119 LOG(DEBUG) << __func__ << ": " << desc->toString();
120 return ndk::ScopedAStatus::ok();
121}
122
123int32_t ModulePrimary::getNominalLatencyMs(const AudioPortConfig& portConfig) {
124 static constexpr int32_t kLowLatencyMs = 5;
Mikhail Naganov13501872023-10-18 16:15:46 -0700125 // 85 ms is chosen considering 4096 frames @ 48 kHz. This is the value which allows
126 // the virtual Android device implementation to pass CTS. Hardware implementations
127 // should have significantly lower latency.
Mikhail Naganov14fee7b2025-03-11 12:25:13 -0700128 static constexpr int32_t kStandardLatencyMs = 85;
129 return hasMmapFlag(portConfig.flags.value()) ? kLowLatencyMs : kStandardLatencyMs;
Mikhail Naganov13501872023-10-18 16:15:46 -0700130}
131
Mikhail Naganov521fc492023-07-11 17:24:08 -0700132} // namespace aidl::android::hardware::audio::core