Merge changes from topic "ReadDefaultFstab_requires_sepolicies"
* changes:
fs_mgr: Refactor ReadDefaultFstab() and ReadFstabFromFile()
fs_mgr: Strengthen ReadFstabFromFile() around gsi_public_metadata_file
diff --git a/libstats/pull_rust/Android.bp b/libstats/pull_rust/Android.bp
new file mode 100644
index 0000000..3660199
--- /dev/null
+++ b/libstats/pull_rust/Android.bp
@@ -0,0 +1,55 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+rust_bindgen {
+ name: "libstatspull_bindgen",
+ wrapper_src: "statslog.h",
+ crate_name: "statspull_bindgen",
+ source_stem: "bindings",
+ bindgen_flags: [
+ "--size_t-is-usize",
+ "--whitelist-function=AStatsEventList_addStatsEvent",
+ "--whitelist-function=AStatsEvent_.*",
+ "--whitelist-function=AStatsManager_.*",
+ "--whitelist-var=AStatsManager_.*",
+ ],
+ target: {
+ android: {
+ shared_libs: [
+ "libstatspull",
+ "libstatssocket",
+ ],
+ },
+ host: {
+ static_libs: [
+ "libstatspull",
+ "libstatssocket",
+ ],
+ },
+ },
+}
+
+rust_library {
+ name: "libstatspull_rust",
+ crate_name: "statspull_rust",
+ srcs: ["stats_pull.rs"],
+ rustlibs: [
+ "liblazy_static",
+ "liblog_rust",
+ "libstatslog_rust_header",
+ "libstatspull_bindgen",
+ ],
+}
diff --git a/libstats/pull_rust/stats_pull.rs b/libstats/pull_rust/stats_pull.rs
new file mode 100644
index 0000000..174125e
--- /dev/null
+++ b/libstats/pull_rust/stats_pull.rs
@@ -0,0 +1,170 @@
+// Copyright 2021, The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! A Rust interface for the StatsD pull API.
+
+use lazy_static::lazy_static;
+use statslog_rust_header::{Atoms, Stat, StatsError};
+use statspull_bindgen::*;
+use std::collections::HashMap;
+use std::convert::TryInto;
+use std::os::raw::c_void;
+use std::sync::Mutex;
+
+/// The return value of callbacks.
+pub type StatsPullResult = Vec<Box<dyn Stat>>;
+
+/// A wrapper for AStatsManager_PullAtomMetadata.
+/// It calls AStatsManager_PullAtomMetadata_release on drop.
+pub struct Metadata {
+ metadata: *mut AStatsManager_PullAtomMetadata,
+}
+
+impl Metadata {
+ /// Calls AStatsManager_PullAtomMetadata_obtain.
+ pub fn new() -> Self {
+ // Safety: We panic if the memory allocation fails.
+ let metadata = unsafe { AStatsManager_PullAtomMetadata_obtain() };
+ if metadata.is_null() {
+ panic!("Cannot obtain pull atom metadata.");
+ } else {
+ Metadata { metadata }
+ }
+ }
+
+ /// Calls AStatsManager_PullAtomMetadata_setCoolDownMillis.
+ pub fn set_cooldown_millis(&mut self, cooldown_millis: i64) {
+ // Safety: Metadata::new ensures that self.metadata is a valid object.
+ unsafe { AStatsManager_PullAtomMetadata_setCoolDownMillis(self.metadata, cooldown_millis) }
+ }
+
+ /// Calls AStatsManager_PullAtomMetadata_getCoolDownMillis.
+ pub fn get_cooldown_millis(&self) -> i64 {
+ // Safety: Metadata::new ensures that self.metadata is a valid object.
+ unsafe { AStatsManager_PullAtomMetadata_getCoolDownMillis(self.metadata) }
+ }
+
+ /// Calls AStatsManager_PullAtomMetadata_setTimeoutMillis.
+ pub fn set_timeout_millis(&mut self, timeout_millis: i64) {
+ // Safety: Metadata::new ensures that self.metadata is a valid object.
+ unsafe { AStatsManager_PullAtomMetadata_setTimeoutMillis(self.metadata, timeout_millis) }
+ }
+
+ /// Calls AStatsManager_PullAtomMetadata_getTimeoutMillis.
+ pub fn get_timeout_millis(&self) -> i64 {
+ // Safety: Metadata::new ensures that self.metadata is a valid object.
+ unsafe { AStatsManager_PullAtomMetadata_getTimeoutMillis(self.metadata) }
+ }
+
+ /// Calls AStatsManager_PullAtomMetadata_setAdditiveFields.
+ pub fn set_additive_fields(&mut self, additive_fields: &mut Vec<i32>) {
+ // Safety: Metadata::new ensures that self.metadata is a valid object.
+ unsafe {
+ AStatsManager_PullAtomMetadata_setAdditiveFields(
+ self.metadata,
+ additive_fields.as_mut_ptr(),
+ additive_fields.len().try_into().expect("Cannot convert length to i32"),
+ )
+ }
+ }
+
+ /// Calls AStatsManager_PullAtomMetadata_getAdditiveFields.
+ pub fn get_additive_fields(&self) -> Vec<i32> {
+ // Safety: Metadata::new ensures that self.metadata is a valid object.
+ // We call getNumAdditiveFields to ensure we pass getAdditiveFields a large enough array.
+ unsafe {
+ let num_fields = AStatsManager_PullAtomMetadata_getNumAdditiveFields(self.metadata)
+ .try_into()
+ .expect("Cannot convert num additive fields to usize");
+ let mut fields = vec![0; num_fields];
+ AStatsManager_PullAtomMetadata_getAdditiveFields(self.metadata, fields.as_mut_ptr());
+ fields
+ }
+ }
+}
+
+impl Drop for Metadata {
+ fn drop(&mut self) {
+ // Safety: Metadata::new ensures that self.metadata is a valid object.
+ unsafe { AStatsManager_PullAtomMetadata_release(self.metadata) }
+ }
+}
+
+impl Default for Metadata {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+lazy_static! {
+ static ref COOKIES: Mutex<HashMap<i32, fn() -> StatsPullResult>> = Mutex::new(HashMap::new());
+}
+
+// Safety: We store our callbacks in the global so they are valid.
+unsafe extern "C" fn callback_wrapper(
+ atom_tag: i32,
+ data: *mut AStatsEventList,
+ _cookie: *mut c_void,
+) -> AStatsManager_PullAtomCallbackReturn {
+ if !data.is_null() {
+ let map = COOKIES.lock().unwrap();
+ let cb = map.get(&atom_tag);
+ match cb {
+ None => log::error!("No callback found for {}", atom_tag),
+ Some(cb) => {
+ let stats = cb();
+ let result = stats
+ .iter()
+ .map(|stat| stat.add_astats_event(&mut *data))
+ .collect::<Result<Vec<()>, StatsError>>();
+ match result {
+ Ok(_) => {
+ return AStatsManager_PULL_SUCCESS as AStatsManager_PullAtomCallbackReturn
+ }
+ _ => log::error!("Error adding astats events: {:?}", result),
+ }
+ }
+ }
+ }
+ AStatsManager_PULL_SKIP as AStatsManager_PullAtomCallbackReturn
+}
+
+/// Rust wrapper for AStatsManager_setPullAtomCallback.
+pub fn set_pull_atom_callback(
+ atom: Atoms,
+ metadata: Option<&Metadata>,
+ callback: fn() -> StatsPullResult,
+) {
+ COOKIES.lock().unwrap().insert(atom as i32, callback);
+ let metadata_raw = match metadata {
+ Some(m) => m.metadata,
+ None => std::ptr::null_mut(),
+ };
+ // Safety: We pass a valid function as the callback.
+ unsafe {
+ AStatsManager_setPullAtomCallback(
+ atom as i32,
+ metadata_raw,
+ Some(callback_wrapper),
+ std::ptr::null_mut(),
+ );
+ }
+}
+
+/// Rust wrapper for AStatsManager_clearPullAtomCallback.
+pub fn clear_pull_atom_callback(atom: Atoms) {
+ COOKIES.lock().unwrap().remove(&(atom as i32));
+ // Safety: No memory allocations.
+ unsafe { AStatsManager_clearPullAtomCallback(atom as i32) }
+}
diff --git a/libstats/pull_rust/statslog.h b/libstats/pull_rust/statslog.h
new file mode 100644
index 0000000..983fb7b
--- /dev/null
+++ b/libstats/pull_rust/statslog.h
@@ -0,0 +1,3 @@
+#pragma once
+
+#include "stats_pull_atom_callback.h"
diff --git a/libutils/RefBase.cpp b/libutils/RefBase.cpp
index 8e45226..b57e287 100644
--- a/libutils/RefBase.cpp
+++ b/libutils/RefBase.cpp
@@ -443,6 +443,20 @@
refs->mBase->onFirstRef();
}
+void RefBase::incStrongRequireStrong(const void* id) const {
+ weakref_impl* const refs = mRefs;
+ refs->incWeak(id);
+
+ refs->addStrongRef(id);
+ const int32_t c = refs->mStrong.fetch_add(1, std::memory_order_relaxed);
+
+ LOG_ALWAYS_FATAL_IF(c <= 0 || c == INITIAL_STRONG_VALUE,
+ "incStrongRequireStrong() called on %p which isn't already owned", refs);
+#if PRINT_REFS
+ ALOGD("incStrong (requiring strong) of %p from %p: cnt=%d\n", this, id, c);
+#endif
+}
+
void RefBase::decStrong(const void* id) const
{
weakref_impl* const refs = mRefs;
@@ -521,6 +535,14 @@
ALOG_ASSERT(c >= 0, "incWeak called on %p after last weak ref", this);
}
+void RefBase::weakref_type::incWeakRequireWeak(const void* id)
+{
+ weakref_impl* const impl = static_cast<weakref_impl*>(this);
+ impl->addWeakRef(id);
+ const int32_t c __unused = impl->mWeak.fetch_add(1,
+ std::memory_order_relaxed);
+ LOG_ALWAYS_FATAL_IF(c <= 0, "incWeakRequireWeak called on %p which has no weak refs", this);
+}
void RefBase::weakref_type::decWeak(const void* id)
{
diff --git a/libutils/RefBase_test.cpp b/libutils/RefBase_test.cpp
index c9b4894..dcc469e 100644
--- a/libutils/RefBase_test.cpp
+++ b/libutils/RefBase_test.cpp
@@ -241,6 +241,30 @@
ASSERT_FALSE(wp1 != wp2);
}
+TEST(RefBase, AssertWeakRefExistsSuccess) {
+ // uses some other refcounting method, or non at all
+ bool isDeleted;
+ sp<Foo> foo = sp<Foo>::make(&isDeleted);
+ wp<Foo> weakFoo = foo;
+
+ EXPECT_EQ(weakFoo, wp<Foo>::fromExisting(foo.get()));
+
+ EXPECT_FALSE(isDeleted);
+ foo = nullptr;
+ EXPECT_TRUE(isDeleted);
+}
+
+TEST(RefBase, AssertWeakRefExistsDeath) {
+ // uses some other refcounting method, or non at all
+ bool isDeleted;
+ Foo* foo = new Foo(&isDeleted);
+
+ // can only get a valid wp<> object when you construct it from an sp<>
+ EXPECT_DEATH(wp<Foo>::fromExisting(foo), "");
+
+ delete foo;
+}
+
// Set up a situation in which we race with visit2AndRremove() to delete
// 2 strong references. Bar destructor checks that there are no early
// deletions and prior updates are visible to destructor.
diff --git a/libutils/StrongPointer_test.cpp b/libutils/StrongPointer_test.cpp
index d37c1de..29f6bd4 100644
--- a/libutils/StrongPointer_test.cpp
+++ b/libutils/StrongPointer_test.cpp
@@ -21,8 +21,8 @@
using namespace android;
-class SPFoo : public LightRefBase<SPFoo> {
-public:
+class SPFoo : virtual public RefBase {
+ public:
explicit SPFoo(bool* deleted_check) : mDeleted(deleted_check) {
*mDeleted = false;
}
@@ -69,3 +69,14 @@
ASSERT_NE(nullptr, foo);
ASSERT_NE(foo, nullptr);
}
+
+TEST(StrongPointer, AssertStrongRefExists) {
+ // uses some other refcounting method, or non at all
+ bool isDeleted;
+ SPFoo* foo = new SPFoo(&isDeleted);
+
+ // can only get a valid sp<> object when you construct it as an sp<> object
+ EXPECT_DEATH(sp<SPFoo>::fromExisting(foo), "");
+
+ delete foo;
+}
diff --git a/libutils/include/utils/RefBase.h b/libutils/include/utils/RefBase.h
index e7acd17..5a5bd56 100644
--- a/libutils/include/utils/RefBase.h
+++ b/libutils/include/utils/RefBase.h
@@ -140,7 +140,9 @@
// count, and accidentally passed to f(sp<T>), a strong pointer to the object
// will be temporarily constructed and destroyed, prematurely deallocating the
// object, and resulting in heap corruption. None of this would be easily
-// visible in the source.
+// visible in the source. See below on
+// ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION for a compile time
+// option which helps avoid this case.
// Extra Features:
@@ -167,6 +169,42 @@
// to THE SAME sp<> or wp<>. In effect, their thread-safety properties are
// exactly like those of T*, NOT atomic<T*>.
+// Safety option: ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION
+//
+// This flag makes the semantics for using a RefBase object with wp<> and sp<>
+// much stricter by disabling implicit conversion from raw pointers to these
+// objects. In order to use this, apply this flag in Android.bp like so:
+//
+// cflags: [
+// "-DANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION",
+// ],
+//
+// REGARDLESS of whether this flag is on, best usage of sp<> is shown below. If
+// this flag is on, no other usage is possible (directly calling RefBase methods
+// is possible, but seeing code using 'incStrong' instead of 'sp<>', for
+// instance, should already set off big alarm bells. With carefully constructed
+// data structures, it should NEVER be necessary to directly use RefBase
+// methods). Proper RefBase usage:
+//
+// class Foo : virtual public RefBase { ... };
+//
+// // always construct an sp object with sp::make
+// sp<Foo> myFoo = sp<Foo>::make(/*args*/);
+//
+// // if you need a weak pointer, it must be constructed from a strong
+// // pointer
+// wp<Foo> weakFoo = myFoo; // NOT myFoo.get()
+//
+// // If you are inside of a method of Foo and need access to a strong
+// // explicitly call this function. This documents your intention to code
+// // readers, and it will give a runtime error for what otherwise would
+// // be potential double ownership
+// .... Foo::someMethod(...) {
+// // asserts if there is a memory issue
+// sp<Foo> thiz = sp<Foo>::fromExisting(this);
+// }
+//
+
#ifndef ANDROID_REF_BASE_H
#define ANDROID_REF_BASE_H
@@ -244,6 +282,7 @@
{
public:
void incStrong(const void* id) const;
+ void incStrongRequireStrong(const void* id) const;
void decStrong(const void* id) const;
void forceIncStrong(const void* id) const;
@@ -257,6 +296,7 @@
RefBase* refBase() const;
void incWeak(const void* id);
+ void incWeakRequireWeak(const void* id);
void decWeak(const void* id);
// acquires a strong reference if there is already one.
@@ -365,10 +405,24 @@
inline wp() : m_ptr(nullptr), m_refs(nullptr) { }
+ // if nullptr, returns nullptr
+ //
+ // if a weak pointer is already available, this will retrieve it,
+ // otherwise, this will abort
+ static inline wp<T> fromExisting(T* other);
+
+ // for more information about this flag, see above
+#if defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
+ wp(std::nullptr_t) : wp() {}
+#else
wp(T* other); // NOLINT(implicit)
+#endif
wp(const wp<T>& other);
explicit wp(const sp<T>& other);
+
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
template<typename U> wp(U* other); // NOLINT(implicit)
+#endif
template<typename U> wp(const sp<U>& other); // NOLINT(implicit)
template<typename U> wp(const wp<U>& other); // NOLINT(implicit)
@@ -376,11 +430,15 @@
// Assignment
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
wp& operator = (T* other);
+#endif
wp& operator = (const wp<T>& other);
wp& operator = (const sp<T>& other);
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
template<typename U> wp& operator = (U* other);
+#endif
template<typename U> wp& operator = (const wp<U>& other);
template<typename U> wp& operator = (const sp<U>& other);
@@ -481,12 +539,26 @@
// Note that the above comparison operations go out of their way to provide an ordering consistent
// with ordinary pointer comparison; otherwise they could ignore m_ptr, and just compare m_refs.
+template <typename T>
+wp<T> wp<T>::fromExisting(T* other) {
+ if (!other) return nullptr;
+
+ auto refs = other->getWeakRefs();
+ refs->incWeakRequireWeak(other);
+
+ wp<T> ret;
+ ret.m_refs = refs;
+ return ret;
+}
+
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
template<typename T>
wp<T>::wp(T* other)
: m_ptr(other)
{
m_refs = other ? m_refs = other->createWeak(this) : nullptr;
}
+#endif
template<typename T>
wp<T>::wp(const wp<T>& other)
@@ -502,12 +574,14 @@
m_refs = m_ptr ? m_ptr->createWeak(this) : nullptr;
}
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
template<typename T> template<typename U>
wp<T>::wp(U* other)
: m_ptr(other)
{
m_refs = other ? other->createWeak(this) : nullptr;
}
+#endif
template<typename T> template<typename U>
wp<T>::wp(const wp<U>& other)
@@ -534,6 +608,7 @@
if (m_ptr) m_refs->decWeak(this);
}
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
template<typename T>
wp<T>& wp<T>::operator = (T* other)
{
@@ -544,6 +619,7 @@
m_refs = newRefs;
return *this;
}
+#endif
template<typename T>
wp<T>& wp<T>::operator = (const wp<T>& other)
@@ -569,6 +645,7 @@
return *this;
}
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
template<typename T> template<typename U>
wp<T>& wp<T>::operator = (U* other)
{
@@ -579,6 +656,7 @@
m_refs = newRefs;
return *this;
}
+#endif
template<typename T> template<typename U>
wp<T>& wp<T>::operator = (const wp<U>& other)
diff --git a/libutils/include/utils/StrongPointer.h b/libutils/include/utils/StrongPointer.h
index 11128f2..1f07052 100644
--- a/libutils/include/utils/StrongPointer.h
+++ b/libutils/include/utils/StrongPointer.h
@@ -32,16 +32,43 @@
public:
inline sp() : m_ptr(nullptr) { }
- // TODO: switch everyone to using this over new, and make RefBase operator
- // new private to that class so that we can avoid RefBase being used with
- // other memory management mechanisms.
+ // The old way of using sp<> was like this. This is bad because it relies
+ // on implicit conversion to sp<>, which we would like to remove (if an
+ // object is being managed some other way, this is double-ownership). We
+ // want to move away from this:
+ //
+ // sp<Foo> foo = new Foo(...); // DO NOT DO THIS
+ //
+ // Instead, prefer to do this:
+ //
+ // sp<Foo> foo = sp<Foo>::make(...); // DO THIS
+ //
+ // Sometimes, in order to use this, when a constructor is marked as private,
+ // you may need to add this to your class:
+ //
+ // friend class sp<Foo>;
template <typename... Args>
static inline sp<T> make(Args&&... args);
+ // if nullptr, returns nullptr
+ //
+ // if a strong pointer is already available, this will retrieve it,
+ // otherwise, this will abort
+ static inline sp<T> fromExisting(T* other);
+
+ // for more information about this macro and correct RefBase usage, see
+ // the comment at the top of utils/RefBase.h
+#if defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
+ sp(std::nullptr_t) : sp() {}
+#else
sp(T* other); // NOLINT(implicit)
+#endif
sp(const sp<T>& other);
sp(sp<T>&& other) noexcept;
+
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
template<typename U> sp(U* other); // NOLINT(implicit)
+#endif
template<typename U> sp(const sp<U>& other); // NOLINT(implicit)
template<typename U> sp(sp<U>&& other); // NOLINT(implicit)
@@ -49,13 +76,17 @@
// Assignment
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
sp& operator = (T* other);
+#endif
sp& operator = (const sp<T>& other);
sp& operator=(sp<T>&& other) noexcept;
template<typename U> sp& operator = (const sp<U>& other);
template<typename U> sp& operator = (sp<U>&& other);
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
template<typename U> sp& operator = (U* other);
+#endif
//! Special optimization for use by ProcessState (and nobody else).
void force_set(T* other);
@@ -189,6 +220,19 @@
return result;
}
+template <typename T>
+sp<T> sp<T>::fromExisting(T* other) {
+ if (other) {
+ check_not_on_stack(other);
+ other->incStrongRequireStrong(other);
+ sp<T> result;
+ result.m_ptr = other;
+ return result;
+ }
+ return nullptr;
+}
+
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
template<typename T>
sp<T>::sp(T* other)
: m_ptr(other) {
@@ -197,6 +241,7 @@
other->incStrong(this);
}
}
+#endif
template<typename T>
sp<T>::sp(const sp<T>& other)
@@ -210,6 +255,7 @@
other.m_ptr = nullptr;
}
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
template<typename T> template<typename U>
sp<T>::sp(U* other)
: m_ptr(other) {
@@ -218,6 +264,7 @@
(static_cast<T*>(other))->incStrong(this);
}
}
+#endif
template<typename T> template<typename U>
sp<T>::sp(const sp<U>& other)
@@ -260,6 +307,7 @@
return *this;
}
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
template<typename T>
sp<T>& sp<T>::operator =(T* other) {
T* oldPtr(*const_cast<T* volatile*>(&m_ptr));
@@ -272,6 +320,7 @@
m_ptr = other;
return *this;
}
+#endif
template<typename T> template<typename U>
sp<T>& sp<T>::operator =(const sp<U>& other) {
@@ -294,6 +343,7 @@
return *this;
}
+#if !defined(ANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION)
template<typename T> template<typename U>
sp<T>& sp<T>::operator =(U* other) {
T* oldPtr(*const_cast<T* volatile*>(&m_ptr));
@@ -303,6 +353,7 @@
m_ptr = other;
return *this;
}
+#endif
template<typename T>
void sp<T>::force_set(T* other) {