From 55107c195d884ba0a6558eccdf0ffff60259c49e Mon Sep 17 00:00:00 2001 From: Dylan Katz Date: Fri, 11 Sep 2020 10:47:00 -0700 Subject: [PATCH] Resolved UAF issue in RefBase fuzzer Restrucures this fuzzer to be far more robust and less brittle. Fix: 163727995 Fix: 163449137 Test: libutils_fuzz_refbase clusterfuzz-testcase-minimized-libutils_fuzz_refbase-5674315436261376 Test: libutils_fuzz_refbase clusterfuzz-testcase-minimized-libutils_fuzz_refbase-5731662044069888 Test: libutils_fuzz_refbase clusterfuzz-testcase-minimized-libutils_fuzz_refbase-5081777218256896 Signed-off-by: Dylan Katz Change-Id: I239298dc2895a06af5a126e9ca2ae452579e5cc0 --- libutils/RefBase_fuzz.cpp | 194 +++++++++++++++++++++++++++++--------- 1 file changed, 148 insertions(+), 46 deletions(-) mode change 100755 => 100644 libutils/RefBase_fuzz.cpp diff --git a/libutils/RefBase_fuzz.cpp b/libutils/RefBase_fuzz.cpp old mode 100755 new mode 100644 index 2a92531ee..69288b354 --- a/libutils/RefBase_fuzz.cpp +++ b/libutils/RefBase_fuzz.cpp @@ -14,66 +14,156 @@ * limitations under the License. */ -#include +#define LOG_TAG "RefBaseFuzz" + #include #include "fuzzer/FuzzedDataProvider.h" +#include "utils/Log.h" +#include "utils/RWLock.h" #include "utils/RefBase.h" #include "utils/StrongPointer.h" + using android::RefBase; +using android::RWLock; using android::sp; using android::wp; -static constexpr int REFBASE_INITIAL_STRONG_VALUE = (1 << 28); -static constexpr int REFBASE_MAX_COUNT = 0xfffff; - -static constexpr int MAX_OPERATIONS = 100; -static constexpr int MAX_THREADS = 10; - -bool canDecrementStrong(RefBase* ref) { - // There's an assert around decrementing the strong count too much that causes an artificial - // crash This is just running BAD_STRONG from RefBase - const int32_t count = ref->getStrongCount() - 1; - return !(count == 0 || ((count) & (~(REFBASE_MAX_COUNT | REFBASE_INITIAL_STRONG_VALUE))) != 0); -} -bool canDecrementWeak(RefBase* ref) { - const int32_t count = ref->getWeakRefs()->getWeakCount() - 1; - return !((count) == 0 || ((count) & (~REFBASE_MAX_COUNT)) != 0); -} - +static constexpr int kMaxOperations = 100; +static constexpr int kMaxThreads = 10; struct RefBaseSubclass : public RefBase { - RefBaseSubclass() {} - virtual ~RefBaseSubclass() {} + public: + RefBaseSubclass(bool* deletedCheck, RWLock& deletedMtx) + : mDeleted(deletedCheck), mRwLock(deletedMtx) { + RWLock::AutoWLock lock(mRwLock); + *mDeleted = false; + extendObjectLifetime(OBJECT_LIFETIME_WEAK); + } + + virtual ~RefBaseSubclass() { + RWLock::AutoWLock lock(mRwLock); + *mDeleted = true; + } + + private: + bool* mDeleted; + android::RWLock& mRwLock; }; -std::vector> operations = { - [](RefBaseSubclass* ref) -> void { ref->getStrongCount(); }, - [](RefBaseSubclass* ref) -> void { ref->printRefs(); }, - [](RefBaseSubclass* ref) -> void { ref->getWeakRefs()->printRefs(); }, - [](RefBaseSubclass* ref) -> void { ref->getWeakRefs()->getWeakCount(); }, - [](RefBaseSubclass* ref) -> void { ref->getWeakRefs()->refBase(); }, - [](RefBaseSubclass* ref) -> void { ref->incStrong(nullptr); }, - [](RefBaseSubclass* ref) -> void { - if (canDecrementStrong(ref)) { +// A thread-specific state object for ref +struct RefThreadState { + size_t strongCount = 0; + size_t weakCount = 0; +}; + +RWLock gRefDeletedLock; +bool gRefDeleted = false; +bool gHasModifiedRefs = false; +RefBaseSubclass* ref; +RefBase::weakref_type* weakRefs; + +// These operations don't need locks as they explicitly check per-thread counts before running +// they also have the potential to write to gRefDeleted, so must not be locked. +const std::vector> kUnlockedOperations = { + [](RefThreadState* refState) -> void { + if (refState->strongCount > 0) { ref->decStrong(nullptr); + gHasModifiedRefs = true; + refState->strongCount--; } }, - [](RefBaseSubclass* ref) -> void { ref->forceIncStrong(nullptr); }, - [](RefBaseSubclass* ref) -> void { ref->createWeak(nullptr); }, - [](RefBaseSubclass* ref) -> void { ref->getWeakRefs()->attemptIncStrong(nullptr); }, - [](RefBaseSubclass* ref) -> void { ref->getWeakRefs()->attemptIncWeak(nullptr); }, - [](RefBaseSubclass* ref) -> void { - if (canDecrementWeak(ref)) { - ref->getWeakRefs()->decWeak(nullptr); + [](RefThreadState* refState) -> void { + if (refState->weakCount > 0) { + weakRefs->decWeak(nullptr); + gHasModifiedRefs = true; + refState->weakCount--; } }, - [](RefBaseSubclass* ref) -> void { ref->getWeakRefs()->incWeak(nullptr); }, - [](RefBaseSubclass* ref) -> void { ref->getWeakRefs()->printRefs(); }, }; -void loop(RefBaseSubclass* loopRef, const std::vector& fuzzOps) { +const std::vector> kMaybeLockedOperations = { + // Read-only operations + [](RefThreadState*) -> void { ref->getStrongCount(); }, + [](RefThreadState*) -> void { weakRefs->getWeakCount(); }, + [](RefThreadState*) -> void { ref->printRefs(); }, + + // Read/write operations + [](RefThreadState* refState) -> void { + ref->incStrong(nullptr); + gHasModifiedRefs = true; + refState->strongCount++; + }, + [](RefThreadState* refState) -> void { + ref->forceIncStrong(nullptr); + gHasModifiedRefs = true; + refState->strongCount++; + }, + [](RefThreadState* refState) -> void { + ref->createWeak(nullptr); + gHasModifiedRefs = true; + refState->weakCount++; + }, + [](RefThreadState* refState) -> void { + // This will increment weak internally, then attempt to + // promote it to strong. If it fails, it decrements weak. + // If it succeeds, the weak is converted to strong. + // Both cases net no weak reference change. + if (weakRefs->attemptIncStrong(nullptr)) { + refState->strongCount++; + gHasModifiedRefs = true; + } + }, + [](RefThreadState* refState) -> void { + if (weakRefs->attemptIncWeak(nullptr)) { + refState->weakCount++; + gHasModifiedRefs = true; + } + }, + [](RefThreadState* refState) -> void { + weakRefs->incWeak(nullptr); + gHasModifiedRefs = true; + refState->weakCount++; + }, +}; + +void loop(const std::vector& fuzzOps) { + RefThreadState state; + uint8_t lockedOpSize = kMaybeLockedOperations.size(); + uint8_t totalOperationTypes = lockedOpSize + kUnlockedOperations.size(); for (auto op : fuzzOps) { - operations[op % operations.size()](loopRef); + auto opVal = op % totalOperationTypes; + if (opVal >= lockedOpSize) { + kUnlockedOperations[opVal % lockedOpSize](&state); + } else { + // We only need to lock if we have no strong or weak count + bool shouldLock = state.strongCount == 0 && state.weakCount == 0; + if (shouldLock) { + gRefDeletedLock.readLock(); + // If ref has deleted itself, we can no longer fuzz on this thread. + if (gRefDeleted) { + // Unlock since we're exiting the loop here. + gRefDeletedLock.unlock(); + return; + } + } + // Execute the locked operation + kMaybeLockedOperations[opVal](&state); + // Unlock if we locked. + if (shouldLock) { + gRefDeletedLock.unlock(); + } + } + } + + // Instead of explicitly freeing this, we're going to remove our weak and + // strong references. + for (; state.weakCount > 0; state.weakCount--) { + weakRefs->decWeak(nullptr); + } + + // Clean up any strong references + for (; state.strongCount > 0; state.strongCount--) { + ref->decStrong(nullptr); } } @@ -81,23 +171,35 @@ void spawnThreads(FuzzedDataProvider* dataProvider) { std::vector threads = std::vector(); // Get the number of threads to generate - uint8_t count = dataProvider->ConsumeIntegralInRange(1, MAX_THREADS); - + uint8_t count = dataProvider->ConsumeIntegralInRange(1, kMaxThreads); // Generate threads for (uint8_t i = 0; i < count; i++) { - RefBaseSubclass* threadRef = new RefBaseSubclass(); - uint8_t opCount = dataProvider->ConsumeIntegralInRange(1, MAX_OPERATIONS); + uint8_t opCount = dataProvider->ConsumeIntegralInRange(1, kMaxOperations); std::vector threadOperations = dataProvider->ConsumeBytes(opCount); - std::thread tmp = std::thread(loop, threadRef, threadOperations); - threads.push_back(move(tmp)); + std::thread tmpThread = std::thread(loop, threadOperations); + threads.push_back(move(tmpThread)); } for (auto& th : threads) { th.join(); } } + extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) { + gHasModifiedRefs = false; + ref = new RefBaseSubclass(&gRefDeleted, gRefDeletedLock); + weakRefs = ref->getWeakRefs(); + // Since we are modifying flags, (flags & OBJECT_LIFETIME_MASK) == OBJECT_LIFETIME_WEAK + // is true. The destructor for RefBase should clean up weakrefs because of this. FuzzedDataProvider dataProvider(data, size); spawnThreads(&dataProvider); + LOG_ALWAYS_FATAL_IF(!gHasModifiedRefs && gRefDeleted, "ref(%p) was prematurely deleted!", ref); + // We need to explicitly delete this object + // if no refs have been added or deleted. + if (!gHasModifiedRefs && !gRefDeleted) { + delete ref; + } + LOG_ALWAYS_FATAL_IF(gHasModifiedRefs && !gRefDeleted, + "ref(%p) should be deleted, is it leaking?", ref); return 0; }