2020-07-02 20:51:44 +02:00
|
|
|
/*
|
|
|
|
* Copyright 2020 The Android Open Source Project
|
|
|
|
*
|
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
* you may not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
* See the License for the specific language governing permissions and
|
|
|
|
* limitations under the License.
|
|
|
|
*/
|
|
|
|
|
2020-09-11 19:47:00 +02:00
|
|
|
#define LOG_TAG "RefBaseFuzz"
|
|
|
|
|
2020-07-02 20:51:44 +02:00
|
|
|
#include <thread>
|
|
|
|
|
|
|
|
#include "fuzzer/FuzzedDataProvider.h"
|
2020-09-11 19:47:00 +02:00
|
|
|
#include "utils/Log.h"
|
|
|
|
#include "utils/RWLock.h"
|
2020-07-02 20:51:44 +02:00
|
|
|
#include "utils/RefBase.h"
|
|
|
|
#include "utils/StrongPointer.h"
|
2020-09-11 19:47:00 +02:00
|
|
|
|
2020-07-02 20:51:44 +02:00
|
|
|
using android::RefBase;
|
2020-09-11 19:47:00 +02:00
|
|
|
using android::RWLock;
|
2020-07-02 20:51:44 +02:00
|
|
|
using android::sp;
|
|
|
|
using android::wp;
|
|
|
|
|
2020-09-11 19:47:00 +02:00
|
|
|
static constexpr int kMaxOperations = 100;
|
|
|
|
static constexpr int kMaxThreads = 10;
|
|
|
|
struct RefBaseSubclass : public RefBase {
|
|
|
|
public:
|
|
|
|
RefBaseSubclass(bool* deletedCheck, RWLock& deletedMtx)
|
|
|
|
: mDeleted(deletedCheck), mRwLock(deletedMtx) {
|
|
|
|
RWLock::AutoWLock lock(mRwLock);
|
|
|
|
*mDeleted = false;
|
|
|
|
extendObjectLifetime(OBJECT_LIFETIME_WEAK);
|
|
|
|
}
|
2020-07-02 20:51:44 +02:00
|
|
|
|
2020-09-11 19:47:00 +02:00
|
|
|
virtual ~RefBaseSubclass() {
|
|
|
|
RWLock::AutoWLock lock(mRwLock);
|
|
|
|
*mDeleted = true;
|
|
|
|
}
|
2020-07-02 20:51:44 +02:00
|
|
|
|
2020-09-11 19:47:00 +02:00
|
|
|
private:
|
|
|
|
bool* mDeleted;
|
|
|
|
android::RWLock& mRwLock;
|
|
|
|
};
|
2020-07-02 20:51:44 +02:00
|
|
|
|
2020-09-11 19:47:00 +02:00
|
|
|
// A thread-specific state object for ref
|
|
|
|
struct RefThreadState {
|
|
|
|
size_t strongCount = 0;
|
|
|
|
size_t weakCount = 0;
|
2020-07-02 20:51:44 +02:00
|
|
|
};
|
|
|
|
|
2020-09-11 19:47:00 +02:00
|
|
|
RWLock gRefDeletedLock;
|
|
|
|
bool gRefDeleted = false;
|
|
|
|
bool gHasModifiedRefs = false;
|
|
|
|
RefBaseSubclass* ref;
|
|
|
|
RefBase::weakref_type* weakRefs;
|
|
|
|
|
|
|
|
// These operations don't need locks as they explicitly check per-thread counts before running
|
|
|
|
// they also have the potential to write to gRefDeleted, so must not be locked.
|
|
|
|
const std::vector<std::function<void(RefThreadState*)>> kUnlockedOperations = {
|
|
|
|
[](RefThreadState* refState) -> void {
|
|
|
|
if (refState->strongCount > 0) {
|
2020-07-02 20:51:44 +02:00
|
|
|
ref->decStrong(nullptr);
|
2020-09-11 19:47:00 +02:00
|
|
|
gHasModifiedRefs = true;
|
|
|
|
refState->strongCount--;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
[](RefThreadState* refState) -> void {
|
|
|
|
if (refState->weakCount > 0) {
|
|
|
|
weakRefs->decWeak(nullptr);
|
|
|
|
gHasModifiedRefs = true;
|
|
|
|
refState->weakCount--;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
const std::vector<std::function<void(RefThreadState*)>> kMaybeLockedOperations = {
|
|
|
|
// Read-only operations
|
|
|
|
[](RefThreadState*) -> void { ref->getStrongCount(); },
|
|
|
|
[](RefThreadState*) -> void { weakRefs->getWeakCount(); },
|
|
|
|
[](RefThreadState*) -> void { ref->printRefs(); },
|
|
|
|
|
|
|
|
// Read/write operations
|
|
|
|
[](RefThreadState* refState) -> void {
|
|
|
|
ref->incStrong(nullptr);
|
|
|
|
gHasModifiedRefs = true;
|
|
|
|
refState->strongCount++;
|
|
|
|
},
|
|
|
|
[](RefThreadState* refState) -> void {
|
|
|
|
ref->forceIncStrong(nullptr);
|
|
|
|
gHasModifiedRefs = true;
|
|
|
|
refState->strongCount++;
|
|
|
|
},
|
|
|
|
[](RefThreadState* refState) -> void {
|
|
|
|
ref->createWeak(nullptr);
|
|
|
|
gHasModifiedRefs = true;
|
|
|
|
refState->weakCount++;
|
|
|
|
},
|
|
|
|
[](RefThreadState* refState) -> void {
|
|
|
|
// This will increment weak internally, then attempt to
|
|
|
|
// promote it to strong. If it fails, it decrements weak.
|
|
|
|
// If it succeeds, the weak is converted to strong.
|
|
|
|
// Both cases net no weak reference change.
|
|
|
|
if (weakRefs->attemptIncStrong(nullptr)) {
|
|
|
|
refState->strongCount++;
|
|
|
|
gHasModifiedRefs = true;
|
2020-07-02 20:51:44 +02:00
|
|
|
}
|
|
|
|
},
|
2020-09-11 19:47:00 +02:00
|
|
|
[](RefThreadState* refState) -> void {
|
|
|
|
if (weakRefs->attemptIncWeak(nullptr)) {
|
|
|
|
refState->weakCount++;
|
|
|
|
gHasModifiedRefs = true;
|
2020-07-02 20:51:44 +02:00
|
|
|
}
|
|
|
|
},
|
2020-09-11 19:47:00 +02:00
|
|
|
[](RefThreadState* refState) -> void {
|
|
|
|
weakRefs->incWeak(nullptr);
|
|
|
|
gHasModifiedRefs = true;
|
|
|
|
refState->weakCount++;
|
|
|
|
},
|
2020-07-02 20:51:44 +02:00
|
|
|
};
|
|
|
|
|
2020-09-11 19:47:00 +02:00
|
|
|
void loop(const std::vector<uint8_t>& fuzzOps) {
|
|
|
|
RefThreadState state;
|
|
|
|
uint8_t lockedOpSize = kMaybeLockedOperations.size();
|
|
|
|
uint8_t totalOperationTypes = lockedOpSize + kUnlockedOperations.size();
|
2020-07-02 20:51:44 +02:00
|
|
|
for (auto op : fuzzOps) {
|
2020-09-11 19:47:00 +02:00
|
|
|
auto opVal = op % totalOperationTypes;
|
|
|
|
if (opVal >= lockedOpSize) {
|
|
|
|
kUnlockedOperations[opVal % lockedOpSize](&state);
|
|
|
|
} else {
|
|
|
|
// We only need to lock if we have no strong or weak count
|
|
|
|
bool shouldLock = state.strongCount == 0 && state.weakCount == 0;
|
|
|
|
if (shouldLock) {
|
|
|
|
gRefDeletedLock.readLock();
|
|
|
|
// If ref has deleted itself, we can no longer fuzz on this thread.
|
|
|
|
if (gRefDeleted) {
|
|
|
|
// Unlock since we're exiting the loop here.
|
|
|
|
gRefDeletedLock.unlock();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Execute the locked operation
|
|
|
|
kMaybeLockedOperations[opVal](&state);
|
|
|
|
// Unlock if we locked.
|
|
|
|
if (shouldLock) {
|
|
|
|
gRefDeletedLock.unlock();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Instead of explicitly freeing this, we're going to remove our weak and
|
|
|
|
// strong references.
|
|
|
|
for (; state.weakCount > 0; state.weakCount--) {
|
|
|
|
weakRefs->decWeak(nullptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Clean up any strong references
|
|
|
|
for (; state.strongCount > 0; state.strongCount--) {
|
|
|
|
ref->decStrong(nullptr);
|
2020-07-02 20:51:44 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void spawnThreads(FuzzedDataProvider* dataProvider) {
|
|
|
|
std::vector<std::thread> threads = std::vector<std::thread>();
|
|
|
|
|
|
|
|
// Get the number of threads to generate
|
2020-09-11 19:47:00 +02:00
|
|
|
uint8_t count = dataProvider->ConsumeIntegralInRange<uint8_t>(1, kMaxThreads);
|
2020-07-02 20:51:44 +02:00
|
|
|
// Generate threads
|
|
|
|
for (uint8_t i = 0; i < count; i++) {
|
2020-09-11 19:47:00 +02:00
|
|
|
uint8_t opCount = dataProvider->ConsumeIntegralInRange<uint8_t>(1, kMaxOperations);
|
2020-07-02 20:51:44 +02:00
|
|
|
std::vector<uint8_t> threadOperations = dataProvider->ConsumeBytes<uint8_t>(opCount);
|
2020-09-11 19:47:00 +02:00
|
|
|
std::thread tmpThread = std::thread(loop, threadOperations);
|
|
|
|
threads.push_back(move(tmpThread));
|
2020-07-02 20:51:44 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for (auto& th : threads) {
|
|
|
|
th.join();
|
|
|
|
}
|
|
|
|
}
|
2020-09-11 19:47:00 +02:00
|
|
|
|
2020-07-02 20:51:44 +02:00
|
|
|
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
|
2020-09-11 19:47:00 +02:00
|
|
|
gHasModifiedRefs = false;
|
|
|
|
ref = new RefBaseSubclass(&gRefDeleted, gRefDeletedLock);
|
|
|
|
weakRefs = ref->getWeakRefs();
|
|
|
|
// Since we are modifying flags, (flags & OBJECT_LIFETIME_MASK) == OBJECT_LIFETIME_WEAK
|
|
|
|
// is true. The destructor for RefBase should clean up weakrefs because of this.
|
2020-07-02 20:51:44 +02:00
|
|
|
FuzzedDataProvider dataProvider(data, size);
|
|
|
|
spawnThreads(&dataProvider);
|
2020-09-11 19:47:00 +02:00
|
|
|
LOG_ALWAYS_FATAL_IF(!gHasModifiedRefs && gRefDeleted, "ref(%p) was prematurely deleted!", ref);
|
|
|
|
// We need to explicitly delete this object
|
|
|
|
// if no refs have been added or deleted.
|
|
|
|
if (!gHasModifiedRefs && !gRefDeleted) {
|
|
|
|
delete ref;
|
|
|
|
}
|
|
|
|
LOG_ALWAYS_FATAL_IF(gHasModifiedRefs && !gRefDeleted,
|
|
|
|
"ref(%p) should be deleted, is it leaking?", ref);
|
2020-07-02 20:51:44 +02:00
|
|
|
return 0;
|
|
|
|
}
|