Merge changes from topic "b218588089" into udc-dev am: f08845750b

Original change: https://googleplex-android-review.googlesource.com/c/platform/hardware/interfaces/+/21585493

Change-Id: I453f1a3c817b38d547383454ddf9990ad10e0052
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
This commit is contained in:
TreeHugger Robot 2023-03-13 15:48:44 +00:00 committed by Automerger Merge Worker
commit e55f7b7e3e
21 changed files with 4307 additions and 206 deletions

View file

@ -24,13 +24,56 @@ package {
cc_binary { cc_binary {
name: "android.hardware.automotive.evs-aidl-default-service", name: "android.hardware.automotive.evs-aidl-default-service",
defaults: ["EvsHalDefaults"], defaults: ["EvsHalDefaults"],
local_include_dirs: ["include"], vintf_fragments: ["manifest_evs-default-service.xml"],
vintf_fragments: ["evs-default-service.xml"],
init_rc: ["evs-default-service.rc"], init_rc: ["evs-default-service.rc"],
vendor: true, vendor: true,
relative_install_path: "hw", relative_install_path: "hw",
srcs: ["src/*.cpp"], cflags: [
shared_libs: [ "-DGL_GLEXT_PROTOTYPES",
"libbinder_ndk", "-DEGL_EGLEXT_PROTOTYPES",
"-Wall",
"-Wextra",
"-Werror",
"-Wthread-safety",
], ],
srcs: [
":libgui_frame_event_aidl",
"src/*.cpp"
],
shared_libs: [
"android.hardware.graphics.bufferqueue@1.0",
"android.hardware.graphics.bufferqueue@2.0",
"android.hidl.token@1.0-utils",
"libEGL",
"libGLESv2",
"libbase",
"libbinder_ndk",
"libbufferqueueconverter",
"libcamera_metadata",
"libhardware_legacy",
"libhidlbase",
"liblog",
"libnativewindow",
"libtinyxml2",
"libui",
"libutils",
"libyuv",
],
static_libs: [
"android.frameworks.automotive.display-V1-ndk",
"android.hardware.automotive.evs-V1-ndk",
"android.hardware.common-V2-ndk",
"libaidlcommonsupport",
"libcutils",
],
local_include_dirs: ["include"],
include_dirs: ["frameworks/native/include/"],
required: ["evs_mock_hal_configuration.xml"],
}
prebuilt_etc {
name: "evs_mock_hal_configuration.xml",
soc_specific: true,
src: "resources/evs_mock_configuration.xml",
sub_dir: "automotive/evs",
} }

View file

@ -1,5 +1,8 @@
service vendor.evs-hal-default /vendor/bin/hw/android.hardware.automotive.evs-aidl-default-service service vendor.evs-hal-default /vendor/bin/hw/android.hardware.automotive.evs-aidl-default-service
class early_hal class early_hal
user automotive_evs priority -20
group automotive_evs user graphics
group automotive_evs camera
onrestart restart cardisplayproxyd
onrestart restart evsmanagerd
disabled disabled

View file

@ -1,11 +0,0 @@
<manifest version="1.0" type="device">
<hal format="aidl">
<name>android.hardware.automotive.evs</name>
<transport>hwbinder</transport>
<version>1</version>
<interface>
<name>IEvsEnumerator</name>
<instance>hw/0</instance>
</interface>
</hal>
</manifest>

View file

@ -0,0 +1,384 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "ConfigManagerUtil.h"
#include <aidl/android/hardware/automotive/evs/CameraParam.h>
#include <aidl/android/hardware/graphics/common/PixelFormat.h>
#include <android-base/logging.h>
#include <system/camera_metadata.h>
#include <tinyxml2.h>
#include <string>
#include <string_view>
#include <unordered_map>
#include <unordered_set>
#include <vector>
/*
* Please note that this is different from what is defined in
* libhardware/modules/camera/3_4/metadata/types.h; this has one additional
* field to store a framerate.
*/
typedef struct {
int id;
int width;
int height;
::aidl::android::hardware::graphics::common::PixelFormat format;
int type;
int framerate;
} StreamConfiguration;
class ConfigManager final {
public:
static std::unique_ptr<ConfigManager> Create();
ConfigManager(const ConfigManager&) = delete;
ConfigManager& operator=(const ConfigManager&) = delete;
/* Camera device's capabilities and metadata */
class CameraInfo {
public:
CameraInfo() : characteristics(nullptr) {}
virtual ~CameraInfo();
/* Allocate memory for camera_metadata_t */
bool allocate(size_t entry_cap, size_t data_cap) {
if (characteristics != nullptr) {
LOG(ERROR) << "Camera metadata is already allocated";
return false;
}
characteristics = allocate_camera_metadata(entry_cap, data_cap);
return characteristics != nullptr;
}
/*
* List of supported controls that the primary client can program.
* Paraemters are stored with its valid range
*/
std::unordered_map<::aidl::android::hardware::automotive::evs::CameraParam,
std::tuple<int32_t, int32_t, int32_t>>
controls;
/*
* List of supported output stream configurations.
*/
std::unordered_map<int32_t, StreamConfiguration> streamConfigurations;
/*
* Internal storage for camera metadata. Each entry holds a pointer to
* data and number of elements
*/
std::unordered_map<camera_metadata_tag_t, std::pair<void*, size_t>> cameraMetadata;
/* Camera module characteristics */
camera_metadata_t* characteristics;
};
class CameraGroupInfo : public CameraInfo {
public:
CameraGroupInfo() {}
/* ID of member camera devices */
std::unordered_set<std::string> devices;
/* The capture operation of member camera devices are synchronized */
int32_t synchronized = 0;
};
class SystemInfo {
public:
/* number of available cameras */
int32_t numCameras = 0;
};
class DisplayInfo {
public:
/*
* List of supported input stream configurations.
*/
std::unordered_map<int32_t, StreamConfiguration> streamConfigurations;
};
/*
* Return system information
*
* @return SystemInfo
* Constant reference of SystemInfo.
*/
const SystemInfo& getSystemInfo() {
std::unique_lock<std::mutex> lock(mConfigLock);
mConfigCond.wait(lock, [this] { return mIsReady; });
return mSystemInfo;
}
/*
* Return a list of camera identifiers
*
* This function assumes that it is not being called frequently.
*
* @return std::vector<std::string>
* A vector that contains unique camera device identifiers.
*/
std::vector<std::string> getCameraIdList() {
std::unique_lock<std::mutex> lock(mConfigLock);
mConfigCond.wait(lock, [this] { return mIsReady; });
std::vector<std::string> aList;
aList.reserve(mCameraInfo.size());
for (auto&& v : mCameraInfo) {
aList.push_back(v.first);
}
return aList;
}
/*
* Return a list of camera group identifiers
*
* This function assumes that it is not being called frequently.
*
* @return std::vector<std::string>
* A vector that contains unique camera device identifiers.
*/
std::vector<std::string> getCameraGroupIdList() {
std::unique_lock<std::mutex> lock(mConfigLock);
mConfigCond.wait(lock, [this] { return mIsReady; });
std::vector<std::string> aList;
aList.reserve(mCameraGroups.size());
for (auto&& v : mCameraGroups) {
aList.push_back(v.first);
}
return aList;
}
/*
* Return a pointer to the camera group
*
* @return CameraGroup
* A pointer to a camera group identified by a given id.
*/
std::unique_ptr<CameraGroupInfo>& getCameraGroupInfo(const std::string& gid) {
std::unique_lock<std::mutex> lock(mConfigLock);
mConfigCond.wait(lock, [this] { return mIsReady; });
return mCameraGroups[gid];
}
/*
* Return a camera metadata
*
* @param cameraId
* Unique camera node identifier in string
*
* @return unique_ptr<CameraInfo>
* A pointer to CameraInfo that is associated with a given camera
* ID. This returns a null pointer if this does not recognize a
* given camera identifier.
*/
std::unique_ptr<CameraInfo>& getCameraInfo(const std::string& cameraId) noexcept {
std::unique_lock<std::mutex> lock(mConfigLock);
mConfigCond.wait(lock, [this] { return mIsReady; });
return mCameraInfo[cameraId];
}
/*
* Tell whether the configuration data is ready to be used
*
* @return bool
* True if configuration data is ready to be consumed.
*/
bool isReady() const { return mIsReady; }
private:
/* Constructors */
ConfigManager() : mBinaryFilePath("") {}
static std::string_view sConfigDefaultPath;
static std::string_view sConfigOverridePath;
/* System configuration */
SystemInfo mSystemInfo;
/* Internal data structure for camera device information */
std::unordered_map<std::string, std::unique_ptr<CameraInfo>> mCameraInfo;
/* Internal data structure for camera device information */
std::unordered_map<std::string, std::unique_ptr<DisplayInfo>> mDisplayInfo;
/* Camera groups are stored in <groud id, CameraGroup> hash map */
std::unordered_map<std::string, std::unique_ptr<CameraGroupInfo>> mCameraGroups;
/*
* Camera positions are stored in <position, camera id set> hash map.
* The position must be one of front, rear, left, and right.
*/
std::unordered_map<std::string, std::unordered_set<std::string>> mCameraPosition;
/* Configuration data lock */
mutable std::mutex mConfigLock;
/*
* This condition is signalled when it completes a configuration data
* preparation.
*/
std::condition_variable mConfigCond;
/* A path to a binary configuration file */
const char* mBinaryFilePath;
/* Configuration data readiness */
bool mIsReady = false;
/*
* Parse a given EVS configuration file and store the information
* internally.
*
* @return bool
* True if it completes parsing a file successfully.
*/
bool readConfigDataFromXML() noexcept;
/*
* read the information of the vehicle
*
* @param aSysElem
* A pointer to "system" XML element.
*/
void readSystemInfo(const tinyxml2::XMLElement* const aSysElem);
/*
* read the information of camera devices
*
* @param aCameraElem
* A pointer to "camera" XML element that may contain multiple
* "device" elements.
*/
void readCameraInfo(const tinyxml2::XMLElement* const aCameraElem);
/*
* read display device information
*
* @param aDisplayElem
* A pointer to "display" XML element that may contain multiple
* "device" elements.
*/
void readDisplayInfo(const tinyxml2::XMLElement* const aDisplayElem);
/*
* read camera device information
*
* @param aCamera
* A pointer to CameraInfo that will be completed by this
* method.
* aDeviceElem
* A pointer to "device" XML element that contains camera module
* capability info and its characteristics.
*
* @return bool
* Return false upon any failure in reading and processing camera
* device information.
*/
bool readCameraDeviceInfo(CameraInfo* aCamera, const tinyxml2::XMLElement* aDeviceElem);
/*
* read camera metadata
*
* @param aCapElem
* A pointer to "cap" XML element.
* @param aCamera
* A pointer to CameraInfo that is being filled by this method.
* @param dataSize
* Required size of memory to store camera metadata found in this
* method. This is calculated in this method and returned to the
* caller for camera_metadata allocation.
*
* @return size_t
* Number of camera metadata entries
*/
size_t readCameraCapabilities(const tinyxml2::XMLElement* const aCapElem, CameraInfo* aCamera,
size_t& dataSize);
/*
* read camera metadata
*
* @param aParamElem
* A pointer to "characteristics" XML element.
* @param aCamera
* A pointer to CameraInfo that is being filled by this method.
* @param dataSize
* Required size of memory to store camera metadata found in this
* method.
*
* @return size_t
* Number of camera metadata entries
*/
size_t readCameraMetadata(const tinyxml2::XMLElement* const aParamElem, CameraInfo* aCamera,
size_t& dataSize);
/*
* construct camera_metadata_t from camera capabilities and metadata
*
* @param aCamera
* A pointer to CameraInfo that is being filled by this method.
* @param totalEntries
* Number of camera metadata entries to be added.
* @param totalDataSize
* Sum of sizes of camera metadata entries to be added.
*
* @return bool
* False if either it fails to allocate memory for camera metadata
* or its size is not large enough to add all found camera metadata
* entries.
*/
bool constructCameraMetadata(CameraInfo* aCamera, const size_t totalEntries,
const size_t totalDataSize);
/*
* Read configuration data from the binary file
*
* @return bool
* True if it succeeds to read configuration data from a binary
* file.
*/
bool readConfigDataFromBinary();
/*
* Store configuration data to the file
*
* @return bool
* True if it succeeds to serialize mCameraInfo to the file.
*/
bool writeConfigDataToBinary();
/*
* debugging method to print out all XML elements and their attributes in
* logcat message.
*
* @param aNode
* A pointer to the root XML element to navigate.
* @param prefix
* A prefix to XML string.
*/
void printElementNames(const tinyxml2::XMLElement* aNode, const std::string& prefix = "") const;
};

View file

@ -0,0 +1,62 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <aidl/android/hardware/automotive/evs/CameraParam.h>
#include <aidl/android/hardware/graphics/common/PixelFormat.h>
#include <android-base/macros.h>
#include <system/camera_metadata.h>
#include <string>
#include <utility>
class ConfigManagerUtil final {
public:
/**
* Convert a given string into V4L2_CID_*
*/
static bool convertToEvsCameraParam(
const std::string& id,
::aidl::android::hardware::automotive::evs::CameraParam& camParam);
/**
* Convert a given string into android.hardware.graphics.common.PixelFormat
*/
static bool convertToPixelFormat(const std::string& in,
::aidl::android::hardware::graphics::common::PixelFormat& out);
/**
* Convert a given string into corresponding camera metadata data tag defined in
* system/media/camera/include/system/camera_metadata_tags.h
*/
static bool convertToMetadataTag(const char* name, camera_metadata_tag& aTag);
/**
* Convert a given string into a floating value array
*/
static float* convertFloatArray(const char* sz, const char* vals, size_t& count,
const char delimiter = ',');
/**
* Trim a string
*/
static std::string trimString(const std::string& src, const std::string& ws = " \n\r\t\f\v");
/**
* Convert a given string to corresponding camera capabilities
*/
static bool convertToCameraCapability(
const char* name, camera_metadata_enum_android_request_available_capabilities_t& cap);
DISALLOW_IMPLICIT_CONSTRUCTORS(ConfigManagerUtil);
};

View file

@ -1,66 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef android_hardware_automotive_evs_aidl_impl_evshal_include_DefaultEvsHal_H_
#define android_hardware_automotive_evs_aidl_impl_evshal_include_DefaultEvsHal_H_
#include <aidl/android/hardware/automotive/evs/BnEvsEnumerator.h>
namespace aidl::android::hardware::automotive::evs::implementation {
class DefaultEvsEnumerator final
: public ::aidl::android::hardware::automotive::evs::BnEvsEnumerator {
::ndk::ScopedAStatus isHardware(bool* flag) override;
::ndk::ScopedAStatus openCamera(
const std::string& cameraId,
const ::aidl::android::hardware::automotive::evs::Stream& streamConfig,
std::shared_ptr<::aidl::android::hardware::automotive::evs::IEvsCamera>* obj) override;
::ndk::ScopedAStatus closeCamera(
const std::shared_ptr<::aidl::android::hardware::automotive::evs::IEvsCamera>& obj)
override;
::ndk::ScopedAStatus getCameraList(
std::vector<::aidl::android::hardware::automotive::evs::CameraDesc>* list) override;
::ndk::ScopedAStatus getStreamList(
const ::aidl::android::hardware::automotive::evs::CameraDesc& desc,
std::vector<::aidl::android::hardware::automotive::evs::Stream>* _aidl_return) override;
::ndk::ScopedAStatus openDisplay(
int32_t displayId,
std::shared_ptr<::aidl::android::hardware::automotive::evs::IEvsDisplay>* obj) override;
::ndk::ScopedAStatus closeDisplay(
const std::shared_ptr<::aidl::android::hardware::automotive::evs::IEvsDisplay>& obj)
override;
::ndk::ScopedAStatus getDisplayIdList(std::vector<uint8_t>* list) override;
::ndk::ScopedAStatus getDisplayState(
::aidl::android::hardware::automotive::evs::DisplayState* state) override;
::ndk::ScopedAStatus registerStatusCallback(
const std::shared_ptr<
::aidl::android::hardware::automotive::evs::IEvsEnumeratorStatusCallback>&
callback) override;
::ndk::ScopedAStatus openUltrasonicsArray(
const std::string& id,
std::shared_ptr<::aidl::android::hardware::automotive::evs::IEvsUltrasonicsArray>* obj)
override;
::ndk::ScopedAStatus closeUltrasonicsArray(
const std::shared_ptr<::aidl::android::hardware::automotive::evs::IEvsUltrasonicsArray>&
arr) override;
::ndk::ScopedAStatus getUltrasonicsArrayList(
std::vector<::aidl::android::hardware::automotive::evs::UltrasonicsArrayDesc>* list)
override;
};
} // namespace aidl::android::hardware::automotive::evs::implementation
#endif // android_hardware_automotive_evs_aidl_impl_evshal_include_DefaultEvsHal_H_

View file

@ -0,0 +1,134 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "ConfigManager.h"
#include "EvsGlDisplay.h"
#include "EvsMockCamera.h"
#include <aidl/android/frameworks/automotive/display/ICarDisplayProxy.h>
#include <aidl/android/hardware/automotive/evs/BnEvsEnumerator.h>
#include <aidl/android/hardware/automotive/evs/CameraDesc.h>
#include <aidl/android/hardware/automotive/evs/DeviceStatusType.h>
#include <aidl/android/hardware/automotive/evs/IEvsCamera.h>
#include <aidl/android/hardware/automotive/evs/IEvsEnumeratorStatusCallback.h>
#include <aidl/android/hardware/automotive/evs/Stream.h>
#include <utils/Thread.h>
#include <atomic>
#include <mutex>
#include <optional>
#include <thread>
#include <unordered_map>
namespace aidl::android::hardware::automotive::evs::implementation {
class EvsEnumerator final : public ::aidl::android::hardware::automotive::evs::BnEvsEnumerator {
public:
// Methods from ::aidl::android::hardware::automotive::evs::IEvsEnumerator
ndk::ScopedAStatus isHardware(bool* flag) override;
ndk::ScopedAStatus openCamera(const std::string& cameraId, const evs::Stream& streamConfig,
std::shared_ptr<evs::IEvsCamera>* obj) override;
ndk::ScopedAStatus closeCamera(const std::shared_ptr<evs::IEvsCamera>& obj) override;
ndk::ScopedAStatus getCameraList(std::vector<evs::CameraDesc>* _aidl_return) override;
ndk::ScopedAStatus getStreamList(const evs::CameraDesc& desc,
std::vector<evs::Stream>* _aidl_return) override;
ndk::ScopedAStatus openDisplay(int32_t displayId,
std::shared_ptr<evs::IEvsDisplay>* obj) override;
ndk::ScopedAStatus closeDisplay(const std::shared_ptr<evs::IEvsDisplay>& obj) override;
ndk::ScopedAStatus getDisplayIdList(std::vector<uint8_t>* list) override;
ndk::ScopedAStatus getDisplayState(evs::DisplayState* state) override;
ndk::ScopedAStatus registerStatusCallback(
const std::shared_ptr<evs::IEvsEnumeratorStatusCallback>& callback) override;
ndk::ScopedAStatus openUltrasonicsArray(
const std::string& id, std::shared_ptr<evs::IEvsUltrasonicsArray>* obj) override;
ndk::ScopedAStatus closeUltrasonicsArray(
const std::shared_ptr<evs::IEvsUltrasonicsArray>& obj) override;
ndk::ScopedAStatus getUltrasonicsArrayList(
std::vector<evs::UltrasonicsArrayDesc>* list) override;
// Implementation details
EvsEnumerator(const std::shared_ptr<
::aidl::android::frameworks::automotive::display::ICarDisplayProxy>&
proxyService);
void notifyDeviceStatusChange(const std::string_view& deviceName, evs::DeviceStatusType type);
private:
struct CameraRecord {
evs::CameraDesc desc;
std::weak_ptr<EvsMockCamera> activeInstance;
CameraRecord(const char* cameraId) : desc() { desc.id = cameraId; }
};
class ActiveDisplays {
public:
struct DisplayInfo {
int32_t id{-1};
std::weak_ptr<EvsGlDisplay> displayWeak;
uintptr_t internalDisplayRawAddr;
};
std::optional<DisplayInfo> popDisplay(int32_t id);
std::optional<DisplayInfo> popDisplay(const std::shared_ptr<IEvsDisplay>& display);
std::unordered_map<int32_t, DisplayInfo> getAllDisplays();
bool tryInsert(int32_t id, const std::shared_ptr<EvsGlDisplay>& display);
private:
std::mutex mMutex;
std::unordered_map<int32_t, DisplayInfo> mIdToDisplay GUARDED_BY(mMutex);
std::unordered_map<uintptr_t, int32_t> mDisplayToId GUARDED_BY(mMutex);
};
bool checkPermission();
void closeCamera_impl(const std::shared_ptr<evs::IEvsCamera>& pCamera,
const std::string& cameraId);
static bool qualifyCaptureDevice(const char* deviceName);
static CameraRecord* findCameraById(const std::string& cameraId);
static void enumerateCameras();
static bool addCaptureDevice(const std::string& deviceName);
static bool removeCaptureDevice(const std::string& deviceName);
// Enumerate available displays and return an id of the internal display
static uint64_t enumerateDisplays();
static ActiveDisplays& mutableActiveDisplays();
// NOTE: All members values are static so that all clients operate on the same state
// That is to say, this is effectively a singleton despite the fact that HIDL
// constructs a new instance for each client.
// Because our server has a single thread in the thread pool, these values are
// never accessed concurrently despite potentially having multiple instance objects
// using them.
static std::unordered_map<std::string, CameraRecord> sCameraList;
// Object destructs if client dies.
static std::mutex sLock; // Mutex on shared camera device list.
static std::condition_variable sCameraSignal; // Signal on camera device addition.
static std::unique_ptr<ConfigManager> sConfigManager; // ConfigManager
static std::shared_ptr<::aidl::android::frameworks::automotive::display::ICarDisplayProxy>
sDisplayProxy;
static std::unordered_map<uint8_t, uint64_t> sDisplayPortList;
uint64_t mInternalDisplayId;
std::shared_ptr<evs::IEvsEnumeratorStatusCallback> mCallback;
};
} // namespace aidl::android::hardware::automotive::evs::implementation

View file

@ -0,0 +1,89 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "GlWrapper.h"
#include <aidl/android/frameworks/automotive/display/ICarDisplayProxy.h>
#include <aidl/android/hardware/automotive/evs/BnEvsDisplay.h>
#include <aidl/android/hardware/automotive/evs/BufferDesc.h>
#include <aidl/android/hardware/automotive/evs/DisplayDesc.h>
#include <aidl/android/hardware/automotive/evs/DisplayState.h>
#include <thread>
namespace aidl::android::hardware::automotive::evs::implementation {
class EvsGlDisplay final : public BnEvsDisplay {
public:
// Methods from ::aidl::android::hardware::automotive::evs::IEvsDisplay
// follow.
ndk::ScopedAStatus getDisplayInfo(evs::DisplayDesc* _aidl_return) override;
ndk::ScopedAStatus getDisplayState(evs::DisplayState* _aidl_return) override;
ndk::ScopedAStatus getTargetBuffer(evs::BufferDesc* _aidl_return) override;
ndk::ScopedAStatus returnTargetBufferForDisplay(const evs::BufferDesc& buffer) override;
ndk::ScopedAStatus setDisplayState(evs::DisplayState state) override;
// Implementation details
EvsGlDisplay(const std::shared_ptr<automotivedisplay::ICarDisplayProxy>& service,
uint64_t displayId);
virtual ~EvsGlDisplay() override;
// This gets called if another caller "steals" ownership of the display
void forceShutdown();
private:
// A graphics buffer into which we'll store images. This member variable
// will be protected by semaphores.
struct BufferRecord {
::aidl::android::hardware::graphics::common::HardwareBufferDescription description;
buffer_handle_t handle;
int fingerprint;
} mBuffer;
// State of a rendering thread
enum RenderThreadStates {
STOPPED = 0,
STOPPING = 1,
RUN = 2,
};
uint64_t mDisplayId;
evs::DisplayDesc mInfo;
evs::DisplayState mRequestedState GUARDED_BY(mLock) = evs::DisplayState::NOT_VISIBLE;
std::shared_ptr<automotivedisplay::ICarDisplayProxy> mDisplayProxy;
GlWrapper mGlWrapper;
mutable std::mutex mLock;
// This tells us whether or not our buffer is in use. Protected by
// semaphores.
bool mBufferBusy = false;
// Variables to synchronize a rendering thread w/ main and binder threads
std::thread mRenderThread;
RenderThreadStates mState GUARDED_BY(mLock) = STOPPED;
bool mBufferReady = false;
void renderFrames();
bool initializeGlContextLocked() REQUIRES(mLock);
std::condition_variable mBufferReadyToUse;
std::condition_variable mBufferReadyToRender;
std::condition_variable mBufferDone;
};
} // namespace aidl::android::hardware::automotive::evs::implementation

View file

@ -0,0 +1,152 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "ConfigManager.h"
#include <aidl/android/hardware/automotive/evs/BnEvsCamera.h>
#include <aidl/android/hardware/automotive/evs/BufferDesc.h>
#include <aidl/android/hardware/automotive/evs/CameraDesc.h>
#include <aidl/android/hardware/automotive/evs/CameraParam.h>
#include <aidl/android/hardware/automotive/evs/EvsResult.h>
#include <aidl/android/hardware/automotive/evs/IEvsCameraStream.h>
#include <aidl/android/hardware/automotive/evs/IEvsDisplay.h>
#include <aidl/android/hardware/automotive/evs/ParameterRange.h>
#include <aidl/android/hardware/automotive/evs/Stream.h>
// #include <android-base/result.h>
#include <android/hardware_buffer.h>
#include <ui/GraphicBuffer.h>
#include <functional>
#include <thread>
namespace aidl::android::hardware::automotive::evs::implementation {
class EvsMockCamera : public evs::BnEvsCamera {
// This prevents constructors from direct access while it allows this class to
// be instantiated via ndk::SharedRefBase::make<>.
private:
struct Sigil {
explicit Sigil() = default;
};
public:
// Methods from ::android::hardware::automotive::evs::IEvsCamera follow.
ndk::ScopedAStatus doneWithFrame(const std::vector<evs::BufferDesc>& buffers) override;
ndk::ScopedAStatus forcePrimaryClient(
const std::shared_ptr<evs::IEvsDisplay>& display) override;
ndk::ScopedAStatus getCameraInfo(evs::CameraDesc* _aidl_return) override;
ndk::ScopedAStatus getExtendedInfo(int32_t opaqueIdentifier,
std::vector<uint8_t>* value) override;
ndk::ScopedAStatus getIntParameter(evs::CameraParam id, std::vector<int32_t>* value) override;
ndk::ScopedAStatus getIntParameterRange(evs::CameraParam id,
evs::ParameterRange* _aidl_return) override;
ndk::ScopedAStatus getParameterList(std::vector<evs::CameraParam>* _aidl_return) override;
ndk::ScopedAStatus getPhysicalCameraInfo(const std::string& deviceId,
evs::CameraDesc* _aidl_return) override;
ndk::ScopedAStatus importExternalBuffers(const std::vector<evs::BufferDesc>& buffers,
int32_t* _aidl_return) override;
ndk::ScopedAStatus pauseVideoStream() override;
ndk::ScopedAStatus resumeVideoStream() override;
ndk::ScopedAStatus setExtendedInfo(int32_t opaqueIdentifier,
const std::vector<uint8_t>& opaqueValue) override;
ndk::ScopedAStatus setIntParameter(evs::CameraParam id, int32_t value,
std::vector<int32_t>* effectiveValue) override;
ndk::ScopedAStatus setPrimaryClient() override;
ndk::ScopedAStatus setMaxFramesInFlight(int32_t bufferCount) override;
ndk::ScopedAStatus startVideoStream(
const std::shared_ptr<evs::IEvsCameraStream>& receiver) override;
ndk::ScopedAStatus stopVideoStream() override;
ndk::ScopedAStatus unsetPrimaryClient() override;
static std::shared_ptr<EvsMockCamera> Create(const char* deviceName);
static std::shared_ptr<EvsMockCamera> Create(
const char* deviceName, std::unique_ptr<ConfigManager::CameraInfo>& camInfo,
const evs::Stream* streamCfg = nullptr);
EvsMockCamera(const EvsMockCamera&) = delete;
EvsMockCamera& operator=(const EvsMockCamera&) = delete;
virtual ~EvsMockCamera() override;
void shutdown();
const evs::CameraDesc& getDesc() { return mDescription; }
// Constructors
EvsMockCamera(Sigil sigil, const char* deviceName,
std::unique_ptr<ConfigManager::CameraInfo>& camInfo);
private:
// These three functions are expected to be called while mAccessLock is held
bool setAvailableFrames_Locked(unsigned bufferCount);
unsigned increaseAvailableFrames_Locked(unsigned numToAdd);
unsigned decreaseAvailableFrames_Locked(unsigned numToRemove);
void generateFrames();
void fillMockFrame(buffer_handle_t handle, const AHardwareBuffer_Desc* pDesc);
void returnBufferLocked(const uint32_t bufferId);
ndk::ScopedAStatus stopVideoStream_impl();
CameraDesc mDescription = {}; // The properties of this camera
std::thread mCaptureThread; // The thread we'll use to synthesize frames
// The callback used to deliver each frame
std::shared_ptr<evs::IEvsCameraStream> mStream;
// Horizontal pixel count in the buffers
uint32_t mWidth = 0;
// Vertical pixel count in the buffers
uint32_t mHeight = 0;
// Values from android_pixel_format_t
uint32_t mFormat = HAL_PIXEL_FORMAT_RGBA_8888;
// Values from from Gralloc.h
uint64_t mUsage =
GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
// Bytes per line in the buffers
uint32_t mStride = 0;
struct BufferRecord {
buffer_handle_t handle;
bool inUse;
explicit BufferRecord(buffer_handle_t h) : handle(h), inUse(false){};
};
std::vector<BufferRecord> mBuffers; // Graphics buffers to transfer images
unsigned mFramesAllowed; // How many buffers are we currently using
unsigned mFramesInUse; // How many buffers are currently outstanding
enum StreamStateValues {
STOPPED,
RUNNING,
STOPPING,
DEAD,
};
StreamStateValues mStreamState;
// Synchronization necessary to deconflict mCaptureThread from the main service thread
std::mutex mAccessLock;
// Static camera module information
std::unique_ptr<ConfigManager::CameraInfo>& mCameraInfo;
// For the extended info
std::unordered_map<uint32_t, std::vector<uint8_t>> mExtInfo;
std::unordered_map<CameraParam, int32_t> mParams;
};
} // namespace aidl::android::hardware::automotive::evs::implementation

View file

@ -0,0 +1,79 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <GLES3/gl3.h>
#include <GLES3/gl3ext.h>
#include <aidl/android/frameworks/automotive/display/ICarDisplayProxy.h>
#include <aidl/android/hardware/automotive/evs/BufferDesc.h>
#include <android-base/logging.h>
#include <bufferqueueconverter/BufferQueueConverter.h>
namespace aidl::android::hardware::automotive::evs::implementation {
namespace automotivedisplay = ::aidl::android::frameworks::automotive::display;
class GlWrapper {
public:
GlWrapper() : mSurfaceHolder(::android::SurfaceHolderUniquePtr(nullptr, nullptr)) {}
bool initialize(const std::shared_ptr<automotivedisplay::ICarDisplayProxy>& svc,
uint64_t displayId);
void shutdown();
bool updateImageTexture(
buffer_handle_t handle,
const ::aidl::android::hardware::graphics::common::HardwareBufferDescription&
description);
void renderImageToScreen();
void showWindow(const std::shared_ptr<automotivedisplay::ICarDisplayProxy>& svc,
uint64_t displayId);
void hideWindow(const std::shared_ptr<automotivedisplay::ICarDisplayProxy>& svc,
uint64_t displayId);
unsigned getWidth() { return mWidth; };
unsigned getHeight() { return mHeight; };
private:
::android::sp<::android::hardware::graphics::bufferqueue::V2_0::IGraphicBufferProducer>
mGfxBufferProducer;
EGLDisplay mDisplay;
EGLSurface mSurface;
EGLContext mContext;
unsigned mWidth = 0;
unsigned mHeight = 0;
EGLImageKHR mKHRimage = EGL_NO_IMAGE_KHR;
GLuint mTextureMap = 0;
GLuint mShaderProgram = 0;
// Opaque handle for a native hardware buffer defined in
// frameworks/native/opengl/include/EGL/eglplatform.h
ANativeWindow* mWindow;
// Pointer to a Surface wrapper.
::android::SurfaceHolderUniquePtr mSurfaceHolder;
};
} // namespace aidl::android::hardware::automotive::evs::implementation

View file

@ -0,0 +1,6 @@
<manifest version="1.0" type="device">
<hal format="aidl">
<name>android.hardware.automotive.evs</name>
<fqname>IEvsEnumerator/hw/0</fqname>
</hal>
</manifest>

View file

@ -0,0 +1,68 @@
<?xml version='1.0' encoding='utf-8'?>
<!-- Copyright (C) 2019 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!-- Exterior View System Example Configuration
Android Automotive axes are used to define coordinates.
See https://source.android.com/devices/sensors/sensor-types#auto_axes
Use evs_configuration.dtd with xmllint tool, to validate XML configuration file
-->
<configuration>
<!-- system configuration -->
<system>
<!-- number of cameras available to EVS -->
<num_cameras value='1'/>
</system>
<!-- camera information -->
<camera>
<!-- camera device starts -->
<device id='/dev/video10' position='rear'>
<caps>
<!-- list of supported controls -->
<supported_controls>
<control name='BRIGHTNESS' min='0' max='255'/>
<control name='CONTRAST' min='0' max='255'/>
</supported_controls>
<stream id='0' width='640' height='360' format='RGBA_8888' framerate='30'/>
</caps>
<!-- list of parameters -->
<characteristics>
<!-- Camera intrinsic calibration matrix. See
https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#LENS_INTRINSIC_CALIBRATION
-->
<parameter
name='LENS_INTRINSIC_CALIBRATION'
type='float'
size='5'
value='0.0,0.0,0.0,0.0,0.0'
/>
</characteristics>
</device>
</camera>
<display>
<device id='display0' position='driver'>
<caps>
<!-- list of supported inpu stream configurations -->
<stream id='0' width='1280' height='720' format='RGBA_8888' framerate='30'/>
</caps>
</device>
</display>
</configuration>

View file

@ -0,0 +1,992 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ConfigManager.h"
#include <android-base/parseint.h>
#include <hardware/gralloc.h>
#include <utils/SystemClock.h>
#include <fstream>
#include <sstream>
#include <string_view>
#include <thread>
namespace {
using ::aidl::android::hardware::automotive::evs::CameraParam;
using ::aidl::android::hardware::graphics::common::PixelFormat;
using ::tinyxml2::XMLAttribute;
using ::tinyxml2::XMLDocument;
using ::tinyxml2::XMLElement;
} // namespace
std::string_view ConfigManager::sConfigDefaultPath =
"/vendor/etc/automotive/evs/evs_mock_hal_configuration.xml";
std::string_view ConfigManager::sConfigOverridePath =
"/vendor/etc/automotive/evs/evs_configuration_override.xml";
void ConfigManager::printElementNames(const XMLElement* rootElem, const std::string& prefix) const {
const XMLElement* curElem = rootElem;
while (curElem != nullptr) {
LOG(VERBOSE) << "[ELEM] " << prefix << curElem->Name();
const XMLAttribute* curAttr = curElem->FirstAttribute();
while (curAttr) {
LOG(VERBOSE) << "[ATTR] " << prefix << curAttr->Name() << ": " << curAttr->Value();
curAttr = curAttr->Next();
}
/* recursively go down to descendants */
printElementNames(curElem->FirstChildElement(), prefix + "\t");
curElem = curElem->NextSiblingElement();
}
}
void ConfigManager::readCameraInfo(const XMLElement* const aCameraElem) {
if (aCameraElem == nullptr) {
LOG(WARNING) << "XML file does not have required camera element";
return;
}
const XMLElement* curElem = aCameraElem->FirstChildElement();
while (curElem != nullptr) {
if (!strcmp(curElem->Name(), "group")) {
/* camera group identifier */
const char* id = curElem->FindAttribute("id")->Value();
/* create a camera group to be filled */
CameraGroupInfo* aCamera = new CameraGroupInfo();
/* read camera device information */
if (!readCameraDeviceInfo(aCamera, curElem)) {
LOG(WARNING) << "Failed to read a camera information of " << id;
delete aCamera;
continue;
}
/* camera group synchronization */
const char* sync = curElem->FindAttribute("synchronized")->Value();
if (!strcmp(sync, "CALIBRATED")) {
aCamera->synchronized = ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED;
} else if (!strcmp(sync, "APPROXIMATE")) {
aCamera->synchronized = ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE;
} else {
aCamera->synchronized = 0; // Not synchronized
}
/* add a group to hash map */
mCameraGroups.insert_or_assign(id, std::unique_ptr<CameraGroupInfo>(aCamera));
} else if (!std::strcmp(curElem->Name(), "device")) {
/* camera unique identifier */
const char* id = curElem->FindAttribute("id")->Value();
/* camera mount location */
const char* pos = curElem->FindAttribute("position")->Value();
/* create a camera device to be filled */
CameraInfo* aCamera = new CameraInfo();
/* read camera device information */
if (!readCameraDeviceInfo(aCamera, curElem)) {
LOG(WARNING) << "Failed to read a camera information of " << id;
delete aCamera;
continue;
}
/* store read camera module information */
mCameraInfo.insert_or_assign(id, std::unique_ptr<CameraInfo>(aCamera));
/* assign a camera device to a position group */
mCameraPosition[pos].insert(id);
} else {
/* ignore other device types */
LOG(DEBUG) << "Unknown element " << curElem->Name() << " is ignored";
}
curElem = curElem->NextSiblingElement();
}
}
bool ConfigManager::readCameraDeviceInfo(CameraInfo* aCamera, const XMLElement* aDeviceElem) {
if (aCamera == nullptr || aDeviceElem == nullptr) {
return false;
}
/* size information to allocate camera_metadata_t */
size_t totalEntries = 0;
size_t totalDataSize = 0;
/* read device capabilities */
totalEntries +=
readCameraCapabilities(aDeviceElem->FirstChildElement("caps"), aCamera, totalDataSize);
/* read camera metadata */
totalEntries += readCameraMetadata(aDeviceElem->FirstChildElement("characteristics"), aCamera,
totalDataSize);
/* construct camera_metadata_t */
if (!constructCameraMetadata(aCamera, totalEntries, totalDataSize)) {
LOG(WARNING) << "Either failed to allocate memory or "
<< "allocated memory was not large enough";
}
return true;
}
size_t ConfigManager::readCameraCapabilities(const XMLElement* const aCapElem, CameraInfo* aCamera,
size_t& dataSize) {
if (aCapElem == nullptr || aCamera == nullptr) {
return 0;
}
std::string token;
const XMLElement* curElem = nullptr;
/* a list of supported camera parameters/controls */
curElem = aCapElem->FirstChildElement("supported_controls");
if (curElem != nullptr) {
const XMLElement* ctrlElem = curElem->FirstChildElement("control");
while (ctrlElem != nullptr) {
const char* nameAttr = ctrlElem->FindAttribute("name")->Value();
int32_t minVal = INT32_MIN, maxVal = INT32_MAX;
if (!android::base::ParseInt(ctrlElem->FindAttribute("min")->Value(), &minVal)) {
LOG(WARNING) << "Failed to parse " << ctrlElem->FindAttribute("min")->Value();
}
if (!android::base::ParseInt(ctrlElem->FindAttribute("max")->Value(), &maxVal)) {
LOG(WARNING) << "Failed to parse " << ctrlElem->FindAttribute("max")->Value();
}
int32_t stepVal = 1;
const XMLAttribute* stepAttr = ctrlElem->FindAttribute("step");
if (stepAttr != nullptr) {
if (!android::base::ParseInt(stepAttr->Value(), &stepVal)) {
LOG(WARNING) << "Failed to parse " << stepAttr->Value();
}
}
CameraParam aParam;
if (ConfigManagerUtil::convertToEvsCameraParam(nameAttr, aParam)) {
aCamera->controls.insert_or_assign(
aParam, std::move(std::make_tuple(minVal, maxVal, stepVal)));
}
ctrlElem = ctrlElem->NextSiblingElement("control");
}
}
/* a list of camera stream configurations */
curElem = aCapElem->FirstChildElement("stream");
while (curElem != nullptr) {
/* read 5 attributes */
const XMLAttribute* idAttr = curElem->FindAttribute("id");
const XMLAttribute* widthAttr = curElem->FindAttribute("width");
const XMLAttribute* heightAttr = curElem->FindAttribute("height");
const XMLAttribute* fmtAttr = curElem->FindAttribute("format");
const XMLAttribute* fpsAttr = curElem->FindAttribute("framerate");
int32_t id = -1;
int32_t framerate = 0;
if (!android::base::ParseInt(idAttr->Value(), &id)) {
LOG(WARNING) << "Failed to parse " << idAttr->Value();
}
if (fpsAttr != nullptr) {
if (!android::base::ParseInt(fpsAttr->Value(), &framerate)) {
LOG(WARNING) << "Failed to parse " << fpsAttr->Value();
}
}
PixelFormat format = PixelFormat::UNSPECIFIED;
if (ConfigManagerUtil::convertToPixelFormat(fmtAttr->Value(), format)) {
StreamConfiguration cfg = {
.id = id,
.format = format,
.type = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
.framerate = framerate,
};
if (!android::base::ParseInt(widthAttr->Value(), &cfg.width) ||
!android::base::ParseInt(heightAttr->Value(), &cfg.height)) {
LOG(WARNING) << "Failed to parse " << widthAttr->Value() << " and "
<< heightAttr->Value();
}
aCamera->streamConfigurations.insert_or_assign(id, cfg);
}
curElem = curElem->NextSiblingElement("stream");
}
dataSize = calculate_camera_metadata_entry_data_size(
get_camera_metadata_tag_type(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS),
aCamera->streamConfigurations.size() * sizeof(StreamConfiguration));
/* a single camera metadata entry contains multiple stream configurations */
return dataSize > 0 ? 1 : 0;
}
size_t ConfigManager::readCameraMetadata(const XMLElement* const aParamElem, CameraInfo* aCamera,
size_t& dataSize) {
if (aParamElem == nullptr || aCamera == nullptr) {
return 0;
}
const XMLElement* curElem = aParamElem->FirstChildElement("parameter");
size_t numEntries = 0;
camera_metadata_tag_t tag;
while (curElem != nullptr) {
if (ConfigManagerUtil::convertToMetadataTag(curElem->FindAttribute("name")->Value(), tag)) {
switch (tag) {
case ANDROID_LENS_DISTORTION:
case ANDROID_LENS_POSE_ROTATION:
case ANDROID_LENS_POSE_TRANSLATION:
case ANDROID_LENS_INTRINSIC_CALIBRATION: {
/* float[] */
size_t count = 0;
void* data = ConfigManagerUtil::convertFloatArray(
curElem->FindAttribute("size")->Value(),
curElem->FindAttribute("value")->Value(), count);
aCamera->cameraMetadata.insert_or_assign(tag, std::make_pair(data, count));
++numEntries;
dataSize += calculate_camera_metadata_entry_data_size(
get_camera_metadata_tag_type(tag), count);
break;
}
case ANDROID_REQUEST_AVAILABLE_CAPABILITIES: {
camera_metadata_enum_android_request_available_capabilities_t* data =
new camera_metadata_enum_android_request_available_capabilities_t[1];
if (ConfigManagerUtil::convertToCameraCapability(
curElem->FindAttribute("value")->Value(), *data)) {
aCamera->cameraMetadata.insert_or_assign(tag,
std::make_pair((void*)data, 1));
++numEntries;
dataSize += calculate_camera_metadata_entry_data_size(
get_camera_metadata_tag_type(tag), 1);
}
break;
}
case ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS: {
/* a comma-separated list of physical camera devices */
size_t len = strlen(curElem->FindAttribute("value")->Value());
char* data = new char[len + 1];
memcpy(data, curElem->FindAttribute("value")->Value(), len * sizeof(char));
/* replace commas with null char */
char* p = data;
while (*p != '\0') {
if (*p == ',') {
*p = '\0';
}
++p;
}
aCamera->cameraMetadata.insert_or_assign(tag,
std::make_pair((void*)data, len + 1));
++numEntries;
dataSize += calculate_camera_metadata_entry_data_size(
get_camera_metadata_tag_type(tag), len);
break;
}
/* TODO(b/140416878): add vendor-defined/custom tag support */
default:
LOG(WARNING) << "Parameter " << curElem->FindAttribute("name")->Value()
<< " is not supported";
break;
}
} else {
LOG(WARNING) << "Unsupported metadata tag " << curElem->FindAttribute("name")->Value()
<< " is found.";
}
curElem = curElem->NextSiblingElement("parameter");
}
return numEntries;
}
bool ConfigManager::constructCameraMetadata(CameraInfo* aCamera, size_t totalEntries,
size_t totalDataSize) {
if (aCamera == nullptr || !aCamera->allocate(totalEntries, totalDataSize)) {
LOG(ERROR) << "Failed to allocate memory for camera metadata";
return false;
}
const size_t numStreamConfigs = aCamera->streamConfigurations.size();
std::unique_ptr<int32_t[]> data(new int32_t[sizeof(StreamConfiguration) * numStreamConfigs]);
int32_t* ptr = data.get();
for (auto& cfg : aCamera->streamConfigurations) {
memcpy(ptr, &cfg.second, sizeof(StreamConfiguration));
ptr += sizeof(StreamConfiguration);
}
int32_t err = add_camera_metadata_entry(
aCamera->characteristics, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, data.get(),
numStreamConfigs * sizeof(StreamConfiguration));
if (err) {
LOG(ERROR) << "Failed to add stream configurations to metadata, ignored";
return false;
}
bool success = true;
for (auto& [tag, entry] : aCamera->cameraMetadata) {
/* try to add new camera metadata entry */
int32_t err =
add_camera_metadata_entry(aCamera->characteristics, tag, entry.first, entry.second);
if (err) {
LOG(ERROR) << "Failed to add an entry with a tag, " << std::hex << tag;
/* may exceed preallocated capacity */
LOG(ERROR) << "Camera metadata has "
<< get_camera_metadata_entry_count(aCamera->characteristics) << " / "
<< get_camera_metadata_entry_capacity(aCamera->characteristics)
<< " entries and "
<< get_camera_metadata_data_count(aCamera->characteristics) << " / "
<< get_camera_metadata_data_capacity(aCamera->characteristics)
<< " bytes are filled.";
LOG(ERROR) << "\tCurrent metadata entry requires "
<< calculate_camera_metadata_entry_data_size(tag, entry.second) << " bytes.";
success = false;
}
}
LOG(VERBOSE) << "Camera metadata has "
<< get_camera_metadata_entry_count(aCamera->characteristics) << " / "
<< get_camera_metadata_entry_capacity(aCamera->characteristics) << " entries and "
<< get_camera_metadata_data_count(aCamera->characteristics) << " / "
<< get_camera_metadata_data_capacity(aCamera->characteristics)
<< " bytes are filled.";
return success;
}
void ConfigManager::readSystemInfo(const XMLElement* const aSysElem) {
if (aSysElem == nullptr) {
return;
}
/*
* Please note that this function assumes that a given system XML element
* and its child elements follow DTD. If it does not, it will cause a
* segmentation fault due to the failure of finding expected attributes.
*/
/* read number of cameras available in the system */
const XMLElement* xmlElem = aSysElem->FirstChildElement("num_cameras");
if (xmlElem != nullptr) {
if (!android::base::ParseInt(xmlElem->FindAttribute("value")->Value(),
&mSystemInfo.numCameras)) {
LOG(WARNING) << "Failed to parse " << xmlElem->FindAttribute("value")->Value();
}
}
}
void ConfigManager::readDisplayInfo(const XMLElement* const aDisplayElem) {
if (aDisplayElem == nullptr) {
LOG(WARNING) << "XML file does not have required camera element";
return;
}
const XMLElement* curDev = aDisplayElem->FirstChildElement("device");
while (curDev != nullptr) {
const char* id = curDev->FindAttribute("id")->Value();
std::unique_ptr<DisplayInfo> dpy(new DisplayInfo());
if (dpy == nullptr) {
LOG(ERROR) << "Failed to allocate memory for DisplayInfo";
return;
}
const XMLElement* cap = curDev->FirstChildElement("caps");
if (cap != nullptr) {
const XMLElement* curStream = cap->FirstChildElement("stream");
while (curStream != nullptr) {
/* read 4 attributes */
const XMLAttribute* idAttr = curStream->FindAttribute("id");
const XMLAttribute* widthAttr = curStream->FindAttribute("width");
const XMLAttribute* heightAttr = curStream->FindAttribute("height");
const XMLAttribute* fmtAttr = curStream->FindAttribute("format");
int32_t id = -1;
if (!android::base::ParseInt(idAttr->Value(), &id)) {
LOG(WARNING) << "Failed to parse " << idAttr->Value();
}
PixelFormat format = PixelFormat::UNSPECIFIED;
if (ConfigManagerUtil::convertToPixelFormat(fmtAttr->Value(), format)) {
StreamConfiguration cfg = {
.id = id,
.format = format,
.type = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT,
};
if (!android::base::ParseInt(widthAttr->Value(), &cfg.width) ||
!android::base::ParseInt(heightAttr->Value(), &cfg.height)) {
LOG(WARNING) << "Failed to parse " << widthAttr->Value() << " and "
<< heightAttr->Value();
}
dpy->streamConfigurations.insert_or_assign(id, cfg);
}
curStream = curStream->NextSiblingElement("stream");
}
}
mDisplayInfo.insert_or_assign(id, std::move(dpy));
curDev = curDev->NextSiblingElement("device");
}
return;
}
bool ConfigManager::readConfigDataFromXML() noexcept {
XMLDocument xmlDoc;
const int64_t parsingStart = android::elapsedRealtimeNano();
/* load and parse a configuration file */
xmlDoc.LoadFile(sConfigOverridePath.data());
if (xmlDoc.ErrorID() != tinyxml2::XML_SUCCESS) {
xmlDoc.LoadFile(sConfigDefaultPath.data());
if (xmlDoc.ErrorID() != tinyxml2::XML_SUCCESS) {
LOG(ERROR) << "Failed to load and/or parse a configuration file, " << xmlDoc.ErrorStr();
return false;
}
}
/* retrieve the root element */
const XMLElement* rootElem = xmlDoc.RootElement();
if (std::strcmp(rootElem->Name(), "configuration") != 0) {
LOG(ERROR) << "A configuration file is not in the required format. "
<< "See /etc/automotive/evs/evs_configuration.dtd";
return false;
}
std::unique_lock<std::mutex> lock(mConfigLock);
/*
* parse camera information; this needs to be done before reading system
* information
*/
readCameraInfo(rootElem->FirstChildElement("camera"));
/* parse system information */
readSystemInfo(rootElem->FirstChildElement("system"));
/* parse display information */
readDisplayInfo(rootElem->FirstChildElement("display"));
/* configuration data is ready to be consumed */
mIsReady = true;
/* notify that configuration data is ready */
lock.unlock();
mConfigCond.notify_all();
const int64_t parsingEnd = android::elapsedRealtimeNano();
LOG(INFO) << "Parsing configuration file takes " << std::scientific
<< (double)(parsingEnd - parsingStart) / 1000000.0 << " ms.";
return true;
}
bool ConfigManager::readConfigDataFromBinary() {
/* Temporary buffer to hold configuration data read from a binary file */
char mBuffer[1024];
std::fstream srcFile;
const int64_t readStart = android::elapsedRealtimeNano();
srcFile.open(mBinaryFilePath, std::fstream::in | std::fstream::binary);
if (!srcFile) {
LOG(ERROR) << "Failed to open a source binary file, " << mBinaryFilePath;
return false;
}
std::unique_lock<std::mutex> lock(mConfigLock);
mIsReady = false;
/* read configuration data into the internal buffer */
srcFile.read(mBuffer, sizeof(mBuffer));
LOG(VERBOSE) << __FUNCTION__ << ": " << srcFile.gcount() << " bytes are read.";
char* p = mBuffer;
size_t sz = 0;
/* read number of camera group information entries */
const size_t ngrps = *(reinterpret_cast<size_t*>(p));
p += sizeof(size_t);
/* read each camera information entry */
for (size_t cidx = 0; cidx < ngrps; ++cidx) {
/* read camera identifier */
std::string cameraId = *(reinterpret_cast<std::string*>(p));
p += sizeof(std::string);
/* size of camera_metadata_t */
const size_t num_entry = *(reinterpret_cast<size_t*>(p));
p += sizeof(size_t);
const size_t num_data = *(reinterpret_cast<size_t*>(p));
p += sizeof(size_t);
/* create CameraInfo and add it to hash map */
std::unique_ptr<ConfigManager::CameraGroupInfo> aCamera;
if (aCamera == nullptr || !aCamera->allocate(num_entry, num_data)) {
LOG(ERROR) << "Failed to create new CameraInfo object";
mCameraInfo.clear();
return false;
}
/* controls */
typedef struct {
CameraParam cid;
int32_t min;
int32_t max;
int32_t step;
} CameraCtrl;
sz = *(reinterpret_cast<size_t*>(p));
p += sizeof(size_t);
CameraCtrl* ptr = reinterpret_cast<CameraCtrl*>(p);
for (size_t idx = 0; idx < sz; ++idx) {
CameraCtrl temp = *ptr++;
aCamera->controls.insert_or_assign(
temp.cid, std::move(std::make_tuple(temp.min, temp.max, temp.step)));
}
p = reinterpret_cast<char*>(ptr);
/* stream configurations */
sz = *(reinterpret_cast<size_t*>(p));
p += sizeof(size_t);
int32_t* i32_ptr = reinterpret_cast<int32_t*>(p);
for (size_t idx = 0; idx < sz; ++idx) {
const int32_t id = *i32_ptr++;
StreamConfiguration temp;
memcpy(&temp, i32_ptr, sizeof(StreamConfiguration));
i32_ptr += sizeof(StreamConfiguration);
aCamera->streamConfigurations.insert_or_assign(id, temp);
}
p = reinterpret_cast<char*>(i32_ptr);
/* synchronization */
aCamera->synchronized = *(reinterpret_cast<int32_t*>(p));
p += sizeof(int32_t);
for (size_t idx = 0; idx < num_entry; ++idx) {
/* Read camera metadata entries */
camera_metadata_tag_t tag = *reinterpret_cast<camera_metadata_tag_t*>(p);
p += sizeof(camera_metadata_tag_t);
const size_t count = *reinterpret_cast<size_t*>(p);
p += sizeof(size_t);
const int32_t type = get_camera_metadata_tag_type(tag);
switch (type) {
case TYPE_BYTE: {
add_camera_metadata_entry(aCamera->characteristics, tag, p, count);
p += count * sizeof(uint8_t);
break;
}
case TYPE_INT32: {
add_camera_metadata_entry(aCamera->characteristics, tag, p, count);
p += count * sizeof(int32_t);
break;
}
case TYPE_FLOAT: {
add_camera_metadata_entry(aCamera->characteristics, tag, p, count);
p += count * sizeof(float);
break;
}
case TYPE_INT64: {
add_camera_metadata_entry(aCamera->characteristics, tag, p, count);
p += count * sizeof(int64_t);
break;
}
case TYPE_DOUBLE: {
add_camera_metadata_entry(aCamera->characteristics, tag, p, count);
p += count * sizeof(double);
break;
}
case TYPE_RATIONAL:
p += count * sizeof(camera_metadata_rational_t);
break;
default:
LOG(WARNING) << "Type " << type << " is unknown; "
<< "data may be corrupted.";
break;
}
}
mCameraInfo.insert_or_assign(cameraId, std::move(aCamera));
}
/* read number of camera information entries */
const size_t ncams = *(reinterpret_cast<size_t*>(p));
p += sizeof(size_t);
/* read each camera information entry */
for (size_t cidx = 0; cidx < ncams; ++cidx) {
/* read camera identifier */
std::string cameraId = *(reinterpret_cast<std::string*>(p));
p += sizeof(std::string);
/* size of camera_metadata_t */
const size_t num_entry = *(reinterpret_cast<size_t*>(p));
p += sizeof(size_t);
const size_t num_data = *(reinterpret_cast<size_t*>(p));
p += sizeof(size_t);
/* create CameraInfo and add it to hash map */
std::unique_ptr<ConfigManager::CameraInfo> aCamera;
if (aCamera == nullptr || !aCamera->allocate(num_entry, num_data)) {
LOG(ERROR) << "Failed to create new CameraInfo object";
mCameraInfo.clear();
return false;
}
/* controls */
typedef struct {
CameraParam cid;
int32_t min;
int32_t max;
int32_t step;
} CameraCtrl;
sz = *(reinterpret_cast<size_t*>(p));
p += sizeof(size_t);
CameraCtrl* ptr = reinterpret_cast<CameraCtrl*>(p);
for (size_t idx = 0; idx < sz; ++idx) {
CameraCtrl temp = *ptr++;
aCamera->controls.insert_or_assign(
temp.cid, std::move(std::make_tuple(temp.min, temp.max, temp.step)));
}
p = reinterpret_cast<char*>(ptr);
/* stream configurations */
sz = *(reinterpret_cast<size_t*>(p));
p += sizeof(size_t);
int32_t* i32_ptr = reinterpret_cast<int32_t*>(p);
for (size_t idx = 0; idx < sz; ++idx) {
const int32_t id = *i32_ptr++;
StreamConfiguration temp;
memcpy(&temp, i32_ptr, sizeof(StreamConfiguration));
i32_ptr += sizeof(StreamConfiguration);
aCamera->streamConfigurations.insert_or_assign(id, temp);
}
p = reinterpret_cast<char*>(i32_ptr);
for (size_t idx = 0; idx < num_entry; ++idx) {
/* Read camera metadata entries */
camera_metadata_tag_t tag = *reinterpret_cast<camera_metadata_tag_t*>(p);
p += sizeof(camera_metadata_tag_t);
const size_t count = *reinterpret_cast<size_t*>(p);
p += sizeof(size_t);
const int32_t type = get_camera_metadata_tag_type(tag);
switch (type) {
case TYPE_BYTE: {
add_camera_metadata_entry(aCamera->characteristics, tag, p, count);
p += count * sizeof(uint8_t);
break;
}
case TYPE_INT32: {
add_camera_metadata_entry(aCamera->characteristics, tag, p, count);
p += count * sizeof(int32_t);
break;
}
case TYPE_FLOAT: {
add_camera_metadata_entry(aCamera->characteristics, tag, p, count);
p += count * sizeof(float);
break;
}
case TYPE_INT64: {
add_camera_metadata_entry(aCamera->characteristics, tag, p, count);
p += count * sizeof(int64_t);
break;
}
case TYPE_DOUBLE: {
add_camera_metadata_entry(aCamera->characteristics, tag, p, count);
p += count * sizeof(double);
break;
}
case TYPE_RATIONAL:
p += count * sizeof(camera_metadata_rational_t);
break;
default:
LOG(WARNING) << "Type " << type << " is unknown; "
<< "data may be corrupted.";
break;
}
}
mCameraInfo.insert_or_assign(cameraId, std::move(aCamera));
}
mIsReady = true;
/* notify that configuration data is ready */
lock.unlock();
mConfigCond.notify_all();
int64_t readEnd = android::elapsedRealtimeNano();
LOG(INFO) << __FUNCTION__ << " takes " << std::scientific
<< (double)(readEnd - readStart) / 1000000.0 << " ms.";
return true;
}
bool ConfigManager::writeConfigDataToBinary() {
std::fstream outFile;
const int64_t writeStart = android::elapsedRealtimeNano();
outFile.open(mBinaryFilePath, std::fstream::out | std::fstream::binary);
if (!outFile) {
LOG(ERROR) << "Failed to open a destination binary file, " << mBinaryFilePath;
return false;
}
/* lock a configuration data while it's being written to the filesystem */
std::lock_guard<std::mutex> lock(mConfigLock);
/* write camera group information */
size_t sz = mCameraGroups.size();
outFile.write(reinterpret_cast<const char*>(&sz), sizeof(size_t));
for (auto&& [camId, camInfo] : mCameraGroups) {
LOG(INFO) << "Storing camera group " << camId;
/* write a camera identifier string */
outFile.write(reinterpret_cast<const char*>(&camId), sizeof(std::string));
/* controls */
sz = camInfo->controls.size();
outFile.write(reinterpret_cast<const char*>(&sz), sizeof(size_t));
for (auto&& [ctrl, range] : camInfo->controls) {
outFile.write(reinterpret_cast<const char*>(&ctrl), sizeof(CameraParam));
outFile.write(reinterpret_cast<const char*>(&std::get<0>(range)), sizeof(int32_t));
outFile.write(reinterpret_cast<const char*>(&std::get<1>(range)), sizeof(int32_t));
outFile.write(reinterpret_cast<const char*>(&std::get<2>(range)), sizeof(int32_t));
}
/* stream configurations */
sz = camInfo->streamConfigurations.size();
outFile.write(reinterpret_cast<const char*>(&sz), sizeof(size_t));
for (auto&& [sid, cfg] : camInfo->streamConfigurations) {
outFile.write(reinterpret_cast<const char*>(sid), sizeof(int32_t));
outFile.write(reinterpret_cast<const char*>(&cfg), sizeof(cfg));
}
/* synchronization */
outFile.write(reinterpret_cast<const char*>(&camInfo->synchronized), sizeof(int32_t));
/* size of camera_metadata_t */
size_t num_entry = 0;
size_t num_data = 0;
if (camInfo->characteristics != nullptr) {
num_entry = get_camera_metadata_entry_count(camInfo->characteristics);
num_data = get_camera_metadata_data_count(camInfo->characteristics);
}
outFile.write(reinterpret_cast<const char*>(&num_entry), sizeof(size_t));
outFile.write(reinterpret_cast<const char*>(&num_data), sizeof(size_t));
/* write each camera metadata entry */
if (num_entry > 0) {
camera_metadata_entry_t entry;
for (size_t idx = 0; idx < num_entry; ++idx) {
if (get_camera_metadata_entry(camInfo->characteristics, idx, &entry)) {
LOG(ERROR) << "Failed to retrieve camera metadata entry " << idx;
outFile.close();
return false;
}
outFile.write(reinterpret_cast<const char*>(&entry.tag), sizeof(entry.tag));
outFile.write(reinterpret_cast<const char*>(&entry.count), sizeof(entry.count));
int32_t type = get_camera_metadata_tag_type(entry.tag);
switch (type) {
case TYPE_BYTE:
outFile.write(reinterpret_cast<const char*>(entry.data.u8),
sizeof(uint8_t) * entry.count);
break;
case TYPE_INT32:
outFile.write(reinterpret_cast<const char*>(entry.data.i32),
sizeof(int32_t) * entry.count);
break;
case TYPE_FLOAT:
outFile.write(reinterpret_cast<const char*>(entry.data.f),
sizeof(float) * entry.count);
break;
case TYPE_INT64:
outFile.write(reinterpret_cast<const char*>(entry.data.i64),
sizeof(int64_t) * entry.count);
break;
case TYPE_DOUBLE:
outFile.write(reinterpret_cast<const char*>(entry.data.d),
sizeof(double) * entry.count);
break;
case TYPE_RATIONAL:
[[fallthrough]];
default:
LOG(WARNING) << "Type " << type << " is not supported.";
break;
}
}
}
}
/* write camera device information */
sz = mCameraInfo.size();
outFile.write(reinterpret_cast<const char*>(&sz), sizeof(size_t));
for (auto&& [camId, camInfo] : mCameraInfo) {
LOG(INFO) << "Storing camera " << camId;
/* write a camera identifier string */
outFile.write(reinterpret_cast<const char*>(&camId), sizeof(std::string));
/* controls */
sz = camInfo->controls.size();
outFile.write(reinterpret_cast<const char*>(&sz), sizeof(size_t));
for (auto& [ctrl, range] : camInfo->controls) {
outFile.write(reinterpret_cast<const char*>(&ctrl), sizeof(CameraParam));
outFile.write(reinterpret_cast<const char*>(&std::get<0>(range)), sizeof(int32_t));
outFile.write(reinterpret_cast<const char*>(&std::get<1>(range)), sizeof(int32_t));
outFile.write(reinterpret_cast<const char*>(&std::get<2>(range)), sizeof(int32_t));
}
/* stream configurations */
sz = camInfo->streamConfigurations.size();
outFile.write(reinterpret_cast<const char*>(&sz), sizeof(size_t));
for (auto&& [sid, cfg] : camInfo->streamConfigurations) {
outFile.write(reinterpret_cast<const char*>(sid), sizeof(int32_t));
outFile.write(reinterpret_cast<const char*>(&cfg), sizeof(cfg));
}
/* size of camera_metadata_t */
size_t num_entry = 0;
size_t num_data = 0;
if (camInfo->characteristics != nullptr) {
num_entry = get_camera_metadata_entry_count(camInfo->characteristics);
num_data = get_camera_metadata_data_count(camInfo->characteristics);
}
outFile.write(reinterpret_cast<const char*>(&num_entry), sizeof(size_t));
outFile.write(reinterpret_cast<const char*>(&num_data), sizeof(size_t));
/* write each camera metadata entry */
if (num_entry > 0) {
camera_metadata_entry_t entry;
for (size_t idx = 0; idx < num_entry; ++idx) {
if (get_camera_metadata_entry(camInfo->characteristics, idx, &entry)) {
LOG(ERROR) << "Failed to retrieve camera metadata entry " << idx;
outFile.close();
return false;
}
outFile.write(reinterpret_cast<const char*>(&entry.tag), sizeof(entry.tag));
outFile.write(reinterpret_cast<const char*>(&entry.count), sizeof(entry.count));
int32_t type = get_camera_metadata_tag_type(entry.tag);
switch (type) {
case TYPE_BYTE:
outFile.write(reinterpret_cast<const char*>(entry.data.u8),
sizeof(uint8_t) * entry.count);
break;
case TYPE_INT32:
outFile.write(reinterpret_cast<const char*>(entry.data.i32),
sizeof(int32_t) * entry.count);
break;
case TYPE_FLOAT:
outFile.write(reinterpret_cast<const char*>(entry.data.f),
sizeof(float) * entry.count);
break;
case TYPE_INT64:
outFile.write(reinterpret_cast<const char*>(entry.data.i64),
sizeof(int64_t) * entry.count);
break;
case TYPE_DOUBLE:
outFile.write(reinterpret_cast<const char*>(entry.data.d),
sizeof(double) * entry.count);
break;
case TYPE_RATIONAL:
[[fallthrough]];
default:
LOG(WARNING) << "Type " << type << " is not supported.";
break;
}
}
}
}
outFile.close();
int64_t writeEnd = android::elapsedRealtimeNano();
LOG(INFO) << __FUNCTION__ << " takes " << std::scientific
<< (double)(writeEnd - writeStart) / 1000000.0 << " ms.";
return true;
}
std::unique_ptr<ConfigManager> ConfigManager::Create() {
std::unique_ptr<ConfigManager> cfgMgr(new ConfigManager());
/*
* Read a configuration from XML file
*
* If this is too slow, ConfigManager::readConfigDataFromBinary() and
* ConfigManager::writeConfigDataToBinary()can serialize CameraInfo object
* to the filesystem and construct CameraInfo instead; this was
* evaluated as 10x faster.
*/
if (!cfgMgr->readConfigDataFromXML()) {
return nullptr;
} else {
return cfgMgr;
}
}
ConfigManager::CameraInfo::~CameraInfo() {
free_camera_metadata(characteristics);
for (auto&& [tag, val] : cameraMetadata) {
switch (tag) {
case ANDROID_LENS_DISTORTION:
case ANDROID_LENS_POSE_ROTATION:
case ANDROID_LENS_POSE_TRANSLATION:
case ANDROID_LENS_INTRINSIC_CALIBRATION: {
delete[] reinterpret_cast<float*>(val.first);
break;
}
case ANDROID_REQUEST_AVAILABLE_CAPABILITIES: {
delete[] reinterpret_cast<
camera_metadata_enum_android_request_available_capabilities_t*>(val.first);
break;
}
case ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS: {
delete[] reinterpret_cast<char*>(val.first);
break;
}
default:
LOG(WARNING) << "Tag " << std::hex << tag << " is not supported. "
<< "Data may be corrupted?";
break;
}
}
}

View file

@ -0,0 +1,155 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ConfigManagerUtil.h"
#include <android-base/logging.h>
#include <android-base/parsedouble.h>
#include <android-base/parseint.h>
#include <linux/videodev2.h>
#include <sstream>
#include <string>
#include <system/graphics-base-v1.0.h>
using ::aidl::android::hardware::automotive::evs::CameraParam;
using ::aidl::android::hardware::graphics::common::PixelFormat;
bool ConfigManagerUtil::convertToEvsCameraParam(const std::string& id, CameraParam& camParam) {
std::string trimmed = ConfigManagerUtil::trimString(id);
bool success = true;
if (!trimmed.compare("BRIGHTNESS")) {
camParam = CameraParam::BRIGHTNESS;
} else if (!trimmed.compare("CONTRAST")) {
camParam = CameraParam::CONTRAST;
} else if (!trimmed.compare("AUTOGAIN")) {
camParam = CameraParam::AUTOGAIN;
} else if (!trimmed.compare("GAIN")) {
camParam = CameraParam::GAIN;
} else if (!trimmed.compare("AUTO_WHITE_BALANCE")) {
camParam = CameraParam::AUTO_WHITE_BALANCE;
} else if (!trimmed.compare("WHITE_BALANCE_TEMPERATURE")) {
camParam = CameraParam::WHITE_BALANCE_TEMPERATURE;
} else if (!trimmed.compare("SHARPNESS")) {
camParam = CameraParam::SHARPNESS;
} else if (!trimmed.compare("AUTO_EXPOSURE")) {
camParam = CameraParam::AUTO_EXPOSURE;
} else if (!trimmed.compare("ABSOLUTE_EXPOSURE")) {
camParam = CameraParam::ABSOLUTE_EXPOSURE;
} else if (!trimmed.compare("ABSOLUTE_FOCUS")) {
camParam = CameraParam::ABSOLUTE_FOCUS;
} else if (!trimmed.compare("AUTO_FOCUS")) {
camParam = CameraParam::AUTO_FOCUS;
} else if (!trimmed.compare("ABSOLUTE_ZOOM")) {
camParam = CameraParam::ABSOLUTE_ZOOM;
} else {
success = false;
}
return success;
}
bool ConfigManagerUtil::convertToPixelFormat(const std::string& in, PixelFormat& out) {
std::string trimmed = ConfigManagerUtil::trimString(in);
bool success = true;
if (!trimmed.compare("RGBA_8888")) {
out = PixelFormat::RGBA_8888;
} else if (!trimmed.compare("YCRCB_420_SP")) {
out = PixelFormat::YCRCB_420_SP;
} else if (!trimmed.compare("YCBCR_422_I")) {
out = PixelFormat::YCBCR_422_I;
} else {
out = PixelFormat::UNSPECIFIED;
success = false;
}
return success;
}
bool ConfigManagerUtil::convertToMetadataTag(const char* name, camera_metadata_tag& aTag) {
if (!std::strcmp(name, "LENS_DISTORTION")) {
aTag = ANDROID_LENS_DISTORTION;
} else if (!std::strcmp(name, "LENS_INTRINSIC_CALIBRATION")) {
aTag = ANDROID_LENS_INTRINSIC_CALIBRATION;
} else if (!std::strcmp(name, "LENS_POSE_ROTATION")) {
aTag = ANDROID_LENS_POSE_ROTATION;
} else if (!std::strcmp(name, "LENS_POSE_TRANSLATION")) {
aTag = ANDROID_LENS_POSE_TRANSLATION;
} else if (!std::strcmp(name, "REQUEST_AVAILABLE_CAPABILITIES")) {
aTag = ANDROID_REQUEST_AVAILABLE_CAPABILITIES;
} else if (!std::strcmp(name, "LOGICAL_MULTI_CAMERA_PHYSICAL_IDS")) {
aTag = ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS;
} else {
return false;
}
return true;
}
bool ConfigManagerUtil::convertToCameraCapability(
const char* name, camera_metadata_enum_android_request_available_capabilities_t& cap) {
if (!std::strcmp(name, "DEPTH_OUTPUT")) {
cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT;
} else if (!std::strcmp(name, "LOGICAL_MULTI_CAMERA")) {
cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA;
} else if (!std::strcmp(name, "MONOCHROME")) {
cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME;
} else if (!std::strcmp(name, "SECURE_IMAGE_DATA")) {
cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA;
} else {
return false;
}
return true;
}
float* ConfigManagerUtil::convertFloatArray(const char* sz, const char* vals, size_t& count,
const char delimiter) {
std::string size_string(sz);
std::string value_string(vals);
if (!android::base::ParseUint(size_string, &count)) {
LOG(ERROR) << "Failed to parse " << size_string;
return nullptr;
}
float* result = new float[count];
std::stringstream values(value_string);
int32_t idx = 0;
std::string token;
while (getline(values, token, delimiter)) {
if (!android::base::ParseFloat(token, &result[idx++])) {
LOG(WARNING) << "Failed to parse " << token;
}
}
return result;
}
std::string ConfigManagerUtil::trimString(const std::string& src, const std::string& ws) {
const auto s = src.find_first_not_of(ws);
if (s == std::string::npos) {
return "";
}
const auto e = src.find_last_not_of(ws);
const auto r = e - s + 1;
return src.substr(s, r);
}

View file

@ -1,93 +0,0 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// TODO(b/203661081): Remove below lines to disable compiler warnings.
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wunused-parameter"
#define LOG_TAG "DefaultEvsEnumerator"
#include <DefaultEvsEnumerator.h>
namespace aidl::android::hardware::automotive::evs::implementation {
using ::ndk::ScopedAStatus;
ScopedAStatus DefaultEvsEnumerator::isHardware(bool* flag) {
// This returns true always.
*flag = true;
return ScopedAStatus::ok();
}
ScopedAStatus DefaultEvsEnumerator::openCamera(const std::string& cameraId,
const Stream& streamConfig,
std::shared_ptr<IEvsCamera>* obj) {
return ScopedAStatus::ok();
}
ScopedAStatus DefaultEvsEnumerator::closeCamera(const std::shared_ptr<IEvsCamera>& obj) {
return ScopedAStatus::ok();
}
ScopedAStatus DefaultEvsEnumerator::getCameraList(std::vector<CameraDesc>* list) {
return ScopedAStatus::ok();
}
ScopedAStatus DefaultEvsEnumerator::getStreamList(const CameraDesc& desc,
std::vector<Stream>* _aidl_return) {
return ScopedAStatus::ok();
}
ScopedAStatus DefaultEvsEnumerator::openDisplay(int32_t displayId,
std::shared_ptr<IEvsDisplay>* obj) {
return ScopedAStatus::ok();
}
ScopedAStatus DefaultEvsEnumerator::closeDisplay(const std::shared_ptr<IEvsDisplay>& state) {
return ScopedAStatus::ok();
}
ScopedAStatus DefaultEvsEnumerator::getDisplayIdList(std::vector<uint8_t>* list) {
return ScopedAStatus::ok();
}
ScopedAStatus DefaultEvsEnumerator::getDisplayState(DisplayState* state) {
return ScopedAStatus::ok();
}
ScopedAStatus DefaultEvsEnumerator::registerStatusCallback(
const std::shared_ptr<IEvsEnumeratorStatusCallback>& callback) {
return ScopedAStatus::ok();
}
ScopedAStatus DefaultEvsEnumerator::openUltrasonicsArray(
const std::string& id, std::shared_ptr<IEvsUltrasonicsArray>* obj) {
return ScopedAStatus::ok();
}
ScopedAStatus DefaultEvsEnumerator::closeUltrasonicsArray(
const std::shared_ptr<IEvsUltrasonicsArray>& obj) {
return ScopedAStatus::ok();
}
ScopedAStatus DefaultEvsEnumerator::getUltrasonicsArrayList(
std::vector<UltrasonicsArrayDesc>* list) {
return ScopedAStatus::ok();
}
} // namespace aidl::android::hardware::automotive::evs::implementation
#pragma clang diagnostic pop

View file

@ -0,0 +1,552 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "EvsEnumerator.h"
#include "ConfigManager.h"
#include "EvsGlDisplay.h"
#include "EvsMockCamera.h"
#include <aidl/android/hardware/automotive/evs/EvsResult.h>
#include <aidl/android/hardware/graphics/common/BufferUsage.h>
#include <aidl/android/hardware/graphics/common/PixelFormat.h>
#include <cutils/android_filesystem_config.h>
#include <set>
#include <string_view>
namespace {
using ::aidl::android::frameworks::automotive::display::ICarDisplayProxy;
using ::aidl::android::hardware::graphics::common::BufferUsage;
using ::ndk::ScopedAStatus;
using std::chrono_literals::operator""s;
// Constants
constexpr std::chrono::seconds kEnumerationTimeout = 10s;
constexpr uint64_t kInvalidDisplayId = std::numeric_limits<uint64_t>::max();
const std::set<uid_t> kAllowedUids = {AID_AUTOMOTIVE_EVS, AID_SYSTEM, AID_ROOT};
} // namespace
namespace aidl::android::hardware::automotive::evs::implementation {
// NOTE: All members values are static so that all clients operate on the same state
// That is to say, this is effectively a singleton despite the fact that HIDL
// constructs a new instance for each client.
std::unordered_map<std::string, EvsEnumerator::CameraRecord> EvsEnumerator::sCameraList;
std::mutex EvsEnumerator::sLock;
std::condition_variable EvsEnumerator::sCameraSignal;
std::unique_ptr<ConfigManager> EvsEnumerator::sConfigManager;
std::shared_ptr<ICarDisplayProxy> EvsEnumerator::sDisplayProxy;
std::unordered_map<uint8_t, uint64_t> EvsEnumerator::sDisplayPortList;
EvsEnumerator::ActiveDisplays& EvsEnumerator::mutableActiveDisplays() {
static ActiveDisplays active_displays;
return active_displays;
}
EvsEnumerator::EvsEnumerator(const std::shared_ptr<ICarDisplayProxy>& proxyService) {
LOG(DEBUG) << "EvsEnumerator is created.";
if (!sConfigManager) {
/* loads and initializes ConfigManager in a separate thread */
sConfigManager = ConfigManager::Create();
}
if (!sDisplayProxy) {
/* sets a car-window service handle */
sDisplayProxy = proxyService;
}
// Enumerate existing devices
enumerateCameras();
mInternalDisplayId = enumerateDisplays();
}
bool EvsEnumerator::checkPermission() {
const auto uid = AIBinder_getCallingUid();
if (kAllowedUids.find(uid) == kAllowedUids.end()) {
LOG(ERROR) << "EVS access denied: "
<< "pid = " << AIBinder_getCallingPid() << ", uid = " << uid;
return false;
}
return true;
}
void EvsEnumerator::enumerateCameras() {
if (!sConfigManager) {
return;
}
for (auto id : sConfigManager->getCameraIdList()) {
CameraRecord rec(id.data());
std::unique_ptr<ConfigManager::CameraInfo>& pInfo = sConfigManager->getCameraInfo(id);
if (pInfo) {
uint8_t* ptr = reinterpret_cast<uint8_t*>(pInfo->characteristics);
const size_t len = get_camera_metadata_size(pInfo->characteristics);
rec.desc.metadata.insert(rec.desc.metadata.end(), ptr, ptr + len);
}
sCameraList.insert_or_assign(id, std::move(rec));
}
}
uint64_t EvsEnumerator::enumerateDisplays() {
LOG(INFO) << __FUNCTION__ << ": Starting display enumeration";
uint64_t internalDisplayId = kInvalidDisplayId;
if (!sDisplayProxy) {
LOG(ERROR) << "ICarDisplayProxy is not available!";
return internalDisplayId;
}
std::vector<int64_t> displayIds;
if (auto status = sDisplayProxy->getDisplayIdList(&displayIds); !status.isOk()) {
LOG(ERROR) << "Failed to retrieve a display id list"
<< ::android::statusToString(status.getStatus());
return internalDisplayId;
}
if (displayIds.size() > 0) {
// The first entry of the list is the internal display. See
// SurfaceFlinger::getPhysicalDisplayIds() implementation.
internalDisplayId = displayIds[0];
for (const auto& id : displayIds) {
const auto port = id & 0xFF;
LOG(INFO) << "Display " << std::hex << id << " is detected on the port, " << port;
sDisplayPortList.insert_or_assign(port, id);
}
}
LOG(INFO) << "Found " << sDisplayPortList.size() << " displays";
return internalDisplayId;
}
// Methods from ::android::hardware::automotive::evs::IEvsEnumerator follow.
ScopedAStatus EvsEnumerator::getCameraList(std::vector<CameraDesc>* _aidl_return) {
LOG(DEBUG) << __FUNCTION__;
if (!checkPermission()) {
return ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::PERMISSION_DENIED));
}
{
std::unique_lock<std::mutex> lock(sLock);
if (sCameraList.size() < 1) {
// No qualified device has been found. Wait until new device is ready,
// for 10 seconds.
if (!sCameraSignal.wait_for(lock, kEnumerationTimeout,
[] { return sCameraList.size() > 0; })) {
LOG(DEBUG) << "Timer expired. No new device has been added.";
}
}
}
// Build up a packed array of CameraDesc for return
_aidl_return->resize(sCameraList.size());
unsigned i = 0;
for (const auto& [key, cam] : sCameraList) {
(*_aidl_return)[i++] = cam.desc;
}
if (sConfigManager) {
// Adding camera groups that represent logical camera devices
auto camGroups = sConfigManager->getCameraGroupIdList();
for (auto&& id : camGroups) {
if (sCameraList.find(id) != sCameraList.end()) {
// Already exists in the _aidl_return
continue;
}
std::unique_ptr<ConfigManager::CameraGroupInfo>& tempInfo =
sConfigManager->getCameraGroupInfo(id);
CameraRecord cam(id.data());
if (tempInfo) {
uint8_t* ptr = reinterpret_cast<uint8_t*>(tempInfo->characteristics);
const size_t len = get_camera_metadata_size(tempInfo->characteristics);
cam.desc.metadata.insert(cam.desc.metadata.end(), ptr, ptr + len);
}
sCameraList.insert_or_assign(id, cam);
_aidl_return->push_back(cam.desc);
}
}
// Send back the results
LOG(DEBUG) << "Reporting " << sCameraList.size() << " cameras available";
return ScopedAStatus::ok();
}
ScopedAStatus EvsEnumerator::getStreamList(const CameraDesc& desc,
std::vector<Stream>* _aidl_return) {
using AidlPixelFormat = ::aidl::android::hardware::graphics::common::PixelFormat;
camera_metadata_t* pMetadata = const_cast<camera_metadata_t*>(
reinterpret_cast<const camera_metadata_t*>(desc.metadata.data()));
camera_metadata_entry_t streamConfig;
if (!find_camera_metadata_entry(pMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
&streamConfig)) {
const unsigned numStreamConfigs = streamConfig.count / sizeof(StreamConfiguration);
_aidl_return->resize(numStreamConfigs);
const StreamConfiguration* pCurrentConfig =
reinterpret_cast<StreamConfiguration*>(streamConfig.data.i32);
for (unsigned i = 0; i < numStreamConfigs; ++i, ++pCurrentConfig) {
// Build ::aidl::android::hardware::automotive::evs::Stream from
// StreamConfiguration.
Stream current = {
.id = pCurrentConfig->id,
.streamType =
pCurrentConfig->type ==
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT
? StreamType::INPUT
: StreamType::OUTPUT,
.width = pCurrentConfig->width,
.height = pCurrentConfig->height,
.format = static_cast<AidlPixelFormat>(pCurrentConfig->format),
.usage = BufferUsage::CAMERA_INPUT,
.rotation = Rotation::ROTATION_0,
};
(*_aidl_return)[i] = current;
}
}
return ScopedAStatus::ok();
}
ScopedAStatus EvsEnumerator::openCamera(const std::string& id, const Stream& cfg,
std::shared_ptr<IEvsCamera>* obj) {
LOG(DEBUG) << __FUNCTION__;
if (!checkPermission()) {
return ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::PERMISSION_DENIED));
}
// Is this a recognized camera id?
CameraRecord* pRecord = findCameraById(id);
if (!pRecord) {
LOG(ERROR) << id << " does not exist!";
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
}
// Has this camera already been instantiated by another caller?
std::shared_ptr<EvsMockCamera> pActiveCamera = pRecord->activeInstance.lock();
if (pActiveCamera) {
LOG(WARNING) << "Killing previous camera because of new caller";
closeCamera(pActiveCamera);
}
// Construct a camera instance for the caller
if (!sConfigManager) {
pActiveCamera = EvsMockCamera::Create(id.data());
} else {
pActiveCamera = EvsMockCamera::Create(id.data(), sConfigManager->getCameraInfo(id), &cfg);
}
pRecord->activeInstance = pActiveCamera;
if (!pActiveCamera) {
LOG(ERROR) << "Failed to create new EvsMockCamera object for " << id;
return ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::UNDERLYING_SERVICE_ERROR));
}
*obj = pActiveCamera;
return ScopedAStatus::ok();
}
ScopedAStatus EvsEnumerator::closeCamera(const std::shared_ptr<IEvsCamera>& cameraObj) {
LOG(DEBUG) << __FUNCTION__;
if (!cameraObj) {
LOG(ERROR) << "Ignoring call to closeCamera with null camera ptr";
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
}
// Get the camera id so we can find it in our list
CameraDesc desc;
auto status = cameraObj->getCameraInfo(&desc);
if (!status.isOk()) {
LOG(ERROR) << "Failed to read a camera descriptor";
return ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::UNDERLYING_SERVICE_ERROR));
}
auto cameraId = desc.id;
closeCamera_impl(cameraObj, cameraId);
return ScopedAStatus::ok();
}
ScopedAStatus EvsEnumerator::openDisplay(int32_t id, std::shared_ptr<IEvsDisplay>* displayObj) {
LOG(DEBUG) << __FUNCTION__;
if (!checkPermission()) {
return ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::PERMISSION_DENIED));
}
auto& displays = mutableActiveDisplays();
if (auto existing_display_search = displays.popDisplay(id)) {
// If we already have a display active, then we need to shut it down so we can
// give exclusive access to the new caller.
std::shared_ptr<EvsGlDisplay> pActiveDisplay = existing_display_search->displayWeak.lock();
if (pActiveDisplay) {
LOG(WARNING) << "Killing previous display because of new caller";
pActiveDisplay->forceShutdown();
}
}
// Create a new display interface and return it
uint64_t targetDisplayId = mInternalDisplayId;
auto it = sDisplayPortList.find(id);
if (it != sDisplayPortList.end()) {
targetDisplayId = it->second;
} else {
LOG(WARNING) << "No display is available on the port " << static_cast<int32_t>(id)
<< ". The main display " << mInternalDisplayId << " will be used instead";
}
// Create a new display interface and return it.
std::shared_ptr<EvsGlDisplay> pActiveDisplay =
ndk::SharedRefBase::make<EvsGlDisplay>(sDisplayProxy, targetDisplayId);
if (auto insert_result = displays.tryInsert(id, pActiveDisplay); !insert_result) {
LOG(ERROR) << "Display ID " << id << " has been used by another caller.";
pActiveDisplay->forceShutdown();
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::RESOURCE_BUSY));
}
LOG(DEBUG) << "Returning new EvsGlDisplay object " << pActiveDisplay.get();
*displayObj = pActiveDisplay;
return ScopedAStatus::ok();
}
ScopedAStatus EvsEnumerator::closeDisplay(const std::shared_ptr<IEvsDisplay>& obj) {
LOG(DEBUG) << __FUNCTION__;
auto& displays = mutableActiveDisplays();
const auto display_search = displays.popDisplay(obj);
if (!display_search) {
LOG(WARNING) << "Ignoring close of previously orphaned display - why did a client steal?";
return ScopedAStatus::ok();
}
auto pActiveDisplay = display_search->displayWeak.lock();
if (!pActiveDisplay) {
LOG(ERROR) << "Somehow a display is being destroyed "
<< "when the enumerator didn't know one existed";
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::OWNERSHIP_LOST));
}
pActiveDisplay->forceShutdown();
return ScopedAStatus::ok();
}
ScopedAStatus EvsEnumerator::getDisplayState(DisplayState* state) {
LOG(DEBUG) << __FUNCTION__;
if (!checkPermission()) {
*state = DisplayState::DEAD;
return ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::PERMISSION_DENIED));
}
// TODO(b/262779341): For now we can just return the state of the 1st display. Need to update
// the API later.
const auto& all_displays = mutableActiveDisplays().getAllDisplays();
// Do we still have a display object we think should be active?
if (all_displays.empty()) {
*state = DisplayState::NOT_OPEN;
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::OWNERSHIP_LOST));
}
std::shared_ptr<IEvsDisplay> pActiveDisplay = all_displays.begin()->second.displayWeak.lock();
if (pActiveDisplay) {
return pActiveDisplay->getDisplayState(state);
} else {
*state = DisplayState::NOT_OPEN;
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::OWNERSHIP_LOST));
}
}
ScopedAStatus EvsEnumerator::getDisplayIdList(std::vector<uint8_t>* list) {
std::vector<uint8_t>& output = *list;
if (sDisplayPortList.size() > 0) {
output.resize(sDisplayPortList.size());
unsigned i = 0;
output[i++] = mInternalDisplayId & 0xFF;
for (const auto& [port, id] : sDisplayPortList) {
if (mInternalDisplayId != id) {
output[i++] = port;
}
}
}
return ScopedAStatus::ok();
}
ScopedAStatus EvsEnumerator::isHardware(bool* flag) {
*flag = true;
return ScopedAStatus::ok();
}
void EvsEnumerator::notifyDeviceStatusChange(const std::string_view& deviceName,
DeviceStatusType type) {
std::lock_guard lock(sLock);
if (!mCallback) {
return;
}
std::vector<DeviceStatus> status{{.id = std::string(deviceName), .status = type}};
if (!mCallback->deviceStatusChanged(status).isOk()) {
LOG(WARNING) << "Failed to notify a device status change, name = " << deviceName
<< ", type = " << static_cast<int>(type);
}
}
ScopedAStatus EvsEnumerator::registerStatusCallback(
const std::shared_ptr<IEvsEnumeratorStatusCallback>& callback) {
std::lock_guard lock(sLock);
if (mCallback) {
LOG(INFO) << "Replacing an existing device status callback";
}
mCallback = callback;
return ScopedAStatus::ok();
}
void EvsEnumerator::closeCamera_impl(const std::shared_ptr<IEvsCamera>& pCamera,
const std::string& cameraId) {
// Find the named camera
CameraRecord* pRecord = findCameraById(cameraId);
// Is the display being destroyed actually the one we think is active?
if (!pRecord) {
LOG(ERROR) << "Asked to close a camera whose name isn't recognized";
} else {
std::shared_ptr<EvsMockCamera> pActiveCamera = pRecord->activeInstance.lock();
if (!pActiveCamera) {
LOG(WARNING) << "Somehow a camera is being destroyed "
<< "when the enumerator didn't know one existed";
} else if (pActiveCamera != pCamera) {
// This can happen if the camera was aggressively reopened,
// orphaning this previous instance
LOG(WARNING) << "Ignoring close of previously orphaned camera "
<< "- why did a client steal?";
} else {
// Shutdown the active camera
pActiveCamera->shutdown();
}
}
return;
}
EvsEnumerator::CameraRecord* EvsEnumerator::findCameraById(const std::string& cameraId) {
// Find the named camera
auto found = sCameraList.find(cameraId);
if (found != sCameraList.end()) {
// Found a match!
return &found->second;
}
// We didn't find a match
return nullptr;
}
std::optional<EvsEnumerator::ActiveDisplays::DisplayInfo> EvsEnumerator::ActiveDisplays::popDisplay(
int32_t id) {
std::lock_guard lck(mMutex);
const auto search = mIdToDisplay.find(id);
if (search == mIdToDisplay.end()) {
return std::nullopt;
}
const auto display_info = search->second;
mIdToDisplay.erase(search);
mDisplayToId.erase(display_info.internalDisplayRawAddr);
return display_info;
}
std::optional<EvsEnumerator::ActiveDisplays::DisplayInfo> EvsEnumerator::ActiveDisplays::popDisplay(
const std::shared_ptr<IEvsDisplay>& display) {
const auto display_ptr_val = reinterpret_cast<uintptr_t>(display.get());
std::lock_guard lck(mMutex);
const auto display_to_id_search = mDisplayToId.find(display_ptr_val);
if (display_to_id_search == mDisplayToId.end()) {
LOG(ERROR) << "Unknown display.";
return std::nullopt;
}
const auto id = display_to_id_search->second;
const auto id_to_display_search = mIdToDisplay.find(id);
mDisplayToId.erase(display_to_id_search);
if (id_to_display_search == mIdToDisplay.end()) {
LOG(ERROR) << "No correspsonding ID for the display, probably orphaned.";
return std::nullopt;
}
const auto display_info = id_to_display_search->second;
mIdToDisplay.erase(id);
return display_info;
}
std::unordered_map<int32_t, EvsEnumerator::ActiveDisplays::DisplayInfo>
EvsEnumerator::ActiveDisplays::getAllDisplays() {
std::lock_guard lck(mMutex);
auto id_to_display_map_copy = mIdToDisplay;
return id_to_display_map_copy;
}
bool EvsEnumerator::ActiveDisplays::tryInsert(int32_t id,
const std::shared_ptr<EvsGlDisplay>& display) {
std::lock_guard lck(mMutex);
const auto display_ptr_val = reinterpret_cast<uintptr_t>(display.get());
auto id_to_display_insert_result =
mIdToDisplay.emplace(id, DisplayInfo{
.id = id,
.displayWeak = display,
.internalDisplayRawAddr = display_ptr_val,
});
if (!id_to_display_insert_result.second) {
return false;
}
auto display_to_id_insert_result = mDisplayToId.emplace(display_ptr_val, id);
if (!display_to_id_insert_result.second) {
mIdToDisplay.erase(id);
return false;
}
return true;
}
ScopedAStatus EvsEnumerator::getUltrasonicsArrayList(
[[maybe_unused]] std::vector<UltrasonicsArrayDesc>* list) {
// TODO(b/149874793): Add implementation for EVS Manager and Sample driver
return ScopedAStatus::ok();
}
ScopedAStatus EvsEnumerator::openUltrasonicsArray(
[[maybe_unused]] const std::string& id,
[[maybe_unused]] std::shared_ptr<IEvsUltrasonicsArray>* obj) {
// TODO(b/149874793): Add implementation for EVS Manager and Sample driver
return ScopedAStatus::ok();
}
ScopedAStatus EvsEnumerator::closeUltrasonicsArray(
[[maybe_unused]] const std::shared_ptr<IEvsUltrasonicsArray>& obj) {
// TODO(b/149874793): Add implementation for EVS Manager and Sample driver
return ScopedAStatus::ok();
}
} // namespace aidl::android::hardware::automotive::evs::implementation

View file

@ -0,0 +1,417 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "EvsGlDisplay.h"
#include <aidl/android/hardware/automotive/evs/EvsResult.h>
#include <aidl/android/hardware/graphics/common/BufferUsage.h>
#include <aidl/android/hardware/graphics/common/PixelFormat.h>
#include <aidlcommonsupport/NativeHandle.h>
#include <android-base/thread_annotations.h>
#include <linux/time.h>
#include <ui/DisplayMode.h>
#include <ui/DisplayState.h>
#include <ui/GraphicBufferAllocator.h>
#include <ui/GraphicBufferMapper.h>
#include <utils/SystemClock.h>
#include <chrono>
namespace {
using ::aidl::android::frameworks::automotive::display::ICarDisplayProxy;
using ::aidl::android::hardware::graphics::common::BufferUsage;
using ::aidl::android::hardware::graphics::common::PixelFormat;
using ::android::base::ScopedLockAssertion;
using ::ndk::ScopedAStatus;
constexpr auto kTimeout = std::chrono::seconds(1);
bool debugFirstFrameDisplayed = false;
int generateFingerPrint(buffer_handle_t handle) {
return static_cast<int>(reinterpret_cast<long>(handle) & 0xFFFFFFFF);
}
} // namespace
namespace aidl::android::hardware::automotive::evs::implementation {
EvsGlDisplay::EvsGlDisplay(const std::shared_ptr<ICarDisplayProxy>& pDisplayProxy,
uint64_t displayId)
: mDisplayId(displayId), mDisplayProxy(pDisplayProxy) {
LOG(DEBUG) << "EvsGlDisplay instantiated";
// Set up our self description
// NOTE: These are arbitrary values chosen for testing
mInfo.id = std::to_string(displayId);
mInfo.vendorFlags = 3870;
// Start a thread to render images on this display
{
std::lock_guard lock(mLock);
mState = RUN;
}
mRenderThread = std::thread([this]() { renderFrames(); });
}
EvsGlDisplay::~EvsGlDisplay() {
LOG(DEBUG) << "EvsGlDisplay being destroyed";
forceShutdown();
}
/**
* This gets called if another caller "steals" ownership of the display
*/
void EvsGlDisplay::forceShutdown() {
LOG(DEBUG) << "EvsGlDisplay forceShutdown";
{
std::lock_guard lock(mLock);
// If the buffer isn't being held by a remote client, release it now as an
// optimization to release the resources more quickly than the destructor might
// get called.
if (mBuffer.handle != nullptr) {
// Report if we're going away while a buffer is outstanding
if (mBufferBusy || mState == RUN) {
LOG(ERROR) << "EvsGlDisplay going down while client is holding a buffer";
}
mState = STOPPING;
}
// Put this object into an unrecoverable error state since somebody else
// is going to own the display now.
mRequestedState = DisplayState::DEAD;
}
mBufferReadyToRender.notify_all();
if (mRenderThread.joinable()) {
mRenderThread.join();
}
}
/**
* Initialize GL in the context of a caller's thread and prepare a graphic
* buffer to use.
*/
bool EvsGlDisplay::initializeGlContextLocked() {
// Initialize our display window
// NOTE: This will cause the display to become "VISIBLE" before a frame is actually
// returned, which is contrary to the spec and will likely result in a black frame being
// (briefly) shown.
if (!mGlWrapper.initialize(mDisplayProxy, mDisplayId)) {
// Report the failure
LOG(ERROR) << "Failed to initialize GL display";
return false;
}
// Assemble the buffer description we'll use for our render target
static_assert(::aidl::android::hardware::graphics::common::PixelFormat::RGBA_8888 ==
static_cast<::aidl::android::hardware::graphics::common::PixelFormat>(
HAL_PIXEL_FORMAT_RGBA_8888));
mBuffer.description = {
.width = static_cast<int>(mGlWrapper.getWidth()),
.height = static_cast<int>(mGlWrapper.getHeight()),
.layers = 1,
.format = PixelFormat::RGBA_8888,
// FIXME: Below line is not using
// ::aidl::android::hardware::graphics::common::BufferUsage because
// BufferUsage enum does not support a bitwise-OR operation; they
// should be BufferUsage::GPU_RENDER_TARGET |
// BufferUsage::COMPOSER_OVERLAY
.usage = static_cast<BufferUsage>(GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_COMPOSER),
};
::android::GraphicBufferAllocator& alloc(::android::GraphicBufferAllocator::get());
uint32_t stride = static_cast<uint32_t>(mBuffer.description.stride);
buffer_handle_t handle = nullptr;
const ::android::status_t result =
alloc.allocate(mBuffer.description.width, mBuffer.description.height,
static_cast<::android::PixelFormat>(mBuffer.description.format),
mBuffer.description.layers,
static_cast<uint64_t>(mBuffer.description.usage), &handle, &stride,
/* requestorName= */ "EvsGlDisplay");
mBuffer.description.stride = stride;
mBuffer.fingerprint = generateFingerPrint(mBuffer.handle);
if (result != ::android::NO_ERROR) {
LOG(ERROR) << "Error " << result << " allocating " << mBuffer.description.width << " x "
<< mBuffer.description.height << " graphics buffer.";
mGlWrapper.shutdown();
return false;
}
mBuffer.handle = handle;
if (mBuffer.handle == nullptr) {
LOG(ERROR) << "We didn't get a buffer handle back from the allocator";
mGlWrapper.shutdown();
return false;
}
LOG(DEBUG) << "Allocated new buffer " << mBuffer.handle << " with stride "
<< mBuffer.description.stride;
return true;
}
/**
* This method runs in a separate thread and renders the contents of the buffer.
*/
void EvsGlDisplay::renderFrames() {
{
std::lock_guard lock(mLock);
if (!initializeGlContextLocked()) {
LOG(ERROR) << "Failed to initialize GL context";
return;
}
// Display buffer is ready.
mBufferBusy = false;
}
mBufferReadyToUse.notify_all();
while (true) {
{
std::unique_lock lock(mLock);
ScopedLockAssertion lock_assertion(mLock);
mBufferReadyToRender.wait(
lock, [this]() REQUIRES(mLock) { return mBufferReady || mState != RUN; });
if (mState != RUN) {
LOG(DEBUG) << "A rendering thread is stopping";
break;
}
mBufferReady = false;
}
// Update the texture contents with the provided data
if (!mGlWrapper.updateImageTexture(mBuffer.handle, mBuffer.description)) {
LOG(WARNING) << "Failed to update the image texture";
continue;
}
// Put the image on the screen
mGlWrapper.renderImageToScreen();
if (!debugFirstFrameDisplayed) {
LOG(DEBUG) << "EvsFirstFrameDisplayTiming start time: " << ::android::elapsedRealtime()
<< " ms.";
debugFirstFrameDisplayed = true;
}
// Mark current frame is consumed.
{
std::lock_guard lock(mLock);
mBufferBusy = false;
}
mBufferDone.notify_all();
}
LOG(DEBUG) << "A rendering thread is stopped.";
// Drop the graphics buffer we've been using
::android::GraphicBufferAllocator& alloc(::android::GraphicBufferAllocator::get());
alloc.free(mBuffer.handle);
mBuffer.handle = nullptr;
mGlWrapper.hideWindow(mDisplayProxy, mDisplayId);
mGlWrapper.shutdown();
std::lock_guard lock(mLock);
mState = STOPPED;
}
/**
* Returns basic information about the EVS display provided by the system.
* See the description of the DisplayDesc structure for details.
*/
ScopedAStatus EvsGlDisplay::getDisplayInfo(DisplayDesc* _aidl_return) {
if (!mDisplayProxy) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::UNDERLYING_SERVICE_ERROR));
}
::aidl::android::frameworks::automotive::display::DisplayDesc proxyDisplay;
auto status = mDisplayProxy->getDisplayInfo(mDisplayId, &proxyDisplay);
if (!status.isOk()) {
return ::ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::UNDERLYING_SERVICE_ERROR));
}
_aidl_return->width = proxyDisplay.width;
_aidl_return->height = proxyDisplay.height;
_aidl_return->orientation = static_cast<Rotation>(proxyDisplay.orientation);
_aidl_return->id = mInfo.id; // FIXME: what should be ID here?
_aidl_return->vendorFlags = mInfo.vendorFlags;
return ::ndk::ScopedAStatus::ok();
}
/**
* Clients may set the display state to express their desired state.
* The HAL implementation must gracefully accept a request for any state
* while in any other state, although the response may be to ignore the request.
* The display is defined to start in the NOT_VISIBLE state upon initialization.
* The client is then expected to request the VISIBLE_ON_NEXT_FRAME state, and
* then begin providing video. When the display is no longer required, the client
* is expected to request the NOT_VISIBLE state after passing the last video frame.
*/
ScopedAStatus EvsGlDisplay::setDisplayState(DisplayState state) {
LOG(DEBUG) << __FUNCTION__;
std::lock_guard lock(mLock);
if (mRequestedState == DisplayState::DEAD) {
// This object no longer owns the display -- it's been superceeded!
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::OWNERSHIP_LOST));
}
// Ensure we recognize the requested state so we don't go off the rails
static constexpr ::ndk::enum_range<DisplayState> kDisplayStateRange;
if (std::find(kDisplayStateRange.begin(), kDisplayStateRange.end(), state) ==
kDisplayStateRange.end()) {
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
}
switch (state) {
case DisplayState::NOT_VISIBLE:
mGlWrapper.hideWindow(mDisplayProxy, mDisplayId);
break;
case DisplayState::VISIBLE:
mGlWrapper.showWindow(mDisplayProxy, mDisplayId);
break;
default:
break;
}
// Record the requested state
mRequestedState = state;
return ScopedAStatus::ok();
}
/**
* The HAL implementation should report the actual current state, which might
* transiently differ from the most recently requested state. Note, however, that
* the logic responsible for changing display states should generally live above
* the device layer, making it undesirable for the HAL implementation to
* spontaneously change display states.
*/
ScopedAStatus EvsGlDisplay::getDisplayState(DisplayState* _aidl_return) {
LOG(DEBUG) << __FUNCTION__;
std::lock_guard lock(mLock);
*_aidl_return = mRequestedState;
return ScopedAStatus::ok();
}
/**
* This call returns a handle to a frame buffer associated with the display.
* This buffer may be locked and written to by software and/or GL. This buffer
* must be returned via a call to returnTargetBufferForDisplay() even if the
* display is no longer visible.
*/
ScopedAStatus EvsGlDisplay::getTargetBuffer(BufferDesc* _aidl_return) {
LOG(DEBUG) << __FUNCTION__;
std::unique_lock lock(mLock);
ScopedLockAssertion lock_assertion(mLock);
if (mRequestedState == DisplayState::DEAD) {
LOG(ERROR) << "Rejecting buffer request from object that lost ownership of the display.";
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::OWNERSHIP_LOST));
}
// If we don't already have a buffer, allocate one now
// mBuffer.memHandle is a type of buffer_handle_t, which is equal to
// native_handle_t*.
mBufferReadyToUse.wait(lock, [this]() REQUIRES(mLock) { return !mBufferBusy; });
// Do we have a frame available?
if (mBufferBusy) {
// This means either we have a 2nd client trying to compete for buffers
// (an unsupported mode of operation) or else the client hasn't returned
// a previously issued buffer yet (they're behaving badly).
// NOTE: We have to make the callback even if we have nothing to provide
LOG(ERROR) << "getTargetBuffer called while no buffers available.";
return ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
}
// Mark our buffer as busy
mBufferBusy = true;
// Send the buffer to the client
LOG(VERBOSE) << "Providing display buffer handle " << mBuffer.handle;
BufferDesc bufferDescToSend = {
.buffer =
{
.handle = std::move(::android::dupToAidl(mBuffer.handle)),
.description = mBuffer.description,
},
.pixelSizeBytes = 4, // RGBA_8888 is 4-byte-per-pixel format
.bufferId = mBuffer.fingerprint,
};
*_aidl_return = std::move(bufferDescToSend);
return ScopedAStatus::ok();
}
/**
* This call tells the display that the buffer is ready for display.
* The buffer is no longer valid for use by the client after this call.
*/
ScopedAStatus EvsGlDisplay::returnTargetBufferForDisplay(const BufferDesc& buffer) {
LOG(VERBOSE) << __FUNCTION__;
std::unique_lock lock(mLock);
ScopedLockAssertion lock_assertion(mLock);
// Nobody should call us with a null handle
if (buffer.buffer.handle.fds.size() < 1) {
LOG(ERROR) << __FUNCTION__ << " called without a valid buffer handle.";
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
}
if (buffer.bufferId != mBuffer.fingerprint) {
LOG(ERROR) << "Got an unrecognized frame returned.";
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
}
if (!mBufferBusy) {
LOG(ERROR) << "A frame was returned with no outstanding frames.";
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
}
// If we've been displaced by another owner of the display, then we can't do anything else
if (mRequestedState == DisplayState::DEAD) {
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::OWNERSHIP_LOST));
}
// If we were waiting for a new frame, this is it!
if (mRequestedState == DisplayState::VISIBLE_ON_NEXT_FRAME) {
mRequestedState = DisplayState::VISIBLE;
mGlWrapper.showWindow(mDisplayProxy, mDisplayId);
}
// Validate we're in an expected state
if (mRequestedState != DisplayState::VISIBLE) {
// Not sure why a client would send frames back when we're not visible.
LOG(WARNING) << "Got a frame returned while not visible - ignoring.";
return ScopedAStatus::ok();
}
mBufferReady = true;
mBufferReadyToRender.notify_all();
if (!mBufferDone.wait_for(lock, kTimeout, [this]() REQUIRES(mLock) { return !mBufferBusy; })) {
return ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::UNDERLYING_SERVICE_ERROR));
}
return ScopedAStatus::ok();
}
} // namespace aidl::android::hardware::automotive::evs::implementation

View file

@ -0,0 +1,642 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "EvsMockCamera.h"
#include "ConfigManager.h"
#include "EvsEnumerator.h"
#include <aidlcommonsupport/NativeHandle.h>
#include <ui/GraphicBufferAllocator.h>
#include <ui/GraphicBufferMapper.h>
#include <utils/SystemClock.h>
#include <memory>
namespace {
using ::aidl::android::hardware::graphics::common::BufferUsage;
using ::ndk::ScopedAStatus;
// Arbitrary limit on number of graphics buffers allowed to be allocated
// Safeguards against unreasonable resource consumption and provides a testable limit
constexpr unsigned kMaxBuffersInFlight = 100;
// Minimum number of buffers to run a video stream
constexpr int kMinimumBuffersInFlight = 1;
// Colors for the colorbar test pattern in ABGR format
constexpr uint32_t kColors[] = {
0xFFFFFFFF, // white
0xFF00FFFF, // yellow
0xFFFFFF00, // cyan
0xFF00FF00, // green
0xFFFF00FF, // fuchsia
0xFF0000FF, // red
0xFFFF0000, // blue
0xFF000000, // black
};
constexpr size_t kNumColors = sizeof(kColors) / sizeof(kColors[0]);
} // namespace
namespace aidl::android::hardware::automotive::evs::implementation {
EvsMockCamera::EvsMockCamera([[maybe_unused]] Sigil sigil, const char* id,
std::unique_ptr<ConfigManager::CameraInfo>& camInfo)
: mFramesAllowed(0), mFramesInUse(0), mStreamState(STOPPED), mCameraInfo(camInfo) {
LOG(DEBUG) << __FUNCTION__;
/* set a camera id */
mDescription.id = id;
/* set camera metadata */
if (camInfo) {
uint8_t* ptr = reinterpret_cast<uint8_t*>(camInfo->characteristics);
const size_t len = get_camera_metadata_size(camInfo->characteristics);
mDescription.metadata.insert(mDescription.metadata.end(), ptr, ptr + len);
}
}
EvsMockCamera::~EvsMockCamera() {
LOG(DEBUG) << __FUNCTION__;
shutdown();
}
// This gets called if another caller "steals" ownership of the camera
void EvsMockCamera::shutdown() {
LOG(DEBUG) << __FUNCTION__;
// Make sure our output stream is cleaned up
// (It really should be already)
stopVideoStream_impl();
// Claim the lock while we work on internal state
std::lock_guard lock(mAccessLock);
// Drop all the graphics buffers we've been using
if (mBuffers.size() > 0) {
::android::GraphicBufferAllocator& alloc(::android::GraphicBufferAllocator::get());
for (auto&& rec : mBuffers) {
if (rec.inUse) {
LOG(WARNING) << "WARNING: releasing a buffer remotely owned.";
}
alloc.free(rec.handle);
rec.handle = nullptr;
}
mBuffers.clear();
}
// Put this object into an unrecoverable error state since somebody else
// is going to own the underlying camera now
mStreamState = DEAD;
}
// Methods from ::aidl::android::hardware::automotive::evs::IEvsCamera follow.
ScopedAStatus EvsMockCamera::getCameraInfo(CameraDesc* _aidl_return) {
LOG(DEBUG) << __FUNCTION__;
// Send back our self description
*_aidl_return = mDescription;
return ScopedAStatus::ok();
}
ScopedAStatus EvsMockCamera::setMaxFramesInFlight(int32_t bufferCount) {
LOG(DEBUG) << __FUNCTION__ << ", bufferCount = " << bufferCount;
;
std::lock_guard lock(mAccessLock);
// If we've been displaced by another owner of the camera, then we can't do anything else
if (mStreamState == DEAD) {
LOG(ERROR) << "Ignoring setMaxFramesInFlight call when camera has been lost.";
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::OWNERSHIP_LOST));
}
// We cannot function without at least one video buffer to send data
if (bufferCount < 1) {
LOG(ERROR) << "Ignoring setMaxFramesInFlight with less than one buffer requested.";
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
}
// Update our internal state
if (!setAvailableFrames_Locked(bufferCount)) {
LOG(ERROR) << "Failed to adjust the maximum number of frames in flight.";
return ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
}
return ScopedAStatus::ok();
}
ScopedAStatus EvsMockCamera::startVideoStream(const std::shared_ptr<IEvsCameraStream>& cb) {
LOG(DEBUG) << __FUNCTION__;
if (!cb) {
LOG(ERROR) << "A given stream callback is invalid.";
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
}
std::lock_guard lock(mAccessLock);
// If we've been displaced by another owner of the camera, then we can't do anything else
if (mStreamState == DEAD) {
LOG(ERROR) << "Ignoring startVideoStream call when camera has been lost.";
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::OWNERSHIP_LOST));
}
if (mStreamState != STOPPED) {
LOG(ERROR) << "Ignoring startVideoStream call when a stream is already running.";
return ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::STREAM_ALREADY_RUNNING));
}
// If the client never indicated otherwise, configure ourselves for a single streaming buffer
if (mFramesAllowed < kMinimumBuffersInFlight &&
!setAvailableFrames_Locked(kMinimumBuffersInFlight)) {
LOG(ERROR) << "Failed to start stream because we couldn't get a graphics buffer";
return ScopedAStatus::fromServiceSpecificError(
static_cast<int>(EvsResult::BUFFER_NOT_AVAILABLE));
}
// Record the user's callback for use when we have a frame ready
mStream = cb;
// Start the frame generation thread
mStreamState = RUNNING;
mCaptureThread = std::thread([this]() { generateFrames(); });
return ScopedAStatus::ok();
}
ScopedAStatus EvsMockCamera::doneWithFrame(const std::vector<BufferDesc>& list) {
std::lock_guard lock(mAccessLock);
for (const auto& desc : list) {
returnBufferLocked(desc.bufferId);
}
return ScopedAStatus::ok();
}
ScopedAStatus EvsMockCamera::stopVideoStream() {
LOG(DEBUG) << __FUNCTION__;
return stopVideoStream_impl();
}
ScopedAStatus EvsMockCamera::stopVideoStream_impl() {
std::unique_lock lock(mAccessLock);
if (mStreamState != RUNNING) {
// Safely return here because a stream is not running.
return ScopedAStatus::ok();
}
// Tell the GenerateFrames loop we want it to stop
mStreamState = STOPPING;
// Block outside the mutex until the "stop" flag has been acknowledged
// We won't send any more frames, but the client might still get some already in flight
LOG(DEBUG) << "Waiting for stream thread to end...";
lock.unlock();
if (mCaptureThread.joinable()) {
mCaptureThread.join();
}
lock.lock();
mStreamState = STOPPED;
mStream = nullptr;
LOG(DEBUG) << "Stream marked STOPPED.";
return ScopedAStatus::ok();
}
ScopedAStatus EvsMockCamera::getExtendedInfo(int32_t opaqueIdentifier,
std::vector<uint8_t>* opaqueValue) {
const auto it = mExtInfo.find(opaqueIdentifier);
if (it == mExtInfo.end()) {
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::INVALID_ARG));
} else {
*opaqueValue = mExtInfo[opaqueIdentifier];
}
return ScopedAStatus::ok();
}
ScopedAStatus EvsMockCamera::setExtendedInfo(int32_t opaqueIdentifier,
const std::vector<uint8_t>& opaqueValue) {
mExtInfo.insert_or_assign(opaqueIdentifier, opaqueValue);
return ScopedAStatus::ok();
}
ScopedAStatus EvsMockCamera::getPhysicalCameraInfo([[maybe_unused]] const std::string& id,
CameraDesc* _aidl_return) {
LOG(DEBUG) << __FUNCTION__;
// This method works exactly same as getCameraInfo() in EVS HW module.
*_aidl_return = mDescription;
return ScopedAStatus::ok();
}
ScopedAStatus EvsMockCamera::pauseVideoStream() {
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
}
ScopedAStatus EvsMockCamera::resumeVideoStream() {
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
}
ScopedAStatus EvsMockCamera::setPrimaryClient() {
/* Because EVS HW module reference implementation expects a single client at
* a time, this returns a success code always.
*/
return ScopedAStatus::ok();
}
ScopedAStatus EvsMockCamera::forcePrimaryClient(const std::shared_ptr<IEvsDisplay>&) {
/* Because EVS HW module reference implementation expects a single client at
* a time, this returns a success code always.
*/
return ScopedAStatus::ok();
}
ScopedAStatus EvsMockCamera::unsetPrimaryClient() {
/* Because EVS HW module reference implementation expects a single client at
* a time, there is no chance that this is called by the secondary client and
* therefore returns a success code always.
*/
return ScopedAStatus::ok();
}
ScopedAStatus EvsMockCamera::getParameterList(std::vector<CameraParam>* _aidl_return) {
if (mCameraInfo) {
_aidl_return->resize(mCameraInfo->controls.size());
auto idx = 0;
for (auto& [name, range] : mCameraInfo->controls) {
(*_aidl_return)[idx++] = name;
}
}
return ScopedAStatus::ok();
}
ScopedAStatus EvsMockCamera::getIntParameterRange([[maybe_unused]] CameraParam id,
[[maybe_unused]] ParameterRange* _aidl_return) {
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
}
ScopedAStatus EvsMockCamera::setIntParameter(
[[maybe_unused]] CameraParam id, [[maybe_unused]] int32_t value,
[[maybe_unused]] std::vector<int32_t>* effectiveValue) {
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
}
ScopedAStatus EvsMockCamera::getIntParameter([[maybe_unused]] CameraParam id,
[[maybe_unused]] std::vector<int32_t>* value) {
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
}
ScopedAStatus EvsMockCamera::importExternalBuffers(
[[maybe_unused]] const std::vector<BufferDesc>& buffers,
[[maybe_unused]] int32_t* _aidl_return) {
LOG(DEBUG) << "This implementation does not support an external buffer import.";
return ScopedAStatus::fromServiceSpecificError(static_cast<int>(EvsResult::NOT_SUPPORTED));
}
bool EvsMockCamera::setAvailableFrames_Locked(unsigned bufferCount) {
if (bufferCount < 1) {
LOG(ERROR) << "Ignoring request to set buffer count to zero";
return false;
}
if (bufferCount > kMaxBuffersInFlight) {
LOG(ERROR) << "Rejecting buffer request in excess of internal limit";
return false;
}
// Is an increase required?
if (mFramesAllowed < bufferCount) {
// An increase is required
auto needed = bufferCount - mFramesAllowed;
LOG(INFO) << "Allocating " << needed << " buffers for camera frames";
auto added = increaseAvailableFrames_Locked(needed);
if (added != needed) {
// If we didn't add all the frames we needed, then roll back to the previous state
LOG(ERROR) << "Rolling back to previous frame queue size";
decreaseAvailableFrames_Locked(added);
return false;
}
} else if (mFramesAllowed > bufferCount) {
// A decrease is required
auto framesToRelease = mFramesAllowed - bufferCount;
LOG(INFO) << "Returning " << framesToRelease << " camera frame buffers";
auto released = decreaseAvailableFrames_Locked(framesToRelease);
if (released != framesToRelease) {
// This shouldn't happen with a properly behaving client because the client
// should only make this call after returning sufficient outstanding buffers
// to allow a clean resize.
LOG(ERROR) << "Buffer queue shrink failed -- too many buffers currently in use?";
}
}
return true;
}
unsigned EvsMockCamera::increaseAvailableFrames_Locked(unsigned numToAdd) {
// Acquire the graphics buffer allocator
::android::GraphicBufferAllocator& alloc(::android::GraphicBufferAllocator::get());
unsigned added = 0;
while (added < numToAdd) {
unsigned pixelsPerLine = 0;
buffer_handle_t memHandle = nullptr;
auto result = alloc.allocate(mWidth, mHeight, mFormat, 1, mUsage, &memHandle,
&pixelsPerLine, 0, "EvsMockCamera");
if (result != ::android::NO_ERROR) {
LOG(ERROR) << "Error " << result << " allocating " << mWidth << " x " << mHeight
<< " graphics buffer";
break;
}
if (memHandle == nullptr) {
LOG(ERROR) << "We didn't get a buffer handle back from the allocator";
break;
}
if (mStride > 0) {
if (mStride != pixelsPerLine) {
LOG(ERROR) << "We did not expect to get buffers with different strides!";
}
} else {
// Gralloc defines stride in terms of pixels per line
mStride = pixelsPerLine;
}
// Find a place to store the new buffer
auto stored = false;
for (auto&& rec : mBuffers) {
if (rec.handle == nullptr) {
// Use this existing entry
rec.handle = memHandle;
rec.inUse = false;
stored = true;
break;
}
}
if (!stored) {
// Add a BufferRecord wrapping this handle to our set of available buffers
mBuffers.push_back(BufferRecord(memHandle));
}
++mFramesAllowed;
++added;
}
return added;
}
unsigned EvsMockCamera::decreaseAvailableFrames_Locked(unsigned numToRemove) {
// Acquire the graphics buffer allocator
::android::GraphicBufferAllocator& alloc(::android::GraphicBufferAllocator::get());
unsigned removed = 0;
for (auto&& rec : mBuffers) {
// Is this record not in use, but holding a buffer that we can free?
if ((rec.inUse == false) && (rec.handle != nullptr)) {
// Release buffer and update the record so we can recognize it as "empty"
alloc.free(rec.handle);
rec.handle = nullptr;
--mFramesAllowed;
++removed;
if (removed == numToRemove) {
break;
}
}
}
return removed;
}
// This is the asynchronous frame generation thread that runs in parallel with the
// main serving thread. There is one for each active camera instance.
void EvsMockCamera::generateFrames() {
LOG(DEBUG) << "Frame generation loop started.";
unsigned idx = 0;
while (true) {
bool timeForFrame = false;
const nsecs_t startTime = systemTime(SYSTEM_TIME_MONOTONIC);
// Lock scope for updating shared state
{
std::lock_guard lock(mAccessLock);
if (mStreamState != RUNNING) {
// Break out of our main thread loop
break;
}
// Are we allowed to issue another buffer?
if (mFramesInUse >= mFramesAllowed) {
// Can't do anything right now -- skip this frame
LOG(WARNING) << "Skipped a frame because too many are in flight.";
} else {
// Identify an available buffer to fill
for (idx = 0; idx < mBuffers.size(); idx++) {
if (!mBuffers[idx].inUse) {
if (mBuffers[idx].handle != nullptr) {
// Found an available record, so stop looking
break;
}
}
}
if (idx >= mBuffers.size()) {
// This shouldn't happen since we already checked mFramesInUse vs mFramesAllowed
ALOGE("Failed to find an available buffer slot\n");
} else {
// We're going to make the frame busy
mBuffers[idx].inUse = true;
mFramesInUse++;
timeForFrame = true;
}
}
}
if (timeForFrame) {
using AidlPixelFormat = ::aidl::android::hardware::graphics::common::PixelFormat;
// Assemble the buffer description we'll transmit below
buffer_handle_t memHandle = mBuffers[idx].handle;
BufferDesc newBuffer = {
.buffer =
{
.description =
{
.width = static_cast<int32_t>(mWidth),
.height = static_cast<int32_t>(mHeight),
.layers = 1,
.format = static_cast<AidlPixelFormat>(mFormat),
.usage = static_cast<BufferUsage>(mUsage),
.stride = static_cast<int32_t>(mStride),
},
.handle = ::android::dupToAidl(memHandle),
},
.bufferId = static_cast<int32_t>(idx),
.deviceId = mDescription.id,
.timestamp = static_cast<int64_t>(::android::elapsedRealtimeNano() *
1e+3), // timestamps is in microseconds
};
// Write test data into the image buffer
fillMockFrame(memHandle, reinterpret_cast<const AHardwareBuffer_Desc*>(
&newBuffer.buffer.description));
// Issue the (asynchronous) callback to the client -- can't be holding the lock
auto flag = false;
if (mStream) {
std::vector<BufferDesc> frames;
frames.push_back(std::move(newBuffer));
flag = mStream->deliverFrame(frames).isOk();
}
if (flag) {
LOG(DEBUG) << "Delivered " << memHandle << ", id = " << mBuffers[idx].handle;
} else {
// This can happen if the client dies and is likely unrecoverable.
// To avoid consuming resources generating failing calls, we stop sending
// frames. Note, however, that the stream remains in the "STREAMING" state
// until cleaned up on the main thread.
LOG(ERROR) << "Frame delivery call failed in the transport layer.";
// Since we didn't actually deliver it, mark the frame as available
std::lock_guard<std::mutex> lock(mAccessLock);
mBuffers[idx].inUse = false;
mFramesInUse--;
}
}
// We arbitrarily choose to generate frames at 15 fps to ensure we pass the 10fps test
// requirement
static const int kTargetFrameRate = 15;
static const nsecs_t kTargetFrameIntervalUs = 1000 * 1000 / kTargetFrameRate;
const nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
const nsecs_t elapsedTimeUs = (now - startTime) / 1000;
const nsecs_t sleepDurationUs = kTargetFrameIntervalUs - elapsedTimeUs;
if (sleepDurationUs > 0) {
usleep(sleepDurationUs);
}
}
// If we've been asked to stop, send an event to signal the actual end of stream
EvsEventDesc event = {
.aType = EvsEventType::STREAM_STOPPED,
};
if (!mStream->notify(event).isOk()) {
ALOGE("Error delivering end of stream marker");
}
return;
}
void EvsMockCamera::fillMockFrame(buffer_handle_t handle, const AHardwareBuffer_Desc* pDesc) {
// Lock our output buffer for writing
uint32_t* pixels = nullptr;
::android::GraphicBufferMapper& mapper = ::android::GraphicBufferMapper::get();
mapper.lock(handle, GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_NEVER,
::android::Rect(pDesc->width, pDesc->height), (void**)&pixels);
// If we failed to lock the pixel buffer, we're about to crash, but log it first
if (!pixels) {
ALOGE("Camera failed to gain access to image buffer for writing");
return;
}
// Fill in the test pixels; the colorbar in ABGR format
for (unsigned row = 0; row < pDesc->height; row++) {
for (unsigned col = 0; col < pDesc->width; col++) {
const uint32_t index = col * kNumColors / pDesc->width;
pixels[col] = kColors[index];
}
// Point to the next row
// NOTE: stride retrieved from gralloc is in units of pixels
pixels = pixels + pDesc->stride;
}
// Release our output buffer
mapper.unlock(handle);
}
void EvsMockCamera::returnBufferLocked(const uint32_t bufferId) {
if (bufferId >= mBuffers.size()) {
ALOGE("ignoring doneWithFrame called with invalid bufferId %d (max is %zu)", bufferId,
mBuffers.size() - 1);
return;
}
if (!mBuffers[bufferId].inUse) {
ALOGE("ignoring doneWithFrame called on frame %d which is already free", bufferId);
return;
}
// Mark the frame as available
mBuffers[bufferId].inUse = false;
mFramesInUse--;
// If this frame's index is high in the array, try to move it down
// to improve locality after mFramesAllowed has been reduced.
if (bufferId >= mFramesAllowed) {
// Find an empty slot lower in the array (which should always exist in this case)
for (auto&& rec : mBuffers) {
if (rec.handle == nullptr) {
rec.handle = mBuffers[bufferId].handle;
mBuffers[bufferId].handle = nullptr;
break;
}
}
}
}
std::shared_ptr<EvsMockCamera> EvsMockCamera::Create(const char* deviceName) {
std::unique_ptr<ConfigManager::CameraInfo> nullCamInfo = nullptr;
return Create(deviceName, nullCamInfo);
}
std::shared_ptr<EvsMockCamera> EvsMockCamera::Create(
const char* deviceName, std::unique_ptr<ConfigManager::CameraInfo>& camInfo,
[[maybe_unused]] const Stream* streamCfg) {
std::shared_ptr<EvsMockCamera> c =
ndk::SharedRefBase::make<EvsMockCamera>(Sigil{}, deviceName, camInfo);
if (!c) {
LOG(ERROR) << "Failed to instantiate EvsMockCamera.";
return nullptr;
}
// Use the first resolution from the list for the testing
// TODO(b/214835237): Uses a given Stream configuration to choose the best
// stream configuration.
auto it = camInfo->streamConfigurations.begin();
c->mWidth = it->second.width;
c->mHeight = it->second.height;
c->mDescription.vendorFlags = 0xFFFFFFFF; // Arbitrary test value
c->mFormat = HAL_PIXEL_FORMAT_RGBA_8888;
c->mUsage = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_CAMERA_WRITE |
GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_RARELY;
return c;
}
} // namespace aidl::android::hardware::automotive::evs::implementation

View file

@ -0,0 +1,465 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "GlWrapper.h"
#include <aidl/android/frameworks/automotive/display/DisplayDesc.h>
#include <aidl/android/hardware/graphics/common/HardwareBufferDescription.h>
#include <aidlcommonsupport/NativeHandle.h>
#include <ui/DisplayMode.h>
#include <ui/DisplayState.h>
#include <ui/GraphicBuffer.h>
#include <fcntl.h>
#include <stdio.h>
#include <sys/ioctl.h>
#include <utility>
namespace {
using ::aidl::android::frameworks::automotive::display::DisplayDesc;
using ::aidl::android::frameworks::automotive::display::ICarDisplayProxy;
using ::aidl::android::frameworks::automotive::display::Rotation;
using ::aidl::android::hardware::common::NativeHandle;
using ::aidl::android::hardware::graphics::common::HardwareBufferDescription;
using ::android::GraphicBuffer;
using ::android::sp;
constexpr const char vertexShaderSource[] =
"attribute vec4 pos; \n"
"attribute vec2 tex; \n"
"varying vec2 uv; \n"
"void main() \n"
"{ \n"
" gl_Position = pos; \n"
" uv = tex; \n"
"} \n";
constexpr const char pixelShaderSource[] =
"precision mediump float; \n"
"uniform sampler2D tex; \n"
"varying vec2 uv; \n"
"void main() \n"
"{ \n"
" gl_FragColor = texture2D(tex, uv);\n"
"} \n";
const char* getEGLError(void) {
switch (eglGetError()) {
case EGL_SUCCESS:
return "EGL_SUCCESS";
case EGL_NOT_INITIALIZED:
return "EGL_NOT_INITIALIZED";
case EGL_BAD_ACCESS:
return "EGL_BAD_ACCESS";
case EGL_BAD_ALLOC:
return "EGL_BAD_ALLOC";
case EGL_BAD_ATTRIBUTE:
return "EGL_BAD_ATTRIBUTE";
case EGL_BAD_CONTEXT:
return "EGL_BAD_CONTEXT";
case EGL_BAD_CONFIG:
return "EGL_BAD_CONFIG";
case EGL_BAD_CURRENT_SURFACE:
return "EGL_BAD_CURRENT_SURFACE";
case EGL_BAD_DISPLAY:
return "EGL_BAD_DISPLAY";
case EGL_BAD_SURFACE:
return "EGL_BAD_SURFACE";
case EGL_BAD_MATCH:
return "EGL_BAD_MATCH";
case EGL_BAD_PARAMETER:
return "EGL_BAD_PARAMETER";
case EGL_BAD_NATIVE_PIXMAP:
return "EGL_BAD_NATIVE_PIXMAP";
case EGL_BAD_NATIVE_WINDOW:
return "EGL_BAD_NATIVE_WINDOW";
case EGL_CONTEXT_LOST:
return "EGL_CONTEXT_LOST";
default:
return "Unknown error";
}
}
// Given shader source, load and compile it
GLuint loadShader(GLenum type, const char* shaderSrc) {
// Create the shader object
GLuint shader = glCreateShader(type);
if (shader == 0) {
return 0;
}
// Load and compile the shader
glShaderSource(shader, 1, &shaderSrc, nullptr);
glCompileShader(shader);
// Verify the compilation worked as expected
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
LOG(ERROR) << "Error compiling shader";
GLint size = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &size);
if (size > 0) {
// Get and report the error message
char* infoLog = (char*)malloc(size);
glGetShaderInfoLog(shader, size, nullptr, infoLog);
LOG(ERROR) << " msg:" << std::endl << infoLog;
free(infoLog);
}
glDeleteShader(shader);
return 0;
}
return shader;
}
// Create a program object given vertex and pixels shader source
GLuint buildShaderProgram(const char* vtxSrc, const char* pxlSrc) {
GLuint program = glCreateProgram();
if (program == 0) {
LOG(ERROR) << "Failed to allocate program object";
return 0;
}
// Compile the shaders and bind them to this program
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, vtxSrc);
if (vertexShader == 0) {
LOG(ERROR) << "Failed to load vertex shader";
glDeleteProgram(program);
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pxlSrc);
if (pixelShader == 0) {
LOG(ERROR) << "Failed to load pixel shader";
glDeleteProgram(program);
glDeleteShader(vertexShader);
return 0;
}
glAttachShader(program, vertexShader);
glAttachShader(program, pixelShader);
glBindAttribLocation(program, 0, "pos");
glBindAttribLocation(program, 1, "tex");
// Link the program
glLinkProgram(program);
GLint linked = 0;
glGetProgramiv(program, GL_LINK_STATUS, &linked);
if (!linked) {
LOG(ERROR) << "Error linking program";
GLint size = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &size);
if (size > 0) {
// Get and report the error message
char* infoLog = (char*)malloc(size);
glGetProgramInfoLog(program, size, nullptr, infoLog);
LOG(ERROR) << " msg: " << infoLog;
free(infoLog);
}
glDeleteProgram(program);
glDeleteShader(vertexShader);
glDeleteShader(pixelShader);
return 0;
}
return program;
}
::android::sp<HGraphicBufferProducer> convertNativeHandleToHGBP(const NativeHandle& aidlHandle) {
native_handle_t* handle = ::android::dupFromAidl(aidlHandle);
if (handle->numFds != 0 || handle->numInts < std::ceil(sizeof(size_t) / sizeof(int))) {
LOG(ERROR) << "Invalid native handle";
return nullptr;
}
::android::hardware::hidl_vec<uint8_t> halToken;
halToken.setToExternal(reinterpret_cast<uint8_t*>(const_cast<int*>(&(handle->data[1]))),
handle->data[0]);
::android::sp<HGraphicBufferProducer> hgbp =
HGraphicBufferProducer::castFrom(::android::retrieveHalInterface(halToken));
return std::move(hgbp);
}
} // namespace
namespace aidl::android::hardware::automotive::evs::implementation {
// Main entry point
bool GlWrapper::initialize(const std::shared_ptr<ICarDisplayProxy>& pWindowProxy,
uint64_t displayId) {
LOG(DEBUG) << __FUNCTION__;
if (!pWindowProxy) {
LOG(ERROR) << "Could not get ICarDisplayProxy.";
return false;
}
DisplayDesc displayDesc;
auto status = pWindowProxy->getDisplayInfo(displayId, &displayDesc);
if (!status.isOk()) {
LOG(ERROR) << "Failed to read the display information";
return false;
}
mWidth = displayDesc.width;
mHeight = displayDesc.height;
if ((displayDesc.orientation != Rotation::ROTATION_0) &&
(displayDesc.orientation != Rotation::ROTATION_180)) {
std::swap(mWidth, mHeight);
}
LOG(INFO) << "Display resolution is " << mWidth << "x" << mHeight;
NativeHandle aidlHandle;
status = pWindowProxy->getHGraphicBufferProducer(displayId, &aidlHandle);
if (!status.isOk()) {
LOG(ERROR) << "Failed to get IGraphicBufferProducer from ICarDisplayProxy.";
return false;
}
mGfxBufferProducer = convertNativeHandleToHGBP(aidlHandle);
if (!mGfxBufferProducer) {
LOG(ERROR) << "Failed to convert a NativeHandle to HGBP.";
return false;
}
mSurfaceHolder = getSurfaceFromHGBP(mGfxBufferProducer);
if (mSurfaceHolder == nullptr) {
LOG(ERROR) << "Failed to get a Surface from HGBP.";
return false;
}
mWindow = getNativeWindow(mSurfaceHolder.get());
if (mWindow == nullptr) {
LOG(ERROR) << "Failed to get a native window from Surface.";
return false;
}
// Set up our OpenGL ES context associated with the default display
mDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (mDisplay == EGL_NO_DISPLAY) {
LOG(ERROR) << "Failed to get egl display";
return false;
}
EGLint major = 2;
EGLint minor = 0;
if (!eglInitialize(mDisplay, &major, &minor)) {
LOG(ERROR) << "Failed to initialize EGL: " << getEGLError();
return false;
}
const EGLint config_attribs[] = {
// clang-format off
// Tag Value
EGL_RED_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_DEPTH_SIZE, 0,
EGL_NONE
// clang-format on
};
// Pick the default configuration without constraints (is this good enough?)
EGLConfig egl_config = {0};
EGLint numConfigs = -1;
eglChooseConfig(mDisplay, config_attribs, &egl_config, 1, &numConfigs);
if (numConfigs != 1) {
LOG(ERROR) << "Didn't find a suitable format for our display window, " << getEGLError();
return false;
}
// Create the EGL render target surface
mSurface = eglCreateWindowSurface(mDisplay, egl_config, mWindow, nullptr);
if (mSurface == EGL_NO_SURFACE) {
LOG(ERROR) << "eglCreateWindowSurface failed, " << getEGLError();
return false;
}
// Create the EGL context
// NOTE: Our shader is (currently at least) written to require version 3, so this
// is required.
const EGLint context_attribs[] = {EGL_CONTEXT_CLIENT_VERSION, 3, EGL_NONE};
mContext = eglCreateContext(mDisplay, egl_config, EGL_NO_CONTEXT, context_attribs);
if (mContext == EGL_NO_CONTEXT) {
LOG(ERROR) << "Failed to create OpenGL ES Context: " << getEGLError();
return false;
}
// Activate our render target for drawing
if (!eglMakeCurrent(mDisplay, mSurface, mSurface, mContext)) {
LOG(ERROR) << "Failed to make the OpenGL ES Context current: " << getEGLError();
return false;
}
// Create the shader program for our simple pipeline
mShaderProgram = buildShaderProgram(vertexShaderSource, pixelShaderSource);
if (!mShaderProgram) {
LOG(ERROR) << "Failed to build shader program: " << getEGLError();
return false;
}
// Create a GL texture that will eventually wrap our externally created texture surface(s)
glGenTextures(1, &mTextureMap);
if (mTextureMap <= 0) {
LOG(ERROR) << "Didn't get a texture handle allocated: " << getEGLError();
return false;
}
// Turn off mip-mapping for the created texture surface
// (the inbound camera imagery doesn't have MIPs)
glBindTexture(GL_TEXTURE_2D, mTextureMap);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, 0);
return true;
}
void GlWrapper::shutdown() {
// Drop our device textures
if (mKHRimage != EGL_NO_IMAGE_KHR) {
eglDestroyImageKHR(mDisplay, mKHRimage);
mKHRimage = EGL_NO_IMAGE_KHR;
}
// Release all GL resources
if (eglGetCurrentContext() == mContext) {
eglMakeCurrent(mDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
}
eglDestroySurface(mDisplay, mSurface);
eglDestroyContext(mDisplay, mContext);
eglTerminate(mDisplay);
mSurface = EGL_NO_SURFACE;
mContext = EGL_NO_CONTEXT;
mDisplay = EGL_NO_DISPLAY;
// Release the window
mSurfaceHolder = nullptr;
}
void GlWrapper::showWindow(const std::shared_ptr<ICarDisplayProxy>& pWindowProxy, uint64_t id) {
if (pWindowProxy) {
pWindowProxy->showWindow(id);
} else {
LOG(ERROR) << "ICarDisplayProxy is not available.";
}
}
void GlWrapper::hideWindow(const std::shared_ptr<ICarDisplayProxy>& pWindowProxy, uint64_t id) {
if (pWindowProxy) {
pWindowProxy->hideWindow(id);
} else {
LOG(ERROR) << "ICarDisplayProxy is not available.";
}
}
bool GlWrapper::updateImageTexture(buffer_handle_t handle,
const HardwareBufferDescription& description) {
if (mKHRimage != EGL_NO_IMAGE_KHR) {
return true;
}
// Create a temporary GraphicBuffer to wrap the provided handle.
sp<GraphicBuffer> pGfxBuffer =
new GraphicBuffer(description.width, description.height,
static_cast<::android::PixelFormat>(description.format),
description.layers, static_cast<uint32_t>(description.usage),
description.stride, const_cast<native_handle_t*>(handle),
/* keepOwnership= */ false);
if (!pGfxBuffer) {
LOG(ERROR) << "Failed to allocate GraphicBuffer to wrap our native handle";
return false;
}
// Get a GL compatible reference to the graphics buffer we've been given
EGLint eglImageAttributes[] = {EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE};
EGLClientBuffer cbuf = static_cast<EGLClientBuffer>(pGfxBuffer->getNativeBuffer());
mKHRimage = eglCreateImageKHR(mDisplay, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID, cbuf,
eglImageAttributes);
if (mKHRimage == EGL_NO_IMAGE_KHR) {
LOG(ERROR) << "Error creating EGLImage: " << getEGLError();
return false;
}
// Update the texture handle we already created to refer to this gralloc buffer
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, mTextureMap);
glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, static_cast<GLeglImageOES>(mKHRimage));
return true;
}
void GlWrapper::renderImageToScreen() {
// Set the viewport
glViewport(0, 0, mWidth, mHeight);
// Clear the color buffer
glClearColor(0.1f, 0.5f, 0.1f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// Select our screen space simple texture shader
glUseProgram(mShaderProgram);
// Bind the texture and assign it to the shader's sampler
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, mTextureMap);
GLint sampler = glGetUniformLocation(mShaderProgram, "tex");
glUniform1i(sampler, 0);
// We want our image to show up opaque regardless of alpha values
glDisable(GL_BLEND);
// Draw a rectangle on the screen
GLfloat vertsCarPos[] = {
// clang-format off
-0.8, 0.8, 0.0f, // left top in window space
0.8, 0.8, 0.0f, // right top
-0.8, -0.8, 0.0f, // left bottom
0.8, -0.8, 0.0f // right bottom
// clang-format on
};
// NOTE: We didn't flip the image in the texture, so V=0 is actually the top of the image
GLfloat vertsCarTex[] = {
// clang-format off
0.0f, 0.0f, // left top
1.0f, 0.0f, // right top
0.0f, 1.0f, // left bottom
1.0f, 1.0f // right bottom
// clang-format on
};
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, vertsCarPos);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, vertsCarTex);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
// Clean up and flip the rendered result to the front so it is visible
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glFinish();
if (eglSwapBuffers(mDisplay, mSurface) == EGL_FALSE) {
LOG(WARNING) << "Failed to swap EGL buffers, " << getEGLError();
}
}
} // namespace aidl::android::hardware::automotive::evs::implementation

View file

@ -14,38 +14,75 @@
* limitations under the License. * limitations under the License.
*/ */
#define LOG_TAG "EvsService" #include "EvsEnumerator.h"
#include "EvsGlDisplay.h"
#include <DefaultEvsEnumerator.h>
#include <android/binder_manager.h> #include <android/binder_manager.h>
#include <android/binder_process.h> #include <android/binder_process.h>
#include <utils/Log.h> #include <utils/Log.h>
using ::aidl::android::hardware::automotive::evs::implementation::DefaultEvsEnumerator; #include <unistd.h>
int main([[maybe_unused]] int argc, [[maybe_unused]] char* argv[]) { #include <atomic>
std::shared_ptr<DefaultEvsEnumerator> vhal = ndk::SharedRefBase::make<DefaultEvsEnumerator>(); #include <cstdlib>
#include <string_view>
ALOGI("Registering as service..."); namespace {
binder_exception_t err =
AServiceManager_addService(vhal->asBinder().get(), "android.hardware.automotive.evs"); using ::aidl::android::frameworks::automotive::display::ICarDisplayProxy;
using ::aidl::android::hardware::automotive::evs::implementation::EvsEnumerator;
constexpr std::string_view kDisplayServiceInstanceName = "/default";
constexpr std::string_view kHwInstanceName = "/hw/0";
constexpr int kNumBinderThreads = 1;
} // namespace
int main() {
LOG(INFO) << "EVS Hardware Enumerator service is starting";
const std::string displayServiceInstanceName =
std::string(ICarDisplayProxy::descriptor) + std::string(kDisplayServiceInstanceName);
if (!AServiceManager_isDeclared(displayServiceInstanceName.data())) {
// TODO: We may just want to disable EVS display.
LOG(ERROR) << displayServiceInstanceName << " is required.";
return EXIT_FAILURE;
}
std::shared_ptr<ICarDisplayProxy> displayService = ICarDisplayProxy::fromBinder(
::ndk::SpAIBinder(AServiceManager_waitForService(displayServiceInstanceName.data())));
if (!displayService) {
LOG(ERROR) << "Cannot use " << displayServiceInstanceName << ". Exiting.";
return EXIT_FAILURE;
}
// Register our service -- if somebody is already registered by our name,
// they will be killed (their thread pool will throw an exception).
std::shared_ptr<EvsEnumerator> service =
ndk::SharedRefBase::make<EvsEnumerator>(displayService);
if (!service) {
LOG(ERROR) << "Failed to instantiate the service";
return EXIT_FAILURE;
}
const std::string instanceName =
std::string(EvsEnumerator::descriptor) + std::string(kHwInstanceName);
auto err = AServiceManager_addService(service->asBinder().get(), instanceName.data());
if (err != EX_NONE) { if (err != EX_NONE) {
ALOGE("failed to register android.hardware.automotive.evs service, exception: %d", err); LOG(ERROR) << "Failed to register " << instanceName << ", exception = " << err;
return 1; return EXIT_FAILURE;
} }
if (!ABinderProcess_setThreadPoolMaxThreadCount(1)) { if (!ABinderProcess_setThreadPoolMaxThreadCount(kNumBinderThreads)) {
ALOGE("%s", "failed to set thread pool max thread count"); LOG(ERROR) << "Failed to set thread pool";
return 1; return EXIT_FAILURE;
} }
ABinderProcess_startThreadPool(); ABinderProcess_startThreadPool();
LOG(INFO) << "EVS Hardware Enumerator is ready";
ALOGI("Evs Service Ready");
ABinderProcess_joinThreadPool(); ABinderProcess_joinThreadPool();
// In normal operation, we don't expect the thread pool to exit
ALOGI("Evs Service Exiting"); LOG(INFO) << "EVS Hardware Enumerator is shutting down";
return EXIT_SUCCESS;
return 0;
} }

View file

@ -84,15 +84,6 @@
<regex-instance>[a-z]+/[0-9]+</regex-instance> <regex-instance>[a-z]+/[0-9]+</regex-instance>
</interface> </interface>
</hal> </hal>
<hal format="hidl" optional="true">
<name>android.hardware.automotive.evs</name>
<version>1.0-1</version>
<interface>
<name>IEvsEnumerator</name>
<instance>default</instance>
<regex-instance>[a-z]+/[0-9]+</regex-instance>
</interface>
</hal>
<hal format="aidl" optional="true"> <hal format="aidl" optional="true">
<name>android.hardware.automotive.occupant_awareness</name> <name>android.hardware.automotive.occupant_awareness</name>
<version>1</version> <version>1</version>