Migrate reference external HAL implementation to AIDL

Android T migrated the HAL interface to AIDL, but the reference HAL
was never updated to use AIDL. This CL migrates the reference
HAL implementation for external cameras to use AIDL as well. The
external HAL uses V4L2 standard to expose USB cameras to the
cameraserver.

The reference HAL implementation for internal camera was dropped
because it is not possible to write a generic HAL that works with some
large percentage of internal cameras.

Bug: 219974678
Test: Existing CTS tests pass with external camera connected.
Change-Id: I35f3dc32c16670eca7735a4ac00fed3daf36aa65
This commit is contained in:
Avichal Rakesh 2022-06-08 17:47:23 -07:00
parent 1f5dec26ef
commit e1857f8dbd
22 changed files with 7393 additions and 0 deletions

View file

@ -18,6 +18,7 @@ cc_library_static {
"VendorTagDescriptor.cpp",
"HandleImporter.cpp",
"Exif.cpp",
"SimpleThread.cpp",
],
cflags: [
"-Werror",

View file

@ -0,0 +1,65 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "SimpleThread.h"
namespace android {
namespace hardware {
namespace camera {
namespace common {
namespace helper {
SimpleThread::SimpleThread() : mDone(true), mThread() {}
SimpleThread::~SimpleThread() {
// Safe to call requestExitAndWait() from the destructor because requestExitAndWait() ensures
// that the thread is joinable before joining on it. This is different from how
// android::Thread worked.
requestExitAndWait();
}
void SimpleThread::run() {
requestExitAndWait(); // Exit current execution, if any.
// start thread
mDone.store(false, std::memory_order_release);
mThread = std::thread(&SimpleThread::runLoop, this);
}
void SimpleThread::requestExitAndWait() {
// Signal thread to stop
mDone.store(true, std::memory_order_release);
// Wait for thread to exit if needed. This should happen in no more than one iteration of
// threadLoop
if (mThread.joinable()) {
mThread.join();
}
mThread = std::thread();
}
void SimpleThread::runLoop() {
while (!exitPending()) {
if (!threadLoop()) {
break;
}
}
}
} // namespace helper
} // namespace common
} // namespace camera
} // namespace hardware
} // namespace android

View file

@ -0,0 +1,64 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HARDWARE_INTERFACES_CAMERA_COMMON_SIMPLETHREAD_H_
#define HARDWARE_INTERFACES_CAMERA_COMMON_SIMPLETHREAD_H_
#include <thread>
namespace android {
namespace hardware {
namespace camera {
namespace common {
namespace helper {
// A simple looper based on std::thread.
class SimpleThread {
public:
SimpleThread();
virtual ~SimpleThread();
// Explicit call to start execution of the thread. No thread is created before this function
// is called.
virtual void run() final;
virtual void requestExitAndWait() final;
protected:
// Main logic of the thread. This function is called repeatedly until it returns false.
// Thread execution stops if this function returns false.
virtual bool threadLoop() = 0;
// Returns true if the thread execution should stop. Should be used by threadLoop to check if
// the thread has been requested to exit.
virtual inline bool exitPending() final { return mDone.load(std::memory_order_acquire); }
private:
// Wraps threadLoop in a simple while loop that allows safe exit
virtual void runLoop() final;
// Flag to signal end of thread execution. This flag is checked before every iteration
// of threadLoop.
std::atomic_bool mDone;
std::thread mThread;
};
} // namespace helper
} // namespace common
} // namespace camera
} // namespace hardware
} // namespace android
#endif // HARDWARE_INTERFACES_CAMERA_COMMON_SIMPLETHREAD_H_

View file

@ -0,0 +1,71 @@
//
// Copyright (C) 2020 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package {
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "camera.device-external-impl",
defaults: ["hidl_defaults"],
proprietary: true,
srcs: [
"ExternalCameraDevice.cpp",
"ExternalCameraDeviceSession.cpp",
"ExternalCameraOfflineSession.cpp",
"ExternalCameraUtils.cpp",
"convert.cpp",
],
shared_libs: [
"android.hardware.camera.common-V1-ndk",
"android.hardware.camera.device-V1-ndk",
"android.hardware.graphics.allocator-V1-ndk",
"android.hardware.graphics.common-V4-ndk",
"android.hardware.graphics.mapper@2.0",
"android.hardware.graphics.mapper@3.0",
"android.hardware.graphics.mapper@4.0",
"android.hidl.allocator@1.0",
"android.hidl.memory@1.0",
"libbinder_ndk",
"libcamera_metadata",
"libcutils",
"libexif",
"libfmq",
"libgralloctypes",
"libhardware",
"libhidlbase",
"libhidlmemory",
"libjpeg",
"liblog",
"libsync",
"libtinyxml2",
"libutils",
"libyuv",
],
static_libs: [
"android.hardware.camera.common@1.0-helper",
"libaidlcommonsupport",
],
header_libs: [
"media_plugin_headers",
],
export_include_dirs: ["."],
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,191 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERADEVICE_H_
#define HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERADEVICE_H_
#include <ExternalCameraDeviceSession.h>
#include <ExternalCameraUtils.h>
#include <aidl/android/hardware/camera/device/BnCameraDevice.h>
namespace android {
namespace hardware {
namespace camera {
namespace device {
namespace implementation {
using ::aidl::android::hardware::camera::common::CameraResourceCost;
using ::aidl::android::hardware::camera::device::BnCameraDevice;
using ::aidl::android::hardware::camera::device::CameraMetadata;
using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
using ::aidl::android::hardware::camera::device::ICameraDeviceSession;
using ::aidl::android::hardware::camera::device::ICameraInjectionSession;
using ::aidl::android::hardware::camera::device::StreamConfiguration;
using ::android::hardware::camera::external::common::ExternalCameraConfig;
class ExternalCameraDevice : public BnCameraDevice {
public:
// Called by external camera provider HAL.
// Provider HAL must ensure the uniqueness of CameraDevice object per cameraId, or there could
// be multiple CameraDevice trying to access the same physical camera. Also, provider will have
// to keep track of all CameraDevice objects in order to notify CameraDevice when the underlying
// camera is detached.
ExternalCameraDevice(const std::string& devicePath, const ExternalCameraConfig& config);
~ExternalCameraDevice() override;
ndk::ScopedAStatus getCameraCharacteristics(CameraMetadata* _aidl_return) override;
ndk::ScopedAStatus getPhysicalCameraCharacteristics(const std::string& in_physicalCameraId,
CameraMetadata* _aidl_return) override;
ndk::ScopedAStatus getResourceCost(CameraResourceCost* _aidl_return) override;
ndk::ScopedAStatus isStreamCombinationSupported(const StreamConfiguration& in_streams,
bool* _aidl_return) override;
ndk::ScopedAStatus open(const std::shared_ptr<ICameraDeviceCallback>& in_callback,
std::shared_ptr<ICameraDeviceSession>* _aidl_return) override;
ndk::ScopedAStatus openInjectionSession(
const std::shared_ptr<ICameraDeviceCallback>& in_callback,
std::shared_ptr<ICameraInjectionSession>* _aidl_return) override;
ndk::ScopedAStatus setTorchMode(bool in_on) override;
ndk::ScopedAStatus turnOnTorchWithStrengthLevel(int32_t in_torchStrength) override;
ndk::ScopedAStatus getTorchStrengthLevel(int32_t* _aidl_return) override;
binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
// Caller must use this method to check if CameraDevice ctor failed
bool isInitFailed();
// Device version to be used by the external camera provider.
// Should be of the form <major>.<minor>
static std::string kDeviceVersion;
private:
virtual std::shared_ptr<ExternalCameraDeviceSession> createSession(
const std::shared_ptr<ICameraDeviceCallback>&, const ExternalCameraConfig& cfg,
const std::vector<SupportedV4L2Format>& sortedFormats, const CroppingType& croppingType,
const common::V1_0::helper::CameraMetadata& chars, const std::string& cameraId,
unique_fd v4l2Fd);
bool isInitFailedLocked();
// Init supported w/h/format/fps in mSupportedFormats. Caller still owns fd
void initSupportedFormatsLocked(int fd);
// Calls into virtual member function. Do not use it in constructor
status_t initCameraCharacteristics();
// Init available capabilities keys
virtual status_t initAvailableCapabilities(
::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
// Init non-device dependent keys
virtual status_t initDefaultCharsKeys(
::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
// Init camera control chars keys. Caller still owns fd
status_t initCameraControlsCharsKeys(
int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
// Init camera output configuration related keys. Caller still owns fd
status_t initOutputCharsKeys(
int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
// Helper function for initOutputCharskeys
template <size_t SIZE>
status_t initOutputCharsKeysByFormat(
::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata,
uint32_t fourcc, const std::array<int, SIZE>& halFormats, int streamConfigTag,
int streamConfiguration, int minFrameDuration, int stallDuration);
status_t calculateMinFps(::android::hardware::camera::common::V1_0::helper::CameraMetadata*);
static void getFrameRateList(int fd, double fpsUpperBound, SupportedV4L2Format* format);
static void updateFpsBounds(int fd, CroppingType cropType,
const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
SupportedV4L2Format format,
std::vector<SupportedV4L2Format>& outFmts);
// Get candidate supported formats list of input cropping type.
static std::vector<SupportedV4L2Format> getCandidateSupportedFormatsLocked(
int fd, CroppingType cropType,
const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,
const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,
const Size& minStreamSize, bool depthEnabled);
// Trim supported format list by the cropping type. Also sort output formats by width/height
static void trimSupportedFormats(CroppingType cropType,
/*inout*/ std::vector<SupportedV4L2Format>* pFmts);
Mutex mLock;
bool mInitialized = false;
bool mInitFailed = false;
std::string mCameraId;
std::string mDevicePath;
const ExternalCameraConfig& mCfg;
std::vector<SupportedV4L2Format> mSupportedFormats;
CroppingType mCroppingType;
std::weak_ptr<ExternalCameraDeviceSession> mSession =
std::weak_ptr<ExternalCameraDeviceSession>();
::android::hardware::camera::common::V1_0::helper::CameraMetadata mCameraCharacteristics;
const std::vector<int32_t> AVAILABLE_CHARACTERISTICS_KEYS = {
ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
ANDROID_CONTROL_AE_AVAILABLE_MODES,
ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
ANDROID_CONTROL_AE_COMPENSATION_RANGE,
ANDROID_CONTROL_AE_COMPENSATION_STEP,
ANDROID_CONTROL_AE_LOCK_AVAILABLE,
ANDROID_CONTROL_AF_AVAILABLE_MODES,
ANDROID_CONTROL_AVAILABLE_EFFECTS,
ANDROID_CONTROL_AVAILABLE_MODES,
ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
ANDROID_CONTROL_AWB_AVAILABLE_MODES,
ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
ANDROID_CONTROL_MAX_REGIONS,
ANDROID_FLASH_INFO_AVAILABLE,
ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
ANDROID_LENS_FACING,
ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
ANDROID_SCALER_CROPPING_TYPE,
ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
ANDROID_SENSOR_ORIENTATION,
ANDROID_SHADING_AVAILABLE_MODES,
ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
ANDROID_SYNC_MAX_LATENCY};
};
} // namespace implementation
} // namespace device
} // namespace camera
} // namespace hardware
} // namespace android
#endif // HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERADEVICE_H_

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,399 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERADEVICESESSION_H_
#define HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERADEVICESESSION_H_
#include <ExternalCameraUtils.h>
#include <SimpleThread.h>
#include <aidl/android/hardware/camera/common/Status.h>
#include <aidl/android/hardware/camera/device/BnCameraDeviceSession.h>
#include <aidl/android/hardware/camera/device/BufferRequest.h>
#include <aidl/android/hardware/camera/device/Stream.h>
#include <android-base/unique_fd.h>
#include <fmq/AidlMessageQueue.h>
#include <utils/Thread.h>
#include <deque>
#include <list>
namespace android {
namespace hardware {
namespace camera {
namespace device {
namespace implementation {
using ::aidl::android::hardware::camera::common::Status;
using ::aidl::android::hardware::camera::device::BnCameraDeviceSession;
using ::aidl::android::hardware::camera::device::BufferCache;
using ::aidl::android::hardware::camera::device::BufferRequest;
using ::aidl::android::hardware::camera::device::CameraMetadata;
using ::aidl::android::hardware::camera::device::CameraOfflineSessionInfo;
using ::aidl::android::hardware::camera::device::CaptureRequest;
using ::aidl::android::hardware::camera::device::HalStream;
using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
using ::aidl::android::hardware::camera::device::ICameraOfflineSession;
using ::aidl::android::hardware::camera::device::RequestTemplate;
using ::aidl::android::hardware::camera::device::Stream;
using ::aidl::android::hardware::camera::device::StreamConfiguration;
using ::aidl::android::hardware::common::fmq::MQDescriptor;
using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
using ::android::AidlMessageQueue;
using ::android::base::unique_fd;
using ::android::hardware::camera::common::helper::SimpleThread;
using ::android::hardware::camera::external::common::ExternalCameraConfig;
using ::android::hardware::camera::external::common::SizeHasher;
using ::ndk::ScopedAStatus;
class ExternalCameraDeviceSession : public BnCameraDeviceSession, public OutputThreadInterface {
public:
ExternalCameraDeviceSession(const std::shared_ptr<ICameraDeviceCallback>&,
const ExternalCameraConfig& cfg,
const std::vector<SupportedV4L2Format>& sortedFormats,
const CroppingType& croppingType,
const common::V1_0::helper::CameraMetadata& chars,
const std::string& cameraId, unique_fd v4l2Fd);
~ExternalCameraDeviceSession() override;
// Caller must use this method to check if CameraDeviceSession ctor failed
bool isInitFailed();
bool isClosed();
ScopedAStatus close() override;
ScopedAStatus configureStreams(const StreamConfiguration& in_requestedConfiguration,
std::vector<HalStream>* _aidl_return) override;
ScopedAStatus constructDefaultRequestSettings(RequestTemplate in_type,
CameraMetadata* _aidl_return) override;
ScopedAStatus flush() override;
ScopedAStatus getCaptureRequestMetadataQueue(
MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) override;
ScopedAStatus getCaptureResultMetadataQueue(
MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) override;
ScopedAStatus isReconfigurationRequired(const CameraMetadata& in_oldSessionParams,
const CameraMetadata& in_newSessionParams,
bool* _aidl_return) override;
ScopedAStatus processCaptureRequest(const std::vector<CaptureRequest>& in_requests,
const std::vector<BufferCache>& in_cachesToRemove,
int32_t* _aidl_return) override;
ScopedAStatus signalStreamFlush(const std::vector<int32_t>& in_streamIds,
int32_t in_streamConfigCounter) override;
ScopedAStatus switchToOffline(const std::vector<int32_t>& in_streamsToKeep,
CameraOfflineSessionInfo* out_offlineSessionInfo,
std::shared_ptr<ICameraOfflineSession>* _aidl_return) override;
ScopedAStatus repeatingRequestEnd(int32_t in_frameNumber,
const std::vector<int32_t>& in_streamIds) override;
Status importBuffer(int32_t streamId, uint64_t bufId, buffer_handle_t buf,
buffer_handle_t** outBufPtr) override;
void notifyError(int32_t frameNumber, int32_t streamId, ErrorCode ec) override;
Status processCaptureRequestError(const std::shared_ptr<HalRequest>& ptr,
std::vector<NotifyMsg>* msgs,
std::vector<CaptureResult>* results) override;
Status processCaptureResult(std::shared_ptr<HalRequest>& ptr) override;
ssize_t getJpegBufferSize(int32_t width, int32_t height) const override;
// Called by CameraDevice to dump active device states
binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
static Status isStreamCombinationSupported(
const StreamConfiguration& config,
const std::vector<SupportedV4L2Format>& supportedFormats,
const ExternalCameraConfig& devCfg);
static const int kMaxProcessedStream = 2;
static const int kMaxStallStream = 1;
static const uint32_t kMaxBytesPerPixel = 2;
class BufferRequestThread : public SimpleThread {
public:
BufferRequestThread(std::weak_ptr<OutputThreadInterface> parent,
std::shared_ptr<ICameraDeviceCallback> callbacks);
int requestBufferStart(const std::vector<HalStreamBuffer>&);
int waitForBufferRequestDone(
/*out*/ std::vector<HalStreamBuffer>*);
bool threadLoop() override;
private:
void waitForNextRequest();
const std::weak_ptr<OutputThreadInterface> mParent;
const std::shared_ptr<ICameraDeviceCallback> mCallbacks;
std::mutex mLock;
bool mRequestingBuffer = false;
std::vector<HalStreamBuffer> mBufferReqs;
std::vector<HalStreamBuffer> mPendingReturnBufferReqs;
// mHalBufferReqs is not under mLock protection during the HIDL transaction
std::vector<BufferRequest> mHalBufferReqs;
// request buffers takes much less time in steady state, but can take much longer
// when requesting 1st buffer from a stream.
// TODO: consider a separate timeout for new vs. steady state?
// TODO: or make sure framework is warming up the pipeline during configure new stream?
static const int kReqProcTimeoutMs = 66;
static const int kReqWaitTimeoutMs = 33;
static const int kReqWaitTimesWarn = 90; // 33ms * 90 ~= 3 sec
std::condition_variable mRequestCond; // signaled when a new buffer request incoming
std::condition_variable mRequestDoneCond; // signaled when a request is done
};
class OutputThread : public SimpleThread {
public:
OutputThread(std::weak_ptr<OutputThreadInterface> parent, CroppingType,
const common::V1_0::helper::CameraMetadata&,
std::shared_ptr<BufferRequestThread> bufReqThread);
~OutputThread();
Status allocateIntermediateBuffers(const Size& v4lSize, const Size& thumbSize,
const std::vector<Stream>& streams,
uint32_t blobBufferSize);
Status submitRequest(const std::shared_ptr<HalRequest>&);
void flush();
void dump(int fd);
bool threadLoop() override;
void setExifMakeModel(const std::string& make, const std::string& model);
// The remaining request list is returned for offline processing
std::list<std::shared_ptr<HalRequest>> switchToOffline();
protected:
static const int kFlushWaitTimeoutSec = 3; // 3 sec
static const int kReqWaitTimeoutMs = 33; // 33ms
static const int kReqWaitTimesMax = 90; // 33ms * 90 ~= 3 sec
// Methods to request output buffer in parallel
int requestBufferStart(const std::vector<HalStreamBuffer>&);
int waitForBufferRequestDone(
/*out*/ std::vector<HalStreamBuffer>*);
void waitForNextRequest(std::shared_ptr<HalRequest>* out);
void signalRequestDone();
int cropAndScaleLocked(std::shared_ptr<AllocatedFrame>& in, const Size& outSize,
YCbCrLayout* out);
int cropAndScaleThumbLocked(std::shared_ptr<AllocatedFrame>& in, const Size& outSize,
YCbCrLayout* out);
int createJpegLocked(HalStreamBuffer& halBuf,
const common::V1_0::helper::CameraMetadata& settings);
void clearIntermediateBuffers();
const std::weak_ptr<OutputThreadInterface> mParent;
const CroppingType mCroppingType;
const common::V1_0::helper::CameraMetadata mCameraCharacteristics;
mutable std::mutex mRequestListLock; // Protect access to mRequestList,
// mProcessingRequest and mProcessingFrameNumber
std::condition_variable mRequestCond; // signaled when a new request is submitted
std::condition_variable mRequestDoneCond; // signaled when a request is done processing
std::list<std::shared_ptr<HalRequest>> mRequestList;
bool mProcessingRequest = false;
uint32_t mProcessingFrameNumber = 0;
// V4L2 frameIn
// (MJPG decode)-> mYu12Frame
// (Scale)-> mScaledYu12Frames
// (Format convert) -> output gralloc frames
mutable std::mutex mBufferLock; // Protect access to intermediate buffers
std::shared_ptr<AllocatedFrame> mYu12Frame;
std::shared_ptr<AllocatedFrame> mYu12ThumbFrame;
std::unordered_map<Size, std::shared_ptr<AllocatedFrame>, SizeHasher> mIntermediateBuffers;
std::unordered_map<Size, std::shared_ptr<AllocatedFrame>, SizeHasher> mScaledYu12Frames;
YCbCrLayout mYu12FrameLayout;
YCbCrLayout mYu12ThumbFrameLayout;
std::vector<uint8_t> mMuteTestPatternFrame;
uint32_t mTestPatternData[4] = {0, 0, 0, 0};
bool mCameraMuted = false;
uint32_t mBlobBufferSize = 0; // 0 -> HAL derive buffer size, else: use given size
std::string mExifMake;
std::string mExifModel;
const std::shared_ptr<BufferRequestThread> mBufferRequestThread;
};
private:
bool initialize();
// To init/close different version of output thread
void initOutputThread();
void closeOutputThread();
void closeOutputThreadImpl();
void close(bool callerIsDtor);
Status initStatus() const;
status_t initDefaultRequests();
status_t fillCaptureResult(common::V1_0::helper::CameraMetadata& md, nsecs_t timestamp);
int configureV4l2StreamLocked(const SupportedV4L2Format& fmt, double fps = 0.0);
int v4l2StreamOffLocked();
int setV4l2FpsLocked(double fps);
std::unique_ptr<V4L2Frame> dequeueV4l2FrameLocked(
/*out*/ nsecs_t* shutterTs); // Called with mLock held
void enqueueV4l2Frame(const std::shared_ptr<V4L2Frame>&);
// Check if input Stream is one of supported stream setting on this device
static bool isSupported(const Stream& stream,
const std::vector<SupportedV4L2Format>& supportedFormats,
const ExternalCameraConfig& cfg);
// Validate and import request's output buffers and acquire fence
Status importRequestLocked(const CaptureRequest& request,
std::vector<buffer_handle_t*>& allBufPtrs,
std::vector<int>& allFences);
Status importRequestLockedImpl(const CaptureRequest& request,
std::vector<buffer_handle_t*>& allBufPtrs,
std::vector<int>& allFences);
Status importBufferLocked(int32_t streamId, uint64_t bufId, buffer_handle_t buf,
/*out*/ buffer_handle_t** outBufPtr);
static void cleanupInflightFences(std::vector<int>& allFences, size_t numFences);
void cleanupBuffersLocked(int id);
void updateBufferCaches(const std::vector<BufferCache>& cachesToRemove);
Status processOneCaptureRequest(const CaptureRequest& request);
void notifyShutter(int32_t frameNumber, nsecs_t shutterTs);
void invokeProcessCaptureResultCallback(std::vector<CaptureResult>& results, bool tryWriteFmq);
Size getMaxJpegResolution() const;
Size getMaxThumbResolution() const;
int waitForV4L2BufferReturnLocked(std::unique_lock<std::mutex>& lk);
// Main body of switchToOffline. This method does not invoke any callbacks
// but instead returns the necessary callbacks in output arguments so callers
// can callback later without holding any locks
Status switchToOffline(const std::vector<int32_t>& offlineStreams,
/*out*/ std::vector<NotifyMsg>* msgs,
/*out*/ std::vector<CaptureResult>* results,
/*out*/ CameraOfflineSessionInfo* info,
/*out*/ std::shared_ptr<ICameraOfflineSession>* session);
bool supportOfflineLocked(int32_t streamId);
// Whether a request can be completely dropped when switching to offline
bool canDropRequest(const std::vector<int32_t>& offlineStreams,
std::shared_ptr<HalRequest> halReq);
void fillOfflineSessionInfo(const std::vector<int32_t>& offlineStreams,
std::deque<std::shared_ptr<HalRequest>>& offlineReqs,
const std::map<int, CirculatingBuffers>& circulatingBuffers,
/*out*/ CameraOfflineSessionInfo* info);
// Protect (most of) HIDL interface methods from synchronized-entering
mutable Mutex mInterfaceLock;
mutable Mutex mLock; // Protect all private members except otherwise noted
const std::shared_ptr<ICameraDeviceCallback> mCallback;
const ExternalCameraConfig& mCfg;
const common::V1_0::helper::CameraMetadata mCameraCharacteristics;
const std::vector<SupportedV4L2Format> mSupportedFormats;
const CroppingType mCroppingType;
const std::string mCameraId;
// Not protected by mLock, this is almost a const.
// Setup in constructor, reset in close() after OutputThread is joined
unique_fd mV4l2Fd;
// device is closed either
// - closed by user
// - init failed
// - camera disconnected
bool mClosed = false;
bool mInitialized = false;
bool mInitFail = false;
bool mFirstRequest = false;
common::V1_0::helper::CameraMetadata mLatestReqSetting;
bool mV4l2Streaming = false;
SupportedV4L2Format mV4l2StreamingFmt;
double mV4l2StreamingFps = 0.0;
size_t mV4L2BufferCount = 0;
static const int kBufferWaitTimeoutSec = 3; // TODO: handle long exposure (or not allowing)
std::mutex mV4l2BufferLock; // protect the buffer count and condition below
std::condition_variable mV4L2BufferReturned;
size_t mNumDequeuedV4l2Buffers = 0;
uint32_t mMaxV4L2BufferSize = 0;
// Not protected by mLock (but might be used when mLock is locked)
std::shared_ptr<OutputThread> mOutputThread;
// Stream ID -> Stream cache
std::unordered_map<int, Stream> mStreamMap;
std::mutex mInflightFramesLock; // protect mInflightFrames
std::unordered_set<uint32_t> mInflightFrames;
// Stream ID -> circulating buffers map
std::map<int, CirculatingBuffers> mCirculatingBuffers;
// Protect mCirculatingBuffers, must not lock mLock after acquiring this lock
mutable Mutex mCbsLock;
std::mutex mAfTriggerLock; // protect mAfTrigger
bool mAfTrigger = false;
uint32_t mBlobBufferSize = 0;
static HandleImporter sHandleImporter;
bool mSupportBufMgr;
std::shared_ptr<BufferRequestThread> mBufferRequestThread;
/* Beginning of members not changed after initialize() */
using RequestMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
std::unique_ptr<RequestMetadataQueue> mRequestMetadataQueue;
using ResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
std::shared_ptr<ResultMetadataQueue> mResultMetadataQueue;
// Protect against invokeProcessCaptureResultCallback()
Mutex mProcessCaptureResultLock;
// tracks last seen stream config counter
int32_t mLastStreamConfigCounter = -1;
std::unordered_map<RequestTemplate, CameraMetadata> mDefaultRequests;
const Size mMaxThumbResolution;
const Size mMaxJpegResolution;
std::string mExifMake;
std::string mExifModel;
/* End of members not changed after initialize() */
};
} // namespace implementation
} // namespace device
} // namespace camera
} // namespace hardware
} // namespace android
#endif // HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERADEVICESESSION_H_

View file

@ -0,0 +1,547 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "ExtCamOfflnSsn"
#include <android/log.h>
#include "ExternalCameraOfflineSession.h"
#include <aidl/android/hardware/camera/device/BufferStatus.h>
#include <aidl/android/hardware/camera/device/ErrorMsg.h>
#include <aidl/android/hardware/camera/device/ShutterMsg.h>
#include <aidl/android/hardware/camera/device/StreamBuffer.h>
#include <aidlcommonsupport/NativeHandle.h>
#include <convert.h>
#include <linux/videodev2.h>
#include <sync/sync.h>
#include <utils/Trace.h>
#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs
#include <libyuv.h>
namespace {
// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer.
constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */;
} // anonymous namespace
namespace android {
namespace hardware {
namespace camera {
namespace device {
namespace implementation {
using ::aidl::android::hardware::camera::device::BufferStatus;
using ::aidl::android::hardware::camera::device::ErrorMsg;
using ::aidl::android::hardware::camera::device::ShutterMsg;
using ::aidl::android::hardware::camera::device::StreamBuffer;
// Static instance
HandleImporter ExternalCameraOfflineSession::sHandleImporter;
ExternalCameraOfflineSession::ExternalCameraOfflineSession(
const CroppingType& croppingType, const common::V1_0::helper::CameraMetadata& chars,
const std::string& cameraId, const std::string& exifMake, const std::string& exifModel,
uint32_t blobBufferSize, bool afTrigger, const std::vector<Stream>& offlineStreams,
std::deque<std::shared_ptr<HalRequest>>& offlineReqs,
const std::map<int, CirculatingBuffers>& circulatingBuffers)
: mCroppingType(croppingType),
mChars(chars),
mCameraId(cameraId),
mExifMake(exifMake),
mExifModel(exifModel),
mBlobBufferSize(blobBufferSize),
mAfTrigger(afTrigger),
mOfflineStreams(offlineStreams),
mOfflineReqs(offlineReqs),
mCirculatingBuffers(circulatingBuffers) {}
ExternalCameraOfflineSession::~ExternalCameraOfflineSession() {
close();
}
bool ExternalCameraOfflineSession::initialize() {
mResultMetadataQueue =
std::make_shared<ResultMetadataQueue>(kMetadataMsgQueueSize, false /* non blocking */);
if (!mResultMetadataQueue->isValid()) {
ALOGE("%s: invalid result fmq", __FUNCTION__);
return true;
}
return false;
}
Status ExternalCameraOfflineSession::importBuffer(int32_t streamId, uint64_t bufId,
buffer_handle_t buf,
buffer_handle_t** outBufPtr) {
Mutex::Autolock _l(mCbsLock);
return importBufferImpl(mCirculatingBuffers, sHandleImporter, streamId, bufId, buf, outBufPtr);
}
Status ExternalCameraOfflineSession::processCaptureResult(std::shared_ptr<HalRequest>& req) {
ATRACE_CALL();
// Fill output buffers
std::vector<CaptureResult> results;
results.resize(1);
CaptureResult& result = results[0];
result.frameNumber = req->frameNumber;
result.partialResult = 1;
result.inputBuffer.streamId = -1;
result.outputBuffers.resize(req->buffers.size());
for (size_t i = 0; i < req->buffers.size(); i++) {
StreamBuffer& outputBuffer = result.outputBuffers[i];
outputBuffer.streamId = req->buffers[i].streamId;
outputBuffer.bufferId = req->buffers[i].bufferId;
if (req->buffers[i].fenceTimeout) {
outputBuffer.status = BufferStatus::ERROR;
if (req->buffers[i].acquireFence >= 0) {
native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
handle->data[0] = req->buffers[i].acquireFence;
result.outputBuffers[i].releaseFence = android::makeToAidl(handle);
}
notifyError(req->frameNumber, req->buffers[i].streamId, ErrorCode::ERROR_BUFFER);
} else {
result.outputBuffers[i].status = BufferStatus::OK;
// TODO: refactor
if (req->buffers[i].acquireFence >= 0) {
native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
handle->data[0] = req->buffers[i].acquireFence;
outputBuffer.releaseFence = android::makeToAidl(handle);
}
}
}
// Fill capture result metadata
fillCaptureResult(req->setting, req->shutterTs);
const camera_metadata_t* rawResult = req->setting.getAndLock();
convertToAidl(rawResult, &result.result);
req->setting.unlock(rawResult);
// Callback into framework
invokeProcessCaptureResultCallback(results, /* tryWriteFmq */ true);
freeReleaseFences(results);
return Status::OK;
}
#define UPDATE(md, tag, data, size) \
do { \
if ((md).update((tag), (data), (size))) { \
ALOGE("Update " #tag " failed!"); \
return BAD_VALUE; \
} \
} while (0)
status_t ExternalCameraOfflineSession::fillCaptureResult(common::V1_0::helper::CameraMetadata md,
nsecs_t timestamp) {
bool afTrigger = false;
{
std::lock_guard<std::mutex> lk(mAfTriggerLock);
afTrigger = mAfTrigger;
if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) {
camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER);
if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) {
mAfTrigger = afTrigger = true;
} else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) {
mAfTrigger = afTrigger = false;
}
}
}
// For USB camera, the USB camera handles everything and we don't have control
// over AF. We only simply fake the AF metadata based on the request
// received here.
uint8_t afState;
if (afTrigger) {
afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
} else {
afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
}
UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1);
camera_metadata_ro_entry activeArraySize = mChars.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
return fillCaptureResultCommon(md, timestamp, activeArraySize);
}
void ExternalCameraOfflineSession::invokeProcessCaptureResultCallback(
std::vector<CaptureResult>& results, bool tryWriteFmq) {
if (mProcessCaptureResultLock.tryLock() != OK) {
const nsecs_t NS_TO_SECOND = 1E9;
ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__);
if (mProcessCaptureResultLock.timedLock(/* 1s */ NS_TO_SECOND) != OK) {
ALOGE("%s: cannot acquire lock in 1s, cannot proceed", __FUNCTION__);
return;
}
}
if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) {
for (CaptureResult& result : results) {
if (!result.result.metadata.empty()) {
if (mResultMetadataQueue->write(
reinterpret_cast<int8_t*>(result.result.metadata.data()),
result.result.metadata.size())) {
result.fmqResultSize = result.result.metadata.size();
result.result.metadata.clear();
} else {
ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__);
result.fmqResultSize = 0;
}
} else {
result.fmqResultSize = 0;
}
}
}
auto status = mCallback->processCaptureResult(results);
if (!status.isOk()) {
ALOGE("%s: processCaptureResult ERROR : %d:%d", __FUNCTION__, status.getExceptionCode(),
status.getServiceSpecificError());
}
mProcessCaptureResultLock.unlock();
}
Status ExternalCameraOfflineSession::processCaptureRequestError(
const std::shared_ptr<HalRequest>& req, std::vector<NotifyMsg>* outMsgs,
std::vector<CaptureResult>* outResults) {
ATRACE_CALL();
if (outMsgs == nullptr) {
notifyError(/*frameNum*/ req->frameNumber, /*stream*/ -1, ErrorCode::ERROR_REQUEST);
} else {
NotifyMsg shutter;
shutter.set<NotifyMsg::Tag::shutter>(ShutterMsg{
.frameNumber = req->frameNumber,
.timestamp = req->shutterTs,
});
NotifyMsg error;
error.set<NotifyMsg::Tag::error>(ErrorMsg{.frameNumber = req->frameNumber,
.errorStreamId = -1,
.errorCode = ErrorCode::ERROR_REQUEST});
outMsgs->push_back(shutter);
outMsgs->push_back(error);
}
// Fill output buffers
CaptureResult result;
result.frameNumber = req->frameNumber;
result.partialResult = 1;
result.inputBuffer.streamId = -1;
result.outputBuffers.resize(req->buffers.size());
for (size_t i = 0; i < req->buffers.size(); i++) {
StreamBuffer& outputBuffer = result.outputBuffers[i];
outputBuffer.streamId = req->buffers[i].streamId;
outputBuffer.bufferId = req->buffers[i].bufferId;
outputBuffer.status = BufferStatus::ERROR;
if (req->buffers[i].acquireFence >= 0) {
native_handle_t* handle = native_handle_create(/*numFds*/ 1, /*numInts*/ 0);
handle->data[0] = req->buffers[i].acquireFence;
outputBuffer.releaseFence = makeToAidl(handle);
}
}
if (outResults == nullptr) {
// Callback into framework
std::vector<CaptureResult> results(1);
results[0] = std::move(result);
invokeProcessCaptureResultCallback(results, /* tryWriteFmq */ true);
freeReleaseFences(results);
} else {
outResults->push_back(std::move(result));
}
return Status::OK;
}
ssize_t ExternalCameraOfflineSession::getJpegBufferSize(int32_t, int32_t) const {
// Empty implementation here as the jpeg buffer size is passed in by ctor
return 0;
}
void ExternalCameraOfflineSession::notifyError(int32_t frameNumber, int32_t streamId,
ErrorCode ec) {
NotifyMsg msg;
msg.set<NotifyMsg::Tag::error>(
ErrorMsg{.frameNumber = frameNumber, .errorStreamId = streamId, .errorCode = ec});
mCallback->notify({msg});
}
ScopedAStatus ExternalCameraOfflineSession::setCallback(
const std::shared_ptr<ICameraDeviceCallback>& in_cb) {
Mutex::Autolock _il(mInterfaceLock);
if (mCallback != nullptr && in_cb != nullptr) {
ALOGE("%s: callback must not be set twice!", __FUNCTION__);
return fromStatus(Status::OK);
}
mCallback = in_cb;
initOutputThread();
if (mOutputThread == nullptr) {
ALOGE("%s: init OutputThread failed!", __FUNCTION__);
}
return fromStatus(Status::OK);
}
void ExternalCameraOfflineSession::initOutputThread() {
if (mOutputThread != nullptr) {
ALOGE("%s: OutputThread already exist!", __FUNCTION__);
return;
}
// Grab a shared_ptr to 'this' from ndk::SharedRefBase::ref()
std::shared_ptr<ExternalCameraOfflineSession> thiz = ref<ExternalCameraOfflineSession>();
mBufferRequestThread = std::make_shared<ExternalCameraDeviceSession::BufferRequestThread>(
/*parent=*/thiz, mCallback);
mBufferRequestThread->run();
mOutputThread = std::make_shared<OutputThread>(/*parent=*/thiz, mCroppingType, mChars,
mBufferRequestThread, mOfflineReqs);
mOutputThread->setExifMakeModel(mExifMake, mExifModel);
Size inputSize = {mOfflineReqs[0]->frameIn->mWidth, mOfflineReqs[0]->frameIn->mHeight};
Size maxThumbSize = getMaxThumbnailResolution(mChars);
mOutputThread->allocateIntermediateBuffers(inputSize, maxThumbSize, mOfflineStreams,
mBlobBufferSize);
mOutputThread->run();
}
ScopedAStatus ExternalCameraOfflineSession::getCaptureResultMetadataQueue(
MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) {
Mutex::Autolock _il(mInterfaceLock);
*_aidl_return = mResultMetadataQueue->dupeDesc();
return fromStatus(Status::OK);
}
ScopedAStatus ExternalCameraOfflineSession::close() {
Mutex::Autolock _il(mInterfaceLock);
{
Mutex::Autolock _l(mLock);
if (mClosed) {
ALOGW("%s: offline session already closed!", __FUNCTION__);
return fromStatus(Status::OK);
}
}
if (mBufferRequestThread != nullptr) {
mBufferRequestThread->requestExitAndWait();
mBufferRequestThread.reset();
}
if (mOutputThread) {
mOutputThread->flush();
mOutputThread->requestExitAndWait();
mOutputThread.reset();
}
Mutex::Autolock _l(mLock);
// free all buffers
{
Mutex::Autolock _cbl(mCbsLock);
for (auto& stream : mOfflineStreams) {
cleanupBuffersLocked(stream.id);
}
}
mCallback.reset();
mClosed = true;
return fromStatus(Status::OK);
}
void ExternalCameraOfflineSession::cleanupBuffersLocked(int32_t id) {
for (auto& pair : mCirculatingBuffers.at(id)) {
sHandleImporter.freeBuffer(pair.second);
}
mCirculatingBuffers[id].clear();
mCirculatingBuffers.erase(id);
}
bool ExternalCameraOfflineSession::OutputThread::threadLoop() {
auto parent = mParent.lock();
if (parent == nullptr) {
ALOGE("%s: session has been disconnected!", __FUNCTION__);
return false;
}
if (mOfflineReqs.empty()) {
ALOGI("%s: all offline requests are processed. Stopping.", __FUNCTION__);
return false;
}
std::shared_ptr<HalRequest> req = mOfflineReqs.front();
mOfflineReqs.pop_front();
auto onDeviceError = [&](auto... args) {
ALOGE(args...);
parent->notifyError(req->frameNumber, /*stream*/ -1, ErrorCode::ERROR_DEVICE);
signalRequestDone();
return false;
};
if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) {
return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__,
req->frameIn->mFourcc & 0xFF, (req->frameIn->mFourcc >> 8) & 0xFF,
(req->frameIn->mFourcc >> 16) & 0xFF,
(req->frameIn->mFourcc >> 24) & 0xFF);
}
int res = requestBufferStart(req->buffers);
if (res != 0) {
ALOGE("%s: send BufferRequest failed! res %d", __FUNCTION__, res);
return onDeviceError("%s: failed to send buffer request!", __FUNCTION__);
}
std::unique_lock<std::mutex> lk(mBufferLock);
// Convert input V4L2 frame to YU12 of the same size
// TODO: see if we can save some computation by converting to YV12 here
uint8_t* inData;
size_t inDataSize;
if (req->frameIn->getData(&inData, &inDataSize) != 0) {
lk.unlock();
return onDeviceError("%s: V4L2 buffer map failed", __FUNCTION__);
}
// TODO: in some special case maybe we can decode jpg directly to gralloc output?
if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) {
ATRACE_BEGIN("MJPGtoI420");
int convRes = libyuv::MJPGToI420(
inData, inDataSize, static_cast<uint8_t*>(mYu12FrameLayout.y),
mYu12FrameLayout.yStride, static_cast<uint8_t*>(mYu12FrameLayout.cb),
mYu12FrameLayout.cStride, static_cast<uint8_t*>(mYu12FrameLayout.cr),
mYu12FrameLayout.cStride, mYu12Frame->mWidth, mYu12Frame->mHeight,
mYu12Frame->mWidth, mYu12Frame->mHeight);
ATRACE_END();
if (convRes != 0) {
// For some webcam, the first few V4L2 frames might be malformed...
ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, convRes);
lk.unlock();
Status st = parent->processCaptureRequestError(req);
if (st != Status::OK) {
return onDeviceError("%s: failed to process capture request error!", __FUNCTION__);
}
signalRequestDone();
return true;
}
}
ATRACE_BEGIN("Wait for BufferRequest done");
res = waitForBufferRequestDone(&req->buffers);
ATRACE_END();
if (res != 0) {
ALOGE("%s: wait for BufferRequest done failed! res %d", __FUNCTION__, res);
lk.unlock();
return onDeviceError("%s: failed to process buffer request error!", __FUNCTION__);
}
ALOGV("%s processing new request", __FUNCTION__);
const int kSyncWaitTimeoutMs = 500;
for (auto& halBuf : req->buffers) {
if (*(halBuf.bufPtr) == nullptr) {
ALOGW("%s: buffer for stream %d missing", __FUNCTION__, halBuf.streamId);
halBuf.fenceTimeout = true;
} else if (halBuf.acquireFence >= 0) {
int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs);
if (ret) {
halBuf.fenceTimeout = true;
} else {
::close(halBuf.acquireFence);
halBuf.acquireFence = -1;
}
}
if (halBuf.fenceTimeout) {
continue;
}
// Gralloc lockYCbCr the buffer
switch (halBuf.format) {
case PixelFormat::BLOB: {
int ret = createJpegLocked(halBuf, req->setting);
if (ret != 0) {
lk.unlock();
return onDeviceError("%s: createJpegLocked failed with %d", __FUNCTION__, ret);
}
} break;
case PixelFormat::Y16: {
void* outLayout = sHandleImporter.lock(
*(halBuf.bufPtr), static_cast<uint64_t>(halBuf.usage), inDataSize);
std::memcpy(outLayout, inData, inDataSize);
int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
if (relFence >= 0) {
halBuf.acquireFence = relFence;
}
} break;
case PixelFormat::YCBCR_420_888:
case PixelFormat::YV12: {
IMapper::Rect outRect{0, 0, static_cast<int32_t>(halBuf.width),
static_cast<int32_t>(halBuf.height)};
YCbCrLayout outLayout = sHandleImporter.lockYCbCr(
*(halBuf.bufPtr), static_cast<uint64_t>(halBuf.usage), outRect);
ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d", __FUNCTION__,
outLayout.y, outLayout.cb, outLayout.cr, outLayout.yStride, outLayout.cStride,
outLayout.chromaStep);
// Convert to output buffer size/format
uint32_t outputFourcc = getFourCcFromLayout(outLayout);
ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, outputFourcc & 0xFF,
(outputFourcc >> 8) & 0xFF, (outputFourcc >> 16) & 0xFF,
(outputFourcc >> 24) & 0xFF);
YCbCrLayout cropAndScaled;
ATRACE_BEGIN("cropAndScaleLocked");
int ret = cropAndScaleLocked(mYu12Frame, Size{halBuf.width, halBuf.height},
&cropAndScaled);
ATRACE_END();
if (ret != 0) {
lk.unlock();
return onDeviceError("%s: crop and scale failed!", __FUNCTION__);
}
Size sz{halBuf.width, halBuf.height};
ATRACE_BEGIN("formatConvert");
ret = formatConvert(cropAndScaled, outLayout, sz, outputFourcc);
ATRACE_END();
if (ret != 0) {
lk.unlock();
return onDeviceError("%s: format coversion failed!", __FUNCTION__);
}
int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
if (relFence >= 0) {
halBuf.acquireFence = relFence;
}
} break;
default:
lk.unlock();
return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format);
}
} // for each buffer
mScaledYu12Frames.clear();
// Don't hold the lock while calling back to parent
lk.unlock();
Status st = parent->processCaptureResult(req);
if (st != Status::OK) {
return onDeviceError("%s: failed to process capture result!", __FUNCTION__);
}
signalRequestDone();
return true;
}
} // namespace implementation
} // namespace device
} // namespace camera
} // namespace hardware
} // namespace android

View file

@ -0,0 +1,143 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERAOFFLINESESSION_H_
#define HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERAOFFLINESESSION_H_
#include <ExternalCameraDeviceSession.h>
#include <ExternalCameraUtils.h>
#include <aidl/android/hardware/camera/common/Status.h>
#include <aidl/android/hardware/camera/device/BnCameraOfflineSession.h>
#include <aidl/android/hardware/camera/device/Stream.h>
#include <fmq/AidlMessageQueue.h>
#include <utils/RefBase.h>
#include <deque>
namespace android {
namespace hardware {
namespace camera {
namespace device {
namespace implementation {
using ::aidl::android::hardware::camera::common::Status;
using ::aidl::android::hardware::camera::device::BnCameraOfflineSession;
using ::aidl::android::hardware::camera::device::ICameraDeviceCallback;
using ::aidl::android::hardware::camera::device::Stream;
using ::aidl::android::hardware::common::fmq::MQDescriptor;
using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
class ExternalCameraOfflineSession : public BnCameraOfflineSession,
public virtual RefBase,
public virtual OutputThreadInterface {
public:
ExternalCameraOfflineSession(const CroppingType& croppingType,
const common::V1_0::helper::CameraMetadata& chars,
const std::string& cameraId, const std::string& exifMake,
const std::string& exifModel, uint32_t blobBufferSize,
bool afTrigger, const std::vector<Stream>& offlineStreams,
std::deque<std::shared_ptr<HalRequest>>& offlineReqs,
const std::map<int, CirculatingBuffers>& circulatingBuffers);
~ExternalCameraOfflineSession() override;
bool initialize();
// Methods from OutputThreadInterface
Status importBuffer(int32_t streamId, uint64_t bufId, buffer_handle_t buf,
/*out*/ buffer_handle_t** outBufPtr) override;
Status processCaptureResult(std::shared_ptr<HalRequest>&) override;
Status processCaptureRequestError(const std::shared_ptr<HalRequest>&,
/*out*/ std::vector<NotifyMsg>* msgs,
/*out*/ std::vector<CaptureResult>* results) override;
ssize_t getJpegBufferSize(int32_t width, int32_t height) const override;
void notifyError(int32_t frameNumber, int32_t streamId, ErrorCode ec) override;
// End of OutputThreadInterface methods
ScopedAStatus setCallback(const std::shared_ptr<ICameraDeviceCallback>& in_cb) override;
ScopedAStatus getCaptureResultMetadataQueue(
MQDescriptor<int8_t, SynchronizedReadWrite>* _aidl_return) override;
ScopedAStatus close() override;
private:
class OutputThread : public ExternalCameraDeviceSession::OutputThread {
public:
OutputThread(std::weak_ptr<OutputThreadInterface> parent, CroppingType ct,
const common::V1_0::helper::CameraMetadata& chars,
std::shared_ptr<ExternalCameraDeviceSession::BufferRequestThread> bufReqThread,
std::deque<std::shared_ptr<HalRequest>>& offlineReqs)
: ExternalCameraDeviceSession::OutputThread(std::move(parent), ct, chars,
std::move(bufReqThread)),
mOfflineReqs(offlineReqs) {}
bool threadLoop() override;
protected:
std::deque<std::shared_ptr<HalRequest>> mOfflineReqs;
}; // OutputThread
status_t fillCaptureResult(common::V1_0::helper::CameraMetadata md, nsecs_t timestamp);
void invokeProcessCaptureResultCallback(std::vector<CaptureResult>& results, bool tryWriteFmq);
void initOutputThread();
void cleanupBuffersLocked(int32_t id);
// Protect (most of) HIDL interface methods from synchronized-entering
mutable Mutex mInterfaceLock;
mutable Mutex mLock; // Protect all data members except otherwise noted
bool mClosed = false;
const CroppingType mCroppingType;
const common::V1_0::helper::CameraMetadata mChars;
const std::string mCameraId;
const std::string mExifMake;
const std::string mExifModel;
const uint32_t mBlobBufferSize;
std::mutex mAfTriggerLock; // protect mAfTrigger
bool mAfTrigger;
const std::vector<Stream> mOfflineStreams;
std::deque<std::shared_ptr<HalRequest>> mOfflineReqs;
// Protect mCirculatingBuffers, must not lock mLock after acquiring this lock
mutable Mutex mCbsLock;
std::map<int, CirculatingBuffers> mCirculatingBuffers;
static HandleImporter sHandleImporter;
using ResultMetadataQueue = AidlMessageQueue<int8_t, SynchronizedReadWrite>;
std::shared_ptr<ResultMetadataQueue> mResultMetadataQueue;
// Protect against invokeProcessCaptureResultCallback()
Mutex mProcessCaptureResultLock;
std::shared_ptr<ICameraDeviceCallback> mCallback;
std::shared_ptr<ExternalCameraDeviceSession::BufferRequestThread> mBufferRequestThread;
std::shared_ptr<OutputThread> mOutputThread;
};
} // namespace implementation
} // namespace device
} // namespace camera
} // namespace hardware
} // namespace android
#endif // HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERAOFFLINESESSION_H_

View file

@ -0,0 +1,860 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "ExtCamUtils"
// #define LOG_NDEBUG 0
#include "ExternalCameraUtils.h"
#include <aidlcommonsupport/NativeHandle.h>
#include <jpeglib.h>
#include <linux/videodev2.h>
#include <log/log.h>
#include <algorithm>
#include <cinttypes>
#include <cmath>
#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs
#include <libyuv.h>
namespace android {
namespace hardware {
namespace camera {
namespace external {
namespace common {
namespace {
const int kDefaultCameraIdOffset = 100;
const int kDefaultJpegBufSize = 5 << 20; // 5MB
const int kDefaultNumVideoBuffer = 4;
const int kDefaultNumStillBuffer = 2;
const int kDefaultOrientation = 0; // suitable for natural landscape displays like tablet/TV
// For phone devices 270 is better
} // anonymous namespace
const char* ExternalCameraConfig::kDefaultCfgPath = "/vendor/etc/external_camera_config.xml";
ExternalCameraConfig ExternalCameraConfig::loadFromCfg(const char* cfgPath) {
using namespace tinyxml2;
ExternalCameraConfig ret;
XMLDocument configXml;
XMLError err = configXml.LoadFile(cfgPath);
if (err != XML_SUCCESS) {
ALOGE("%s: Unable to load external camera config file '%s'. Error: %s", __FUNCTION__,
cfgPath, XMLDocument::ErrorIDToName(err));
return ret;
} else {
ALOGI("%s: load external camera config succeeded!", __FUNCTION__);
}
XMLElement* extCam = configXml.FirstChildElement("ExternalCamera");
if (extCam == nullptr) {
ALOGI("%s: no external camera config specified", __FUNCTION__);
return ret;
}
XMLElement* providerCfg = extCam->FirstChildElement("Provider");
if (providerCfg == nullptr) {
ALOGI("%s: no external camera provider config specified", __FUNCTION__);
return ret;
}
XMLElement* cameraIdOffset = providerCfg->FirstChildElement("CameraIdOffset");
if (cameraIdOffset != nullptr) {
ret.cameraIdOffset = std::atoi(cameraIdOffset->GetText());
}
XMLElement* ignore = providerCfg->FirstChildElement("ignore");
if (ignore == nullptr) {
ALOGI("%s: no internal ignored device specified", __FUNCTION__);
return ret;
}
XMLElement* id = ignore->FirstChildElement("id");
while (id != nullptr) {
const char* text = id->GetText();
if (text != nullptr) {
ret.mInternalDevices.insert(text);
ALOGI("%s: device %s will be ignored by external camera provider", __FUNCTION__, text);
}
id = id->NextSiblingElement("id");
}
XMLElement* deviceCfg = extCam->FirstChildElement("Device");
if (deviceCfg == nullptr) {
ALOGI("%s: no external camera device config specified", __FUNCTION__);
return ret;
}
XMLElement* jpegBufSz = deviceCfg->FirstChildElement("MaxJpegBufferSize");
if (jpegBufSz == nullptr) {
ALOGI("%s: no max jpeg buffer size specified", __FUNCTION__);
} else {
ret.maxJpegBufSize = jpegBufSz->UnsignedAttribute("bytes", /*Default*/ kDefaultJpegBufSize);
}
XMLElement* numVideoBuf = deviceCfg->FirstChildElement("NumVideoBuffers");
if (numVideoBuf == nullptr) {
ALOGI("%s: no num video buffers specified", __FUNCTION__);
} else {
ret.numVideoBuffers =
numVideoBuf->UnsignedAttribute("count", /*Default*/ kDefaultNumVideoBuffer);
}
XMLElement* numStillBuf = deviceCfg->FirstChildElement("NumStillBuffers");
if (numStillBuf == nullptr) {
ALOGI("%s: no num still buffers specified", __FUNCTION__);
} else {
ret.numStillBuffers =
numStillBuf->UnsignedAttribute("count", /*Default*/ kDefaultNumStillBuffer);
}
XMLElement* fpsList = deviceCfg->FirstChildElement("FpsList");
if (fpsList == nullptr) {
ALOGI("%s: no fps list specified", __FUNCTION__);
} else {
if (!updateFpsList(fpsList, ret.fpsLimits)) {
return ret;
}
}
XMLElement* depth = deviceCfg->FirstChildElement("Depth16Supported");
if (depth == nullptr) {
ret.depthEnabled = false;
ALOGI("%s: depth output is not enabled", __FUNCTION__);
} else {
ret.depthEnabled = depth->BoolAttribute("enabled", false);
}
if (ret.depthEnabled) {
XMLElement* depthFpsList = deviceCfg->FirstChildElement("DepthFpsList");
if (depthFpsList == nullptr) {
ALOGW("%s: no depth fps list specified", __FUNCTION__);
} else {
if (!updateFpsList(depthFpsList, ret.depthFpsLimits)) {
return ret;
}
}
}
XMLElement* minStreamSize = deviceCfg->FirstChildElement("MinimumStreamSize");
if (minStreamSize == nullptr) {
ALOGI("%s: no minimum stream size specified", __FUNCTION__);
} else {
ret.minStreamSize = {
static_cast<int32_t>(minStreamSize->UnsignedAttribute("width", /*Default*/ 0)),
static_cast<int32_t>(minStreamSize->UnsignedAttribute("height", /*Default*/ 0))};
}
XMLElement* orientation = deviceCfg->FirstChildElement("Orientation");
if (orientation == nullptr) {
ALOGI("%s: no sensor orientation specified", __FUNCTION__);
} else {
ret.orientation = orientation->IntAttribute("degree", /*Default*/ kDefaultOrientation);
}
ALOGI("%s: external camera cfg loaded: maxJpgBufSize %d,"
" num video buffers %d, num still buffers %d, orientation %d",
__FUNCTION__, ret.maxJpegBufSize, ret.numVideoBuffers, ret.numStillBuffers,
ret.orientation);
for (const auto& limit : ret.fpsLimits) {
ALOGI("%s: fpsLimitList: %dx%d@%f", __FUNCTION__, limit.size.width, limit.size.height,
limit.fpsUpperBound);
}
for (const auto& limit : ret.depthFpsLimits) {
ALOGI("%s: depthFpsLimitList: %dx%d@%f", __FUNCTION__, limit.size.width, limit.size.height,
limit.fpsUpperBound);
}
ALOGI("%s: minStreamSize: %dx%d", __FUNCTION__, ret.minStreamSize.width,
ret.minStreamSize.height);
return ret;
}
bool ExternalCameraConfig::updateFpsList(tinyxml2::XMLElement* fpsList,
std::vector<FpsLimitation>& fpsLimits) {
using namespace tinyxml2;
std::vector<FpsLimitation> limits;
XMLElement* row = fpsList->FirstChildElement("Limit");
while (row != nullptr) {
FpsLimitation prevLimit{{0, 0}, 1000.0};
FpsLimitation limit = {
{/* width */ static_cast<int32_t>(row->UnsignedAttribute("width", /*Default*/ 0)),
/* height */ static_cast<int32_t>(
row->UnsignedAttribute("height", /*Default*/ 0))},
/* fpsUpperBound */ row->DoubleAttribute("fpsBound", /*Default*/ 1000.0)};
if (limit.size.width <= prevLimit.size.width ||
limit.size.height <= prevLimit.size.height ||
limit.fpsUpperBound >= prevLimit.fpsUpperBound) {
ALOGE("%s: FPS limit list must have increasing size and decreasing fps!"
" Prev %dx%d@%f, Current %dx%d@%f",
__FUNCTION__, prevLimit.size.width, prevLimit.size.height,
prevLimit.fpsUpperBound, limit.size.width, limit.size.height,
limit.fpsUpperBound);
return false;
}
limits.push_back(limit);
row = row->NextSiblingElement("Limit");
}
fpsLimits = limits;
return true;
}
ExternalCameraConfig::ExternalCameraConfig()
: cameraIdOffset(kDefaultCameraIdOffset),
maxJpegBufSize(kDefaultJpegBufSize),
numVideoBuffers(kDefaultNumVideoBuffer),
numStillBuffers(kDefaultNumStillBuffer),
depthEnabled(false),
orientation(kDefaultOrientation) {
fpsLimits.push_back({/* size */ {/* width */ 640, /* height */ 480}, /* fpsUpperBound */ 30.0});
fpsLimits.push_back({/* size */ {/* width */ 1280, /* height */ 720}, /* fpsUpperBound */ 7.5});
fpsLimits.push_back(
{/* size */ {/* width */ 1920, /* height */ 1080}, /* fpsUpperBound */ 5.0});
minStreamSize = {0, 0};
}
} // namespace common
} // namespace external
namespace device {
namespace implementation {
double SupportedV4L2Format::FrameRate::getFramesPerSecond() const {
return static_cast<double>(durationDenominator) / durationNumerator;
}
Frame::Frame(uint32_t width, uint32_t height, uint32_t fourcc)
: mWidth(width), mHeight(height), mFourcc(fourcc) {}
Frame::~Frame() {}
V4L2Frame::V4L2Frame(uint32_t w, uint32_t h, uint32_t fourcc, int bufIdx, int fd, uint32_t dataSize,
uint64_t offset)
: Frame(w, h, fourcc), mBufferIndex(bufIdx), mFd(fd), mDataSize(dataSize), mOffset(offset) {}
V4L2Frame::~V4L2Frame() {
unmap();
}
int V4L2Frame::getData(uint8_t** outData, size_t* dataSize) {
return map(outData, dataSize);
}
int V4L2Frame::map(uint8_t** data, size_t* dataSize) {
if (data == nullptr || dataSize == nullptr) {
ALOGI("%s: V4L2 buffer map bad argument: data %p, dataSize %p", __FUNCTION__, data,
dataSize);
return -EINVAL;
}
std::lock_guard<std::mutex> lk(mLock);
if (!mMapped) {
void* addr = mmap(nullptr, mDataSize, PROT_READ, MAP_SHARED, mFd, mOffset);
if (addr == MAP_FAILED) {
ALOGE("%s: V4L2 buffer map failed: %s", __FUNCTION__, strerror(errno));
return -EINVAL;
}
mData = static_cast<uint8_t*>(addr);
mMapped = true;
}
*data = mData;
*dataSize = mDataSize;
ALOGV("%s: V4L map FD %d, data %p size %zu", __FUNCTION__, mFd, mData, mDataSize);
return 0;
}
int V4L2Frame::unmap() {
std::lock_guard<std::mutex> lk(mLock);
if (mMapped) {
ALOGV("%s: V4L unmap data %p size %zu", __FUNCTION__, mData, mDataSize);
if (munmap(mData, mDataSize) != 0) {
ALOGE("%s: V4L2 buffer unmap failed: %s", __FUNCTION__, strerror(errno));
return -EINVAL;
}
mMapped = false;
}
return 0;
}
AllocatedFrame::AllocatedFrame(uint32_t w, uint32_t h) : Frame(w, h, V4L2_PIX_FMT_YUV420) {}
AllocatedFrame::~AllocatedFrame() {}
int AllocatedFrame::getData(uint8_t** outData, size_t* dataSize) {
YCbCrLayout layout;
int ret = allocate(&layout);
if (ret != 0) {
return ret;
}
*outData = mData.data();
*dataSize = mBufferSize;
return 0;
}
int AllocatedFrame::allocate(YCbCrLayout* out) {
std::lock_guard<std::mutex> lk(mLock);
if ((mWidth % 2) || (mHeight % 2)) {
ALOGE("%s: bad dimension %dx%d (not multiple of 2)", __FUNCTION__, mWidth, mHeight);
return -EINVAL;
}
// This frame might be sent to jpeglib to be encoded. Since AllocatedFrame only contains YUV420,
// jpeglib expects height and width of Y component to be an integral multiple of 2*DCTSIZE,
// and heights and widths of Cb and Cr components to be an integral multiple of DCTSIZE. If the
// image size does not meet this requirement, libjpeg expects its input to be padded to meet the
// constraints. This padding is removed from the final encoded image so the content in the
// padding doesn't matter. What matters is that the memory is accessible to jpeglib at the time
// of encoding.
// For example, if the image size is 1500x844 and DCTSIZE is 8, jpeglib expects a YUV 420
// frame with components of following sizes:
// Y: 1504x848 because 1504 and 848 are the next smallest multiples of 2*8
// Cb/Cr: 752x424 which are the next smallest multiples of 8
// jpeglib takes an array of row pointers which makes vertical padding trivial when setting up
// the pointers. Padding horizontally is a bit more complicated. AllocatedFrame holds the data
// in a flattened buffer, which means memory accesses past a row will flow into the next logical
// row. For any row of a component, we can consider the first few bytes of the next row as
// padding for the current one. This is true for Y and Cb components and all but last row of the
// Cr component. Reading past the last row of Cr component will lead to undefined behavior as
// libjpeg attempts to read memory past the allocated buffer. To prevent undefined behavior,
// the buffer allocated here is padded such that libjpeg never accesses unallocated memory when
// reading the last row. Effectively, we only need to ensure that the last row of Cr component
// has width that is an integral multiple of DCTSIZE.
size_t dataSize = mWidth * mHeight * 3 / 2; // YUV420
size_t cbWidth = mWidth / 2;
size_t requiredCbWidth = DCTSIZE * ((cbWidth + DCTSIZE - 1) / DCTSIZE);
size_t padding = requiredCbWidth - cbWidth;
size_t finalSize = dataSize + padding;
if (mData.size() != finalSize) {
mData.resize(finalSize);
mBufferSize = dataSize;
}
if (out != nullptr) {
out->y = mData.data();
out->yStride = mWidth;
uint8_t* cbStart = mData.data() + mWidth * mHeight;
uint8_t* crStart = cbStart + mWidth * mHeight / 4;
out->cb = cbStart;
out->cr = crStart;
out->cStride = mWidth / 2;
out->chromaStep = 1;
}
return 0;
}
int AllocatedFrame::getLayout(YCbCrLayout* out) {
IMapper::Rect noCrop = {0, 0, static_cast<int32_t>(mWidth), static_cast<int32_t>(mHeight)};
return getCroppedLayout(noCrop, out);
}
int AllocatedFrame::getCroppedLayout(const IMapper::Rect& rect, YCbCrLayout* out) {
if (out == nullptr) {
ALOGE("%s: null out", __FUNCTION__);
return -1;
}
std::lock_guard<std::mutex> lk(mLock);
if ((rect.left + rect.width) > static_cast<int>(mWidth) ||
(rect.top + rect.height) > static_cast<int>(mHeight) || (rect.left % 2) || (rect.top % 2) ||
(rect.width % 2) || (rect.height % 2)) {
ALOGE("%s: bad rect left %d top %d w %d h %d", __FUNCTION__, rect.left, rect.top,
rect.width, rect.height);
return -1;
}
out->y = mData.data() + mWidth * rect.top + rect.left;
out->yStride = mWidth;
uint8_t* cbStart = mData.data() + mWidth * mHeight;
uint8_t* crStart = cbStart + mWidth * mHeight / 4;
out->cb = cbStart + mWidth * rect.top / 4 + rect.left / 2;
out->cr = crStart + mWidth * rect.top / 4 + rect.left / 2;
out->cStride = mWidth / 2;
out->chromaStep = 1;
return 0;
}
bool isAspectRatioClose(float ar1, float ar2) {
constexpr float kAspectRatioMatchThres = 0.025f; // This threshold is good enough to
// distinguish 4:3/16:9/20:9 1.33/1.78/2
return std::abs(ar1 - ar2) < kAspectRatioMatchThres;
}
aidl::android::hardware::camera::common::Status importBufferImpl(
/*inout*/ std::map<int, CirculatingBuffers>& circulatingBuffers,
/*inout*/ HandleImporter& handleImporter, int32_t streamId, uint64_t bufId,
buffer_handle_t buf,
/*out*/ buffer_handle_t** outBufPtr) {
using ::aidl::android::hardware::camera::common::Status;
if (buf == nullptr && bufId == BUFFER_ID_NO_BUFFER) {
ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId);
return Status::ILLEGAL_ARGUMENT;
}
CirculatingBuffers& cbs = circulatingBuffers[streamId];
if (cbs.count(bufId) == 0) {
if (buf == nullptr) {
ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId);
return Status::ILLEGAL_ARGUMENT;
}
// Register a newly seen buffer
buffer_handle_t importedBuf = buf;
handleImporter.importBuffer(importedBuf);
if (importedBuf == nullptr) {
ALOGE("%s: output buffer for stream %d is invalid!", __FUNCTION__, streamId);
return Status::INTERNAL_ERROR;
} else {
cbs[bufId] = importedBuf;
}
}
*outBufPtr = &cbs[bufId];
return Status::OK;
}
uint32_t getFourCcFromLayout(const YCbCrLayout& layout) {
intptr_t cb = reinterpret_cast<intptr_t>(layout.cb);
intptr_t cr = reinterpret_cast<intptr_t>(layout.cr);
if (std::abs(cb - cr) == 1 && layout.chromaStep == 2) {
// Interleaved format
if (layout.cb > layout.cr) {
return V4L2_PIX_FMT_NV21;
} else {
return V4L2_PIX_FMT_NV12;
}
} else if (layout.chromaStep == 1) {
// Planar format
if (layout.cb > layout.cr) {
return V4L2_PIX_FMT_YVU420; // YV12
} else {
return V4L2_PIX_FMT_YUV420; // YU12
}
} else {
return FLEX_YUV_GENERIC;
}
}
int getCropRect(CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out) {
if (out == nullptr) {
ALOGE("%s: out is null", __FUNCTION__);
return -1;
}
uint32_t inW = inSize.width;
uint32_t inH = inSize.height;
uint32_t outW = outSize.width;
uint32_t outH = outSize.height;
// Handle special case where aspect ratio is close to input but scaled
// dimension is slightly larger than input
float arIn = ASPECT_RATIO(inSize);
float arOut = ASPECT_RATIO(outSize);
if (isAspectRatioClose(arIn, arOut)) {
out->left = 0;
out->top = 0;
out->width = static_cast<int32_t>(inW);
out->height = static_cast<int32_t>(inH);
return 0;
}
if (ct == VERTICAL) {
uint64_t scaledOutH = static_cast<uint64_t>(outH) * inW / outW;
if (scaledOutH > inH) {
ALOGE("%s: Output size %dx%d cannot be vertically cropped from input size %dx%d",
__FUNCTION__, outW, outH, inW, inH);
return -1;
}
scaledOutH = scaledOutH & ~0x1; // make it multiple of 2
out->left = 0;
out->top = static_cast<int32_t>((inH - scaledOutH) / 2) & ~0x1;
out->width = static_cast<int32_t>(inW);
out->height = static_cast<int32_t>(scaledOutH);
ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledH %d", __FUNCTION__, inW, inH, outW, outH,
out->top, static_cast<int32_t>(scaledOutH));
} else {
uint64_t scaledOutW = static_cast<uint64_t>(outW) * inH / outH;
if (scaledOutW > inW) {
ALOGE("%s: Output size %dx%d cannot be horizontally cropped from input size %dx%d",
__FUNCTION__, outW, outH, inW, inH);
return -1;
}
scaledOutW = scaledOutW & ~0x1; // make it multiple of 2
out->left = static_cast<int32_t>((inW - scaledOutW) / 2) & ~0x1;
out->top = 0;
out->width = static_cast<int32_t>(scaledOutW);
out->height = static_cast<int32_t>(inH);
ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledW %d", __FUNCTION__, inW, inH, outW, outH,
out->top, static_cast<int32_t>(scaledOutW));
}
return 0;
}
int formatConvert(const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format) {
int ret = 0;
switch (format) {
case V4L2_PIX_FMT_NV21:
ret = libyuv::I420ToNV21(
static_cast<uint8_t*>(in.y), static_cast<int32_t>(in.yStride),
static_cast<uint8_t*>(in.cb), static_cast<int32_t>(in.cStride),
static_cast<uint8_t*>(in.cr), static_cast<int32_t>(in.cStride),
static_cast<uint8_t*>(out.y), static_cast<int32_t>(out.yStride),
static_cast<uint8_t*>(out.cr), static_cast<int32_t>(out.cStride),
static_cast<int32_t>(sz.width), static_cast<int32_t>(sz.height));
if (ret != 0) {
ALOGE("%s: convert to NV21 buffer failed! ret %d", __FUNCTION__, ret);
return ret;
}
break;
case V4L2_PIX_FMT_NV12:
ret = libyuv::I420ToNV12(
static_cast<uint8_t*>(in.y), static_cast<int32_t>(in.yStride),
static_cast<uint8_t*>(in.cb), static_cast<int32_t>(in.cStride),
static_cast<uint8_t*>(in.cr), static_cast<int32_t>(in.cStride),
static_cast<uint8_t*>(out.y), static_cast<int32_t>(out.yStride),
static_cast<uint8_t*>(out.cb), static_cast<int32_t>(out.cStride),
static_cast<int32_t>(sz.width), static_cast<int32_t>(sz.height));
if (ret != 0) {
ALOGE("%s: convert to NV12 buffer failed! ret %d", __FUNCTION__, ret);
return ret;
}
break;
case V4L2_PIX_FMT_YVU420: // YV12
case V4L2_PIX_FMT_YUV420: // YU12
// TODO: maybe we can speed up here by somehow save this copy?
ret = libyuv::I420Copy(static_cast<uint8_t*>(in.y), static_cast<int32_t>(in.yStride),
static_cast<uint8_t*>(in.cb), static_cast<int32_t>(in.cStride),
static_cast<uint8_t*>(in.cr), static_cast<int32_t>(in.cStride),
static_cast<uint8_t*>(out.y), static_cast<int32_t>(out.yStride),
static_cast<uint8_t*>(out.cb), static_cast<int32_t>(out.cStride),
static_cast<uint8_t*>(out.cr), static_cast<int32_t>(out.cStride),
static_cast<int32_t>(sz.width), static_cast<int32_t>(sz.height));
if (ret != 0) {
ALOGE("%s: copy to YV12 or YU12 buffer failed! ret %d", __FUNCTION__, ret);
return ret;
}
break;
case FLEX_YUV_GENERIC:
// TODO: b/72261744 write to arbitrary flexible YUV layout. Slow.
ALOGE("%s: unsupported flexible yuv layout"
" y %p cb %p cr %p y_str %d c_str %d c_step %d",
__FUNCTION__, out.y, out.cb, out.cr, out.yStride, out.cStride, out.chromaStep);
return -1;
default:
ALOGE("%s: unknown YUV format 0x%x!", __FUNCTION__, format);
return -1;
}
return 0;
}
int encodeJpegYU12(const Size& inSz, const YCbCrLayout& inLayout, int jpegQuality,
const void* app1Buffer, size_t app1Size, void* out, size_t maxOutSize,
size_t& actualCodeSize) {
/* libjpeg is a C library so we use C-style "inheritance" by
* putting libjpeg's jpeg_destination_mgr first in our custom
* struct. This allows us to cast jpeg_destination_mgr* to
* CustomJpegDestMgr* when we get it passed to us in a callback */
struct CustomJpegDestMgr {
struct jpeg_destination_mgr mgr;
JOCTET* mBuffer;
size_t mBufferSize;
size_t mEncodedSize;
bool mSuccess;
} dmgr;
jpeg_compress_struct cinfo = {};
jpeg_error_mgr jerr;
/* Initialize error handling with standard callbacks, but
* then override output_message (to print to ALOG) and
* error_exit to set a flag and print a message instead
* of killing the whole process */
cinfo.err = jpeg_std_error(&jerr);
cinfo.err->output_message = [](j_common_ptr cinfo) {
char buffer[JMSG_LENGTH_MAX];
/* Create the message */
(*cinfo->err->format_message)(cinfo, buffer);
ALOGE("libjpeg error: %s", buffer);
};
cinfo.err->error_exit = [](j_common_ptr cinfo) {
(*cinfo->err->output_message)(cinfo);
if (cinfo->client_data) {
auto& dmgr = *reinterpret_cast<CustomJpegDestMgr*>(cinfo->client_data);
dmgr.mSuccess = false;
}
};
/* Now that we initialized some callbacks, let's create our compressor */
jpeg_create_compress(&cinfo);
/* Initialize our destination manager */
dmgr.mBuffer = static_cast<JOCTET*>(out);
dmgr.mBufferSize = maxOutSize;
dmgr.mEncodedSize = 0;
dmgr.mSuccess = true;
cinfo.client_data = static_cast<void*>(&dmgr);
/* These lambdas become C-style function pointers and as per C++11 spec
* may not capture anything */
dmgr.mgr.init_destination = [](j_compress_ptr cinfo) {
auto& dmgr = reinterpret_cast<CustomJpegDestMgr&>(*cinfo->dest);
dmgr.mgr.next_output_byte = dmgr.mBuffer;
dmgr.mgr.free_in_buffer = dmgr.mBufferSize;
ALOGV("%s:%d jpeg start: %p [%zu]", __FUNCTION__, __LINE__, dmgr.mBuffer, dmgr.mBufferSize);
};
dmgr.mgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) {
ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__);
return 0;
};
dmgr.mgr.term_destination = [](j_compress_ptr cinfo) {
auto& dmgr = reinterpret_cast<CustomJpegDestMgr&>(*cinfo->dest);
dmgr.mEncodedSize = dmgr.mBufferSize - dmgr.mgr.free_in_buffer;
ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.mEncodedSize);
};
cinfo.dest = reinterpret_cast<struct jpeg_destination_mgr*>(&dmgr);
/* We are going to be using JPEG in raw data mode, so we are passing
* straight subsampled planar YCbCr and it will not touch our pixel
* data or do any scaling or anything */
cinfo.image_width = inSz.width;
cinfo.image_height = inSz.height;
cinfo.input_components = 3;
cinfo.in_color_space = JCS_YCbCr;
/* Initialize defaults and then override what we want */
jpeg_set_defaults(&cinfo);
jpeg_set_quality(&cinfo, jpegQuality, 1);
jpeg_set_colorspace(&cinfo, JCS_YCbCr);
cinfo.raw_data_in = 1;
cinfo.dct_method = JDCT_IFAST;
/* Configure sampling factors. The sampling factor is JPEG subsampling 420
* because the source format is YUV420. Note that libjpeg sampling factors
* are... a little weird. Sampling of Y=2,U=1,V=1 means there is 1 U and
* 1 V value for each 2 Y values */
cinfo.comp_info[0].h_samp_factor = 2;
cinfo.comp_info[0].v_samp_factor = 2;
cinfo.comp_info[1].h_samp_factor = 1;
cinfo.comp_info[1].v_samp_factor = 1;
cinfo.comp_info[2].h_samp_factor = 1;
cinfo.comp_info[2].v_samp_factor = 1;
/* Start the compressor */
jpeg_start_compress(&cinfo, TRUE);
/* Let's not hardcode YUV420 in 6 places... 5 was enough */
int maxVSampFactor = cinfo.max_v_samp_factor;
int cVSubSampling = cinfo.comp_info[0].v_samp_factor / cinfo.comp_info[1].v_samp_factor;
/* Compute our macroblock height, so we can pad our input to be vertically
* macroblock aligned. No need to for horizontal alignment since AllocatedFrame already
* pads horizontally */
size_t mcuV = DCTSIZE * maxVSampFactor;
size_t paddedHeight = mcuV * ((inSz.height + mcuV - 1) / mcuV);
/* libjpeg uses arrays of row pointers, which makes it really easy to pad
* data vertically (unfortunately doesn't help horizontally) */
std::vector<JSAMPROW> yLines(paddedHeight);
std::vector<JSAMPROW> cbLines(paddedHeight / cVSubSampling);
std::vector<JSAMPROW> crLines(paddedHeight / cVSubSampling);
uint8_t* py = static_cast<uint8_t*>(inLayout.y);
uint8_t* pcb = static_cast<uint8_t*>(inLayout.cb);
uint8_t* pcr = static_cast<uint8_t*>(inLayout.cr);
for (int32_t i = 0; i < paddedHeight; i++) {
/* Once we are in the padding territory we still point to the last line
* effectively replicating it several times ~ CLAMP_TO_EDGE */
int li = std::min(i, inSz.height - 1);
yLines[i] = static_cast<JSAMPROW>(py + li * inLayout.yStride);
if (i < paddedHeight / cVSubSampling) {
li = std::min(i, (inSz.height - 1) / cVSubSampling);
cbLines[i] = static_cast<JSAMPROW>(pcb + li * inLayout.cStride);
crLines[i] = static_cast<JSAMPROW>(pcr + li * inLayout.cStride);
}
}
/* If APP1 data was passed in, use it */
if (app1Buffer && app1Size) {
jpeg_write_marker(&cinfo, JPEG_APP0 + 1, static_cast<const JOCTET*>(app1Buffer), app1Size);
}
/* While we still have padded height left to go, keep giving it one
* macroblock at a time. */
while (cinfo.next_scanline < cinfo.image_height) {
const uint32_t batchSize = DCTSIZE * maxVSampFactor;
const uint32_t nl = cinfo.next_scanline;
JSAMPARRAY planes[3]{&yLines[nl], &cbLines[nl / cVSubSampling],
&crLines[nl / cVSubSampling]};
uint32_t done = jpeg_write_raw_data(&cinfo, planes, batchSize);
if (done != batchSize) {
ALOGE("%s: compressed %u lines, expected %u (total %u/%u)", __FUNCTION__, done,
batchSize, cinfo.next_scanline, cinfo.image_height);
return -1;
}
}
/* This will flush everything */
jpeg_finish_compress(&cinfo);
/* Grab the actual code size and set it */
actualCodeSize = dmgr.mEncodedSize;
return 0;
}
Size getMaxThumbnailResolution(const common::V1_0::helper::CameraMetadata& chars) {
Size thumbSize{0, 0};
camera_metadata_ro_entry entry = chars.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES);
for (uint32_t i = 0; i < entry.count; i += 2) {
Size sz{.width = entry.data.i32[i], .height = entry.data.i32[i + 1]};
if (sz.width * sz.height > thumbSize.width * thumbSize.height) {
thumbSize = sz;
}
}
if (thumbSize.width * thumbSize.height == 0) {
ALOGW("%s: non-zero thumbnail size not available", __FUNCTION__);
}
return thumbSize;
}
void freeReleaseFences(std::vector<CaptureResult>& results) {
for (auto& result : results) {
native_handle_t* inputReleaseFence =
::android::makeFromAidl(result.inputBuffer.releaseFence);
if (inputReleaseFence != nullptr) {
native_handle_close(inputReleaseFence);
native_handle_delete(inputReleaseFence);
}
for (auto& buf : result.outputBuffers) {
native_handle_t* outReleaseFence = ::android::makeFromAidl(buf.releaseFence);
if (outReleaseFence != nullptr) {
native_handle_close(outReleaseFence);
native_handle_delete(outReleaseFence);
}
}
}
}
#define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))
#define UPDATE(md, tag, data, size) \
do { \
if ((md).update((tag), (data), (size))) { \
ALOGE("Update " #tag " failed!"); \
return BAD_VALUE; \
} \
} while (0)
status_t fillCaptureResultCommon(CameraMetadata& md, nsecs_t timestamp,
camera_metadata_ro_entry& activeArraySize) {
if (activeArraySize.count < 4) {
ALOGE("%s: cannot find active array size!", __FUNCTION__);
return -EINVAL;
}
// android.control
// For USB camera, we don't know the AE state. Set the state to converged to
// indicate the frame should be good to use. Then apps don't have to wait the
// AE state.
const uint8_t aeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
UPDATE(md, ANDROID_CONTROL_AE_STATE, &aeState, 1);
const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF;
UPDATE(md, ANDROID_CONTROL_AE_LOCK, &ae_lock, 1);
// Set AWB state to converged to indicate the frame should be good to use.
const uint8_t awbState = ANDROID_CONTROL_AWB_STATE_CONVERGED;
UPDATE(md, ANDROID_CONTROL_AWB_STATE, &awbState, 1);
const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
UPDATE(md, ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
UPDATE(md, ANDROID_FLASH_STATE, &flashState, 1);
// This means pipeline latency of X frame intervals. The maximum number is 4.
const uint8_t requestPipelineMaxDepth = 4;
UPDATE(md, ANDROID_REQUEST_PIPELINE_DEPTH, &requestPipelineMaxDepth, 1);
// android.scaler
const int32_t crop_region[] = {
activeArraySize.data.i32[0],
activeArraySize.data.i32[1],
activeArraySize.data.i32[2],
activeArraySize.data.i32[3],
};
UPDATE(md, ANDROID_SCALER_CROP_REGION, crop_region, ARRAY_SIZE(crop_region));
// android.sensor
UPDATE(md, ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
// android.statistics
const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
UPDATE(md, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1);
const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
UPDATE(md, ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1);
return OK;
}
#undef ARRAY_SIZE
#undef UPDATE
AllocatedV4L2Frame::AllocatedV4L2Frame(std::shared_ptr<V4L2Frame> frameIn)
: Frame(frameIn->mWidth, frameIn->mHeight, frameIn->mFourcc) {
uint8_t* dataIn;
size_t dataSize;
if (frameIn->getData(&dataIn, &dataSize) != 0) {
ALOGE("%s: map input V4L2 frame failed!", __FUNCTION__);
return;
}
mData.resize(dataSize);
std::memcpy(mData.data(), dataIn, dataSize);
}
AllocatedV4L2Frame::~AllocatedV4L2Frame() {}
int AllocatedV4L2Frame::getData(uint8_t** outData, size_t* dataSize) {
if (outData == nullptr || dataSize == nullptr) {
ALOGE("%s: outData(%p)/dataSize(%p) must not be null", __FUNCTION__, outData, dataSize);
return -1;
}
*outData = mData.data();
*dataSize = mData.size();
return 0;
}
} // namespace implementation
} // namespace device
} // namespace camera
} // namespace hardware
} // namespace android

View file

@ -0,0 +1,300 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERAUTILS_H_
#define HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERAUTILS_H_
#include <CameraMetadata.h>
#include <HandleImporter.h>
#include <aidl/android/hardware/camera/common/Status.h>
#include <aidl/android/hardware/camera/device/CaptureResult.h>
#include <aidl/android/hardware/camera/device/ErrorCode.h>
#include <aidl/android/hardware/camera/device/NotifyMsg.h>
#include <aidl/android/hardware/graphics/common/BufferUsage.h>
#include <aidl/android/hardware/graphics/common/PixelFormat.h>
#include <tinyxml2.h>
#include <unordered_map>
#include <unordered_set>
using ::aidl::android::hardware::camera::common::Status;
using ::aidl::android::hardware::camera::device::CaptureResult;
using ::aidl::android::hardware::camera::device::ErrorCode;
using ::aidl::android::hardware::camera::device::NotifyMsg;
using ::aidl::android::hardware::graphics::common::BufferUsage;
using ::aidl::android::hardware::graphics::common::PixelFormat;
using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
using ::android::hardware::camera::common::V1_0::helper::HandleImporter;
namespace android {
namespace hardware {
namespace camera {
namespace external {
namespace common {
struct Size {
int32_t width;
int32_t height;
bool operator==(const Size& other) const {
return (width == other.width && height == other.height);
}
};
struct SizeHasher {
size_t operator()(const Size& sz) const {
size_t result = 1;
result = 31 * result + sz.width;
result = 31 * result + sz.height;
return result;
}
};
struct ExternalCameraConfig {
static const char* kDefaultCfgPath;
static ExternalCameraConfig loadFromCfg(const char* cfgPath = kDefaultCfgPath);
// CameraId base offset for numerical representation
uint32_t cameraIdOffset;
// List of internal V4L2 video nodes external camera HAL must ignore.
std::unordered_set<std::string> mInternalDevices;
// Maximal size of a JPEG buffer, in bytes
int32_t maxJpegBufSize;
// Maximum Size that can sustain 30fps streaming
Size maxVideoSize;
// Size of v4l2 buffer queue when streaming <= kMaxVideoSize
uint32_t numVideoBuffers;
// Size of v4l2 buffer queue when streaming > kMaxVideoSize
uint32_t numStillBuffers;
// Indication that the device connected supports depth output
bool depthEnabled;
struct FpsLimitation {
Size size;
double fpsUpperBound;
};
std::vector<FpsLimitation> fpsLimits;
std::vector<FpsLimitation> depthFpsLimits;
// Minimum output stream size
Size minStreamSize;
// The value of android.sensor.orientation
int32_t orientation;
private:
ExternalCameraConfig();
static bool updateFpsList(tinyxml2::XMLElement* fpsList, std::vector<FpsLimitation>& fpsLimits);
};
} // namespace common
} // namespace external
namespace device {
namespace implementation {
struct SupportedV4L2Format {
int32_t width;
int32_t height;
uint32_t fourcc;
// All supported frame rate for this w/h/fourcc combination
struct FrameRate {
// Duration (in seconds) of a single frame.
// Numerator and denominator of the frame duration are stored separately.
// For ex. a frame lasting 1/30 of a second will be stored as {1, 30}
uint32_t durationNumerator; // frame duration numerator. Ex: 1
uint32_t durationDenominator; // frame duration denominator. Ex: 30
double getFramesPerSecond() const; // FPS as double. Ex: 30.0
};
std::vector<FrameRate> frameRates;
};
// A Base class with basic information about a frame
struct Frame : public std::enable_shared_from_this<Frame> {
public:
Frame(uint32_t width, uint32_t height, uint32_t fourcc);
virtual ~Frame();
const int32_t mWidth;
const int32_t mHeight;
const uint32_t mFourcc;
// getData might involve map/allocation
virtual int getData(uint8_t** outData, size_t* dataSize) = 0;
};
// A class provide access to a dequeued V4L2 frame buffer (mostly in MJPG format)
// Also contains necessary information to enqueue the buffer back to V4L2 buffer queue
class V4L2Frame : public Frame {
public:
V4L2Frame(uint32_t w, uint32_t h, uint32_t fourcc, int bufIdx, int fd, uint32_t dataSize,
uint64_t offset);
virtual ~V4L2Frame();
virtual int getData(uint8_t** outData, size_t* dataSize) override;
const int mBufferIndex; // for later enqueue
int map(uint8_t** data, size_t* dataSize);
int unmap();
private:
std::mutex mLock;
const int mFd; // used for mmap but doesn't claim ownership
const size_t mDataSize;
const uint64_t mOffset; // used for mmap
uint8_t* mData = nullptr;
bool mMapped = false;
};
// A RAII class representing a CPU allocated YUV frame used as intermediate buffers
// when generating output images.
class AllocatedFrame : public Frame {
public:
AllocatedFrame(uint32_t w, uint32_t h); // only support V4L2_PIX_FMT_YUV420 for now
~AllocatedFrame() override;
virtual int getData(uint8_t** outData, size_t* dataSize) override;
int allocate(YCbCrLayout* out = nullptr);
int getLayout(YCbCrLayout* out);
int getCroppedLayout(const IMapper::Rect&, YCbCrLayout* out); // return non-zero for bad input
private:
std::mutex mLock;
std::vector<uint8_t> mData;
size_t mBufferSize; // size of mData before padding. Actual size of mData might be slightly
// bigger to horizontally pad the frame for jpeglib.
};
enum CroppingType { HORIZONTAL = 0, VERTICAL = 1 };
// Aspect ratio is defined as width/height here and ExternalCameraDevice
// will guarantee all supported sizes has width >= height (so aspect ratio >= 1.0)
#define ASPECT_RATIO(sz) (static_cast<float>((sz).width) / (sz).height)
const float kMaxAspectRatio = std::numeric_limits<float>::max();
const float kMinAspectRatio = 1.f;
bool isAspectRatioClose(float ar1, float ar2);
struct HalStreamBuffer {
int32_t streamId;
int64_t bufferId;
int32_t width;
int32_t height;
::aidl::android::hardware::graphics::common::PixelFormat format;
::aidl::android::hardware::graphics::common::BufferUsage usage;
buffer_handle_t* bufPtr;
int acquireFence;
bool fenceTimeout;
};
struct HalRequest {
int32_t frameNumber;
common::V1_0::helper::CameraMetadata setting;
std::shared_ptr<Frame> frameIn;
nsecs_t shutterTs;
std::vector<HalStreamBuffer> buffers;
};
static const uint64_t BUFFER_ID_NO_BUFFER = 0;
// buffers currently circulating between HAL and camera service
// key: bufferId sent via HIDL interface
// value: imported buffer_handle_t
// Buffer will be imported during processCaptureRequest (or requestStreamBuffer
// in the case of HAL buffer manager is enabled) and will be freed
// when the stream is deleted or camera device session is closed
typedef std::unordered_map<uint64_t, buffer_handle_t> CirculatingBuffers;
aidl::android::hardware::camera::common::Status importBufferImpl(
/*inout*/ std::map<int, CirculatingBuffers>& circulatingBuffers,
/*inout*/ HandleImporter& handleImporter, int32_t streamId, uint64_t bufId,
buffer_handle_t buf,
/*out*/ buffer_handle_t** outBufPtr);
static const uint32_t FLEX_YUV_GENERIC =
static_cast<uint32_t>('F') | static_cast<uint32_t>('L') << 8 |
static_cast<uint32_t>('E') << 16 | static_cast<uint32_t>('X') << 24;
// returns FLEX_YUV_GENERIC for formats other than YV12/YU12/NV12/NV21
uint32_t getFourCcFromLayout(const YCbCrLayout&);
using ::android::hardware::camera::external::common::Size;
int getCropRect(CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out);
int formatConvert(const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format);
int encodeJpegYU12(const Size& inSz, const YCbCrLayout& inLayout, int jpegQuality,
const void* app1Buffer, size_t app1Size, void* out, size_t maxOutSize,
size_t& actualCodeSize);
Size getMaxThumbnailResolution(const common::V1_0::helper::CameraMetadata&);
void freeReleaseFences(std::vector<CaptureResult>&);
status_t fillCaptureResultCommon(common::V1_0::helper::CameraMetadata& md, nsecs_t timestamp,
camera_metadata_ro_entry& activeArraySize);
// Interface for OutputThread calling back to parent
struct OutputThreadInterface {
virtual ~OutputThreadInterface() {}
virtual aidl::android::hardware::camera::common::Status importBuffer(
int32_t streamId, uint64_t bufId, buffer_handle_t buf,
/*out*/ buffer_handle_t** outBufPtr) = 0;
virtual void notifyError(int32_t frameNumber, int32_t streamId, ErrorCode ec) = 0;
// Callbacks are fired within the method if msgs/results are nullptr.
// Otherwise the callbacks will be returned and caller is responsible to
// fire the callback later
virtual aidl::android::hardware::camera::common::Status processCaptureRequestError(
const std::shared_ptr<HalRequest>&,
/*out*/ std::vector<NotifyMsg>* msgs,
/*out*/ std::vector<CaptureResult>* results) = 0;
virtual aidl::android::hardware::camera::common::Status processCaptureRequestError(
const std::shared_ptr<HalRequest>& reqs) final {
return processCaptureRequestError(reqs, nullptr, nullptr);
}
virtual aidl::android::hardware::camera::common::Status processCaptureResult(
std::shared_ptr<HalRequest>&) = 0;
virtual ssize_t getJpegBufferSize(int32_t width, int32_t height) const = 0;
};
// A CPU copy of a mapped V4L2Frame. Will map the input V4L2 frame.
class AllocatedV4L2Frame : public Frame {
public:
AllocatedV4L2Frame(std::shared_ptr<V4L2Frame> frameIn);
~AllocatedV4L2Frame() override;
virtual int getData(uint8_t** outData, size_t* dataSize) override;
private:
std::vector<uint8_t> mData;
};
} // namespace implementation
} // namespace device
} // namespace camera
} // namespace hardware
} // namespace android
#endif // HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_EXTERNALCAMERAUTILS_H_

View file

@ -0,0 +1 @@
include platform/frameworks/av:/camera/OWNERS

View file

@ -0,0 +1,71 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "android.hardware.camera.device@3.4-convert-impl"
#include <log/log.h>
#include "convert.h"
#include <aidl/android/hardware/graphics/common/BufferUsage.h>
#include <aidl/android/hardware/graphics/common/PixelFormat.h>
#include <hardware/camera_common.h>
namespace android {
namespace hardware {
namespace camera {
namespace device {
namespace implementation {
using ::aidl::android::hardware::camera::device::ErrorCode;
using ::aidl::android::hardware::camera::device::ErrorMsg;
using ::aidl::android::hardware::camera::device::ShutterMsg;
using ::aidl::android::hardware::graphics::common::BufferUsage;
using ::aidl::android::hardware::graphics::common::PixelFormat;
void convertToAidl(const camera_metadata_t* src, CameraMetadata* dest) {
if (src == nullptr) {
return;
}
size_t size = get_camera_metadata_size(src);
auto* src_start = (uint8_t*)src;
uint8_t* src_end = src_start + size;
dest->metadata.assign(src_start, src_end);
}
bool convertFromAidl(const CameraMetadata& src, const camera_metadata_t** dst) {
const std::vector<uint8_t>& metadata = src.metadata;
if (metadata.empty()) {
// Special case for null metadata
*dst = nullptr;
return true;
}
const uint8_t* data = metadata.data();
// check that the size of CameraMetadata match underlying camera_metadata_t
if (get_camera_metadata_size((camera_metadata_t*)data) != metadata.size()) {
ALOGE("%s: input CameraMetadata is corrupt!", __FUNCTION__);
return false;
}
*dst = (camera_metadata_t*)data;
return true;
}
} // namespace implementation
} // namespace device
} // namespace camera
} // namespace hardware
} // namespace android

View file

@ -0,0 +1,58 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_CONVERT_H_
#define HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_CONVERT_H_
#include <aidl/android/hardware/camera/common/Status.h>
#include <aidl/android/hardware/camera/device/BufferStatus.h>
#include <aidl/android/hardware/camera/device/CameraMetadata.h>
#include <aidl/android/hardware/camera/device/HalStream.h>
#include <aidl/android/hardware/camera/device/NotifyMsg.h>
#include <aidl/android/hardware/camera/device/Stream.h>
#include <hardware/camera3.h>
#include <system/camera_metadata.h>
namespace android {
namespace hardware {
namespace camera {
namespace device {
namespace implementation {
using ::aidl::android::hardware::camera::common::Status;
using ::aidl::android::hardware::camera::device::BufferStatus;
using ::aidl::android::hardware::camera::device::CameraMetadata;
using ::aidl::android::hardware::camera::device::HalStream;
using ::aidl::android::hardware::camera::device::NotifyMsg;
using ::aidl::android::hardware::camera::device::Stream;
void convertToAidl(const camera_metadata_t* src, CameraMetadata* dest);
bool convertFromAidl(const CameraMetadata& src, const camera_metadata_t** dst);
inline ndk::ScopedAStatus fromStatus(Status status) {
return status == Status::OK
? ndk::ScopedAStatus::ok()
: ndk::ScopedAStatus::fromServiceSpecificError(static_cast<int32_t>(status));
}
} // namespace implementation
} // namespace device
} // namespace camera
} // namespace hardware
} // namespace android
#endif // HARDWARE_INTERFACES_CAMERA_DEVICE_DEFAULT_CONVERT_H_

View file

@ -0,0 +1,104 @@
//
// Copyright (C) 2020 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package {
// See: http://go/android-license-faq
// A large-scale-change added 'default_applicable_licenses' to import
// all of the 'license_kinds' from "hardware_interfaces_license"
// to get the below license kinds:
// SPDX-license-identifier-Apache-2.0
default_applicable_licenses: ["hardware_interfaces_license"],
}
cc_library_shared {
name: "android.hardware.camera.provider-V1-external-impl",
defaults: ["hidl_defaults"],
proprietary: true,
srcs: [
"ExternalCameraProvider.cpp",
],
shared_libs: [
"android.hardware.camera.common-V1-ndk",
"android.hardware.camera.device-V1-ndk",
"android.hardware.camera.provider-V1-ndk",
"android.hardware.graphics.mapper@2.0",
"android.hardware.graphics.mapper@3.0",
"android.hardware.graphics.mapper@4.0",
"android.hidl.allocator@1.0",
"android.hidl.memory@1.0",
"camera.device-external-impl",
"libbinder_ndk",
"libcamera_metadata",
"libcutils",
"libfmq",
"libhardware",
"libhidlbase",
"liblog",
"libtinyxml2",
"libutils",
],
static_libs: [
"android.hardware.camera.common@1.0-helper",
],
export_include_dirs: ["."],
}
cc_defaults {
name: "camera_external_service_defaults",
defaults: ["hidl_defaults"],
proprietary: true,
relative_install_path: "hw",
srcs: ["external-service.cpp"],
compile_multilib: "first",
shared_libs: [
"android.hardware.camera.common-V1-ndk",
"android.hardware.camera.device-V1-ndk",
"android.hardware.camera.provider-V1-ndk",
"android.hardware.camera.provider-V1-external-impl",
"android.hardware.graphics.mapper@2.0",
"android.hardware.graphics.mapper@3.0",
"android.hardware.graphics.mapper@4.0",
"android.hidl.allocator@1.0",
"android.hidl.memory@1.0",
"camera.device-external-impl",
"libbinder_ndk",
"libcamera_metadata",
"libcutils",
"libfmq",
"libhardware",
"libhidlbase",
"liblog",
"libtinyxml2",
"libutils",
],
static_libs: [
"android.hardware.camera.common@1.0-helper",
],
}
cc_binary {
name: "android.hardware.camera.provider-V1-external-service",
defaults: ["camera_external_service_defaults"],
init_rc: ["android.hardware.camera.provider-V1-external-service.rc"],
}
cc_binary {
name: "android.hardware.camera.provider-V1-external-service-lazy",
overrides: ["android.hardware.camera.provider-V1-external-service"],
defaults: ["camera_external_service_defaults"],
init_rc: ["android.hardware.camera.provider-V1-external-service-lazy.rc"],
cflags: ["-DLAZY_SERVICE"],
}

View file

@ -0,0 +1,382 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "ExtCamPrvdr"
// #define LOG_NDEBUG 0
#include "ExternalCameraProvider.h"
#include <ExternalCameraDevice.h>
#include <aidl/android/hardware/camera/common/Status.h>
#include <convert.h>
#include <cutils/properties.h>
#include <linux/videodev2.h>
#include <log/log.h>
#include <sys/inotify.h>
#include <regex>
namespace android {
namespace hardware {
namespace camera {
namespace provider {
namespace implementation {
using ::aidl::android::hardware::camera::common::Status;
using ::android::hardware::camera::device::implementation::ExternalCameraDevice;
using ::android::hardware::camera::device::implementation::fromStatus;
using ::android::hardware::camera::external::common::ExternalCameraConfig;
namespace {
// "device@<version>/external/<id>"
const std::regex kDeviceNameRE("device@([0-9]+\\.[0-9]+)/external/(.+)");
const int kMaxDevicePathLen = 256;
constexpr char kDevicePath[] = "/dev/";
constexpr char kPrefix[] = "video";
constexpr int kPrefixLen = sizeof(kPrefix) - 1;
constexpr int kDevicePrefixLen = sizeof(kDevicePath) + kPrefixLen + 1;
bool matchDeviceName(int cameraIdOffset, const std::string& deviceName, std::string* deviceVersion,
std::string* cameraDevicePath) {
std::smatch sm;
if (std::regex_match(deviceName, sm, kDeviceNameRE)) {
if (deviceVersion != nullptr) {
*deviceVersion = sm[1];
}
if (cameraDevicePath != nullptr) {
*cameraDevicePath = "/dev/video" + std::to_string(std::stoi(sm[2]) - cameraIdOffset);
}
return true;
}
return false;
}
} // namespace
ExternalCameraProvider::ExternalCameraProvider() : mCfg(ExternalCameraConfig::loadFromCfg()) {
mHotPlugThread = std::make_shared<HotplugThread>(this);
mHotPlugThread->run();
}
ExternalCameraProvider::~ExternalCameraProvider() {
mHotPlugThread->requestExitAndWait();
}
ndk::ScopedAStatus ExternalCameraProvider::setCallback(
const std::shared_ptr<ICameraProviderCallback>& in_callback) {
{
Mutex::Autolock _l(mLock);
mCallback = in_callback;
}
if (mCallback == nullptr) {
return fromStatus(Status::OK);
}
for (const auto& pair : mCameraStatusMap) {
mCallback->cameraDeviceStatusChange(pair.first, pair.second);
}
return fromStatus(Status::OK);
}
ndk::ScopedAStatus ExternalCameraProvider::getVendorTags(
std::vector<VendorTagSection>* _aidl_return) {
if (_aidl_return == nullptr) {
return fromStatus(Status::ILLEGAL_ARGUMENT);
}
// No vendor tag support for USB camera
*_aidl_return = {};
return fromStatus(Status::OK);
}
ndk::ScopedAStatus ExternalCameraProvider::getCameraIdList(std::vector<std::string>* _aidl_return) {
if (_aidl_return == nullptr) {
return fromStatus(Status::ILLEGAL_ARGUMENT);
}
// External camera HAL always report 0 camera, and extra cameras
// are just reported via cameraDeviceStatusChange callbacks
*_aidl_return = {};
return fromStatus(Status::OK);
}
ndk::ScopedAStatus ExternalCameraProvider::getCameraDeviceInterface(
const std::string& in_cameraDeviceName, std::shared_ptr<ICameraDevice>* _aidl_return) {
if (_aidl_return == nullptr) {
return fromStatus(Status::ILLEGAL_ARGUMENT);
}
std::string cameraDevicePath, deviceVersion;
bool match = matchDeviceName(mCfg.cameraIdOffset, in_cameraDeviceName, &deviceVersion,
&cameraDevicePath);
if (!match) {
*_aidl_return = nullptr;
return fromStatus(Status::ILLEGAL_ARGUMENT);
}
if (mCameraStatusMap.count(in_cameraDeviceName) == 0 ||
mCameraStatusMap[in_cameraDeviceName] != CameraDeviceStatus::PRESENT) {
*_aidl_return = nullptr;
return fromStatus(Status::ILLEGAL_ARGUMENT);
}
ALOGV("Constructing external camera device");
std::shared_ptr<ExternalCameraDevice> deviceImpl =
ndk::SharedRefBase::make<ExternalCameraDevice>(cameraDevicePath, mCfg);
if (deviceImpl == nullptr || deviceImpl->isInitFailed()) {
ALOGE("%s: camera device %s init failed!", __FUNCTION__, cameraDevicePath.c_str());
*_aidl_return = nullptr;
return fromStatus(Status::INTERNAL_ERROR);
}
IF_ALOGV() {
int interfaceVersion;
deviceImpl->getInterfaceVersion(&interfaceVersion);
ALOGV("%s: device interface version: %d", __FUNCTION__, interfaceVersion);
}
*_aidl_return = deviceImpl;
return fromStatus(Status::OK);
}
ndk::ScopedAStatus ExternalCameraProvider::notifyDeviceStateChange(int64_t) {
return fromStatus(Status::OK);
}
ndk::ScopedAStatus ExternalCameraProvider::getConcurrentCameraIds(
std::vector<ConcurrentCameraIdCombination>* _aidl_return) {
if (_aidl_return == nullptr) {
return fromStatus(Status::ILLEGAL_ARGUMENT);
}
*_aidl_return = {};
return fromStatus(Status::OK);
}
ndk::ScopedAStatus ExternalCameraProvider::isConcurrentStreamCombinationSupported(
const std::vector<CameraIdAndStreamCombination>&, bool* _aidl_return) {
if (_aidl_return == nullptr) {
return fromStatus(Status::ILLEGAL_ARGUMENT);
}
// No concurrent stream combinations are supported
*_aidl_return = false;
return fromStatus(Status::OK);
}
void ExternalCameraProvider::addExternalCamera(const char* devName) {
ALOGV("%s: ExtCam: adding %s to External Camera HAL!", __FUNCTION__, devName);
Mutex::Autolock _l(mLock);
std::string deviceName;
std::string cameraId =
std::to_string(mCfg.cameraIdOffset + std::atoi(devName + kDevicePrefixLen));
deviceName =
std::string("device@") + ExternalCameraDevice::kDeviceVersion + "/external/" + cameraId;
mCameraStatusMap[deviceName] = CameraDeviceStatus::PRESENT;
if (mCallback != nullptr) {
mCallback->cameraDeviceStatusChange(deviceName, CameraDeviceStatus::PRESENT);
}
}
void ExternalCameraProvider::deviceAdded(const char* devName) {
{
base::unique_fd fd(::open(devName, O_RDWR));
if (fd.get() < 0) {
ALOGE("%s open v4l2 device %s failed:%s", __FUNCTION__, devName, strerror(errno));
return;
}
struct v4l2_capability capability;
int ret = ioctl(fd.get(), VIDIOC_QUERYCAP, &capability);
if (ret < 0) {
ALOGE("%s v4l2 QUERYCAP %s failed", __FUNCTION__, devName);
return;
}
if (!(capability.device_caps & V4L2_CAP_VIDEO_CAPTURE)) {
ALOGW("%s device %s does not support VIDEO_CAPTURE", __FUNCTION__, devName);
return;
}
}
// See if we can initialize ExternalCameraDevice correctly
std::shared_ptr<ExternalCameraDevice> deviceImpl =
ndk::SharedRefBase::make<ExternalCameraDevice>(devName, mCfg);
if (deviceImpl == nullptr || deviceImpl->isInitFailed()) {
ALOGW("%s: Attempt to init camera device %s failed!", __FUNCTION__, devName);
return;
}
deviceImpl.reset();
addExternalCamera(devName);
}
void ExternalCameraProvider::deviceRemoved(const char* devName) {
Mutex::Autolock _l(mLock);
std::string deviceName;
std::string cameraId =
std::to_string(mCfg.cameraIdOffset + std::atoi(devName + kDevicePrefixLen));
deviceName =
std::string("device@") + ExternalCameraDevice::kDeviceVersion + "/external/" + cameraId;
if (mCameraStatusMap.erase(deviceName) == 0) {
// Unknown device, do not fire callback
ALOGE("%s: cannot find camera device to remove %s", __FUNCTION__, devName);
return;
}
if (mCallback != nullptr) {
mCallback->cameraDeviceStatusChange(deviceName, CameraDeviceStatus::NOT_PRESENT);
}
}
void ExternalCameraProvider::updateAttachedCameras() {
ALOGV("%s start scanning for existing V4L2 devices", __FUNCTION__);
// Find existing /dev/video* devices
DIR* devdir = opendir(kDevicePath);
if (devdir == nullptr) {
ALOGE("%s: cannot open %s! Exiting threadloop", __FUNCTION__, kDevicePath);
return;
}
struct dirent* de;
while ((de = readdir(devdir)) != nullptr) {
// Find external v4l devices that's existing before we start watching and add them
if (!strncmp(kPrefix, de->d_name, kPrefixLen)) {
std::string deviceId(de->d_name + kPrefixLen);
if (mCfg.mInternalDevices.count(deviceId) == 0) {
ALOGV("Non-internal v4l device %s found", de->d_name);
char v4l2DevicePath[kMaxDevicePathLen];
snprintf(v4l2DevicePath, kMaxDevicePathLen, "%s%s", kDevicePath, de->d_name);
deviceAdded(v4l2DevicePath);
}
}
}
closedir(devdir);
}
// Start ExternalCameraProvider::HotplugThread functions
ExternalCameraProvider::HotplugThread::HotplugThread(ExternalCameraProvider* parent)
: mParent(parent), mInternalDevices(parent->mCfg.mInternalDevices) {}
ExternalCameraProvider::HotplugThread::~HotplugThread() {
// Clean up inotify descriptor if needed.
if (mINotifyFD >= 0) {
close(mINotifyFD);
}
}
bool ExternalCameraProvider::HotplugThread::initialize() {
// Update existing cameras
mParent->updateAttachedCameras();
// Set up non-blocking fd. The threadLoop will be responsible for polling read at the
// desired frequency
mINotifyFD = inotify_init();
if (mINotifyFD < 0) {
ALOGE("%s: inotify init failed! Exiting threadloop", __FUNCTION__);
return false;
}
// Start watching /dev/ directory for created and deleted files
mWd = inotify_add_watch(mINotifyFD, kDevicePath, IN_CREATE | IN_DELETE);
if (mWd < 0) {
ALOGE("%s: inotify add watch failed! Exiting threadloop", __FUNCTION__);
return false;
}
mPollFd = {.fd = mINotifyFD, .events = POLLIN};
mIsInitialized = true;
return true;
}
bool ExternalCameraProvider::HotplugThread::threadLoop() {
// Initialize inotify descriptors if needed.
if (!mIsInitialized && !initialize()) {
return true;
}
// poll /dev/* and handle timeouts and error
int pollRet = poll(&mPollFd, /* fd_count= */ 1, /* timeout= */ 250);
if (pollRet == 0) {
// no read event in 100ms
mPollFd.revents = 0;
return true;
} else if (pollRet < 0) {
ALOGE("%s: error while polling for /dev/*: %d", __FUNCTION__, errno);
mPollFd.revents = 0;
return true;
} else if (mPollFd.revents & POLLERR) {
ALOGE("%s: polling /dev/ returned POLLERR", __FUNCTION__);
mPollFd.revents = 0;
return true;
} else if (mPollFd.revents & POLLHUP) {
ALOGE("%s: polling /dev/ returned POLLHUP", __FUNCTION__);
mPollFd.revents = 0;
return true;
} else if (mPollFd.revents & POLLNVAL) {
ALOGE("%s: polling /dev/ returned POLLNVAL", __FUNCTION__);
mPollFd.revents = 0;
return true;
}
// mPollFd.revents must contain POLLIN, so safe to reset it before reading
mPollFd.revents = 0;
uint64_t offset = 0;
ssize_t ret = read(mINotifyFD, mEventBuf, sizeof(mEventBuf));
if (ret < sizeof(struct inotify_event)) {
// invalid event. skip
return true;
}
while (offset < ret) {
struct inotify_event* event = (struct inotify_event*)&mEventBuf[offset];
offset += sizeof(struct inotify_event) + event->len;
if (event->wd != mWd) {
// event for an unrelated descriptor. ignore.
continue;
}
ALOGV("%s inotify_event %s", __FUNCTION__, event->name);
if (strncmp(kPrefix, event->name, kPrefixLen) != 0) {
// event not for /dev/video*. ignore.
continue;
}
std::string deviceId = event->name + kPrefixLen;
if (mInternalDevices.count(deviceId) != 0) {
// update to an internal device. ignore.
continue;
}
char v4l2DevicePath[kMaxDevicePathLen];
snprintf(v4l2DevicePath, kMaxDevicePathLen, "%s%s", kDevicePath, event->name);
if (event->mask & IN_CREATE) {
mParent->deviceAdded(v4l2DevicePath);
} else if (event->mask & IN_DELETE) {
mParent->deviceRemoved(v4l2DevicePath);
}
}
return true;
}
// End ExternalCameraProvider::HotplugThread functions
} // namespace implementation
} // namespace provider
} // namespace camera
} // namespace hardware
} // namespace android

View file

@ -0,0 +1,117 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef HARDWARE_INTERFACES_CAMERA_PROVIDER_DEFAULT_EXTERNALCAMERAPROVIDER_H_
#define HARDWARE_INTERFACES_CAMERA_PROVIDER_DEFAULT_EXTERNALCAMERAPROVIDER_H_
#include <ExternalCameraUtils.h>
#include <SimpleThread.h>
#include <aidl/android/hardware/camera/common/CameraDeviceStatus.h>
#include <aidl/android/hardware/camera/common/VendorTagSection.h>
#include <aidl/android/hardware/camera/device/ICameraDevice.h>
#include <aidl/android/hardware/camera/provider/BnCameraProvider.h>
#include <aidl/android/hardware/camera/provider/CameraIdAndStreamCombination.h>
#include <aidl/android/hardware/camera/provider/ConcurrentCameraIdCombination.h>
#include <aidl/android/hardware/camera/provider/ICameraProviderCallback.h>
#include <poll.h>
#include <utils/Mutex.h>
#include <utils/Thread.h>
#include <thread>
#include <unordered_map>
#include <unordered_set>
namespace android {
namespace hardware {
namespace camera {
namespace provider {
namespace implementation {
using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
using ::aidl::android::hardware::camera::common::VendorTagSection;
using ::aidl::android::hardware::camera::device::ICameraDevice;
using ::aidl::android::hardware::camera::provider::BnCameraProvider;
using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
using ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination;
using ::aidl::android::hardware::camera::provider::ICameraProviderCallback;
using ::android::hardware::camera::common::helper::SimpleThread;
using ::android::hardware::camera::external::common::ExternalCameraConfig;
class ExternalCameraProvider : public BnCameraProvider {
public:
ExternalCameraProvider();
~ExternalCameraProvider() override;
ndk::ScopedAStatus setCallback(
const std::shared_ptr<ICameraProviderCallback>& in_callback) override;
ndk::ScopedAStatus getVendorTags(std::vector<VendorTagSection>* _aidl_return) override;
ndk::ScopedAStatus getCameraIdList(std::vector<std::string>* _aidl_return) override;
ndk::ScopedAStatus getCameraDeviceInterface(
const std::string& in_cameraDeviceName,
std::shared_ptr<ICameraDevice>* _aidl_return) override;
ndk::ScopedAStatus notifyDeviceStateChange(int64_t in_deviceState) override;
ndk::ScopedAStatus getConcurrentCameraIds(
std::vector<ConcurrentCameraIdCombination>* _aidl_return) override;
ndk::ScopedAStatus isConcurrentStreamCombinationSupported(
const std::vector<CameraIdAndStreamCombination>& in_configs,
bool* _aidl_return) override;
private:
void addExternalCamera(const char* devName);
void deviceAdded(const char* devName);
void deviceRemoved(const char* devName);
void updateAttachedCameras();
// A separate thread to monitor '/dev' directory for '/dev/video*' entries
// This thread calls back into ExternalCameraProvider when an actionable change is detected.
class HotplugThread : public SimpleThread {
public:
explicit HotplugThread(ExternalCameraProvider* parent);
~HotplugThread() override;
protected:
bool threadLoop() override;
private:
// Returns true if thread initialization succeeded, and false if thread initialization
// failed.
bool initialize();
ExternalCameraProvider* mParent = nullptr;
const std::unordered_set<std::string> mInternalDevices;
bool mIsInitialized = false;
int mINotifyFD = -1;
int mWd = -1;
// struct to wrap mINotifyFD and poll it with timeout
struct pollfd mPollFd = {};
char mEventBuf[512] = {0};
};
Mutex mLock;
std::shared_ptr<ICameraProviderCallback> mCallback = nullptr;
std::unordered_map<std::string, CameraDeviceStatus> mCameraStatusMap; // camera id -> status
const ExternalCameraConfig mCfg;
std::shared_ptr<HotplugThread> mHotPlugThread;
};
} // namespace implementation
} // namespace provider
} // namespace camera
} // namespace hardware
} // namespace android
#endif // HARDWARE_INTERFACES_CAMERA_PROVIDER_DEFAULT_EXTERNALCAMERAPROVIDER_H_

View file

@ -0,0 +1 @@
include platform/frameworks/av:/camera/OWNERS

View file

@ -0,0 +1,10 @@
service vendor.camera.provider-ext /vendor/bin/hw/android.hardware.camera.provider-V1-external-service-lazy
interface aidl android.hardware.camera.provider.ICameraProvider/external/0
class hal
oneshot
disabled
user cameraserver
group audio camera input drmrpc usb
ioprio rt 4
capabilities SYS_NICE
task_profiles CameraServiceCapacity MaxPerformance

View file

@ -0,0 +1,8 @@
service vendor.camera.provider-ext /vendor/bin/hw/android.hardware.camera.provider-V1-external-service
interface aidl android.hardware.camera.provider.ICameraProvider/external/0
class hal
user cameraserver
group audio camera input drmrpc usb
ioprio rt 4
capabilities SYS_NICE
task_profiles CameraServiceCapacity MaxPerformance

View file

@ -0,0 +1,52 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <ExternalCameraProvider.h>
#include <android-base/logging.h>
#include <android/binder_manager.h>
#include <android/binder_process.h>
using ::android::hardware::camera::provider::implementation::ExternalCameraProvider;
namespace {
// Default recommended RPC thread count for camera provider implementations
const int HWBINDER_THREAD_COUNT = 6;
} // namespace
int main() {
ALOGI("CameraProvider: external webcam service is starting.");
ABinderProcess_setThreadPoolMaxThreadCount(HWBINDER_THREAD_COUNT);
std::shared_ptr<ExternalCameraProvider> defaultProvider =
ndk::SharedRefBase::make<ExternalCameraProvider>();
const std::string serviceName = std::string(ExternalCameraProvider::descriptor) + "/external/0";
#ifdef LAZY_SERVICE
binder_exception_t ret = AServiceManager_registerLazyService(defaultProvider->asBinder().get(),
serviceName.c_str());
LOG_ALWAYS_FATAL_IF(ret != EX_NONE,
"Error while registering lazy ext camera provider service: %d", ret);
#else
binder_exception_t ret =
AServiceManager_addService(defaultProvider->asBinder().get(), serviceName.c_str());
LOG_ALWAYS_FATAL_IF(ret != EX_NONE, "Error while registering ext camera provider service: %d",
ret);
#endif
ABinderProcess_joinThreadPool();
return EXIT_FAILURE; // should not reach
}