From 0aa5ca906b81b7c66bc6ec44d3782cb321692c44 Mon Sep 17 00:00:00 2001 From: chemicstry Date: Thu, 9 Nov 2023 05:20:38 -0500 Subject: [PATCH 1/2] Fix clippy and rustfmt warnings --- libcamera-meta/src/bin/generate_c.rs | 9 +- libcamera-meta/src/bin/generate_rust.rs | 11 +- libcamera/src/framebuffer_allocator.rs | 26 +- libcamera/src/generated/controls.rs | 369 +++++++++++---------- libcamera/src/generated/properties.rs | 411 ++++++++++++------------ regenerate.sh | 2 +- rustfmt.toml | 4 +- 7 files changed, 432 insertions(+), 400 deletions(-) diff --git a/libcamera-meta/src/bin/generate_c.rs b/libcamera-meta/src/bin/generate_c.rs index fb08b33..3ac7558 100644 --- a/libcamera-meta/src/bin/generate_c.rs +++ b/libcamera-meta/src/bin/generate_c.rs @@ -1,3 +1,5 @@ +use std::fmt::Write; + use libcamera_meta::{control_ids, property_ids, Control}; /// Converts `ExampleName` to `example_name` @@ -66,9 +68,10 @@ fn format_docstring(desc: &str, indent: usize) -> String { } out.push(" */".to_string()); - out.iter() - .map(|line| format!("{}{}\n", " ".repeat(indent), line)) - .collect() + out.iter().fold(String::new(), |mut output, line| { + writeln!(output, "{}{}", " ".repeat(indent), line).unwrap(); + output + }) } fn generate_controls(controls: &[Control], name: &str) { diff --git a/libcamera-meta/src/bin/generate_rust.rs b/libcamera-meta/src/bin/generate_rust.rs index 50f858c..78d6a7e 100644 --- a/libcamera-meta/src/bin/generate_rust.rs +++ b/libcamera-meta/src/bin/generate_rust.rs @@ -97,12 +97,13 @@ fn generate_controls(controls: &[Control], ty: ControlsType) { printdoc! {" impl TryFrom for {ctrl_name} {{ type Error = ControlValueError; - + fn try_from(value: ControlValue) -> Result {{ - Self::try_from({ctrl_type}::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from({ctrl_type}::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) }} }} - + impl From<{ctrl_name}> for ControlValue {{ fn from(val: {ctrl_name}) -> Self {{ ControlValue::from(<{ctrl_type}>::from(val)) @@ -116,12 +117,12 @@ fn generate_controls(controls: &[Control], ty: ControlsType) { impl Deref for {ctrl_name} {{ type Target = {ctrl_type}; - + fn deref(&self) -> &Self::Target {{ &self.0 }} }} - + impl DerefMut for {ctrl_name} {{ fn deref_mut(&mut self) -> &mut Self::Target {{ &mut self.0 diff --git a/libcamera/src/framebuffer_allocator.rs b/libcamera/src/framebuffer_allocator.rs index 10d5005..446f1d8 100644 --- a/libcamera/src/framebuffer_allocator.rs +++ b/libcamera/src/framebuffer_allocator.rs @@ -14,14 +14,15 @@ struct FrameBufferAllocatorInstance { ptr: NonNull, /// List of streams for which buffers were allocated. /// We use this list to free buffers on drop. - allocated_streams: Mutex>>, + allocated_streams: Vec>, } +unsafe impl Send for FrameBufferAllocatorInstance {} + impl Drop for FrameBufferAllocatorInstance { fn drop(&mut self) { // Free allocated streams - let mut streams = self.allocated_streams.lock().unwrap(); - for stream in streams.drain(..) { + for stream in self.allocated_streams.drain(..) { unsafe { libcamera_framebuffer_allocator_free(self.ptr.as_ptr(), stream.as_ptr()); } @@ -32,30 +33,31 @@ impl Drop for FrameBufferAllocatorInstance { } pub struct FrameBufferAllocator { - inner: Arc, + inner: Arc>, } impl FrameBufferAllocator { pub fn new(cam: &Camera<'_>) -> Self { Self { - inner: Arc::new(FrameBufferAllocatorInstance { + inner: Arc::new(Mutex::new(FrameBufferAllocatorInstance { ptr: NonNull::new(unsafe { libcamera_framebuffer_allocator_create(cam.ptr.as_ptr()) }).unwrap(), - allocated_streams: Mutex::new(Vec::new()), - }), + allocated_streams: Vec::new(), + })), } } /// Allocate N buffers for a given stream, where N is equal to /// [StreamConfigurationRef::get_buffer_count()](crate::stream::StreamConfigurationRef::get_buffer_count). pub fn alloc(&mut self, stream: &Stream) -> io::Result> { - let ret = unsafe { libcamera_framebuffer_allocator_allocate(self.inner.ptr.as_ptr(), stream.ptr.as_ptr()) }; + let mut inner = self.inner.lock().unwrap(); + + let ret = unsafe { libcamera_framebuffer_allocator_allocate(inner.ptr.as_ptr(), stream.ptr.as_ptr()) }; if ret < 0 { Err(io::Error::from_raw_os_error(ret)) } else { - self.inner.allocated_streams.lock().unwrap().push(stream.ptr); + inner.allocated_streams.push(stream.ptr); - let buffers = - unsafe { libcamera_framebuffer_allocator_buffers(self.inner.ptr.as_ptr(), stream.ptr.as_ptr()) }; + let buffers = unsafe { libcamera_framebuffer_allocator_buffers(inner.ptr.as_ptr(), stream.ptr.as_ptr()) }; let len = unsafe { libcamera_framebuffer_list_size(buffers) }; @@ -86,7 +88,7 @@ impl FrameBufferAllocator { pub struct FrameBuffer { ptr: NonNull, - _alloc: Arc, + _alloc: Arc>, } impl core::fmt::Debug for FrameBuffer { diff --git a/libcamera/src/generated/controls.rs b/libcamera/src/generated/controls.rs index ed70972..6c5c668 100644 --- a/libcamera/src/generated/controls.rs +++ b/libcamera/src/generated/controls.rs @@ -1,25 +1,25 @@ //! Generated by `cargo run --bin generate_rust controls` +use std::ops::{Deref, DerefMut}; +use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] use crate::geometry::{Rectangle, Size}; -use num_enum::{IntoPrimitive, TryFromPrimitive}; -use std::ops::{Deref, DerefMut}; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(u32)] pub enum ControlId { /// Enable or disable the AE. - /// + /// /// \sa ExposureTime AnalogueGain AeEnable = 1, /// Report the lock status of a running AE algorithm. - /// + /// /// If the AE algorithm is locked the value shall be set to true, if it's /// converging it shall be set to false. If the AE algorithm is not /// running the control shall not be present in the metadata control list. - /// + /// /// \sa AeEnable AeLocked = 2, /// Specify a metering mode for the AE algorithm to use. The metering @@ -39,22 +39,22 @@ pub enum ControlId { AeExposureMode = 5, /// Specify an Exposure Value (EV) parameter. The EV parameter will only be /// applied if the AE algorithm is currently enabled. - /// + /// /// By convention EV adjusts the exposure as log2. For example /// EV = [-2, -1, 0.5, 0, 0.5, 1, 2] results in an exposure adjustment /// of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. - /// + /// /// \sa AeEnable ExposureValue = 6, /// Exposure time (shutter speed) for the frame applied in the sensor /// device. This value is specified in micro-seconds. - /// + /// /// Setting this value means that it is now fixed and the AE algorithm may /// not change it. Setting it back to zero returns it to the control of the /// AE algorithm. - /// + /// /// \sa AnalogueGain AeEnable - /// + /// /// \todo Document the interactions between AeEnable and setting a fixed /// value for this control. Consider interactions with other AE features, /// such as aperture and aperture/shutter priority mode, and decide if @@ -64,13 +64,13 @@ pub enum ControlId { /// Analogue gain value applied in the sensor device. /// The value of the control specifies the gain multiplier applied to all /// colour channels. This value cannot be lower than 1.0. - /// + /// /// Setting this value means that it is now fixed and the AE algorithm may /// not change it. Setting it back to zero returns it to the control of the /// AE algorithm. - /// + /// /// \sa ExposureTime AeEnable - /// + /// /// \todo Document the interactions between AeEnable and setting a fixed /// value for this control. Consider interactions with other AE features, /// such as aperture and aperture/shutter priority mode, and decide if @@ -88,24 +88,24 @@ pub enum ControlId { /// control can only be returned in metadata. Lux = 11, /// Enable or disable the AWB. - /// + /// /// \sa ColourGains AwbEnable = 12, /// Specify the range of illuminants to use for the AWB algorithm. The modes /// supported are platform specific, and not all modes may be supported. AwbMode = 13, /// Report the lock status of a running AWB algorithm. - /// + /// /// If the AWB algorithm is locked the value shall be set to true, if it's /// converging it shall be set to false. If the AWB algorithm is not /// running the control shall not be present in the metadata control list. - /// + /// /// \sa AwbEnable AwbLocked = 14, /// Pair of gain values for the Red and Blue colour channels, in that /// order. ColourGains can only be applied in a Request when the AWB is /// disabled. - /// + /// /// \sa AwbEnable ColourGains = 15, /// Report the current estimate of the colour temperature, in kelvin, for this frame. The ColourTemperature control can only be returned in metadata. @@ -147,19 +147,19 @@ pub enum ControlId { /// relative to the PixelArrayActiveAreas that is being used. The units /// remain native sensor pixels, even if the sensor is being used in /// a binning or skipping mode. - /// + /// /// This control is only present when the pipeline supports scaling. Its /// maximum valid value is given by the properties::ScalerCropMaximum /// property, and the two can be used to implement digital zoom. ScalerCrop = 22, /// Digital gain value applied during the processing steps applied /// to the image as captured from the sensor. - /// + /// /// The global digital gain factor is applied to all the colour channels /// of the RAW image. Different pipeline models are free to /// specify how the global gain factor applies to each separate /// channel. - /// + /// /// If an imaging pipeline applies digital gain in distinct /// processing steps, this value indicates their total sum. /// Pipelines are free to decide how to adjust each processing @@ -172,7 +172,7 @@ pub enum ControlId { FrameDuration = 24, /// The minimum and maximum (in that order) frame duration, /// expressed in microseconds. - /// + /// /// When provided by applications, the control specifies the sensor frame /// duration interval the pipeline has to use. This limits the largest /// exposure time the sensor can use. For example, if a maximum frame @@ -181,7 +181,7 @@ pub enum ControlId { /// A fixed frame duration is achieved by setting the minimum and maximum /// values to be the same. Setting both values to 0 reverts to using the /// IPA provided defaults. - /// + /// /// The maximum frame duration provides the absolute limit to the shutter /// speed computed by the AE algorithm and it overrides any exposure mode /// setting specified with controls::AeExposureMode. Similarly, when a @@ -190,38 +190,38 @@ pub enum ControlId { /// metadata, the control expresses the minimum and maximum frame /// durations used after being clipped to the sensor provided frame /// duration limits. - /// + /// /// \sa AeExposureMode /// \sa ExposureTime - /// + /// /// \todo Define how to calculate the capture frame rate by /// defining controls to report additional delays introduced by /// the capture pipeline or post-processing stages (ie JPEG /// conversion, frame scaling). - /// + /// /// \todo Provide an explicit definition of default control values, for /// this and all other controls. FrameDurationLimits = 25, /// Temperature measure from the camera sensor in Celsius. This is typically /// obtained by a thermal sensor present on-die or in the camera module. The /// range of reported temperatures is device dependent. - /// + /// /// The SensorTemperature control will only be returned in metadata if a /// themal sensor is present. SensorTemperature = 26, /// The time when the first row of the image sensor active array is exposed. - /// + /// /// The timestamp, expressed in nanoseconds, represents a monotonically /// increasing counter since the system boot time, as defined by the /// Linux-specific CLOCK_BOOTTIME clock id. - /// + /// /// The SensorTimestamp control can only be returned in metadata. - /// + /// /// \todo Define how the sensor timestamp has to be used in the reprocessing /// use case. SensorTimestamp = 27, /// Control to set the mode of the AF (autofocus) algorithm. - /// + /// /// An implementation may choose not to implement all the modes. AfMode = 28, /// Control to set the range of focus distances that is scanned. An @@ -239,19 +239,19 @@ pub enum ControlId { /// Sets the focus windows used by the AF algorithm when AfMetering is set /// to AfMeteringWindows. The units used are pixels within the rectangle /// returned by the ScalerCropMaximum property. - /// + /// /// In order to be activated, a rectangle must be programmed with non-zero /// width and height. Internally, these rectangles are intersected with the /// ScalerCropMaximum rectangle. If the window becomes empty after this /// operation, then the window is ignored. If all the windows end up being /// ignored, then the behaviour is platform dependent. - /// + /// /// On platforms that support the ScalerCrop control (for implementing /// digital zoom, for example), no automatic recalculation or adjustment of /// AF windows is performed internally if the ScalerCrop is changed. If any /// window lies outside the output image after the scaler crop has been /// applied, it is up to the application to recalculate them. - /// + /// /// The details of how the windows are used are platform dependent. We note /// that when there is more than one AF window, a typical implementation /// might find the optimal focus position for each one and finally select @@ -260,7 +260,7 @@ pub enum ControlId { AfWindows = 32, /// This control starts an autofocus scan when AfMode is set to AfModeAuto, /// and can also be used to terminate a scan early. - /// + /// /// It is ignored if AfMode is set to AfModeManual or AfModeContinuous. AfTrigger = 33, /// This control has no effect except when in continuous autofocus mode @@ -270,28 +270,28 @@ pub enum ControlId { AfPause = 34, /// Acts as a control to instruct the lens to move to a particular position /// and also reports back the position of the lens for each frame. - /// + /// /// The LensPosition control is ignored unless the AfMode is set to /// AfModeManual, though the value is reported back unconditionally in all /// modes. - /// + /// /// The units are a reciprocal distance scale like dioptres but normalised /// for the hyperfocal distance. That is, for a lens with hyperfocal /// distance H, and setting it to a focal distance D, the lens position LP, /// which is generally a non-integer, is given by - /// + /// /// \f$LP = \frac{H}{D}\f$ - /// + /// /// For example: - /// + /// /// 0 moves the lens to infinity. /// 0.5 moves the lens to twice the hyperfocal distance. /// 1 moves the lens to the hyperfocal position. /// And larger values will focus the lens ever closer. - /// + /// /// \todo Define a property to report the Hyperforcal distance of calibrated /// lenses. - /// + /// /// \todo Define a property to report the maximum and minimum positions of /// this lens. The minimum value will often be zero (meaning infinity). LensPosition = 35, @@ -299,49 +299,49 @@ pub enum ControlId { /// reported AfMode value and (in continuous AF mode) the AfPauseState /// value. The possible state changes are described below, though we note /// the following state transitions that occur when the AfMode is changed. - /// + /// /// If the AfMode is set to AfModeManual, then the AfState will always /// report AfStateIdle (even if the lens is subsequently moved). Changing to /// the AfModeManual state does not initiate any lens movement. - /// + /// /// If the AfMode is set to AfModeAuto then the AfState will report /// AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent together /// then AfState will omit AfStateIdle and move straight to AfStateScanning /// (and start a scan). - /// + /// /// If the AfMode is set to AfModeContinuous then the AfState will initially /// report AfStateScanning. AfState = 36, /// Only applicable in continuous (AfModeContinuous) mode, this reports /// whether the algorithm is currently running, paused or pausing (that is, /// will pause as soon as any in-progress scan completes). - /// + /// /// Any change to AfMode will cause AfPauseStateRunning to be reported. AfPauseState = 37, /// Control for AE metering trigger. Currently identical to /// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. - /// + /// /// Whether the camera device will trigger a precapture metering sequence /// when it processes this request. AePrecaptureTrigger = 38, /// Control to select the noise reduction algorithm mode. Currently /// identical to ANDROID_NOISE_REDUCTION_MODE. - /// + /// /// Mode of operation for the noise reduction algorithm. NoiseReductionMode = 39, /// Control to select the color correction aberration mode. Currently /// identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. - /// + /// /// Mode of operation for the chromatic aberration correction algorithm. ColorCorrectionAberrationMode = 40, /// Control to report the current AE algorithm state. Currently identical to /// ANDROID_CONTROL_AE_STATE. - /// + /// /// Current state of the AE algorithm. AeState = 41, /// Control to report the current AWB algorithm state. Currently identical /// to ANDROID_CONTROL_AWB_STATE. - /// + /// /// Current state of the AWB algorithm. AwbState = 42, /// Control to report the time between the start of exposure of the first @@ -358,7 +358,7 @@ pub enum ControlId { /// it was exposed to when the final completed result was available to the /// framework. Always less than or equal to PipelineMaxDepth. Currently /// identical to ANDROID_REQUEST_PIPELINE_DEPTH. - /// + /// /// The typical value for this control is 3 as a frame is first exposed, /// captured and then processed in a single pass through the ISP. Any /// additional processing step performed after the ISP pass (in example face @@ -377,7 +377,7 @@ pub enum ControlId { } /// Enable or disable the AE. -/// +/// /// \sa ExposureTime AnalogueGain #[derive(Debug, Clone)] pub struct AeEnable(pub bool); @@ -417,11 +417,11 @@ impl ControlEntry for AeEnable { impl Control for AeEnable {} /// Report the lock status of a running AE algorithm. -/// +/// /// If the AE algorithm is locked the value shall be set to true, if it's /// converging it shall be set to false. If the AE algorithm is not /// running the control shall not be present in the metadata control list. -/// +/// /// \sa AeEnable #[derive(Debug, Clone)] pub struct AeLocked(pub bool); @@ -481,7 +481,8 @@ impl TryFrom for AeMeteringMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -517,7 +518,8 @@ impl TryFrom for AeConstraintMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -553,7 +555,8 @@ impl TryFrom for AeExposureMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -570,11 +573,11 @@ impl Control for AeExposureMode {} /// Specify an Exposure Value (EV) parameter. The EV parameter will only be /// applied if the AE algorithm is currently enabled. -/// +/// /// By convention EV adjusts the exposure as log2. For example /// EV = [-2, -1, 0.5, 0, 0.5, 1, 2] results in an exposure adjustment /// of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. -/// +/// /// \sa AeEnable #[derive(Debug, Clone)] pub struct ExposureValue(pub f32); @@ -615,13 +618,13 @@ impl Control for ExposureValue {} /// Exposure time (shutter speed) for the frame applied in the sensor /// device. This value is specified in micro-seconds. -/// +/// /// Setting this value means that it is now fixed and the AE algorithm may /// not change it. Setting it back to zero returns it to the control of the /// AE algorithm. -/// +/// /// \sa AnalogueGain AeEnable -/// +/// /// \todo Document the interactions between AeEnable and setting a fixed /// value for this control. Consider interactions with other AE features, /// such as aperture and aperture/shutter priority mode, and decide if @@ -667,13 +670,13 @@ impl Control for ExposureTime {} /// Analogue gain value applied in the sensor device. /// The value of the control specifies the gain multiplier applied to all /// colour channels. This value cannot be lower than 1.0. -/// +/// /// Setting this value means that it is now fixed and the AE algorithm may /// not change it. Setting it back to zero returns it to the control of the /// AE algorithm. -/// +/// /// \sa ExposureTime AeEnable -/// +/// /// \todo Document the interactions between AeEnable and setting a fixed /// value for this control. Consider interactions with other AE features, /// such as aperture and aperture/shutter priority mode, and decide if @@ -835,7 +838,7 @@ impl ControlEntry for Lux { impl Control for Lux {} /// Enable or disable the AWB. -/// +/// /// \sa ColourGains #[derive(Debug, Clone)] pub struct AwbEnable(pub bool); @@ -901,7 +904,8 @@ impl TryFrom for AwbMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -917,11 +921,11 @@ impl ControlEntry for AwbMode { impl Control for AwbMode {} /// Report the lock status of a running AWB algorithm. -/// +/// /// If the AWB algorithm is locked the value shall be set to true, if it's /// converging it shall be set to false. If the AWB algorithm is not /// running the control shall not be present in the metadata control list. -/// +/// /// \sa AwbEnable #[derive(Debug, Clone)] pub struct AwbLocked(pub bool); @@ -963,7 +967,7 @@ impl Control for AwbLocked {} /// Pair of gain values for the Red and Blue colour channels, in that /// order. ColourGains can only be applied in a Request when the AWB is /// disabled. -/// +/// /// \sa AwbEnable #[derive(Debug, Clone)] pub struct ColourGains(pub [f32; 2]); @@ -1257,7 +1261,7 @@ impl Control for ColourCorrectionMatrix {} /// relative to the PixelArrayActiveAreas that is being used. The units /// remain native sensor pixels, even if the sensor is being used in /// a binning or skipping mode. -/// +/// /// This control is only present when the pipeline supports scaling. Its /// maximum valid value is given by the properties::ScalerCropMaximum /// property, and the two can be used to implement digital zoom. @@ -1300,12 +1304,12 @@ impl Control for ScalerCrop {} /// Digital gain value applied during the processing steps applied /// to the image as captured from the sensor. -/// +/// /// The global digital gain factor is applied to all the colour channels /// of the RAW image. Different pipeline models are free to /// specify how the global gain factor applies to each separate /// channel. -/// +/// /// If an imaging pipeline applies digital gain in distinct /// processing steps, this value indicates their total sum. /// Pipelines are free to decide how to adjust each processing @@ -1390,7 +1394,7 @@ impl Control for FrameDuration {} /// The minimum and maximum (in that order) frame duration, /// expressed in microseconds. -/// +/// /// When provided by applications, the control specifies the sensor frame /// duration interval the pipeline has to use. This limits the largest /// exposure time the sensor can use. For example, if a maximum frame @@ -1399,7 +1403,7 @@ impl Control for FrameDuration {} /// A fixed frame duration is achieved by setting the minimum and maximum /// values to be the same. Setting both values to 0 reverts to using the /// IPA provided defaults. -/// +/// /// The maximum frame duration provides the absolute limit to the shutter /// speed computed by the AE algorithm and it overrides any exposure mode /// setting specified with controls::AeExposureMode. Similarly, when a @@ -1408,15 +1412,15 @@ impl Control for FrameDuration {} /// metadata, the control expresses the minimum and maximum frame /// durations used after being clipped to the sensor provided frame /// duration limits. -/// +/// /// \sa AeExposureMode /// \sa ExposureTime -/// +/// /// \todo Define how to calculate the capture frame rate by /// defining controls to report additional delays introduced by /// the capture pipeline or post-processing stages (ie JPEG /// conversion, frame scaling). -/// +/// /// \todo Provide an explicit definition of default control values, for /// this and all other controls. #[derive(Debug, Clone)] @@ -1459,7 +1463,7 @@ impl Control for FrameDurationLimits {} /// Temperature measure from the camera sensor in Celsius. This is typically /// obtained by a thermal sensor present on-die or in the camera module. The /// range of reported temperatures is device dependent. -/// +/// /// The SensorTemperature control will only be returned in metadata if a /// themal sensor is present. #[derive(Debug, Clone)] @@ -1500,13 +1504,13 @@ impl ControlEntry for SensorTemperature { impl Control for SensorTemperature {} /// The time when the first row of the image sensor active array is exposed. -/// +/// /// The timestamp, expressed in nanoseconds, represents a monotonically /// increasing counter since the system boot time, as defined by the /// Linux-specific CLOCK_BOOTTIME clock id. -/// +/// /// The SensorTimestamp control can only be returned in metadata. -/// +/// /// \todo Define how the sensor timestamp has to be used in the reprocessing /// use case. #[derive(Debug, Clone)] @@ -1547,7 +1551,7 @@ impl ControlEntry for SensorTimestamp { impl Control for SensorTimestamp {} /// Control to set the mode of the AF (autofocus) algorithm. -/// +/// /// An implementation may choose not to implement all the modes. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -1556,18 +1560,18 @@ pub enum AfMode { /// perform any action nor move the lens of its own accord, but an /// application can specify the desired lens position using the /// LensPosition control. - /// + /// /// In this mode the AfState will always report AfStateIdle. Manual = 0, /// The AF algorithm is in auto mode. This means that the algorithm /// will never move the lens or change state unless the AfTrigger /// control is used. The AfTrigger control can be used to initiate a /// focus scan, the results of which will be reported by AfState. - /// + /// /// If the autofocus algorithm is moved from AfModeAuto to another /// mode while a scan is in progress, the scan is cancelled /// immediately, without waiting for the scan to finish. - /// + /// /// When first entering this mode the AfState will report /// AfStateIdle. When a trigger control is sent, AfState will /// report AfStateScanning for a period before spontaneously @@ -1582,11 +1586,11 @@ pub enum AfMode { /// intervention. The AfState still reports whether the algorithm is /// currently scanning or not, though the application has no ability to /// initiate or cancel scans, nor to move the lens for itself. - /// + /// /// However, applications can pause the AF algorithm from continuously /// scanning by using the AfPause control. This allows video or still /// images to be captured whilst guaranteeing that the focus is fixed. - /// + /// /// When set to AfModeContinuous, the system will immediately initiate a /// scan so AfState will report AfStateScanning, and will settle on one /// of AfStateFocused or AfStateFailed, depending on the scan result. @@ -1597,7 +1601,8 @@ impl TryFrom for AfMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1634,7 +1639,8 @@ impl TryFrom for AfRange { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1667,7 +1673,8 @@ impl TryFrom for AfSpeed { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1697,7 +1704,8 @@ impl TryFrom for AfMetering { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1715,19 +1723,19 @@ impl Control for AfMetering {} /// Sets the focus windows used by the AF algorithm when AfMetering is set /// to AfMeteringWindows. The units used are pixels within the rectangle /// returned by the ScalerCropMaximum property. -/// +/// /// In order to be activated, a rectangle must be programmed with non-zero /// width and height. Internally, these rectangles are intersected with the /// ScalerCropMaximum rectangle. If the window becomes empty after this /// operation, then the window is ignored. If all the windows end up being /// ignored, then the behaviour is platform dependent. -/// +/// /// On platforms that support the ScalerCrop control (for implementing /// digital zoom, for example), no automatic recalculation or adjustment of /// AF windows is performed internally if the ScalerCrop is changed. If any /// window lies outside the output image after the scaler crop has been /// applied, it is up to the application to recalculate them. -/// +/// /// The details of how the windows are used are platform dependent. We note /// that when there is more than one AF window, a typical implementation /// might find the optimal focus position for each one and finally select @@ -1772,7 +1780,7 @@ impl Control for AfWindows {} /// This control starts an autofocus scan when AfMode is set to AfModeAuto, /// and can also be used to terminate a scan early. -/// +/// /// It is ignored if AfMode is set to AfModeManual or AfModeContinuous. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -1787,7 +1795,8 @@ impl TryFrom for AfTrigger { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1818,7 +1827,7 @@ pub enum AfPause { /// This is similar to AfPauseImmediate, and if the AfState is currently /// reporting AfStateFocused or AfStateFailed it will remain in that /// state and AfPauseState will report AfPauseStatePaused. - /// + /// /// However, if the algorithm is scanning (AfStateScanning), /// AfPauseState will report AfPauseStatePausing until the scan is /// finished, at which point AfState will report one of AfStateFocused @@ -1835,7 +1844,8 @@ impl TryFrom for AfPause { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1852,28 +1862,28 @@ impl Control for AfPause {} /// Acts as a control to instruct the lens to move to a particular position /// and also reports back the position of the lens for each frame. -/// +/// /// The LensPosition control is ignored unless the AfMode is set to /// AfModeManual, though the value is reported back unconditionally in all /// modes. -/// +/// /// The units are a reciprocal distance scale like dioptres but normalised /// for the hyperfocal distance. That is, for a lens with hyperfocal /// distance H, and setting it to a focal distance D, the lens position LP, /// which is generally a non-integer, is given by -/// +/// /// \f$LP = \frac{H}{D}\f$ -/// +/// /// For example: -/// +/// /// 0 moves the lens to infinity. /// 0.5 moves the lens to twice the hyperfocal distance. /// 1 moves the lens to the hyperfocal position. /// And larger values will focus the lens ever closer. -/// +/// /// \todo Define a property to report the Hyperforcal distance of calibrated /// lenses. -/// +/// /// \todo Define a property to report the maximum and minimum positions of /// this lens. The minimum value will often be zero (meaning infinity). #[derive(Debug, Clone)] @@ -1917,16 +1927,16 @@ impl Control for LensPosition {} /// reported AfMode value and (in continuous AF mode) the AfPauseState /// value. The possible state changes are described below, though we note /// the following state transitions that occur when the AfMode is changed. -/// +/// /// If the AfMode is set to AfModeManual, then the AfState will always /// report AfStateIdle (even if the lens is subsequently moved). Changing to /// the AfModeManual state does not initiate any lens movement. -/// +/// /// If the AfMode is set to AfModeAuto then the AfState will report /// AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent together /// then AfState will omit AfStateIdle and move straight to AfStateScanning /// (and start a scan). -/// +/// /// If the AfMode is set to AfModeContinuous then the AfState will initially /// report AfStateScanning. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] @@ -1942,7 +1952,7 @@ pub enum AfState { /// move back to AfStateIdle or, if the scan actually completes before /// the cancel request is processed, to one of AfStateFocused or /// AfStateFailed. - /// + /// /// Alternatively the AF algorithm could be in continuous mode /// (AfModeContinuous) at which point it may enter this state /// spontaneously whenever it determines that a rescan is needed. @@ -1961,7 +1971,8 @@ impl TryFrom for AfState { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1979,7 +1990,7 @@ impl Control for AfState {} /// Only applicable in continuous (AfModeContinuous) mode, this reports /// whether the algorithm is currently running, paused or pausing (that is, /// will pause as soon as any in-progress scan completes). -/// +/// /// Any change to AfMode will cause AfPauseStateRunning to be reported. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -2001,7 +2012,8 @@ impl TryFrom for AfPauseState { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2018,7 +2030,7 @@ impl Control for AfPauseState {} /// Control for AE metering trigger. Currently identical to /// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. -/// +/// /// Whether the camera device will trigger a precapture metering sequence /// when it processes this request. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] @@ -2037,7 +2049,8 @@ impl TryFrom for AePrecaptureTrigger { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2054,7 +2067,7 @@ impl Control for AePrecaptureTrigger {} /// Control to select the noise reduction algorithm mode. Currently /// identical to ANDROID_NOISE_REDUCTION_MODE. -/// +/// /// Mode of operation for the noise reduction algorithm. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -2075,7 +2088,8 @@ impl TryFrom for NoiseReductionMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2092,7 +2106,7 @@ impl Control for NoiseReductionMode {} /// Control to select the color correction aberration mode. Currently /// identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. -/// +/// /// Mode of operation for the chromatic aberration correction algorithm. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -2110,7 +2124,8 @@ impl TryFrom for ColorCorrectionAberrationMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2127,7 +2142,7 @@ impl Control for ColorCorrectionAberrationMode {} /// Control to report the current AE algorithm state. Currently identical to /// ANDROID_CONTROL_AE_STATE. -/// +/// /// Current state of the AE algorithm. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -2151,7 +2166,8 @@ impl TryFrom for AeState { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2168,7 +2184,7 @@ impl Control for AeState {} /// Control to report the current AWB algorithm state. Currently identical /// to ANDROID_CONTROL_AWB_STATE. -/// +/// /// Current state of the AWB algorithm. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -2187,7 +2203,8 @@ impl TryFrom for AwbState { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2257,7 +2274,8 @@ impl TryFrom for LensShadingMapMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2289,7 +2307,8 @@ impl TryFrom for SceneFlicker { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2308,7 +2327,7 @@ impl Control for SceneFlicker {} /// it was exposed to when the final completed result was available to the /// framework. Always less than or equal to PipelineMaxDepth. Currently /// identical to ANDROID_REQUEST_PIPELINE_DEPTH. -/// +/// /// The typical value for this control is 3 as a frame is first exposed, /// captured and then processed in a single pass through the ISP. Any /// additional processing step performed after the ISP pass (in example face @@ -2442,7 +2461,8 @@ impl TryFrom for TestPatternMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2460,52 +2480,53 @@ impl Control for TestPatternMode {} pub fn make_dyn(id: ControlId, val: ControlValue) -> Result, ControlValueError> { match id { ControlId::AeEnable => Ok(Box::new(AeEnable::try_from(val)?)), - ControlId::AeLocked => Ok(Box::new(AeLocked::try_from(val)?)), - ControlId::AeMeteringMode => Ok(Box::new(AeMeteringMode::try_from(val)?)), - ControlId::AeConstraintMode => Ok(Box::new(AeConstraintMode::try_from(val)?)), - ControlId::AeExposureMode => Ok(Box::new(AeExposureMode::try_from(val)?)), - ControlId::ExposureValue => Ok(Box::new(ExposureValue::try_from(val)?)), - ControlId::ExposureTime => Ok(Box::new(ExposureTime::try_from(val)?)), - ControlId::AnalogueGain => Ok(Box::new(AnalogueGain::try_from(val)?)), - ControlId::Brightness => Ok(Box::new(Brightness::try_from(val)?)), - ControlId::Contrast => Ok(Box::new(Contrast::try_from(val)?)), - ControlId::Lux => Ok(Box::new(Lux::try_from(val)?)), - ControlId::AwbEnable => Ok(Box::new(AwbEnable::try_from(val)?)), - ControlId::AwbMode => Ok(Box::new(AwbMode::try_from(val)?)), - ControlId::AwbLocked => Ok(Box::new(AwbLocked::try_from(val)?)), - ControlId::ColourGains => Ok(Box::new(ColourGains::try_from(val)?)), - ControlId::ColourTemperature => Ok(Box::new(ColourTemperature::try_from(val)?)), - ControlId::Saturation => Ok(Box::new(Saturation::try_from(val)?)), - ControlId::SensorBlackLevels => Ok(Box::new(SensorBlackLevels::try_from(val)?)), - ControlId::Sharpness => Ok(Box::new(Sharpness::try_from(val)?)), - ControlId::FocusFoM => Ok(Box::new(FocusFoM::try_from(val)?)), - ControlId::ColourCorrectionMatrix => Ok(Box::new(ColourCorrectionMatrix::try_from(val)?)), - ControlId::ScalerCrop => Ok(Box::new(ScalerCrop::try_from(val)?)), - ControlId::DigitalGain => Ok(Box::new(DigitalGain::try_from(val)?)), - ControlId::FrameDuration => Ok(Box::new(FrameDuration::try_from(val)?)), - ControlId::FrameDurationLimits => Ok(Box::new(FrameDurationLimits::try_from(val)?)), - ControlId::SensorTemperature => Ok(Box::new(SensorTemperature::try_from(val)?)), - ControlId::SensorTimestamp => Ok(Box::new(SensorTimestamp::try_from(val)?)), - ControlId::AfMode => Ok(Box::new(AfMode::try_from(val)?)), - ControlId::AfRange => Ok(Box::new(AfRange::try_from(val)?)), - ControlId::AfSpeed => Ok(Box::new(AfSpeed::try_from(val)?)), - ControlId::AfMetering => Ok(Box::new(AfMetering::try_from(val)?)), - ControlId::AfWindows => Ok(Box::new(AfWindows::try_from(val)?)), - ControlId::AfTrigger => Ok(Box::new(AfTrigger::try_from(val)?)), - ControlId::AfPause => Ok(Box::new(AfPause::try_from(val)?)), - ControlId::LensPosition => Ok(Box::new(LensPosition::try_from(val)?)), - ControlId::AfState => Ok(Box::new(AfState::try_from(val)?)), - ControlId::AfPauseState => Ok(Box::new(AfPauseState::try_from(val)?)), - ControlId::AePrecaptureTrigger => Ok(Box::new(AePrecaptureTrigger::try_from(val)?)), - ControlId::NoiseReductionMode => Ok(Box::new(NoiseReductionMode::try_from(val)?)), - ControlId::ColorCorrectionAberrationMode => Ok(Box::new(ColorCorrectionAberrationMode::try_from(val)?)), - ControlId::AeState => Ok(Box::new(AeState::try_from(val)?)), - ControlId::AwbState => Ok(Box::new(AwbState::try_from(val)?)), - ControlId::SensorRollingShutterSkew => Ok(Box::new(SensorRollingShutterSkew::try_from(val)?)), - ControlId::LensShadingMapMode => Ok(Box::new(LensShadingMapMode::try_from(val)?)), - ControlId::SceneFlicker => Ok(Box::new(SceneFlicker::try_from(val)?)), - ControlId::PipelineDepth => Ok(Box::new(PipelineDepth::try_from(val)?)), - ControlId::MaxLatency => Ok(Box::new(MaxLatency::try_from(val)?)), - ControlId::TestPatternMode => Ok(Box::new(TestPatternMode::try_from(val)?)), +ControlId::AeLocked => Ok(Box::new(AeLocked::try_from(val)?)), +ControlId::AeMeteringMode => Ok(Box::new(AeMeteringMode::try_from(val)?)), +ControlId::AeConstraintMode => Ok(Box::new(AeConstraintMode::try_from(val)?)), +ControlId::AeExposureMode => Ok(Box::new(AeExposureMode::try_from(val)?)), +ControlId::ExposureValue => Ok(Box::new(ExposureValue::try_from(val)?)), +ControlId::ExposureTime => Ok(Box::new(ExposureTime::try_from(val)?)), +ControlId::AnalogueGain => Ok(Box::new(AnalogueGain::try_from(val)?)), +ControlId::Brightness => Ok(Box::new(Brightness::try_from(val)?)), +ControlId::Contrast => Ok(Box::new(Contrast::try_from(val)?)), +ControlId::Lux => Ok(Box::new(Lux::try_from(val)?)), +ControlId::AwbEnable => Ok(Box::new(AwbEnable::try_from(val)?)), +ControlId::AwbMode => Ok(Box::new(AwbMode::try_from(val)?)), +ControlId::AwbLocked => Ok(Box::new(AwbLocked::try_from(val)?)), +ControlId::ColourGains => Ok(Box::new(ColourGains::try_from(val)?)), +ControlId::ColourTemperature => Ok(Box::new(ColourTemperature::try_from(val)?)), +ControlId::Saturation => Ok(Box::new(Saturation::try_from(val)?)), +ControlId::SensorBlackLevels => Ok(Box::new(SensorBlackLevels::try_from(val)?)), +ControlId::Sharpness => Ok(Box::new(Sharpness::try_from(val)?)), +ControlId::FocusFoM => Ok(Box::new(FocusFoM::try_from(val)?)), +ControlId::ColourCorrectionMatrix => Ok(Box::new(ColourCorrectionMatrix::try_from(val)?)), +ControlId::ScalerCrop => Ok(Box::new(ScalerCrop::try_from(val)?)), +ControlId::DigitalGain => Ok(Box::new(DigitalGain::try_from(val)?)), +ControlId::FrameDuration => Ok(Box::new(FrameDuration::try_from(val)?)), +ControlId::FrameDurationLimits => Ok(Box::new(FrameDurationLimits::try_from(val)?)), +ControlId::SensorTemperature => Ok(Box::new(SensorTemperature::try_from(val)?)), +ControlId::SensorTimestamp => Ok(Box::new(SensorTimestamp::try_from(val)?)), +ControlId::AfMode => Ok(Box::new(AfMode::try_from(val)?)), +ControlId::AfRange => Ok(Box::new(AfRange::try_from(val)?)), +ControlId::AfSpeed => Ok(Box::new(AfSpeed::try_from(val)?)), +ControlId::AfMetering => Ok(Box::new(AfMetering::try_from(val)?)), +ControlId::AfWindows => Ok(Box::new(AfWindows::try_from(val)?)), +ControlId::AfTrigger => Ok(Box::new(AfTrigger::try_from(val)?)), +ControlId::AfPause => Ok(Box::new(AfPause::try_from(val)?)), +ControlId::LensPosition => Ok(Box::new(LensPosition::try_from(val)?)), +ControlId::AfState => Ok(Box::new(AfState::try_from(val)?)), +ControlId::AfPauseState => Ok(Box::new(AfPauseState::try_from(val)?)), +ControlId::AePrecaptureTrigger => Ok(Box::new(AePrecaptureTrigger::try_from(val)?)), +ControlId::NoiseReductionMode => Ok(Box::new(NoiseReductionMode::try_from(val)?)), +ControlId::ColorCorrectionAberrationMode => Ok(Box::new(ColorCorrectionAberrationMode::try_from(val)?)), +ControlId::AeState => Ok(Box::new(AeState::try_from(val)?)), +ControlId::AwbState => Ok(Box::new(AwbState::try_from(val)?)), +ControlId::SensorRollingShutterSkew => Ok(Box::new(SensorRollingShutterSkew::try_from(val)?)), +ControlId::LensShadingMapMode => Ok(Box::new(LensShadingMapMode::try_from(val)?)), +ControlId::SceneFlicker => Ok(Box::new(SceneFlicker::try_from(val)?)), +ControlId::PipelineDepth => Ok(Box::new(PipelineDepth::try_from(val)?)), +ControlId::MaxLatency => Ok(Box::new(MaxLatency::try_from(val)?)), +ControlId::TestPatternMode => Ok(Box::new(TestPatternMode::try_from(val)?)), + } } diff --git a/libcamera/src/generated/properties.rs b/libcamera/src/generated/properties.rs index bf8f16f..65bb10e 100644 --- a/libcamera/src/generated/properties.rs +++ b/libcamera/src/generated/properties.rs @@ -1,11 +1,11 @@ //! Generated by `cargo run --bin generate_rust properties` -use crate::control::{ControlEntry, DynControlEntry, Property}; +use std::ops::{Deref, DerefMut}; +use num_enum::{IntoPrimitive, TryFromPrimitive}; +use crate::control::{Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] use crate::geometry::{Rectangle, Size}; -use num_enum::{IntoPrimitive, TryFromPrimitive}; -use std::ops::{Deref, DerefMut}; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(u32)] @@ -16,16 +16,16 @@ pub enum PropertyId { /// between two reference systems, one relative to the camera module, and /// one defined on the external world scene to be captured when projected /// on the image sensor pixel array. - /// + /// /// A camera sensor has a 2-dimensional reference system 'Rc' defined by /// its pixel array read-out order. The origin is set to the first pixel /// being read out, the X-axis points along the column read-out direction /// towards the last columns, and the Y-axis along the row read-out /// direction towards the last row. - /// + /// /// A typical example for a sensor with a 2592x1944 pixel array matrix /// observed from the front is - /// + /// /// ```text /// 2591 X-axis 0 /// <------------------------+ 0 @@ -36,18 +36,18 @@ pub enum PropertyId { /// .......... ... ..........! 1943 /// V /// ``` - /// - /// + /// + /// /// The external world scene reference system 'Rs' is a 2-dimensional /// reference system on the focal plane of the camera module. The origin is /// placed on the top-left corner of the visible scene, the X-axis points /// towards the right, and the Y-axis points towards the bottom of the /// scene. The top, bottom, left and right directions are intentionally not /// defined and depend on the environment in which the camera is used. - /// + /// /// A typical example of a (very common) picture of a shark swimming from /// left to right, as seen from the camera, is - /// + /// /// ```text /// 0 X-axis /// 0 +-------------------------------------> @@ -63,9 +63,9 @@ pub enum PropertyId { /// V /// Y-axis /// ``` - /// + /// /// With the reference system 'Rs' placed on the camera focal plane. - /// + /// /// ```text /// ¸.·˙! /// ¸.·˙ ! @@ -77,15 +77,15 @@ pub enum PropertyId { /// ˙·.¸ ! /// ˙·.¸! /// ``` - /// + /// /// When projected on the sensor's pixel array, the image and the associated /// reference system 'Rs' are typically (but not always) inverted, due to /// the camera module's lens optical inversion effect. - /// + /// /// Assuming the above represented scene of the swimming shark, the lens /// inversion projects the scene and its reference system onto the sensor /// pixel array, seen from the front of the camera sensor, as follow - /// + /// /// ```text /// Y-axis /// ^ @@ -101,21 +101,21 @@ pub enum PropertyId { /// 0 +-------------------------------------> /// 0 X-axis /// ``` - /// + /// /// Note the shark being upside-down. - /// + /// /// The resulting projected reference system is named 'Rp'. - /// + /// /// The camera rotation property is then defined as the angular difference /// in the counter-clockwise direction between the camera reference system /// 'Rc' and the projected scene reference system 'Rp'. It is expressed in /// degrees as a number in the range [0, 360[. - /// + /// /// Examples - /// + /// /// 0 degrees camera rotation - /// - /// + /// + /// /// ```text /// Y-Rp /// ^ @@ -133,8 +133,8 @@ pub enum PropertyId { /// 0 +-------------------------------------> /// 0 X-Rc /// ``` - /// - /// + /// + /// /// ```text /// X-Rc 0 /// <------------------------------------+ 0 @@ -152,9 +152,9 @@ pub enum PropertyId { /// V /// Y-Rp /// ``` - /// + /// /// 90 degrees camera rotation - /// + /// /// ```text /// 0 Y-Rc /// 0 +--------------------> @@ -178,9 +178,9 @@ pub enum PropertyId { /// V /// X-Rc /// ``` - /// + /// /// 180 degrees camera rotation - /// + /// /// ```text /// 0 /// <------------------------------------+ 0 @@ -198,9 +198,9 @@ pub enum PropertyId { /// 0 +-------------------------------------> /// 0 X-Rp /// ``` - /// + /// /// 270 degrees camera rotation - /// + /// /// ```text /// 0 Y-Rc /// 0 +--------------------> @@ -224,17 +224,17 @@ pub enum PropertyId { /// V /// X-Rc /// ``` - /// - /// + /// + /// /// Example one - Webcam - /// + /// /// A camera module installed on the user facing part of a laptop screen /// casing used for video calls. The captured images are meant to be /// displayed in landscape mode (width > height) on the laptop screen. - /// + /// /// The camera is typically mounted upside-down to compensate the lens /// optical inversion effect. - /// + /// /// ```text /// Y-Rp /// Y-Rc ^ @@ -251,11 +251,11 @@ pub enum PropertyId { /// 0 +-------------------------------------> /// 0 X-Rc /// ``` - /// + /// /// The two reference systems are aligned, the resulting camera rotation is /// 0 degrees, no rotation correction needs to be applied to the resulting /// image once captured to memory buffers to correctly display it to users. - /// + /// /// ```text /// +--------------------------------------+ /// ! ! @@ -269,12 +269,12 @@ pub enum PropertyId { /// ! ! /// +--------------------------------------+ /// ``` - /// + /// /// If the camera sensor is not mounted upside-down to compensate for the /// lens optical inversion, the two reference systems will not be aligned, /// with 'Rp' being rotated 180 degrees relatively to 'Rc'. - /// - /// + /// + /// /// ```text /// X-Rc 0 /// <------------------------------------+ 0 @@ -292,9 +292,9 @@ pub enum PropertyId { /// 0 +-------------------------------------> /// 0 X-Rp /// ``` - /// + /// /// The image once captured to memory will then be rotated by 180 degrees - /// + /// /// ```text /// +--------------------------------------+ /// ! ! @@ -308,10 +308,10 @@ pub enum PropertyId { /// ! ! /// +--------------------------------------+ /// ``` - /// + /// /// A software rotation correction of 180 degrees should be applied to /// correctly display the image. - /// + /// /// ```text /// +--------------------------------------+ /// ! ! @@ -325,18 +325,18 @@ pub enum PropertyId { /// ! ! /// +--------------------------------------+ /// ``` - /// + /// /// Example two - Phone camera - /// + /// /// A camera installed on the back side of a mobile device facing away from /// the user. The captured images are meant to be displayed in portrait mode /// (height > width) to match the device screen orientation and the device /// usage orientation used when taking the picture. - /// + /// /// The camera sensor is typically mounted with its pixel array longer side /// aligned to the device longer side, upside-down mounted to compensate for /// the lens optical inversion effect. - /// + /// /// ```text /// 0 Y-Rc /// 0 +--------------------> @@ -360,13 +360,13 @@ pub enum PropertyId { /// V /// X-Rc /// ``` - /// + /// /// The two reference systems are not aligned and the 'Rp' reference /// system is rotated by 90 degrees in the counter-clockwise direction /// relatively to the 'Rc' reference system. - /// + /// /// The image once captured to memory will be rotated. - /// + /// /// ```text /// +-------------------------------------+ /// | _ _ | @@ -380,11 +380,11 @@ pub enum PropertyId { /// | V | /// +-------------------------------------+ /// ``` - /// + /// /// A correction of 90 degrees in counter-clockwise direction has to be /// applied to correctly display the image in portrait mode on the device /// screen. - /// + /// /// ```text /// +--------------------+ /// | | @@ -409,45 +409,45 @@ pub enum PropertyId { /// camera is part of a larger unit and exposed as a black-box to the /// system. In such cases the model name of the smallest device that /// contains the camera sensor shall be used. - /// + /// /// The model name is not meant to be a camera name displayed to the /// end-user, but may be combined with other camera information to create a /// camera name. - /// + /// /// The model name is not guaranteed to be unique in the system nor is /// it guaranteed to be stable or have any other properties required to make /// it a good candidate to be used as a permanent identifier of a camera. - /// + /// /// The model name shall describe the camera in a human readable format and /// shall be encoded in ASCII. - /// + /// /// Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. Model = 3, /// The pixel unit cell physical size, in nanometers. - /// + /// /// The UnitCellSize properties defines the horizontal and vertical sizes of /// a single pixel unit, including its active and non-active parts. In /// other words, it expresses the horizontal and vertical distance between /// the top-left corners of adjacent pixels. - /// + /// /// The property can be used to calculate the physical size of the sensor's /// pixel array area and for calibration purposes. UnitCellSize = 4, /// The camera sensor pixel array readable area vertical and horizontal /// sizes, in pixels. - /// + /// /// The PixelArraySize property defines the size in pixel units of the /// readable part of full pixel array matrix, including optical black /// pixels used for calibration, pixels which are not considered valid for /// capture and active pixels containing valid image data. - /// + /// /// The property describes the maximum size of the raw data captured by the /// camera, which might not correspond to the physical size of the sensor /// pixel array matrix, as some portions of the physical pixel array matrix /// are not accessible and cannot be transmitted out. - /// + /// /// For example, let's consider a pixel array matrix assembled as follows - /// + /// /// ```text /// +--------------------------------------------------+ /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| @@ -460,7 +460,7 @@ pub enum PropertyId { /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| /// ... ... ... ... ... /// ``` - /// + /// /// ```text /// ... ... ... ... ... /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| @@ -469,14 +469,14 @@ pub enum PropertyId { /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| /// +--------------------------------------------------+ /// ``` - /// + /// /// starting with two lines of non-readable pixels (x), followed by N lines /// of readable data (D) surrounded by two columns of non-readable pixels on /// each side, and ending with two more lines of non-readable pixels. Only /// the readable portion is transmitted to the receiving side, defining the /// sizes of the largest possible buffer of raw data that can be presented /// to applications. - /// + /// /// ```text /// PixelArraySize.width /// /----------------------------------------------/ @@ -493,41 +493,41 @@ pub enum PropertyId { /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | /// +----------------------------------------------+ / /// ``` - /// + /// /// This defines a rectangle whose top-left corner is placed in position (0, /// 0) and whose vertical and horizontal sizes are defined by this property. /// All other rectangles that describe portions of the pixel array, such as /// the optical black pixels rectangles and active pixel areas, are defined /// relatively to this rectangle. - /// + /// /// All the coordinates are expressed relative to the default sensor readout /// direction, without any transformation (such as horizontal and vertical /// flipping) applied. When mapping them to the raw pixel buffer, /// applications shall take any configured transformation into account. - /// + /// /// \todo Rename this property to Size once we will have property /// ```text /// categories (i.e. Properties::PixelArray::Size) PixelArraySize = 5, /// The pixel array region(s) which contain optical black pixels /// considered valid for calibration purposes. - /// + /// /// This property describes the position and size of optical black pixel /// regions in the raw data buffer as stored in memory, which might differ /// from their actual physical location in the pixel array matrix. - /// + /// /// It is important to note, in fact, that camera sensors might /// automatically reorder or skip portions of their pixels array matrix when /// transmitting data to the receiver. For instance, a sensor may merge the /// top and bottom optical black rectangles into a single rectangle, /// transmitted at the beginning of the frame. - /// + /// /// The pixel array contains several areas with different purposes, /// interleaved by lines and columns which are said not to be valid for /// capturing purposes. Invalid lines and columns are defined as invalid as /// they could be positioned too close to the chip margins or to the optical /// black shielding placed on top of optical black pixels. - /// + /// /// ```text /// PixelArraySize.width /// /----------------------------------------------/ @@ -551,16 +551,16 @@ pub enum PropertyId { /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | /// +----------------------------------------------+ / /// ``` - /// + /// /// The readable pixel array matrix is composed by /// 2 invalid lines (I) /// 4 lines of valid optical black pixels (O) /// 2 invalid lines (I) /// n lines of valid pixel data (P) /// 2 invalid lines (I) - /// + /// /// And the position of the optical black pixel rectangles is defined by - /// + /// /// ```text /// PixelArrayOpticalBlackRectangles = { /// { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, @@ -568,11 +568,11 @@ pub enum PropertyId { /// { x2, y3, 2, y4 - y3 + 1 }, /// }; /// ``` - /// + /// /// If the camera, when capturing the full pixel array matrix, automatically /// skips the invalid lines and columns, producing the following data /// buffer, when captured to memory - /// + /// /// ```text /// PixelArraySize.width /// /----------------------------------------------/ @@ -591,12 +591,12 @@ pub enum PropertyId { /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | /// +----------------------------------------------+ / /// ``` - /// + /// /// then the invalid lines and columns should not be reported as part of the /// PixelArraySize property in first place. - /// + /// /// In this case, the position of the black pixel rectangles will be - /// + /// /// ```text /// PixelArrayOpticalBlackRectangles = { /// { 0, 0, y1 + 1, PixelArraySize[0] }, @@ -604,7 +604,7 @@ pub enum PropertyId { /// { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, /// }; /// ``` - /// + /// /// \todo Rename this property to Size once we will have property /// ```text /// categories (i.e. Properties::PixelArray::OpticalBlackRectangles) @@ -612,22 +612,22 @@ pub enum PropertyId { /// The PixelArrayActiveAreas property defines the (possibly multiple and /// overlapping) portions of the camera sensor readable pixel matrix /// which are considered valid for image acquisition purposes. - /// + /// /// This property describes an arbitrary number of overlapping rectangles, /// with each rectangle representing the maximum image size that the camera /// sensor can produce for a particular aspect ratio. They are defined /// relatively to the PixelArraySize rectangle. - /// + /// /// When multiple rectangles are reported, they shall be ordered from the /// tallest to the shortest. - /// + /// /// Example 1 /// A camera sensor which only produces images in the 4:3 image resolution /// will report a single PixelArrayActiveAreas rectangle, from which all /// other image formats are obtained by either cropping the field-of-view /// and/or applying pixel sub-sampling techniques such as pixel skipping or /// binning. - /// + /// /// ```text /// PixelArraySize.width /// /----------------/ @@ -640,18 +640,18 @@ pub enum PropertyId { /// y2 o +------------+ | | /// +----------------+ / /// ``` - /// + /// /// The property reports a single rectangle - /// + /// /// ```text /// PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) /// ``` - /// + /// /// Example 2 /// A camera sensor which can produce images in different native /// resolutions will report several overlapping rectangles, one for each /// natively supported resolution. - /// + /// /// ```text /// PixelArraySize.width /// /------------------/ @@ -666,23 +666,23 @@ pub enum PropertyId { /// y4 o +------+ | | /// +----+------+----+ / /// ``` - /// + /// /// The property reports two rectangles - /// + /// /// ```text /// PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), /// (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) /// ``` - /// + /// /// The first rectangle describes the maximum field-of-view of all image /// formats in the 4:3 resolutions, while the second one describes the /// maximum field of view for all image formats in the 16:9 resolutions. - /// + /// /// Multiple rectangles shall only be reported when the sensor can't capture /// the pixels in the corner regions. If all the pixels in the (x1,y1) - /// (x4,y4) area can be captured, the PixelArrayActiveAreas property shall /// contains the single rectangle (x1,y1) - (x4,y4). - /// + /// /// \todo Rename this property to ActiveAreas once we will have property /// ```text /// categories (i.e. Properties::PixelArray::ActiveAreas) @@ -691,16 +691,16 @@ pub enum PropertyId { /// reflects the minimum mandatory cropping applied in the camera sensor and /// the rest of the pipeline. Just as the ScalerCrop control, it defines a /// rectangle taken from the sensor's active pixel array. - /// + /// /// This property is valid only after the camera has been successfully /// configured and its value may change whenever a new configuration is /// applied. - /// + /// /// \todo Turn this property into a "maximum control value" for the /// ScalerCrop control once "dynamic" controls have been implemented. ScalerCropMaximum = 8, /// The relative sensitivity of the chosen sensor mode. - /// + /// /// Some sensors have readout modes with different sensitivities. For example, /// a binned camera mode might, with the same exposure and gains, produce /// twice the signal level of the full resolution readout. This would be @@ -733,7 +733,8 @@ impl TryFrom for Location { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -752,16 +753,16 @@ impl Property for Location {} /// between two reference systems, one relative to the camera module, and /// one defined on the external world scene to be captured when projected /// on the image sensor pixel array. -/// +/// /// A camera sensor has a 2-dimensional reference system 'Rc' defined by /// its pixel array read-out order. The origin is set to the first pixel /// being read out, the X-axis points along the column read-out direction /// towards the last columns, and the Y-axis along the row read-out /// direction towards the last row. -/// +/// /// A typical example for a sensor with a 2592x1944 pixel array matrix /// observed from the front is -/// +/// /// ```text /// 2591 X-axis 0 /// <------------------------+ 0 @@ -772,18 +773,18 @@ impl Property for Location {} /// .......... ... ..........! 1943 /// V /// ``` -/// -/// +/// +/// /// The external world scene reference system 'Rs' is a 2-dimensional /// reference system on the focal plane of the camera module. The origin is /// placed on the top-left corner of the visible scene, the X-axis points /// towards the right, and the Y-axis points towards the bottom of the /// scene. The top, bottom, left and right directions are intentionally not /// defined and depend on the environment in which the camera is used. -/// +/// /// A typical example of a (very common) picture of a shark swimming from /// left to right, as seen from the camera, is -/// +/// /// ```text /// 0 X-axis /// 0 +-------------------------------------> @@ -799,9 +800,9 @@ impl Property for Location {} /// V /// Y-axis /// ``` -/// +/// /// With the reference system 'Rs' placed on the camera focal plane. -/// +/// /// ```text /// ¸.·˙! /// ¸.·˙ ! @@ -813,15 +814,15 @@ impl Property for Location {} /// ˙·.¸ ! /// ˙·.¸! /// ``` -/// +/// /// When projected on the sensor's pixel array, the image and the associated /// reference system 'Rs' are typically (but not always) inverted, due to /// the camera module's lens optical inversion effect. -/// +/// /// Assuming the above represented scene of the swimming shark, the lens /// inversion projects the scene and its reference system onto the sensor /// pixel array, seen from the front of the camera sensor, as follow -/// +/// /// ```text /// Y-axis /// ^ @@ -837,21 +838,21 @@ impl Property for Location {} /// 0 +-------------------------------------> /// 0 X-axis /// ``` -/// +/// /// Note the shark being upside-down. -/// +/// /// The resulting projected reference system is named 'Rp'. -/// +/// /// The camera rotation property is then defined as the angular difference /// in the counter-clockwise direction between the camera reference system /// 'Rc' and the projected scene reference system 'Rp'. It is expressed in /// degrees as a number in the range [0, 360[. -/// +/// /// Examples -/// +/// /// 0 degrees camera rotation -/// -/// +/// +/// /// ```text /// Y-Rp /// ^ @@ -869,8 +870,8 @@ impl Property for Location {} /// 0 +-------------------------------------> /// 0 X-Rc /// ``` -/// -/// +/// +/// /// ```text /// X-Rc 0 /// <------------------------------------+ 0 @@ -888,9 +889,9 @@ impl Property for Location {} /// V /// Y-Rp /// ``` -/// +/// /// 90 degrees camera rotation -/// +/// /// ```text /// 0 Y-Rc /// 0 +--------------------> @@ -914,9 +915,9 @@ impl Property for Location {} /// V /// X-Rc /// ``` -/// +/// /// 180 degrees camera rotation -/// +/// /// ```text /// 0 /// <------------------------------------+ 0 @@ -934,9 +935,9 @@ impl Property for Location {} /// 0 +-------------------------------------> /// 0 X-Rp /// ``` -/// +/// /// 270 degrees camera rotation -/// +/// /// ```text /// 0 Y-Rc /// 0 +--------------------> @@ -960,17 +961,17 @@ impl Property for Location {} /// V /// X-Rc /// ``` -/// -/// +/// +/// /// Example one - Webcam -/// +/// /// A camera module installed on the user facing part of a laptop screen /// casing used for video calls. The captured images are meant to be /// displayed in landscape mode (width > height) on the laptop screen. -/// +/// /// The camera is typically mounted upside-down to compensate the lens /// optical inversion effect. -/// +/// /// ```text /// Y-Rp /// Y-Rc ^ @@ -987,11 +988,11 @@ impl Property for Location {} /// 0 +-------------------------------------> /// 0 X-Rc /// ``` -/// +/// /// The two reference systems are aligned, the resulting camera rotation is /// 0 degrees, no rotation correction needs to be applied to the resulting /// image once captured to memory buffers to correctly display it to users. -/// +/// /// ```text /// +--------------------------------------+ /// ! ! @@ -1005,12 +1006,12 @@ impl Property for Location {} /// ! ! /// +--------------------------------------+ /// ``` -/// +/// /// If the camera sensor is not mounted upside-down to compensate for the /// lens optical inversion, the two reference systems will not be aligned, /// with 'Rp' being rotated 180 degrees relatively to 'Rc'. -/// -/// +/// +/// /// ```text /// X-Rc 0 /// <------------------------------------+ 0 @@ -1028,9 +1029,9 @@ impl Property for Location {} /// 0 +-------------------------------------> /// 0 X-Rp /// ``` -/// +/// /// The image once captured to memory will then be rotated by 180 degrees -/// +/// /// ```text /// +--------------------------------------+ /// ! ! @@ -1044,10 +1045,10 @@ impl Property for Location {} /// ! ! /// +--------------------------------------+ /// ``` -/// +/// /// A software rotation correction of 180 degrees should be applied to /// correctly display the image. -/// +/// /// ```text /// +--------------------------------------+ /// ! ! @@ -1061,18 +1062,18 @@ impl Property for Location {} /// ! ! /// +--------------------------------------+ /// ``` -/// +/// /// Example two - Phone camera -/// +/// /// A camera installed on the back side of a mobile device facing away from /// the user. The captured images are meant to be displayed in portrait mode /// (height > width) to match the device screen orientation and the device /// usage orientation used when taking the picture. -/// +/// /// The camera sensor is typically mounted with its pixel array longer side /// aligned to the device longer side, upside-down mounted to compensate for /// the lens optical inversion effect. -/// +/// /// ```text /// 0 Y-Rc /// 0 +--------------------> @@ -1096,13 +1097,13 @@ impl Property for Location {} /// V /// X-Rc /// ``` -/// +/// /// The two reference systems are not aligned and the 'Rp' reference /// system is rotated by 90 degrees in the counter-clockwise direction /// relatively to the 'Rc' reference system. -/// +/// /// The image once captured to memory will be rotated. -/// +/// /// ```text /// +-------------------------------------+ /// | _ _ | @@ -1116,11 +1117,11 @@ impl Property for Location {} /// | V | /// +-------------------------------------+ /// ``` -/// +/// /// A correction of 90 degrees in counter-clockwise direction has to be /// applied to correctly display the image in portrait mode on the device /// screen. -/// +/// /// ```text /// +--------------------+ /// | | @@ -1181,18 +1182,18 @@ impl Property for Rotation {} /// camera is part of a larger unit and exposed as a black-box to the /// system. In such cases the model name of the smallest device that /// contains the camera sensor shall be used. -/// +/// /// The model name is not meant to be a camera name displayed to the /// end-user, but may be combined with other camera information to create a /// camera name. -/// +/// /// The model name is not guaranteed to be unique in the system nor is /// it guaranteed to be stable or have any other properties required to make /// it a good candidate to be used as a permanent identifier of a camera. -/// +/// /// The model name shall describe the camera in a human readable format and /// shall be encoded in ASCII. -/// +/// /// Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. #[derive(Debug, Clone)] pub struct Model(pub String); @@ -1232,12 +1233,12 @@ impl ControlEntry for Model { impl Property for Model {} /// The pixel unit cell physical size, in nanometers. -/// +/// /// The UnitCellSize properties defines the horizontal and vertical sizes of /// a single pixel unit, including its active and non-active parts. In /// other words, it expresses the horizontal and vertical distance between /// the top-left corners of adjacent pixels. -/// +/// /// The property can be used to calculate the physical size of the sensor's /// pixel array area and for calibration purposes. #[derive(Debug, Clone)] @@ -1279,19 +1280,19 @@ impl Property for UnitCellSize {} /// The camera sensor pixel array readable area vertical and horizontal /// sizes, in pixels. -/// +/// /// The PixelArraySize property defines the size in pixel units of the /// readable part of full pixel array matrix, including optical black /// pixels used for calibration, pixels which are not considered valid for /// capture and active pixels containing valid image data. -/// +/// /// The property describes the maximum size of the raw data captured by the /// camera, which might not correspond to the physical size of the sensor /// pixel array matrix, as some portions of the physical pixel array matrix /// are not accessible and cannot be transmitted out. -/// +/// /// For example, let's consider a pixel array matrix assembled as follows -/// +/// /// ```text /// +--------------------------------------------------+ /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| @@ -1304,7 +1305,7 @@ impl Property for UnitCellSize {} /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| /// ... ... ... ... ... /// ``` -/// +/// /// ```text /// ... ... ... ... ... /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| @@ -1313,14 +1314,14 @@ impl Property for UnitCellSize {} /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| /// +--------------------------------------------------+ /// ``` -/// +/// /// starting with two lines of non-readable pixels (x), followed by N lines /// of readable data (D) surrounded by two columns of non-readable pixels on /// each side, and ending with two more lines of non-readable pixels. Only /// the readable portion is transmitted to the receiving side, defining the /// sizes of the largest possible buffer of raw data that can be presented /// to applications. -/// +/// /// ```text /// PixelArraySize.width /// /----------------------------------------------/ @@ -1337,18 +1338,18 @@ impl Property for UnitCellSize {} /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | /// +----------------------------------------------+ / /// ``` -/// +/// /// This defines a rectangle whose top-left corner is placed in position (0, /// 0) and whose vertical and horizontal sizes are defined by this property. /// All other rectangles that describe portions of the pixel array, such as /// the optical black pixels rectangles and active pixel areas, are defined /// relatively to this rectangle. -/// +/// /// All the coordinates are expressed relative to the default sensor readout /// direction, without any transformation (such as horizontal and vertical /// flipping) applied. When mapping them to the raw pixel buffer, /// applications shall take any configured transformation into account. -/// +/// /// \todo Rename this property to Size once we will have property /// ```text /// categories (i.e. Properties::PixelArray::Size) @@ -1391,23 +1392,23 @@ impl Property for PixelArraySize {} /// The pixel array region(s) which contain optical black pixels /// considered valid for calibration purposes. -/// +/// /// This property describes the position and size of optical black pixel /// regions in the raw data buffer as stored in memory, which might differ /// from their actual physical location in the pixel array matrix. -/// +/// /// It is important to note, in fact, that camera sensors might /// automatically reorder or skip portions of their pixels array matrix when /// transmitting data to the receiver. For instance, a sensor may merge the /// top and bottom optical black rectangles into a single rectangle, /// transmitted at the beginning of the frame. -/// +/// /// The pixel array contains several areas with different purposes, /// interleaved by lines and columns which are said not to be valid for /// capturing purposes. Invalid lines and columns are defined as invalid as /// they could be positioned too close to the chip margins or to the optical /// black shielding placed on top of optical black pixels. -/// +/// /// ```text /// PixelArraySize.width /// /----------------------------------------------/ @@ -1431,16 +1432,16 @@ impl Property for PixelArraySize {} /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | /// +----------------------------------------------+ / /// ``` -/// +/// /// The readable pixel array matrix is composed by /// 2 invalid lines (I) /// 4 lines of valid optical black pixels (O) /// 2 invalid lines (I) /// n lines of valid pixel data (P) /// 2 invalid lines (I) -/// +/// /// And the position of the optical black pixel rectangles is defined by -/// +/// /// ```text /// PixelArrayOpticalBlackRectangles = { /// { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, @@ -1448,11 +1449,11 @@ impl Property for PixelArraySize {} /// { x2, y3, 2, y4 - y3 + 1 }, /// }; /// ``` -/// +/// /// If the camera, when capturing the full pixel array matrix, automatically /// skips the invalid lines and columns, producing the following data /// buffer, when captured to memory -/// +/// /// ```text /// PixelArraySize.width /// /----------------------------------------------/ @@ -1471,12 +1472,12 @@ impl Property for PixelArraySize {} /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | /// +----------------------------------------------+ / /// ``` -/// +/// /// then the invalid lines and columns should not be reported as part of the /// PixelArraySize property in first place. -/// +/// /// In this case, the position of the black pixel rectangles will be -/// +/// /// ```text /// PixelArrayOpticalBlackRectangles = { /// { 0, 0, y1 + 1, PixelArraySize[0] }, @@ -1484,7 +1485,7 @@ impl Property for PixelArraySize {} /// { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, /// }; /// ``` -/// +/// /// \todo Rename this property to Size once we will have property /// ```text /// categories (i.e. Properties::PixelArray::OpticalBlackRectangles) @@ -1528,22 +1529,22 @@ impl Property for PixelArrayOpticalBlackRectangles {} /// The PixelArrayActiveAreas property defines the (possibly multiple and /// overlapping) portions of the camera sensor readable pixel matrix /// which are considered valid for image acquisition purposes. -/// +/// /// This property describes an arbitrary number of overlapping rectangles, /// with each rectangle representing the maximum image size that the camera /// sensor can produce for a particular aspect ratio. They are defined /// relatively to the PixelArraySize rectangle. -/// +/// /// When multiple rectangles are reported, they shall be ordered from the /// tallest to the shortest. -/// +/// /// Example 1 /// A camera sensor which only produces images in the 4:3 image resolution /// will report a single PixelArrayActiveAreas rectangle, from which all /// other image formats are obtained by either cropping the field-of-view /// and/or applying pixel sub-sampling techniques such as pixel skipping or /// binning. -/// +/// /// ```text /// PixelArraySize.width /// /----------------/ @@ -1556,18 +1557,18 @@ impl Property for PixelArrayOpticalBlackRectangles {} /// y2 o +------------+ | | /// +----------------+ / /// ``` -/// +/// /// The property reports a single rectangle -/// +/// /// ```text /// PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) /// ``` -/// +/// /// Example 2 /// A camera sensor which can produce images in different native /// resolutions will report several overlapping rectangles, one for each /// natively supported resolution. -/// +/// /// ```text /// PixelArraySize.width /// /------------------/ @@ -1582,23 +1583,23 @@ impl Property for PixelArrayOpticalBlackRectangles {} /// y4 o +------+ | | /// +----+------+----+ / /// ``` -/// +/// /// The property reports two rectangles -/// +/// /// ```text /// PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), /// (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) /// ``` -/// +/// /// The first rectangle describes the maximum field-of-view of all image /// formats in the 4:3 resolutions, while the second one describes the /// maximum field of view for all image formats in the 16:9 resolutions. -/// +/// /// Multiple rectangles shall only be reported when the sensor can't capture /// the pixels in the corner regions. If all the pixels in the (x1,y1) - /// (x4,y4) area can be captured, the PixelArrayActiveAreas property shall /// contains the single rectangle (x1,y1) - (x4,y4). -/// +/// /// \todo Rename this property to ActiveAreas once we will have property /// ```text /// categories (i.e. Properties::PixelArray::ActiveAreas) @@ -1643,11 +1644,11 @@ impl Property for PixelArrayActiveAreas {} /// reflects the minimum mandatory cropping applied in the camera sensor and /// the rest of the pipeline. Just as the ScalerCrop control, it defines a /// rectangle taken from the sensor's active pixel array. -/// +/// /// This property is valid only after the camera has been successfully /// configured and its value may change whenever a new configuration is /// applied. -/// +/// /// \todo Turn this property into a "maximum control value" for the /// ScalerCrop control once "dynamic" controls have been implemented. #[derive(Debug, Clone)] @@ -1688,7 +1689,7 @@ impl ControlEntry for ScalerCropMaximum { impl Property for ScalerCropMaximum {} /// The relative sensitivity of the chosen sensor mode. -/// +/// /// Some sensors have readout modes with different sensitivities. For example, /// a binned camera mode might, with the same exposure and gains, produce /// twice the signal level of the full resolution readout. This would be @@ -1757,7 +1758,8 @@ impl TryFrom for ColorFilterArrangement { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1775,14 +1777,15 @@ impl Property for ColorFilterArrangement {} pub fn make_dyn(id: PropertyId, val: ControlValue) -> Result, ControlValueError> { match id { PropertyId::Location => Ok(Box::new(Location::try_from(val)?)), - PropertyId::Rotation => Ok(Box::new(Rotation::try_from(val)?)), - PropertyId::Model => Ok(Box::new(Model::try_from(val)?)), - PropertyId::UnitCellSize => Ok(Box::new(UnitCellSize::try_from(val)?)), - PropertyId::PixelArraySize => Ok(Box::new(PixelArraySize::try_from(val)?)), - PropertyId::PixelArrayOpticalBlackRectangles => Ok(Box::new(PixelArrayOpticalBlackRectangles::try_from(val)?)), - PropertyId::PixelArrayActiveAreas => Ok(Box::new(PixelArrayActiveAreas::try_from(val)?)), - PropertyId::ScalerCropMaximum => Ok(Box::new(ScalerCropMaximum::try_from(val)?)), - PropertyId::SensorSensitivity => Ok(Box::new(SensorSensitivity::try_from(val)?)), - PropertyId::ColorFilterArrangement => Ok(Box::new(ColorFilterArrangement::try_from(val)?)), +PropertyId::Rotation => Ok(Box::new(Rotation::try_from(val)?)), +PropertyId::Model => Ok(Box::new(Model::try_from(val)?)), +PropertyId::UnitCellSize => Ok(Box::new(UnitCellSize::try_from(val)?)), +PropertyId::PixelArraySize => Ok(Box::new(PixelArraySize::try_from(val)?)), +PropertyId::PixelArrayOpticalBlackRectangles => Ok(Box::new(PixelArrayOpticalBlackRectangles::try_from(val)?)), +PropertyId::PixelArrayActiveAreas => Ok(Box::new(PixelArrayActiveAreas::try_from(val)?)), +PropertyId::ScalerCropMaximum => Ok(Box::new(ScalerCropMaximum::try_from(val)?)), +PropertyId::SensorSensitivity => Ok(Box::new(SensorSensitivity::try_from(val)?)), +PropertyId::ColorFilterArrangement => Ok(Box::new(ColorFilterArrangement::try_from(val)?)), + } } diff --git a/regenerate.sh b/regenerate.sh index bbb7bb1..51140a0 100755 --- a/regenerate.sh +++ b/regenerate.sh @@ -5,4 +5,4 @@ cargo run --bin generate_c > libcamera-sys/c_api/controls_generated.h # This could be automated with a procedural macro, but it makes code hard to read and explore. cargo run --bin generate_rust controls > libcamera/src/generated/controls.rs cargo run --bin generate_rust properties > libcamera/src/generated/properties.rs -cargo fmt +cargo +nightly fmt --all diff --git a/rustfmt.toml b/rustfmt.toml index c79d8a7..bd85cba 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -4,4 +4,6 @@ comment_width = 120 wrap_comments = true format_code_in_doc_comments = true group_imports = "StdExternalCrate" -imports_granularity = "Crate" \ No newline at end of file +imports_granularity = "Crate" +error_on_line_overflow = true +error_on_unformatted = true From 88d7c8cbe9be9ba1e2029093c2a11ed2b260f0b3 Mon Sep 17 00:00:00 2001 From: chemicstry Date: Thu, 9 Nov 2023 05:24:23 -0500 Subject: [PATCH 2/2] Fix generated code formatting --- libcamera/src/generated/controls.rs | 392 ++++++++++++------------ libcamera/src/generated/properties.rs | 413 +++++++++++++------------- libcamera/src/lib.rs | 1 - 3 files changed, 398 insertions(+), 408 deletions(-) diff --git a/libcamera/src/generated/controls.rs b/libcamera/src/generated/controls.rs index 6c5c668..52137a7 100644 --- a/libcamera/src/generated/controls.rs +++ b/libcamera/src/generated/controls.rs @@ -1,25 +1,29 @@ //! Generated by `cargo run --bin generate_rust controls` use std::ops::{Deref, DerefMut}; + use num_enum::{IntoPrimitive, TryFromPrimitive}; -use crate::control::{Control, ControlEntry, DynControlEntry}; -use crate::control_value::{ControlValue, ControlValueError}; + #[allow(unused_imports)] use crate::geometry::{Rectangle, Size}; +use crate::{ + control::{Control, ControlEntry, DynControlEntry}, + control_value::{ControlValue, ControlValueError}, +}; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(u32)] pub enum ControlId { /// Enable or disable the AE. - /// + /// /// \sa ExposureTime AnalogueGain AeEnable = 1, /// Report the lock status of a running AE algorithm. - /// + /// /// If the AE algorithm is locked the value shall be set to true, if it's /// converging it shall be set to false. If the AE algorithm is not /// running the control shall not be present in the metadata control list. - /// + /// /// \sa AeEnable AeLocked = 2, /// Specify a metering mode for the AE algorithm to use. The metering @@ -39,22 +43,22 @@ pub enum ControlId { AeExposureMode = 5, /// Specify an Exposure Value (EV) parameter. The EV parameter will only be /// applied if the AE algorithm is currently enabled. - /// + /// /// By convention EV adjusts the exposure as log2. For example /// EV = [-2, -1, 0.5, 0, 0.5, 1, 2] results in an exposure adjustment /// of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. - /// + /// /// \sa AeEnable ExposureValue = 6, /// Exposure time (shutter speed) for the frame applied in the sensor /// device. This value is specified in micro-seconds. - /// + /// /// Setting this value means that it is now fixed and the AE algorithm may /// not change it. Setting it back to zero returns it to the control of the /// AE algorithm. - /// + /// /// \sa AnalogueGain AeEnable - /// + /// /// \todo Document the interactions between AeEnable and setting a fixed /// value for this control. Consider interactions with other AE features, /// such as aperture and aperture/shutter priority mode, and decide if @@ -64,13 +68,13 @@ pub enum ControlId { /// Analogue gain value applied in the sensor device. /// The value of the control specifies the gain multiplier applied to all /// colour channels. This value cannot be lower than 1.0. - /// + /// /// Setting this value means that it is now fixed and the AE algorithm may /// not change it. Setting it back to zero returns it to the control of the /// AE algorithm. - /// + /// /// \sa ExposureTime AeEnable - /// + /// /// \todo Document the interactions between AeEnable and setting a fixed /// value for this control. Consider interactions with other AE features, /// such as aperture and aperture/shutter priority mode, and decide if @@ -88,27 +92,28 @@ pub enum ControlId { /// control can only be returned in metadata. Lux = 11, /// Enable or disable the AWB. - /// + /// /// \sa ColourGains AwbEnable = 12, /// Specify the range of illuminants to use for the AWB algorithm. The modes /// supported are platform specific, and not all modes may be supported. AwbMode = 13, /// Report the lock status of a running AWB algorithm. - /// + /// /// If the AWB algorithm is locked the value shall be set to true, if it's /// converging it shall be set to false. If the AWB algorithm is not /// running the control shall not be present in the metadata control list. - /// + /// /// \sa AwbEnable AwbLocked = 14, /// Pair of gain values for the Red and Blue colour channels, in that /// order. ColourGains can only be applied in a Request when the AWB is /// disabled. - /// + /// /// \sa AwbEnable ColourGains = 15, - /// Report the current estimate of the colour temperature, in kelvin, for this frame. The ColourTemperature control can only be returned in metadata. + /// Report the current estimate of the colour temperature, in kelvin, for this frame. The ColourTemperature control + /// can only be returned in metadata. ColourTemperature = 16, /// Specify a fixed saturation parameter. Normal saturation is given by /// the value 1.0; larger values produce more saturated colours; 0.0 @@ -147,19 +152,19 @@ pub enum ControlId { /// relative to the PixelArrayActiveAreas that is being used. The units /// remain native sensor pixels, even if the sensor is being used in /// a binning or skipping mode. - /// + /// /// This control is only present when the pipeline supports scaling. Its /// maximum valid value is given by the properties::ScalerCropMaximum /// property, and the two can be used to implement digital zoom. ScalerCrop = 22, /// Digital gain value applied during the processing steps applied /// to the image as captured from the sensor. - /// + /// /// The global digital gain factor is applied to all the colour channels /// of the RAW image. Different pipeline models are free to /// specify how the global gain factor applies to each separate /// channel. - /// + /// /// If an imaging pipeline applies digital gain in distinct /// processing steps, this value indicates their total sum. /// Pipelines are free to decide how to adjust each processing @@ -172,7 +177,7 @@ pub enum ControlId { FrameDuration = 24, /// The minimum and maximum (in that order) frame duration, /// expressed in microseconds. - /// + /// /// When provided by applications, the control specifies the sensor frame /// duration interval the pipeline has to use. This limits the largest /// exposure time the sensor can use. For example, if a maximum frame @@ -181,7 +186,7 @@ pub enum ControlId { /// A fixed frame duration is achieved by setting the minimum and maximum /// values to be the same. Setting both values to 0 reverts to using the /// IPA provided defaults. - /// + /// /// The maximum frame duration provides the absolute limit to the shutter /// speed computed by the AE algorithm and it overrides any exposure mode /// setting specified with controls::AeExposureMode. Similarly, when a @@ -190,38 +195,38 @@ pub enum ControlId { /// metadata, the control expresses the minimum and maximum frame /// durations used after being clipped to the sensor provided frame /// duration limits. - /// + /// /// \sa AeExposureMode /// \sa ExposureTime - /// + /// /// \todo Define how to calculate the capture frame rate by /// defining controls to report additional delays introduced by /// the capture pipeline or post-processing stages (ie JPEG /// conversion, frame scaling). - /// + /// /// \todo Provide an explicit definition of default control values, for /// this and all other controls. FrameDurationLimits = 25, /// Temperature measure from the camera sensor in Celsius. This is typically /// obtained by a thermal sensor present on-die or in the camera module. The /// range of reported temperatures is device dependent. - /// + /// /// The SensorTemperature control will only be returned in metadata if a /// themal sensor is present. SensorTemperature = 26, /// The time when the first row of the image sensor active array is exposed. - /// + /// /// The timestamp, expressed in nanoseconds, represents a monotonically /// increasing counter since the system boot time, as defined by the /// Linux-specific CLOCK_BOOTTIME clock id. - /// + /// /// The SensorTimestamp control can only be returned in metadata. - /// + /// /// \todo Define how the sensor timestamp has to be used in the reprocessing /// use case. SensorTimestamp = 27, /// Control to set the mode of the AF (autofocus) algorithm. - /// + /// /// An implementation may choose not to implement all the modes. AfMode = 28, /// Control to set the range of focus distances that is scanned. An @@ -239,19 +244,19 @@ pub enum ControlId { /// Sets the focus windows used by the AF algorithm when AfMetering is set /// to AfMeteringWindows. The units used are pixels within the rectangle /// returned by the ScalerCropMaximum property. - /// + /// /// In order to be activated, a rectangle must be programmed with non-zero /// width and height. Internally, these rectangles are intersected with the /// ScalerCropMaximum rectangle. If the window becomes empty after this /// operation, then the window is ignored. If all the windows end up being /// ignored, then the behaviour is platform dependent. - /// + /// /// On platforms that support the ScalerCrop control (for implementing /// digital zoom, for example), no automatic recalculation or adjustment of /// AF windows is performed internally if the ScalerCrop is changed. If any /// window lies outside the output image after the scaler crop has been /// applied, it is up to the application to recalculate them. - /// + /// /// The details of how the windows are used are platform dependent. We note /// that when there is more than one AF window, a typical implementation /// might find the optimal focus position for each one and finally select @@ -260,7 +265,7 @@ pub enum ControlId { AfWindows = 32, /// This control starts an autofocus scan when AfMode is set to AfModeAuto, /// and can also be used to terminate a scan early. - /// + /// /// It is ignored if AfMode is set to AfModeManual or AfModeContinuous. AfTrigger = 33, /// This control has no effect except when in continuous autofocus mode @@ -270,28 +275,28 @@ pub enum ControlId { AfPause = 34, /// Acts as a control to instruct the lens to move to a particular position /// and also reports back the position of the lens for each frame. - /// + /// /// The LensPosition control is ignored unless the AfMode is set to /// AfModeManual, though the value is reported back unconditionally in all /// modes. - /// + /// /// The units are a reciprocal distance scale like dioptres but normalised /// for the hyperfocal distance. That is, for a lens with hyperfocal /// distance H, and setting it to a focal distance D, the lens position LP, /// which is generally a non-integer, is given by - /// + /// /// \f$LP = \frac{H}{D}\f$ - /// + /// /// For example: - /// + /// /// 0 moves the lens to infinity. /// 0.5 moves the lens to twice the hyperfocal distance. /// 1 moves the lens to the hyperfocal position. /// And larger values will focus the lens ever closer. - /// + /// /// \todo Define a property to report the Hyperforcal distance of calibrated /// lenses. - /// + /// /// \todo Define a property to report the maximum and minimum positions of /// this lens. The minimum value will often be zero (meaning infinity). LensPosition = 35, @@ -299,49 +304,49 @@ pub enum ControlId { /// reported AfMode value and (in continuous AF mode) the AfPauseState /// value. The possible state changes are described below, though we note /// the following state transitions that occur when the AfMode is changed. - /// + /// /// If the AfMode is set to AfModeManual, then the AfState will always /// report AfStateIdle (even if the lens is subsequently moved). Changing to /// the AfModeManual state does not initiate any lens movement. - /// + /// /// If the AfMode is set to AfModeAuto then the AfState will report /// AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent together /// then AfState will omit AfStateIdle and move straight to AfStateScanning /// (and start a scan). - /// + /// /// If the AfMode is set to AfModeContinuous then the AfState will initially /// report AfStateScanning. AfState = 36, /// Only applicable in continuous (AfModeContinuous) mode, this reports /// whether the algorithm is currently running, paused or pausing (that is, /// will pause as soon as any in-progress scan completes). - /// + /// /// Any change to AfMode will cause AfPauseStateRunning to be reported. AfPauseState = 37, /// Control for AE metering trigger. Currently identical to /// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. - /// + /// /// Whether the camera device will trigger a precapture metering sequence /// when it processes this request. AePrecaptureTrigger = 38, /// Control to select the noise reduction algorithm mode. Currently /// identical to ANDROID_NOISE_REDUCTION_MODE. - /// + /// /// Mode of operation for the noise reduction algorithm. NoiseReductionMode = 39, /// Control to select the color correction aberration mode. Currently /// identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. - /// + /// /// Mode of operation for the chromatic aberration correction algorithm. ColorCorrectionAberrationMode = 40, /// Control to report the current AE algorithm state. Currently identical to /// ANDROID_CONTROL_AE_STATE. - /// + /// /// Current state of the AE algorithm. AeState = 41, /// Control to report the current AWB algorithm state. Currently identical /// to ANDROID_CONTROL_AWB_STATE. - /// + /// /// Current state of the AWB algorithm. AwbState = 42, /// Control to report the time between the start of exposure of the first @@ -358,7 +363,7 @@ pub enum ControlId { /// it was exposed to when the final completed result was available to the /// framework. Always less than or equal to PipelineMaxDepth. Currently /// identical to ANDROID_REQUEST_PIPELINE_DEPTH. - /// + /// /// The typical value for this control is 3 as a frame is first exposed, /// captured and then processed in a single pass through the ISP. Any /// additional processing step performed after the ISP pass (in example face @@ -377,7 +382,7 @@ pub enum ControlId { } /// Enable or disable the AE. -/// +/// /// \sa ExposureTime AnalogueGain #[derive(Debug, Clone)] pub struct AeEnable(pub bool); @@ -417,11 +422,11 @@ impl ControlEntry for AeEnable { impl Control for AeEnable {} /// Report the lock status of a running AE algorithm. -/// +/// /// If the AE algorithm is locked the value shall be set to true, if it's /// converging it shall be set to false. If the AE algorithm is not /// running the control shall not be present in the metadata control list. -/// +/// /// \sa AeEnable #[derive(Debug, Clone)] pub struct AeLocked(pub bool); @@ -481,8 +486,7 @@ impl TryFrom for AeMeteringMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -504,11 +508,15 @@ impl Control for AeMeteringMode {} #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] pub enum AeConstraintMode { - /// Default constraint mode. This mode aims to balance the exposure of different parts of the image so as to reach a reasonable average level. However, highlights in the image may appear over-exposed and lowlights may appear under-exposed. + /// Default constraint mode. This mode aims to balance the exposure of different parts of the image so as to reach + /// a reasonable average level. However, highlights in the image may appear over-exposed and lowlights may appear + /// under-exposed. ConstraintNormal = 0, - /// Highlight constraint mode. This mode adjusts the exposure levels in order to try and avoid over-exposing the brightest parts (highlights) of an image. Other non-highlight parts of the image may appear under-exposed. + /// Highlight constraint mode. This mode adjusts the exposure levels in order to try and avoid over-exposing the + /// brightest parts (highlights) of an image. Other non-highlight parts of the image may appear under-exposed. ConstraintHighlight = 1, - /// Shadows constraint mode. This mode adjusts the exposure levels in order to try and avoid under-exposing the dark parts (shadows) of an image. Other normally exposed parts of the image may appear over-exposed. + /// Shadows constraint mode. This mode adjusts the exposure levels in order to try and avoid under-exposing the + /// dark parts (shadows) of an image. Other normally exposed parts of the image may appear over-exposed. ConstraintShadows = 2, /// Custom constraint mode. ConstraintCustom = 3, @@ -518,8 +526,7 @@ impl TryFrom for AeConstraintMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -555,8 +562,7 @@ impl TryFrom for AeExposureMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -573,11 +579,11 @@ impl Control for AeExposureMode {} /// Specify an Exposure Value (EV) parameter. The EV parameter will only be /// applied if the AE algorithm is currently enabled. -/// +/// /// By convention EV adjusts the exposure as log2. For example /// EV = [-2, -1, 0.5, 0, 0.5, 1, 2] results in an exposure adjustment /// of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. -/// +/// /// \sa AeEnable #[derive(Debug, Clone)] pub struct ExposureValue(pub f32); @@ -618,13 +624,13 @@ impl Control for ExposureValue {} /// Exposure time (shutter speed) for the frame applied in the sensor /// device. This value is specified in micro-seconds. -/// +/// /// Setting this value means that it is now fixed and the AE algorithm may /// not change it. Setting it back to zero returns it to the control of the /// AE algorithm. -/// +/// /// \sa AnalogueGain AeEnable -/// +/// /// \todo Document the interactions between AeEnable and setting a fixed /// value for this control. Consider interactions with other AE features, /// such as aperture and aperture/shutter priority mode, and decide if @@ -670,13 +676,13 @@ impl Control for ExposureTime {} /// Analogue gain value applied in the sensor device. /// The value of the control specifies the gain multiplier applied to all /// colour channels. This value cannot be lower than 1.0. -/// +/// /// Setting this value means that it is now fixed and the AE algorithm may /// not change it. Setting it back to zero returns it to the control of the /// AE algorithm. -/// +/// /// \sa ExposureTime AeEnable -/// +/// /// \todo Document the interactions between AeEnable and setting a fixed /// value for this control. Consider interactions with other AE features, /// such as aperture and aperture/shutter priority mode, and decide if @@ -838,7 +844,7 @@ impl ControlEntry for Lux { impl Control for Lux {} /// Enable or disable the AWB. -/// +/// /// \sa ColourGains #[derive(Debug, Clone)] pub struct AwbEnable(pub bool); @@ -904,8 +910,7 @@ impl TryFrom for AwbMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -921,11 +926,11 @@ impl ControlEntry for AwbMode { impl Control for AwbMode {} /// Report the lock status of a running AWB algorithm. -/// +/// /// If the AWB algorithm is locked the value shall be set to true, if it's /// converging it shall be set to false. If the AWB algorithm is not /// running the control shall not be present in the metadata control list. -/// +/// /// \sa AwbEnable #[derive(Debug, Clone)] pub struct AwbLocked(pub bool); @@ -967,7 +972,7 @@ impl Control for AwbLocked {} /// Pair of gain values for the Red and Blue colour channels, in that /// order. ColourGains can only be applied in a Request when the AWB is /// disabled. -/// +/// /// \sa AwbEnable #[derive(Debug, Clone)] pub struct ColourGains(pub [f32; 2]); @@ -1006,7 +1011,8 @@ impl ControlEntry for ColourGains { impl Control for ColourGains {} -/// Report the current estimate of the colour temperature, in kelvin, for this frame. The ColourTemperature control can only be returned in metadata. +/// Report the current estimate of the colour temperature, in kelvin, for this frame. The ColourTemperature control can +/// only be returned in metadata. #[derive(Debug, Clone)] pub struct ColourTemperature(pub i32); @@ -1261,7 +1267,7 @@ impl Control for ColourCorrectionMatrix {} /// relative to the PixelArrayActiveAreas that is being used. The units /// remain native sensor pixels, even if the sensor is being used in /// a binning or skipping mode. -/// +/// /// This control is only present when the pipeline supports scaling. Its /// maximum valid value is given by the properties::ScalerCropMaximum /// property, and the two can be used to implement digital zoom. @@ -1304,12 +1310,12 @@ impl Control for ScalerCrop {} /// Digital gain value applied during the processing steps applied /// to the image as captured from the sensor. -/// +/// /// The global digital gain factor is applied to all the colour channels /// of the RAW image. Different pipeline models are free to /// specify how the global gain factor applies to each separate /// channel. -/// +/// /// If an imaging pipeline applies digital gain in distinct /// processing steps, this value indicates their total sum. /// Pipelines are free to decide how to adjust each processing @@ -1394,7 +1400,7 @@ impl Control for FrameDuration {} /// The minimum and maximum (in that order) frame duration, /// expressed in microseconds. -/// +/// /// When provided by applications, the control specifies the sensor frame /// duration interval the pipeline has to use. This limits the largest /// exposure time the sensor can use. For example, if a maximum frame @@ -1403,7 +1409,7 @@ impl Control for FrameDuration {} /// A fixed frame duration is achieved by setting the minimum and maximum /// values to be the same. Setting both values to 0 reverts to using the /// IPA provided defaults. -/// +/// /// The maximum frame duration provides the absolute limit to the shutter /// speed computed by the AE algorithm and it overrides any exposure mode /// setting specified with controls::AeExposureMode. Similarly, when a @@ -1412,15 +1418,15 @@ impl Control for FrameDuration {} /// metadata, the control expresses the minimum and maximum frame /// durations used after being clipped to the sensor provided frame /// duration limits. -/// +/// /// \sa AeExposureMode /// \sa ExposureTime -/// +/// /// \todo Define how to calculate the capture frame rate by /// defining controls to report additional delays introduced by /// the capture pipeline or post-processing stages (ie JPEG /// conversion, frame scaling). -/// +/// /// \todo Provide an explicit definition of default control values, for /// this and all other controls. #[derive(Debug, Clone)] @@ -1463,7 +1469,7 @@ impl Control for FrameDurationLimits {} /// Temperature measure from the camera sensor in Celsius. This is typically /// obtained by a thermal sensor present on-die or in the camera module. The /// range of reported temperatures is device dependent. -/// +/// /// The SensorTemperature control will only be returned in metadata if a /// themal sensor is present. #[derive(Debug, Clone)] @@ -1504,13 +1510,13 @@ impl ControlEntry for SensorTemperature { impl Control for SensorTemperature {} /// The time when the first row of the image sensor active array is exposed. -/// +/// /// The timestamp, expressed in nanoseconds, represents a monotonically /// increasing counter since the system boot time, as defined by the /// Linux-specific CLOCK_BOOTTIME clock id. -/// +/// /// The SensorTimestamp control can only be returned in metadata. -/// +/// /// \todo Define how the sensor timestamp has to be used in the reprocessing /// use case. #[derive(Debug, Clone)] @@ -1551,7 +1557,7 @@ impl ControlEntry for SensorTimestamp { impl Control for SensorTimestamp {} /// Control to set the mode of the AF (autofocus) algorithm. -/// +/// /// An implementation may choose not to implement all the modes. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -1560,18 +1566,18 @@ pub enum AfMode { /// perform any action nor move the lens of its own accord, but an /// application can specify the desired lens position using the /// LensPosition control. - /// + /// /// In this mode the AfState will always report AfStateIdle. Manual = 0, /// The AF algorithm is in auto mode. This means that the algorithm /// will never move the lens or change state unless the AfTrigger /// control is used. The AfTrigger control can be used to initiate a /// focus scan, the results of which will be reported by AfState. - /// + /// /// If the autofocus algorithm is moved from AfModeAuto to another /// mode while a scan is in progress, the scan is cancelled /// immediately, without waiting for the scan to finish. - /// + /// /// When first entering this mode the AfState will report /// AfStateIdle. When a trigger control is sent, AfState will /// report AfStateScanning for a period before spontaneously @@ -1586,11 +1592,11 @@ pub enum AfMode { /// intervention. The AfState still reports whether the algorithm is /// currently scanning or not, though the application has no ability to /// initiate or cancel scans, nor to move the lens for itself. - /// + /// /// However, applications can pause the AF algorithm from continuously /// scanning by using the AfPause control. This allows video or still /// images to be captured whilst guaranteeing that the focus is fixed. - /// + /// /// When set to AfModeContinuous, the system will immediately initiate a /// scan so AfState will report AfStateScanning, and will settle on one /// of AfStateFocused or AfStateFailed, depending on the scan result. @@ -1601,8 +1607,7 @@ impl TryFrom for AfMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1639,8 +1644,7 @@ impl TryFrom for AfRange { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1673,8 +1677,7 @@ impl TryFrom for AfSpeed { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1696,7 +1699,8 @@ impl Control for AfSpeed {} pub enum AfMetering { /// The AF algorithm should decide for itself where it will measure focus. Auto = 0, - /// The AF algorithm should use the rectangles defined by the AfWindows control to measure focus. If no windows are specified the behaviour is platform dependent. + /// The AF algorithm should use the rectangles defined by the AfWindows control to measure focus. If no windows are + /// specified the behaviour is platform dependent. Windows = 1, } @@ -1704,8 +1708,7 @@ impl TryFrom for AfMetering { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1723,19 +1726,19 @@ impl Control for AfMetering {} /// Sets the focus windows used by the AF algorithm when AfMetering is set /// to AfMeteringWindows. The units used are pixels within the rectangle /// returned by the ScalerCropMaximum property. -/// +/// /// In order to be activated, a rectangle must be programmed with non-zero /// width and height. Internally, these rectangles are intersected with the /// ScalerCropMaximum rectangle. If the window becomes empty after this /// operation, then the window is ignored. If all the windows end up being /// ignored, then the behaviour is platform dependent. -/// +/// /// On platforms that support the ScalerCrop control (for implementing /// digital zoom, for example), no automatic recalculation or adjustment of /// AF windows is performed internally if the ScalerCrop is changed. If any /// window lies outside the output image after the scaler crop has been /// applied, it is up to the application to recalculate them. -/// +/// /// The details of how the windows are used are platform dependent. We note /// that when there is more than one AF window, a typical implementation /// might find the optimal focus position for each one and finally select @@ -1780,7 +1783,7 @@ impl Control for AfWindows {} /// This control starts an autofocus scan when AfMode is set to AfModeAuto, /// and can also be used to terminate a scan early. -/// +/// /// It is ignored if AfMode is set to AfModeManual or AfModeContinuous. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -1795,8 +1798,7 @@ impl TryFrom for AfTrigger { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1827,7 +1829,7 @@ pub enum AfPause { /// This is similar to AfPauseImmediate, and if the AfState is currently /// reporting AfStateFocused or AfStateFailed it will remain in that /// state and AfPauseState will report AfPauseStatePaused. - /// + /// /// However, if the algorithm is scanning (AfStateScanning), /// AfPauseState will report AfPauseStatePausing until the scan is /// finished, at which point AfState will report one of AfStateFocused @@ -1844,8 +1846,7 @@ impl TryFrom for AfPause { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1862,28 +1863,28 @@ impl Control for AfPause {} /// Acts as a control to instruct the lens to move to a particular position /// and also reports back the position of the lens for each frame. -/// +/// /// The LensPosition control is ignored unless the AfMode is set to /// AfModeManual, though the value is reported back unconditionally in all /// modes. -/// +/// /// The units are a reciprocal distance scale like dioptres but normalised /// for the hyperfocal distance. That is, for a lens with hyperfocal /// distance H, and setting it to a focal distance D, the lens position LP, /// which is generally a non-integer, is given by -/// +/// /// \f$LP = \frac{H}{D}\f$ -/// +/// /// For example: -/// +/// /// 0 moves the lens to infinity. /// 0.5 moves the lens to twice the hyperfocal distance. /// 1 moves the lens to the hyperfocal position. /// And larger values will focus the lens ever closer. -/// +/// /// \todo Define a property to report the Hyperforcal distance of calibrated /// lenses. -/// +/// /// \todo Define a property to report the maximum and minimum positions of /// this lens. The minimum value will often be zero (meaning infinity). #[derive(Debug, Clone)] @@ -1927,16 +1928,16 @@ impl Control for LensPosition {} /// reported AfMode value and (in continuous AF mode) the AfPauseState /// value. The possible state changes are described below, though we note /// the following state transitions that occur when the AfMode is changed. -/// +/// /// If the AfMode is set to AfModeManual, then the AfState will always /// report AfStateIdle (even if the lens is subsequently moved). Changing to /// the AfModeManual state does not initiate any lens movement. -/// +/// /// If the AfMode is set to AfModeAuto then the AfState will report /// AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent together /// then AfState will omit AfStateIdle and move straight to AfStateScanning /// (and start a scan). -/// +/// /// If the AfMode is set to AfModeContinuous then the AfState will initially /// report AfStateScanning. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] @@ -1952,7 +1953,7 @@ pub enum AfState { /// move back to AfStateIdle or, if the scan actually completes before /// the cancel request is processed, to one of AfStateFocused or /// AfStateFailed. - /// + /// /// Alternatively the AF algorithm could be in continuous mode /// (AfModeContinuous) at which point it may enter this state /// spontaneously whenever it determines that a rescan is needed. @@ -1971,8 +1972,7 @@ impl TryFrom for AfState { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1990,7 +1990,7 @@ impl Control for AfState {} /// Only applicable in continuous (AfModeContinuous) mode, this reports /// whether the algorithm is currently running, paused or pausing (that is, /// will pause as soon as any in-progress scan completes). -/// +/// /// Any change to AfMode will cause AfPauseStateRunning to be reported. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -2012,8 +2012,7 @@ impl TryFrom for AfPauseState { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2030,7 +2029,7 @@ impl Control for AfPauseState {} /// Control for AE metering trigger. Currently identical to /// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. -/// +/// /// Whether the camera device will trigger a precapture metering sequence /// when it processes this request. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] @@ -2049,8 +2048,7 @@ impl TryFrom for AePrecaptureTrigger { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2067,7 +2065,7 @@ impl Control for AePrecaptureTrigger {} /// Control to select the noise reduction algorithm mode. Currently /// identical to ANDROID_NOISE_REDUCTION_MODE. -/// +/// /// Mode of operation for the noise reduction algorithm. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -2088,8 +2086,7 @@ impl TryFrom for NoiseReductionMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2106,7 +2103,7 @@ impl Control for NoiseReductionMode {} /// Control to select the color correction aberration mode. Currently /// identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. -/// +/// /// Mode of operation for the chromatic aberration correction algorithm. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -2124,8 +2121,7 @@ impl TryFrom for ColorCorrectionAberrationMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2142,7 +2138,7 @@ impl Control for ColorCorrectionAberrationMode {} /// Control to report the current AE algorithm state. Currently identical to /// ANDROID_CONTROL_AE_STATE. -/// +/// /// Current state of the AE algorithm. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -2166,8 +2162,7 @@ impl TryFrom for AeState { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2184,7 +2179,7 @@ impl Control for AeState {} /// Control to report the current AWB algorithm state. Currently identical /// to ANDROID_CONTROL_AWB_STATE. -/// +/// /// Current state of the AWB algorithm. #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] @@ -2203,8 +2198,7 @@ impl TryFrom for AwbState { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2274,8 +2268,7 @@ impl TryFrom for LensShadingMapMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2307,8 +2300,7 @@ impl TryFrom for SceneFlicker { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2327,7 +2319,7 @@ impl Control for SceneFlicker {} /// it was exposed to when the final completed result was available to the /// framework. Always less than or equal to PipelineMaxDepth. Currently /// identical to ANDROID_REQUEST_PIPELINE_DEPTH. -/// +/// /// The typical value for this control is 3 as a frame is first exposed, /// captured and then processed in a single pass through the ISP. Any /// additional processing step performed after the ISP pass (in example face @@ -2461,8 +2453,7 @@ impl TryFrom for TestPatternMode { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -2480,53 +2471,52 @@ impl Control for TestPatternMode {} pub fn make_dyn(id: ControlId, val: ControlValue) -> Result, ControlValueError> { match id { ControlId::AeEnable => Ok(Box::new(AeEnable::try_from(val)?)), -ControlId::AeLocked => Ok(Box::new(AeLocked::try_from(val)?)), -ControlId::AeMeteringMode => Ok(Box::new(AeMeteringMode::try_from(val)?)), -ControlId::AeConstraintMode => Ok(Box::new(AeConstraintMode::try_from(val)?)), -ControlId::AeExposureMode => Ok(Box::new(AeExposureMode::try_from(val)?)), -ControlId::ExposureValue => Ok(Box::new(ExposureValue::try_from(val)?)), -ControlId::ExposureTime => Ok(Box::new(ExposureTime::try_from(val)?)), -ControlId::AnalogueGain => Ok(Box::new(AnalogueGain::try_from(val)?)), -ControlId::Brightness => Ok(Box::new(Brightness::try_from(val)?)), -ControlId::Contrast => Ok(Box::new(Contrast::try_from(val)?)), -ControlId::Lux => Ok(Box::new(Lux::try_from(val)?)), -ControlId::AwbEnable => Ok(Box::new(AwbEnable::try_from(val)?)), -ControlId::AwbMode => Ok(Box::new(AwbMode::try_from(val)?)), -ControlId::AwbLocked => Ok(Box::new(AwbLocked::try_from(val)?)), -ControlId::ColourGains => Ok(Box::new(ColourGains::try_from(val)?)), -ControlId::ColourTemperature => Ok(Box::new(ColourTemperature::try_from(val)?)), -ControlId::Saturation => Ok(Box::new(Saturation::try_from(val)?)), -ControlId::SensorBlackLevels => Ok(Box::new(SensorBlackLevels::try_from(val)?)), -ControlId::Sharpness => Ok(Box::new(Sharpness::try_from(val)?)), -ControlId::FocusFoM => Ok(Box::new(FocusFoM::try_from(val)?)), -ControlId::ColourCorrectionMatrix => Ok(Box::new(ColourCorrectionMatrix::try_from(val)?)), -ControlId::ScalerCrop => Ok(Box::new(ScalerCrop::try_from(val)?)), -ControlId::DigitalGain => Ok(Box::new(DigitalGain::try_from(val)?)), -ControlId::FrameDuration => Ok(Box::new(FrameDuration::try_from(val)?)), -ControlId::FrameDurationLimits => Ok(Box::new(FrameDurationLimits::try_from(val)?)), -ControlId::SensorTemperature => Ok(Box::new(SensorTemperature::try_from(val)?)), -ControlId::SensorTimestamp => Ok(Box::new(SensorTimestamp::try_from(val)?)), -ControlId::AfMode => Ok(Box::new(AfMode::try_from(val)?)), -ControlId::AfRange => Ok(Box::new(AfRange::try_from(val)?)), -ControlId::AfSpeed => Ok(Box::new(AfSpeed::try_from(val)?)), -ControlId::AfMetering => Ok(Box::new(AfMetering::try_from(val)?)), -ControlId::AfWindows => Ok(Box::new(AfWindows::try_from(val)?)), -ControlId::AfTrigger => Ok(Box::new(AfTrigger::try_from(val)?)), -ControlId::AfPause => Ok(Box::new(AfPause::try_from(val)?)), -ControlId::LensPosition => Ok(Box::new(LensPosition::try_from(val)?)), -ControlId::AfState => Ok(Box::new(AfState::try_from(val)?)), -ControlId::AfPauseState => Ok(Box::new(AfPauseState::try_from(val)?)), -ControlId::AePrecaptureTrigger => Ok(Box::new(AePrecaptureTrigger::try_from(val)?)), -ControlId::NoiseReductionMode => Ok(Box::new(NoiseReductionMode::try_from(val)?)), -ControlId::ColorCorrectionAberrationMode => Ok(Box::new(ColorCorrectionAberrationMode::try_from(val)?)), -ControlId::AeState => Ok(Box::new(AeState::try_from(val)?)), -ControlId::AwbState => Ok(Box::new(AwbState::try_from(val)?)), -ControlId::SensorRollingShutterSkew => Ok(Box::new(SensorRollingShutterSkew::try_from(val)?)), -ControlId::LensShadingMapMode => Ok(Box::new(LensShadingMapMode::try_from(val)?)), -ControlId::SceneFlicker => Ok(Box::new(SceneFlicker::try_from(val)?)), -ControlId::PipelineDepth => Ok(Box::new(PipelineDepth::try_from(val)?)), -ControlId::MaxLatency => Ok(Box::new(MaxLatency::try_from(val)?)), -ControlId::TestPatternMode => Ok(Box::new(TestPatternMode::try_from(val)?)), - + ControlId::AeLocked => Ok(Box::new(AeLocked::try_from(val)?)), + ControlId::AeMeteringMode => Ok(Box::new(AeMeteringMode::try_from(val)?)), + ControlId::AeConstraintMode => Ok(Box::new(AeConstraintMode::try_from(val)?)), + ControlId::AeExposureMode => Ok(Box::new(AeExposureMode::try_from(val)?)), + ControlId::ExposureValue => Ok(Box::new(ExposureValue::try_from(val)?)), + ControlId::ExposureTime => Ok(Box::new(ExposureTime::try_from(val)?)), + ControlId::AnalogueGain => Ok(Box::new(AnalogueGain::try_from(val)?)), + ControlId::Brightness => Ok(Box::new(Brightness::try_from(val)?)), + ControlId::Contrast => Ok(Box::new(Contrast::try_from(val)?)), + ControlId::Lux => Ok(Box::new(Lux::try_from(val)?)), + ControlId::AwbEnable => Ok(Box::new(AwbEnable::try_from(val)?)), + ControlId::AwbMode => Ok(Box::new(AwbMode::try_from(val)?)), + ControlId::AwbLocked => Ok(Box::new(AwbLocked::try_from(val)?)), + ControlId::ColourGains => Ok(Box::new(ColourGains::try_from(val)?)), + ControlId::ColourTemperature => Ok(Box::new(ColourTemperature::try_from(val)?)), + ControlId::Saturation => Ok(Box::new(Saturation::try_from(val)?)), + ControlId::SensorBlackLevels => Ok(Box::new(SensorBlackLevels::try_from(val)?)), + ControlId::Sharpness => Ok(Box::new(Sharpness::try_from(val)?)), + ControlId::FocusFoM => Ok(Box::new(FocusFoM::try_from(val)?)), + ControlId::ColourCorrectionMatrix => Ok(Box::new(ColourCorrectionMatrix::try_from(val)?)), + ControlId::ScalerCrop => Ok(Box::new(ScalerCrop::try_from(val)?)), + ControlId::DigitalGain => Ok(Box::new(DigitalGain::try_from(val)?)), + ControlId::FrameDuration => Ok(Box::new(FrameDuration::try_from(val)?)), + ControlId::FrameDurationLimits => Ok(Box::new(FrameDurationLimits::try_from(val)?)), + ControlId::SensorTemperature => Ok(Box::new(SensorTemperature::try_from(val)?)), + ControlId::SensorTimestamp => Ok(Box::new(SensorTimestamp::try_from(val)?)), + ControlId::AfMode => Ok(Box::new(AfMode::try_from(val)?)), + ControlId::AfRange => Ok(Box::new(AfRange::try_from(val)?)), + ControlId::AfSpeed => Ok(Box::new(AfSpeed::try_from(val)?)), + ControlId::AfMetering => Ok(Box::new(AfMetering::try_from(val)?)), + ControlId::AfWindows => Ok(Box::new(AfWindows::try_from(val)?)), + ControlId::AfTrigger => Ok(Box::new(AfTrigger::try_from(val)?)), + ControlId::AfPause => Ok(Box::new(AfPause::try_from(val)?)), + ControlId::LensPosition => Ok(Box::new(LensPosition::try_from(val)?)), + ControlId::AfState => Ok(Box::new(AfState::try_from(val)?)), + ControlId::AfPauseState => Ok(Box::new(AfPauseState::try_from(val)?)), + ControlId::AePrecaptureTrigger => Ok(Box::new(AePrecaptureTrigger::try_from(val)?)), + ControlId::NoiseReductionMode => Ok(Box::new(NoiseReductionMode::try_from(val)?)), + ControlId::ColorCorrectionAberrationMode => Ok(Box::new(ColorCorrectionAberrationMode::try_from(val)?)), + ControlId::AeState => Ok(Box::new(AeState::try_from(val)?)), + ControlId::AwbState => Ok(Box::new(AwbState::try_from(val)?)), + ControlId::SensorRollingShutterSkew => Ok(Box::new(SensorRollingShutterSkew::try_from(val)?)), + ControlId::LensShadingMapMode => Ok(Box::new(LensShadingMapMode::try_from(val)?)), + ControlId::SceneFlicker => Ok(Box::new(SceneFlicker::try_from(val)?)), + ControlId::PipelineDepth => Ok(Box::new(PipelineDepth::try_from(val)?)), + ControlId::MaxLatency => Ok(Box::new(MaxLatency::try_from(val)?)), + ControlId::TestPatternMode => Ok(Box::new(TestPatternMode::try_from(val)?)), } } diff --git a/libcamera/src/generated/properties.rs b/libcamera/src/generated/properties.rs index 65bb10e..f5d4e11 100644 --- a/libcamera/src/generated/properties.rs +++ b/libcamera/src/generated/properties.rs @@ -1,11 +1,15 @@ //! Generated by `cargo run --bin generate_rust properties` use std::ops::{Deref, DerefMut}; + use num_enum::{IntoPrimitive, TryFromPrimitive}; -use crate::control::{Property, ControlEntry, DynControlEntry}; -use crate::control_value::{ControlValue, ControlValueError}; + #[allow(unused_imports)] use crate::geometry::{Rectangle, Size}; +use crate::{ + control::{ControlEntry, DynControlEntry, Property}, + control_value::{ControlValue, ControlValueError}, +}; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(u32)] @@ -16,16 +20,16 @@ pub enum PropertyId { /// between two reference systems, one relative to the camera module, and /// one defined on the external world scene to be captured when projected /// on the image sensor pixel array. - /// + /// /// A camera sensor has a 2-dimensional reference system 'Rc' defined by /// its pixel array read-out order. The origin is set to the first pixel /// being read out, the X-axis points along the column read-out direction /// towards the last columns, and the Y-axis along the row read-out /// direction towards the last row. - /// + /// /// A typical example for a sensor with a 2592x1944 pixel array matrix /// observed from the front is - /// + /// /// ```text /// 2591 X-axis 0 /// <------------------------+ 0 @@ -36,18 +40,18 @@ pub enum PropertyId { /// .......... ... ..........! 1943 /// V /// ``` - /// - /// + /// + /// /// The external world scene reference system 'Rs' is a 2-dimensional /// reference system on the focal plane of the camera module. The origin is /// placed on the top-left corner of the visible scene, the X-axis points /// towards the right, and the Y-axis points towards the bottom of the /// scene. The top, bottom, left and right directions are intentionally not /// defined and depend on the environment in which the camera is used. - /// + /// /// A typical example of a (very common) picture of a shark swimming from /// left to right, as seen from the camera, is - /// + /// /// ```text /// 0 X-axis /// 0 +-------------------------------------> @@ -63,9 +67,9 @@ pub enum PropertyId { /// V /// Y-axis /// ``` - /// + /// /// With the reference system 'Rs' placed on the camera focal plane. - /// + /// /// ```text /// ¸.·˙! /// ¸.·˙ ! @@ -77,15 +81,15 @@ pub enum PropertyId { /// ˙·.¸ ! /// ˙·.¸! /// ``` - /// + /// /// When projected on the sensor's pixel array, the image and the associated /// reference system 'Rs' are typically (but not always) inverted, due to /// the camera module's lens optical inversion effect. - /// + /// /// Assuming the above represented scene of the swimming shark, the lens /// inversion projects the scene and its reference system onto the sensor /// pixel array, seen from the front of the camera sensor, as follow - /// + /// /// ```text /// Y-axis /// ^ @@ -101,21 +105,21 @@ pub enum PropertyId { /// 0 +-------------------------------------> /// 0 X-axis /// ``` - /// + /// /// Note the shark being upside-down. - /// + /// /// The resulting projected reference system is named 'Rp'. - /// + /// /// The camera rotation property is then defined as the angular difference /// in the counter-clockwise direction between the camera reference system /// 'Rc' and the projected scene reference system 'Rp'. It is expressed in /// degrees as a number in the range [0, 360[. - /// + /// /// Examples - /// + /// /// 0 degrees camera rotation - /// - /// + /// + /// /// ```text /// Y-Rp /// ^ @@ -133,8 +137,8 @@ pub enum PropertyId { /// 0 +-------------------------------------> /// 0 X-Rc /// ``` - /// - /// + /// + /// /// ```text /// X-Rc 0 /// <------------------------------------+ 0 @@ -152,9 +156,9 @@ pub enum PropertyId { /// V /// Y-Rp /// ``` - /// + /// /// 90 degrees camera rotation - /// + /// /// ```text /// 0 Y-Rc /// 0 +--------------------> @@ -178,9 +182,9 @@ pub enum PropertyId { /// V /// X-Rc /// ``` - /// + /// /// 180 degrees camera rotation - /// + /// /// ```text /// 0 /// <------------------------------------+ 0 @@ -198,9 +202,9 @@ pub enum PropertyId { /// 0 +-------------------------------------> /// 0 X-Rp /// ``` - /// + /// /// 270 degrees camera rotation - /// + /// /// ```text /// 0 Y-Rc /// 0 +--------------------> @@ -224,17 +228,17 @@ pub enum PropertyId { /// V /// X-Rc /// ``` - /// - /// + /// + /// /// Example one - Webcam - /// + /// /// A camera module installed on the user facing part of a laptop screen /// casing used for video calls. The captured images are meant to be /// displayed in landscape mode (width > height) on the laptop screen. - /// + /// /// The camera is typically mounted upside-down to compensate the lens /// optical inversion effect. - /// + /// /// ```text /// Y-Rp /// Y-Rc ^ @@ -251,11 +255,11 @@ pub enum PropertyId { /// 0 +-------------------------------------> /// 0 X-Rc /// ``` - /// + /// /// The two reference systems are aligned, the resulting camera rotation is /// 0 degrees, no rotation correction needs to be applied to the resulting /// image once captured to memory buffers to correctly display it to users. - /// + /// /// ```text /// +--------------------------------------+ /// ! ! @@ -269,12 +273,12 @@ pub enum PropertyId { /// ! ! /// +--------------------------------------+ /// ``` - /// + /// /// If the camera sensor is not mounted upside-down to compensate for the /// lens optical inversion, the two reference systems will not be aligned, /// with 'Rp' being rotated 180 degrees relatively to 'Rc'. - /// - /// + /// + /// /// ```text /// X-Rc 0 /// <------------------------------------+ 0 @@ -292,9 +296,9 @@ pub enum PropertyId { /// 0 +-------------------------------------> /// 0 X-Rp /// ``` - /// + /// /// The image once captured to memory will then be rotated by 180 degrees - /// + /// /// ```text /// +--------------------------------------+ /// ! ! @@ -308,10 +312,10 @@ pub enum PropertyId { /// ! ! /// +--------------------------------------+ /// ``` - /// + /// /// A software rotation correction of 180 degrees should be applied to /// correctly display the image. - /// + /// /// ```text /// +--------------------------------------+ /// ! ! @@ -325,18 +329,18 @@ pub enum PropertyId { /// ! ! /// +--------------------------------------+ /// ``` - /// + /// /// Example two - Phone camera - /// + /// /// A camera installed on the back side of a mobile device facing away from /// the user. The captured images are meant to be displayed in portrait mode /// (height > width) to match the device screen orientation and the device /// usage orientation used when taking the picture. - /// + /// /// The camera sensor is typically mounted with its pixel array longer side /// aligned to the device longer side, upside-down mounted to compensate for /// the lens optical inversion effect. - /// + /// /// ```text /// 0 Y-Rc /// 0 +--------------------> @@ -360,13 +364,13 @@ pub enum PropertyId { /// V /// X-Rc /// ``` - /// + /// /// The two reference systems are not aligned and the 'Rp' reference /// system is rotated by 90 degrees in the counter-clockwise direction /// relatively to the 'Rc' reference system. - /// + /// /// The image once captured to memory will be rotated. - /// + /// /// ```text /// +-------------------------------------+ /// | _ _ | @@ -380,11 +384,11 @@ pub enum PropertyId { /// | V | /// +-------------------------------------+ /// ``` - /// + /// /// A correction of 90 degrees in counter-clockwise direction has to be /// applied to correctly display the image in portrait mode on the device /// screen. - /// + /// /// ```text /// +--------------------+ /// | | @@ -409,45 +413,45 @@ pub enum PropertyId { /// camera is part of a larger unit and exposed as a black-box to the /// system. In such cases the model name of the smallest device that /// contains the camera sensor shall be used. - /// + /// /// The model name is not meant to be a camera name displayed to the /// end-user, but may be combined with other camera information to create a /// camera name. - /// + /// /// The model name is not guaranteed to be unique in the system nor is /// it guaranteed to be stable or have any other properties required to make /// it a good candidate to be used as a permanent identifier of a camera. - /// + /// /// The model name shall describe the camera in a human readable format and /// shall be encoded in ASCII. - /// + /// /// Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. Model = 3, /// The pixel unit cell physical size, in nanometers. - /// + /// /// The UnitCellSize properties defines the horizontal and vertical sizes of /// a single pixel unit, including its active and non-active parts. In /// other words, it expresses the horizontal and vertical distance between /// the top-left corners of adjacent pixels. - /// + /// /// The property can be used to calculate the physical size of the sensor's /// pixel array area and for calibration purposes. UnitCellSize = 4, /// The camera sensor pixel array readable area vertical and horizontal /// sizes, in pixels. - /// + /// /// The PixelArraySize property defines the size in pixel units of the /// readable part of full pixel array matrix, including optical black /// pixels used for calibration, pixels which are not considered valid for /// capture and active pixels containing valid image data. - /// + /// /// The property describes the maximum size of the raw data captured by the /// camera, which might not correspond to the physical size of the sensor /// pixel array matrix, as some portions of the physical pixel array matrix /// are not accessible and cannot be transmitted out. - /// + /// /// For example, let's consider a pixel array matrix assembled as follows - /// + /// /// ```text /// +--------------------------------------------------+ /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| @@ -460,7 +464,7 @@ pub enum PropertyId { /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| /// ... ... ... ... ... /// ``` - /// + /// /// ```text /// ... ... ... ... ... /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| @@ -469,14 +473,14 @@ pub enum PropertyId { /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| /// +--------------------------------------------------+ /// ``` - /// + /// /// starting with two lines of non-readable pixels (x), followed by N lines /// of readable data (D) surrounded by two columns of non-readable pixels on /// each side, and ending with two more lines of non-readable pixels. Only /// the readable portion is transmitted to the receiving side, defining the /// sizes of the largest possible buffer of raw data that can be presented /// to applications. - /// + /// /// ```text /// PixelArraySize.width /// /----------------------------------------------/ @@ -493,41 +497,41 @@ pub enum PropertyId { /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | /// +----------------------------------------------+ / /// ``` - /// + /// /// This defines a rectangle whose top-left corner is placed in position (0, /// 0) and whose vertical and horizontal sizes are defined by this property. /// All other rectangles that describe portions of the pixel array, such as /// the optical black pixels rectangles and active pixel areas, are defined /// relatively to this rectangle. - /// + /// /// All the coordinates are expressed relative to the default sensor readout /// direction, without any transformation (such as horizontal and vertical /// flipping) applied. When mapping them to the raw pixel buffer, /// applications shall take any configured transformation into account. - /// + /// /// \todo Rename this property to Size once we will have property /// ```text /// categories (i.e. Properties::PixelArray::Size) PixelArraySize = 5, /// The pixel array region(s) which contain optical black pixels /// considered valid for calibration purposes. - /// + /// /// This property describes the position and size of optical black pixel /// regions in the raw data buffer as stored in memory, which might differ /// from their actual physical location in the pixel array matrix. - /// + /// /// It is important to note, in fact, that camera sensors might /// automatically reorder or skip portions of their pixels array matrix when /// transmitting data to the receiver. For instance, a sensor may merge the /// top and bottom optical black rectangles into a single rectangle, /// transmitted at the beginning of the frame. - /// + /// /// The pixel array contains several areas with different purposes, /// interleaved by lines and columns which are said not to be valid for /// capturing purposes. Invalid lines and columns are defined as invalid as /// they could be positioned too close to the chip margins or to the optical /// black shielding placed on top of optical black pixels. - /// + /// /// ```text /// PixelArraySize.width /// /----------------------------------------------/ @@ -551,16 +555,16 @@ pub enum PropertyId { /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | /// +----------------------------------------------+ / /// ``` - /// + /// /// The readable pixel array matrix is composed by /// 2 invalid lines (I) /// 4 lines of valid optical black pixels (O) /// 2 invalid lines (I) /// n lines of valid pixel data (P) /// 2 invalid lines (I) - /// + /// /// And the position of the optical black pixel rectangles is defined by - /// + /// /// ```text /// PixelArrayOpticalBlackRectangles = { /// { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, @@ -568,11 +572,11 @@ pub enum PropertyId { /// { x2, y3, 2, y4 - y3 + 1 }, /// }; /// ``` - /// + /// /// If the camera, when capturing the full pixel array matrix, automatically /// skips the invalid lines and columns, producing the following data /// buffer, when captured to memory - /// + /// /// ```text /// PixelArraySize.width /// /----------------------------------------------/ @@ -591,12 +595,12 @@ pub enum PropertyId { /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | /// +----------------------------------------------+ / /// ``` - /// + /// /// then the invalid lines and columns should not be reported as part of the /// PixelArraySize property in first place. - /// + /// /// In this case, the position of the black pixel rectangles will be - /// + /// /// ```text /// PixelArrayOpticalBlackRectangles = { /// { 0, 0, y1 + 1, PixelArraySize[0] }, @@ -604,7 +608,7 @@ pub enum PropertyId { /// { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, /// }; /// ``` - /// + /// /// \todo Rename this property to Size once we will have property /// ```text /// categories (i.e. Properties::PixelArray::OpticalBlackRectangles) @@ -612,22 +616,22 @@ pub enum PropertyId { /// The PixelArrayActiveAreas property defines the (possibly multiple and /// overlapping) portions of the camera sensor readable pixel matrix /// which are considered valid for image acquisition purposes. - /// + /// /// This property describes an arbitrary number of overlapping rectangles, /// with each rectangle representing the maximum image size that the camera /// sensor can produce for a particular aspect ratio. They are defined /// relatively to the PixelArraySize rectangle. - /// + /// /// When multiple rectangles are reported, they shall be ordered from the /// tallest to the shortest. - /// + /// /// Example 1 /// A camera sensor which only produces images in the 4:3 image resolution /// will report a single PixelArrayActiveAreas rectangle, from which all /// other image formats are obtained by either cropping the field-of-view /// and/or applying pixel sub-sampling techniques such as pixel skipping or /// binning. - /// + /// /// ```text /// PixelArraySize.width /// /----------------/ @@ -640,18 +644,18 @@ pub enum PropertyId { /// y2 o +------------+ | | /// +----------------+ / /// ``` - /// + /// /// The property reports a single rectangle - /// + /// /// ```text /// PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) /// ``` - /// + /// /// Example 2 /// A camera sensor which can produce images in different native /// resolutions will report several overlapping rectangles, one for each /// natively supported resolution. - /// + /// /// ```text /// PixelArraySize.width /// /------------------/ @@ -666,23 +670,23 @@ pub enum PropertyId { /// y4 o +------+ | | /// +----+------+----+ / /// ``` - /// + /// /// The property reports two rectangles - /// + /// /// ```text /// PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), /// (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) /// ``` - /// + /// /// The first rectangle describes the maximum field-of-view of all image /// formats in the 4:3 resolutions, while the second one describes the /// maximum field of view for all image formats in the 16:9 resolutions. - /// + /// /// Multiple rectangles shall only be reported when the sensor can't capture /// the pixels in the corner regions. If all the pixels in the (x1,y1) - /// (x4,y4) area can be captured, the PixelArrayActiveAreas property shall /// contains the single rectangle (x1,y1) - (x4,y4). - /// + /// /// \todo Rename this property to ActiveAreas once we will have property /// ```text /// categories (i.e. Properties::PixelArray::ActiveAreas) @@ -691,16 +695,16 @@ pub enum PropertyId { /// reflects the minimum mandatory cropping applied in the camera sensor and /// the rest of the pipeline. Just as the ScalerCrop control, it defines a /// rectangle taken from the sensor's active pixel array. - /// + /// /// This property is valid only after the camera has been successfully /// configured and its value may change whenever a new configuration is /// applied. - /// + /// /// \todo Turn this property into a "maximum control value" for the /// ScalerCrop control once "dynamic" controls have been implemented. ScalerCropMaximum = 8, /// The relative sensitivity of the chosen sensor mode. - /// + /// /// Some sensors have readout modes with different sensitivities. For example, /// a binned camera mode might, with the same exposure and gains, produce /// twice the signal level of the full resolution readout. This would be @@ -733,8 +737,7 @@ impl TryFrom for Location { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -753,16 +756,16 @@ impl Property for Location {} /// between two reference systems, one relative to the camera module, and /// one defined on the external world scene to be captured when projected /// on the image sensor pixel array. -/// +/// /// A camera sensor has a 2-dimensional reference system 'Rc' defined by /// its pixel array read-out order. The origin is set to the first pixel /// being read out, the X-axis points along the column read-out direction /// towards the last columns, and the Y-axis along the row read-out /// direction towards the last row. -/// +/// /// A typical example for a sensor with a 2592x1944 pixel array matrix /// observed from the front is -/// +/// /// ```text /// 2591 X-axis 0 /// <------------------------+ 0 @@ -773,18 +776,18 @@ impl Property for Location {} /// .......... ... ..........! 1943 /// V /// ``` -/// -/// +/// +/// /// The external world scene reference system 'Rs' is a 2-dimensional /// reference system on the focal plane of the camera module. The origin is /// placed on the top-left corner of the visible scene, the X-axis points /// towards the right, and the Y-axis points towards the bottom of the /// scene. The top, bottom, left and right directions are intentionally not /// defined and depend on the environment in which the camera is used. -/// +/// /// A typical example of a (very common) picture of a shark swimming from /// left to right, as seen from the camera, is -/// +/// /// ```text /// 0 X-axis /// 0 +-------------------------------------> @@ -800,9 +803,9 @@ impl Property for Location {} /// V /// Y-axis /// ``` -/// +/// /// With the reference system 'Rs' placed on the camera focal plane. -/// +/// /// ```text /// ¸.·˙! /// ¸.·˙ ! @@ -814,15 +817,15 @@ impl Property for Location {} /// ˙·.¸ ! /// ˙·.¸! /// ``` -/// +/// /// When projected on the sensor's pixel array, the image and the associated /// reference system 'Rs' are typically (but not always) inverted, due to /// the camera module's lens optical inversion effect. -/// +/// /// Assuming the above represented scene of the swimming shark, the lens /// inversion projects the scene and its reference system onto the sensor /// pixel array, seen from the front of the camera sensor, as follow -/// +/// /// ```text /// Y-axis /// ^ @@ -838,21 +841,21 @@ impl Property for Location {} /// 0 +-------------------------------------> /// 0 X-axis /// ``` -/// +/// /// Note the shark being upside-down. -/// +/// /// The resulting projected reference system is named 'Rp'. -/// +/// /// The camera rotation property is then defined as the angular difference /// in the counter-clockwise direction between the camera reference system /// 'Rc' and the projected scene reference system 'Rp'. It is expressed in /// degrees as a number in the range [0, 360[. -/// +/// /// Examples -/// +/// /// 0 degrees camera rotation -/// -/// +/// +/// /// ```text /// Y-Rp /// ^ @@ -870,8 +873,8 @@ impl Property for Location {} /// 0 +-------------------------------------> /// 0 X-Rc /// ``` -/// -/// +/// +/// /// ```text /// X-Rc 0 /// <------------------------------------+ 0 @@ -889,9 +892,9 @@ impl Property for Location {} /// V /// Y-Rp /// ``` -/// +/// /// 90 degrees camera rotation -/// +/// /// ```text /// 0 Y-Rc /// 0 +--------------------> @@ -915,9 +918,9 @@ impl Property for Location {} /// V /// X-Rc /// ``` -/// +/// /// 180 degrees camera rotation -/// +/// /// ```text /// 0 /// <------------------------------------+ 0 @@ -935,9 +938,9 @@ impl Property for Location {} /// 0 +-------------------------------------> /// 0 X-Rp /// ``` -/// +/// /// 270 degrees camera rotation -/// +/// /// ```text /// 0 Y-Rc /// 0 +--------------------> @@ -961,17 +964,17 @@ impl Property for Location {} /// V /// X-Rc /// ``` -/// -/// +/// +/// /// Example one - Webcam -/// +/// /// A camera module installed on the user facing part of a laptop screen /// casing used for video calls. The captured images are meant to be /// displayed in landscape mode (width > height) on the laptop screen. -/// +/// /// The camera is typically mounted upside-down to compensate the lens /// optical inversion effect. -/// +/// /// ```text /// Y-Rp /// Y-Rc ^ @@ -988,11 +991,11 @@ impl Property for Location {} /// 0 +-------------------------------------> /// 0 X-Rc /// ``` -/// +/// /// The two reference systems are aligned, the resulting camera rotation is /// 0 degrees, no rotation correction needs to be applied to the resulting /// image once captured to memory buffers to correctly display it to users. -/// +/// /// ```text /// +--------------------------------------+ /// ! ! @@ -1006,12 +1009,12 @@ impl Property for Location {} /// ! ! /// +--------------------------------------+ /// ``` -/// +/// /// If the camera sensor is not mounted upside-down to compensate for the /// lens optical inversion, the two reference systems will not be aligned, /// with 'Rp' being rotated 180 degrees relatively to 'Rc'. -/// -/// +/// +/// /// ```text /// X-Rc 0 /// <------------------------------------+ 0 @@ -1029,9 +1032,9 @@ impl Property for Location {} /// 0 +-------------------------------------> /// 0 X-Rp /// ``` -/// +/// /// The image once captured to memory will then be rotated by 180 degrees -/// +/// /// ```text /// +--------------------------------------+ /// ! ! @@ -1045,10 +1048,10 @@ impl Property for Location {} /// ! ! /// +--------------------------------------+ /// ``` -/// +/// /// A software rotation correction of 180 degrees should be applied to /// correctly display the image. -/// +/// /// ```text /// +--------------------------------------+ /// ! ! @@ -1062,18 +1065,18 @@ impl Property for Location {} /// ! ! /// +--------------------------------------+ /// ``` -/// +/// /// Example two - Phone camera -/// +/// /// A camera installed on the back side of a mobile device facing away from /// the user. The captured images are meant to be displayed in portrait mode /// (height > width) to match the device screen orientation and the device /// usage orientation used when taking the picture. -/// +/// /// The camera sensor is typically mounted with its pixel array longer side /// aligned to the device longer side, upside-down mounted to compensate for /// the lens optical inversion effect. -/// +/// /// ```text /// 0 Y-Rc /// 0 +--------------------> @@ -1097,13 +1100,13 @@ impl Property for Location {} /// V /// X-Rc /// ``` -/// +/// /// The two reference systems are not aligned and the 'Rp' reference /// system is rotated by 90 degrees in the counter-clockwise direction /// relatively to the 'Rc' reference system. -/// +/// /// The image once captured to memory will be rotated. -/// +/// /// ```text /// +-------------------------------------+ /// | _ _ | @@ -1117,11 +1120,11 @@ impl Property for Location {} /// | V | /// +-------------------------------------+ /// ``` -/// +/// /// A correction of 90 degrees in counter-clockwise direction has to be /// applied to correctly display the image in portrait mode on the device /// screen. -/// +/// /// ```text /// +--------------------+ /// | | @@ -1182,18 +1185,18 @@ impl Property for Rotation {} /// camera is part of a larger unit and exposed as a black-box to the /// system. In such cases the model name of the smallest device that /// contains the camera sensor shall be used. -/// +/// /// The model name is not meant to be a camera name displayed to the /// end-user, but may be combined with other camera information to create a /// camera name. -/// +/// /// The model name is not guaranteed to be unique in the system nor is /// it guaranteed to be stable or have any other properties required to make /// it a good candidate to be used as a permanent identifier of a camera. -/// +/// /// The model name shall describe the camera in a human readable format and /// shall be encoded in ASCII. -/// +/// /// Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. #[derive(Debug, Clone)] pub struct Model(pub String); @@ -1233,12 +1236,12 @@ impl ControlEntry for Model { impl Property for Model {} /// The pixel unit cell physical size, in nanometers. -/// +/// /// The UnitCellSize properties defines the horizontal and vertical sizes of /// a single pixel unit, including its active and non-active parts. In /// other words, it expresses the horizontal and vertical distance between /// the top-left corners of adjacent pixels. -/// +/// /// The property can be used to calculate the physical size of the sensor's /// pixel array area and for calibration purposes. #[derive(Debug, Clone)] @@ -1280,19 +1283,19 @@ impl Property for UnitCellSize {} /// The camera sensor pixel array readable area vertical and horizontal /// sizes, in pixels. -/// +/// /// The PixelArraySize property defines the size in pixel units of the /// readable part of full pixel array matrix, including optical black /// pixels used for calibration, pixels which are not considered valid for /// capture and active pixels containing valid image data. -/// +/// /// The property describes the maximum size of the raw data captured by the /// camera, which might not correspond to the physical size of the sensor /// pixel array matrix, as some portions of the physical pixel array matrix /// are not accessible and cannot be transmitted out. -/// +/// /// For example, let's consider a pixel array matrix assembled as follows -/// +/// /// ```text /// +--------------------------------------------------+ /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| @@ -1305,7 +1308,7 @@ impl Property for UnitCellSize {} /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| /// ... ... ... ... ... /// ``` -/// +/// /// ```text /// ... ... ... ... ... /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| @@ -1314,14 +1317,14 @@ impl Property for UnitCellSize {} /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| /// +--------------------------------------------------+ /// ``` -/// +/// /// starting with two lines of non-readable pixels (x), followed by N lines /// of readable data (D) surrounded by two columns of non-readable pixels on /// each side, and ending with two more lines of non-readable pixels. Only /// the readable portion is transmitted to the receiving side, defining the /// sizes of the largest possible buffer of raw data that can be presented /// to applications. -/// +/// /// ```text /// PixelArraySize.width /// /----------------------------------------------/ @@ -1338,18 +1341,18 @@ impl Property for UnitCellSize {} /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | /// +----------------------------------------------+ / /// ``` -/// +/// /// This defines a rectangle whose top-left corner is placed in position (0, /// 0) and whose vertical and horizontal sizes are defined by this property. /// All other rectangles that describe portions of the pixel array, such as /// the optical black pixels rectangles and active pixel areas, are defined /// relatively to this rectangle. -/// +/// /// All the coordinates are expressed relative to the default sensor readout /// direction, without any transformation (such as horizontal and vertical /// flipping) applied. When mapping them to the raw pixel buffer, /// applications shall take any configured transformation into account. -/// +/// /// \todo Rename this property to Size once we will have property /// ```text /// categories (i.e. Properties::PixelArray::Size) @@ -1392,23 +1395,23 @@ impl Property for PixelArraySize {} /// The pixel array region(s) which contain optical black pixels /// considered valid for calibration purposes. -/// +/// /// This property describes the position and size of optical black pixel /// regions in the raw data buffer as stored in memory, which might differ /// from their actual physical location in the pixel array matrix. -/// +/// /// It is important to note, in fact, that camera sensors might /// automatically reorder or skip portions of their pixels array matrix when /// transmitting data to the receiver. For instance, a sensor may merge the /// top and bottom optical black rectangles into a single rectangle, /// transmitted at the beginning of the frame. -/// +/// /// The pixel array contains several areas with different purposes, /// interleaved by lines and columns which are said not to be valid for /// capturing purposes. Invalid lines and columns are defined as invalid as /// they could be positioned too close to the chip margins or to the optical /// black shielding placed on top of optical black pixels. -/// +/// /// ```text /// PixelArraySize.width /// /----------------------------------------------/ @@ -1432,16 +1435,16 @@ impl Property for PixelArraySize {} /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | /// +----------------------------------------------+ / /// ``` -/// +/// /// The readable pixel array matrix is composed by /// 2 invalid lines (I) /// 4 lines of valid optical black pixels (O) /// 2 invalid lines (I) /// n lines of valid pixel data (P) /// 2 invalid lines (I) -/// +/// /// And the position of the optical black pixel rectangles is defined by -/// +/// /// ```text /// PixelArrayOpticalBlackRectangles = { /// { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, @@ -1449,11 +1452,11 @@ impl Property for PixelArraySize {} /// { x2, y3, 2, y4 - y3 + 1 }, /// }; /// ``` -/// +/// /// If the camera, when capturing the full pixel array matrix, automatically /// skips the invalid lines and columns, producing the following data /// buffer, when captured to memory -/// +/// /// ```text /// PixelArraySize.width /// /----------------------------------------------/ @@ -1472,12 +1475,12 @@ impl Property for PixelArraySize {} /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | /// +----------------------------------------------+ / /// ``` -/// +/// /// then the invalid lines and columns should not be reported as part of the /// PixelArraySize property in first place. -/// +/// /// In this case, the position of the black pixel rectangles will be -/// +/// /// ```text /// PixelArrayOpticalBlackRectangles = { /// { 0, 0, y1 + 1, PixelArraySize[0] }, @@ -1485,7 +1488,7 @@ impl Property for PixelArraySize {} /// { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, /// }; /// ``` -/// +/// /// \todo Rename this property to Size once we will have property /// ```text /// categories (i.e. Properties::PixelArray::OpticalBlackRectangles) @@ -1529,22 +1532,22 @@ impl Property for PixelArrayOpticalBlackRectangles {} /// The PixelArrayActiveAreas property defines the (possibly multiple and /// overlapping) portions of the camera sensor readable pixel matrix /// which are considered valid for image acquisition purposes. -/// +/// /// This property describes an arbitrary number of overlapping rectangles, /// with each rectangle representing the maximum image size that the camera /// sensor can produce for a particular aspect ratio. They are defined /// relatively to the PixelArraySize rectangle. -/// +/// /// When multiple rectangles are reported, they shall be ordered from the /// tallest to the shortest. -/// +/// /// Example 1 /// A camera sensor which only produces images in the 4:3 image resolution /// will report a single PixelArrayActiveAreas rectangle, from which all /// other image formats are obtained by either cropping the field-of-view /// and/or applying pixel sub-sampling techniques such as pixel skipping or /// binning. -/// +/// /// ```text /// PixelArraySize.width /// /----------------/ @@ -1557,18 +1560,18 @@ impl Property for PixelArrayOpticalBlackRectangles {} /// y2 o +------------+ | | /// +----------------+ / /// ``` -/// +/// /// The property reports a single rectangle -/// +/// /// ```text /// PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) /// ``` -/// +/// /// Example 2 /// A camera sensor which can produce images in different native /// resolutions will report several overlapping rectangles, one for each /// natively supported resolution. -/// +/// /// ```text /// PixelArraySize.width /// /------------------/ @@ -1583,23 +1586,23 @@ impl Property for PixelArrayOpticalBlackRectangles {} /// y4 o +------+ | | /// +----+------+----+ / /// ``` -/// +/// /// The property reports two rectangles -/// +/// /// ```text /// PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), /// (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) /// ``` -/// +/// /// The first rectangle describes the maximum field-of-view of all image /// formats in the 4:3 resolutions, while the second one describes the /// maximum field of view for all image formats in the 16:9 resolutions. -/// +/// /// Multiple rectangles shall only be reported when the sensor can't capture /// the pixels in the corner regions. If all the pixels in the (x1,y1) - /// (x4,y4) area can be captured, the PixelArrayActiveAreas property shall /// contains the single rectangle (x1,y1) - (x4,y4). -/// +/// /// \todo Rename this property to ActiveAreas once we will have property /// ```text /// categories (i.e. Properties::PixelArray::ActiveAreas) @@ -1644,11 +1647,11 @@ impl Property for PixelArrayActiveAreas {} /// reflects the minimum mandatory cropping applied in the camera sensor and /// the rest of the pipeline. Just as the ScalerCrop control, it defines a /// rectangle taken from the sensor's active pixel array. -/// +/// /// This property is valid only after the camera has been successfully /// configured and its value may change whenever a new configuration is /// applied. -/// +/// /// \todo Turn this property into a "maximum control value" for the /// ScalerCrop control once "dynamic" controls have been implemented. #[derive(Debug, Clone)] @@ -1689,7 +1692,7 @@ impl ControlEntry for ScalerCropMaximum { impl Property for ScalerCropMaximum {} /// The relative sensitivity of the chosen sensor mode. -/// +/// /// Some sensors have readout modes with different sensitivities. For example, /// a binned camera mode might, with the same exposure and gains, produce /// twice the signal level of the full resolution readout. This would be @@ -1758,8 +1761,7 @@ impl TryFrom for ColorFilterArrangement { type Error = ControlValueError; fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) + Self::try_from(i32::try_from(value.clone())?).map_err(|_| ControlValueError::UnknownVariant(value)) } } @@ -1777,15 +1779,14 @@ impl Property for ColorFilterArrangement {} pub fn make_dyn(id: PropertyId, val: ControlValue) -> Result, ControlValueError> { match id { PropertyId::Location => Ok(Box::new(Location::try_from(val)?)), -PropertyId::Rotation => Ok(Box::new(Rotation::try_from(val)?)), -PropertyId::Model => Ok(Box::new(Model::try_from(val)?)), -PropertyId::UnitCellSize => Ok(Box::new(UnitCellSize::try_from(val)?)), -PropertyId::PixelArraySize => Ok(Box::new(PixelArraySize::try_from(val)?)), -PropertyId::PixelArrayOpticalBlackRectangles => Ok(Box::new(PixelArrayOpticalBlackRectangles::try_from(val)?)), -PropertyId::PixelArrayActiveAreas => Ok(Box::new(PixelArrayActiveAreas::try_from(val)?)), -PropertyId::ScalerCropMaximum => Ok(Box::new(ScalerCropMaximum::try_from(val)?)), -PropertyId::SensorSensitivity => Ok(Box::new(SensorSensitivity::try_from(val)?)), -PropertyId::ColorFilterArrangement => Ok(Box::new(ColorFilterArrangement::try_from(val)?)), - + PropertyId::Rotation => Ok(Box::new(Rotation::try_from(val)?)), + PropertyId::Model => Ok(Box::new(Model::try_from(val)?)), + PropertyId::UnitCellSize => Ok(Box::new(UnitCellSize::try_from(val)?)), + PropertyId::PixelArraySize => Ok(Box::new(PixelArraySize::try_from(val)?)), + PropertyId::PixelArrayOpticalBlackRectangles => Ok(Box::new(PixelArrayOpticalBlackRectangles::try_from(val)?)), + PropertyId::PixelArrayActiveAreas => Ok(Box::new(PixelArrayActiveAreas::try_from(val)?)), + PropertyId::ScalerCropMaximum => Ok(Box::new(ScalerCropMaximum::try_from(val)?)), + PropertyId::SensorSensitivity => Ok(Box::new(SensorSensitivity::try_from(val)?)), + PropertyId::ColorFilterArrangement => Ok(Box::new(ColorFilterArrangement::try_from(val)?)), } } diff --git a/libcamera/src/lib.rs b/libcamera/src/lib.rs index 0639e7f..7759bc8 100644 --- a/libcamera/src/lib.rs +++ b/libcamera/src/lib.rs @@ -14,6 +14,5 @@ pub mod request; pub mod stream; pub mod utils; -#[rustfmt::skip] mod generated; pub use generated::*;