From 46ac292b8bc05515b98eb0443f163ff50103f509 Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Sat, 7 Dec 2024 22:56:42 -0800 Subject: [PATCH 01/14] Add support for iterating and getting information from ControlInfoMap --- libcamera-sys/Cargo.toml | 6 +- libcamera-sys/c_api/controls.cpp | 130 ++++++++++++++++++++++++++++- libcamera-sys/c_api/controls.h | 48 ++++++++++- libcamera/Cargo.toml | 2 +- libcamera/build.rs | 6 +- libcamera/src/control.rs | 138 +++++++++++++++++++++++++++++++ libcamera/src/framebuffer_map.rs | 1 + 7 files changed, 317 insertions(+), 14 deletions(-) diff --git a/libcamera-sys/Cargo.toml b/libcamera-sys/Cargo.toml index 3fcb338..883da76 100644 --- a/libcamera-sys/Cargo.toml +++ b/libcamera-sys/Cargo.toml @@ -17,6 +17,6 @@ doctest = false [dependencies] [build-dependencies] -bindgen = "0.66.1" -pkg-config = "0.3.26" -cc = "1.0" +bindgen = "0.70" +pkg-config = "0.3" +cc = "1" diff --git a/libcamera-sys/c_api/controls.cpp b/libcamera-sys/c_api/controls.cpp index a07debc..8ae224e 100644 --- a/libcamera-sys/c_api/controls.cpp +++ b/libcamera-sys/c_api/controls.cpp @@ -5,7 +5,27 @@ extern "C" { -const char *libcamera_control_name(enum libcamera_control_id id) { +enum libcamera_control_id_enum libcamera_control_id(libcamera_control_id_t *control){ + return (enum libcamera_control_id_enum)control->id(); +} + +const char *libcamera_control_name(libcamera_control_id_t *control){ + return control->name().c_str(); +} + +enum libcamera_control_type libcamera_control_type(libcamera_control_id_t *control) { + return (enum libcamera_control_type) control->type(); +} + +const libcamera_control_id_t *libcamera_control_from_id(enum libcamera_control_id_enum id){ + auto it = libcamera::controls::controls.find(id); + if (it != libcamera::controls::controls.end()) + return it->second; + else + return nullptr; +} + +const char *libcamera_control_name_from_id(enum libcamera_control_id_enum id) { auto it = libcamera::controls::controls.find(id); if (it != libcamera::controls::controls.end()) return it->second->name().c_str(); @@ -13,7 +33,7 @@ const char *libcamera_control_name(enum libcamera_control_id id) { return nullptr; } -enum libcamera_control_type libcamera_control_type(enum libcamera_control_id id) { +enum libcamera_control_type libcamera_control_type_from_id(enum libcamera_control_id_enum id) { auto it = libcamera::controls::controls.find(id); if (it != libcamera::controls::controls.end()) return (enum libcamera_control_type)it->second->type(); @@ -120,4 +140,110 @@ void libcamera_control_value_set(libcamera_control_value_t *val, enum libcamera_ memcpy(storage.data(), data, storage.size()); } +const libcamera_control_value_t *libcamera_control_info_max(libcamera_control_info_t *val){ + return &val->max(); +} +const libcamera_control_value_t *libcamera_control_info_min(libcamera_control_info_t *val){ + return &val->min(); +} +const libcamera_control_value_t *libcamera_control_info_def(libcamera_control_info_t *val){ + return &val->def(); +} + +bool libcamera_control_id_map_add(libcamera_control_id_map_t *idmap, unsigned int key, const libcamera_control_id_t *control_id) +{ + if (!idmap || !control_id) + return false; + + (*idmap)[key] = control_id; + return true; +} + +const libcamera_control_id_t *libcamera_control_id_map_get(libcamera_control_id_map_t *idmap, unsigned int key) +{ + if (!idmap) + return nullptr; + + auto it = idmap->find(key); + if (it != idmap->end()) + return it->second; + return nullptr; +} + + +const libcamera_control_info_t *libcamera_control_info_map_at(libcamera_control_info_map_t *map, unsigned int key) +{ + if (!map) + return nullptr; + + try { + return &map->at(key); + } catch (const std::out_of_range &) { + return nullptr; + } +} + +size_t libcamera_control_info_map_count(const libcamera_control_info_map_t *map, unsigned int key) +{ + if (!map) + return 0; + + return map->count(key); +} + +size_t libcamera_control_info_map_size(const libcamera_control_info_map_t *map) +{ + if (!map) + return 0; + + return map->size(); +} + +const libcamera_control_info_t *libcamera_control_info_map_find(const libcamera_control_info_map_t *map, unsigned int key) +{ + if (!map) + return nullptr; + + auto it = map->find(key); + if (it != map->end()) { + return &it->second; + } + + return nullptr; +} + + +libcamera_control_info_map_iter_t* libcamera_control_info_map_iter_create(const libcamera_control_info_map_t* map) { + if (!map) return nullptr; + libcamera_control_info_map_iter_t* iter = new libcamera_control_info_map_iter_t(); + iter->current = map->begin(); + iter->end = map->end(); + return iter; +} + +bool libcamera_control_info_map_iter_has_next(const libcamera_control_info_map_iter_t* iter) { + if (!iter) return false; + return iter->current != iter->end; +} + +unsigned int libcamera_control_info_map_iter_key(const libcamera_control_info_map_iter_t* iter) { + if (!iter || iter->current == iter->end) return 0; + return iter->current->first->id(); +} + +const libcamera_control_info_t* libcamera_control_info_map_iter_value(const libcamera_control_info_map_iter_t* iter) { + if (!iter || iter->current == iter->end) return nullptr; + return &(iter->current->second); +} + +void libcamera_control_info_map_iter_next(libcamera_control_info_map_iter_t* iter) { + if (!iter || iter->current == iter->end) return; + ++(iter->current); +} + +void libcamera_control_info_map_iter_destroy(libcamera_control_info_map_iter_t* iter) { + delete iter; +} + + } diff --git a/libcamera-sys/c_api/controls.h b/libcamera-sys/c_api/controls.h index 3864bf0..52b1b4d 100644 --- a/libcamera-sys/c_api/controls.h +++ b/libcamera-sys/c_api/controls.h @@ -13,20 +13,35 @@ struct libcamera_control_list_iter { libcamera::ControlList::iterator it; }; +struct libcamera_control_info_map_iter { + libcamera::ControlInfoMap::const_iterator current; + libcamera::ControlInfoMap::const_iterator end; +}; + + typedef libcamera::ControlValue libcamera_control_value_t; typedef libcamera::ControlList libcamera_control_list_t; typedef struct libcamera_control_list_iter libcamera_control_list_iter_t; +typedef struct libcamera_control_info_map_iter libcamera_control_info_map_iter_t; typedef libcamera::ControlInfoMap libcamera_control_info_map_t; +typedef libcamera::ControlIdMap libcamera_control_id_map_t; +typedef libcamera::ControlId libcamera_control_id_t; +typedef libcamera::ControlInfo libcamera_control_info_t; extern "C" { #else typedef struct libcamera_control_value libcamera_control_value_t; typedef struct libcamera_control_list libcamera_control_list_t; typedef struct libcamera_control_list_iter libcamera_control_list_iter_t; +typedef struct libcamera_control_info_map_iter libcamera_control_info_map_iter_t; typedef struct libcamera_control_info_map libcamera_control_info_map_t; +typedef struct libcamera_control_id libcamera_control_id_t; +typedef struct libcamera_control_info libcamera_control_info_t; +typedef struct libcamera_control_id_map libcamera_control_id_map_t; + #endif -enum libcamera_control_id { libcamera_control_id_DUMMY }; +enum libcamera_control_id_enum { libcamera_control_id_DUMMY }; enum libcamera_property_id { libcamera_property_id_DUMMY }; enum libcamera_control_type { @@ -42,9 +57,13 @@ enum libcamera_control_type { }; // --- libcamera_control_id --- -const char *libcamera_control_name(enum libcamera_control_id id); -enum libcamera_control_type libcamera_control_type(enum libcamera_control_id id); +const libcamera_control_id_t *libcamera_control_from_id(enum libcamera_control_id_enum id); +const char *libcamera_control_name_from_id(enum libcamera_control_id_enum id); +enum libcamera_control_type libcamera_control_type_from_id(enum libcamera_control_id_enum id); +enum libcamera_control_id_enum libcamera_control_id(libcamera_control_id_t *control); +const char *libcamera_control_name(libcamera_control_id_t *control); +enum libcamera_control_type libcamera_control_type(libcamera_control_id_t *control); // --- libcamera_property_id --- const char *libcamera_property_name(enum libcamera_property_id id); enum libcamera_control_type libcamera_property_type(enum libcamera_property_id id); @@ -73,6 +92,29 @@ size_t libcamera_control_value_num_elements(const libcamera_control_value_t *val const void *libcamera_control_value_get(const libcamera_control_value_t *val); void libcamera_control_value_set(libcamera_control_value_t *val, enum libcamera_control_type type, const void *data, bool is_array, size_t num_elements); +// --- libcamera_control_info_t --- +const libcamera_control_value_t *libcamera_control_info_max(libcamera_control_info_t *val); +const libcamera_control_value_t *libcamera_control_info_min(libcamera_control_info_t *val); +const libcamera_control_value_t *libcamera_control_info_def(libcamera_control_info_t *val); + +// --- libcamera_control_id_map --- +bool libcamera_control_id_map_add(libcamera_control_id_map_t *idmap, unsigned int key, const libcamera_control_id_t *control_id); +const libcamera_control_id_t *libcamera_control_id_map_get(libcamera_control_id_map_t *idmap, unsigned int key); + +// --- libcamera_control_info_map --- +const libcamera_control_info_t *libcamera_control_info_map_at(libcamera_control_info_map_t *map, unsigned int key); +size_t libcamera_control_info_map_size(const libcamera_control_info_map_t *map); +size_t libcamera_control_info_map_count(const libcamera_control_info_map_t *map, unsigned int key); +const libcamera_control_info_t * libcamera_control_info_map_find(const libcamera_control_info_map_t *map, unsigned int key); + +// --- libcamera_control_info_map_iter_t --- +libcamera_control_info_map_iter_t* libcamera_control_info_map_iter_create(const libcamera_control_info_map_t* map); +bool libcamera_control_info_map_iter_has_next(const libcamera_control_info_map_iter_t* iter); +unsigned int libcamera_control_info_map_iter_key(const libcamera_control_info_map_iter_t* iter); +const libcamera_control_info_t* libcamera_control_info_map_iter_value(const libcamera_control_info_map_iter_t* iter); +void libcamera_control_info_map_iter_next(libcamera_control_info_map_iter_t* iter); +void libcamera_control_info_map_iter_destroy(libcamera_control_info_map_iter_t* iter); + #ifdef __cplusplus } #endif diff --git a/libcamera/Cargo.toml b/libcamera/Cargo.toml index 158ff50..de1f407 100644 --- a/libcamera/Cargo.toml +++ b/libcamera/Cargo.toml @@ -31,5 +31,5 @@ smallvec = "1.10" thiserror = "1.0" [build-dependencies] -libcamera-sys = { path = "../libcamera-sys", version = "0.3.0" } +# libcamera-sys = { path = "../libcamera-sys", version = "0.3.0" } semver = "1.0.22" diff --git a/libcamera/build.rs b/libcamera/build.rs index 3417151..77d623a 100644 --- a/libcamera/build.rs +++ b/libcamera/build.rs @@ -6,11 +6,7 @@ use std::{ use semver::{Comparator, Op, Version}; fn main() { - let libcamera_version = Version::new( - libcamera_sys::LIBCAMERA_VERSION_MAJOR as _, - libcamera_sys::LIBCAMERA_VERSION_MINOR as _, - libcamera_sys::LIBCAMERA_VERSION_PATCH as _, - ); + let libcamera_version = Version::new(0, 3, 2); let versioned_files = Path::new("versioned_files"); let mut candidates = std::fs::read_dir(versioned_files) diff --git a/libcamera/src/control.rs b/libcamera/src/control.rs index 07e205a..1fa8bec 100644 --- a/libcamera/src/control.rs +++ b/libcamera/src/control.rs @@ -43,6 +43,44 @@ impl DynControlEntry for T { } } +#[repr(transparent)] +pub struct ControlInfo(libcamera_control_info_t); + +impl ControlInfo { + pub(crate) unsafe fn from_ptr<'a>(ptr: NonNull) -> &'a mut Self { + // Safety: we can cast it because of `#[repr(transparent)]` + &mut *(ptr.as_ptr() as *mut Self) + } + + pub(crate) fn ptr(&self) -> *const libcamera_control_info_t { + // Safety: we can cast it because of `#[repr(transparent)]` + &self.0 as *const libcamera_control_info_t + } + + pub fn min(&self) -> ControlValue { + + unsafe { + ControlValue::read(NonNull::new(libcamera_control_info_min(self.ptr().cast_mut()).cast_mut()).unwrap()).unwrap() + } + } + + pub fn max(&self) -> ControlValue { + + unsafe { + ControlValue::read(NonNull::new(libcamera_control_info_max(self.ptr().cast_mut()).cast_mut()).unwrap()).unwrap() + } + } + + pub fn def(&self) -> ControlValue { + + unsafe { + ControlValue::read(NonNull::new(libcamera_control_info_def(self.ptr().cast_mut()).cast_mut()).unwrap()).unwrap() + } + } + +} + + #[repr(transparent)] pub struct ControlInfoMap(libcamera_control_info_map_t); @@ -51,6 +89,49 @@ impl ControlInfoMap { // Safety: we can cast it because of `#[repr(transparent)]` &mut *(ptr.as_ptr() as *mut Self) } + + pub(crate) fn ptr(&self) -> *const libcamera_control_info_map_t { + // Safety: we can cast it because of `#[repr(transparent)]` + &self.0 as *const libcamera_control_info_map_t + } + + pub fn at(&self, key: u32) -> &ControlInfo { + unsafe { + + let ptr = libcamera_control_info_map_at(self.ptr().cast_mut(), key); + println!("ptr {:?}", ptr); + ControlInfo::from_ptr(NonNull::new(ptr.cast_mut()).unwrap()) + } + } + + pub fn count(&self, key: u32) -> usize { + unsafe { + libcamera_control_info_map_count(self.ptr().cast_mut(), key) + } + } + + pub fn find(&self, key: u32) -> &ControlInfo { + unsafe { + ControlInfo::from_ptr(NonNull::new(libcamera_control_info_map_find(self.ptr().cast_mut(), key).cast_mut()).unwrap()) + } + } + + pub fn size(&self) -> usize { + unsafe { + libcamera_control_info_map_size(self.ptr().cast_mut()) + } + } + + +} + +impl<'a> IntoIterator for &'a ControlInfoMap { + type Item = (u32, &'a ControlInfo); + type IntoIter = ControlInfoMapIter<'a>; + + fn into_iter(self) -> Self::IntoIter { + ControlInfoMapIter::new(self).expect("Failed to create ControlInfoMap iterator") + } } #[repr(transparent)] @@ -238,3 +319,60 @@ impl<'d> Drop for ControlListRefIterator<'d> { unsafe { libcamera_control_list_iter_destroy(self.it.as_ptr()) } } } + + +pub struct ControlInfoMapIter<'a> { + iter: *mut libcamera_control_info_map_iter_t, + marker: PhantomData<&'a libcamera_control_info_map_t>, +} + +impl<'a> ControlInfoMapIter<'a> { + + pub fn new(map: &'a ControlInfoMap) -> Option { + unsafe { + let iter = libcamera_control_info_map_iter_create(map.ptr()); + if iter.is_null() { + None + } else { + Some(ControlInfoMapIter { + iter, + marker: PhantomData, + }) + } + } + } + + pub fn next(&mut self) -> Option<(u32, &'a ControlInfo)> { + unsafe { + if libcamera_control_info_map_iter_has_next(self.iter) { + let key = libcamera_control_info_map_iter_key(self.iter); + let value_ptr = libcamera_control_info_map_iter_value(self.iter); + if value_ptr.is_null() { + None + } else { + let control_info = &*(value_ptr as *const ControlInfo); + libcamera_control_info_map_iter_next(self.iter); + Some((key, control_info)) + } + } else { + None + } + } + } +} + +impl<'a> Iterator for ControlInfoMapIter<'a> { + type Item = (u32, &'a ControlInfo); + + fn next(&mut self) -> Option { + self.next() + } +} + +impl<'a> Drop for ControlInfoMapIter<'a> { + fn drop(&mut self) { + unsafe { + libcamera_control_info_map_iter_destroy(self.iter); + } + } +} \ No newline at end of file diff --git a/libcamera/src/framebuffer_map.rs b/libcamera/src/framebuffer_map.rs index cd46eaf..67c93f2 100644 --- a/libcamera/src/framebuffer_map.rs +++ b/libcamera/src/framebuffer_map.rs @@ -113,6 +113,7 @@ impl MemoryMappedFrameBuffer { }) .collect() } + } impl AsFrameBuffer for MemoryMappedFrameBuffer { From eff5ee937b3e3b6c54f273c8864bff70db2c6252 Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Sat, 7 Dec 2024 23:18:29 -0800 Subject: [PATCH 02/14] cargo fmt --- libcamera/src/control.rs | 37 ++++++++++++-------------------- libcamera/src/framebuffer_map.rs | 1 - 2 files changed, 14 insertions(+), 24 deletions(-) diff --git a/libcamera/src/control.rs b/libcamera/src/control.rs index 1fa8bec..a637341 100644 --- a/libcamera/src/control.rs +++ b/libcamera/src/control.rs @@ -58,29 +58,27 @@ impl ControlInfo { } pub fn min(&self) -> ControlValue { - unsafe { - ControlValue::read(NonNull::new(libcamera_control_info_min(self.ptr().cast_mut()).cast_mut()).unwrap()).unwrap() + ControlValue::read(NonNull::new(libcamera_control_info_min(self.ptr().cast_mut()).cast_mut()).unwrap()) + .unwrap() } } pub fn max(&self) -> ControlValue { - unsafe { - ControlValue::read(NonNull::new(libcamera_control_info_max(self.ptr().cast_mut()).cast_mut()).unwrap()).unwrap() + ControlValue::read(NonNull::new(libcamera_control_info_max(self.ptr().cast_mut()).cast_mut()).unwrap()) + .unwrap() } } pub fn def(&self) -> ControlValue { - unsafe { - ControlValue::read(NonNull::new(libcamera_control_info_def(self.ptr().cast_mut()).cast_mut()).unwrap()).unwrap() + ControlValue::read(NonNull::new(libcamera_control_info_def(self.ptr().cast_mut()).cast_mut()).unwrap()) + .unwrap() } } - } - #[repr(transparent)] pub struct ControlInfoMap(libcamera_control_info_map_t); @@ -89,7 +87,7 @@ impl ControlInfoMap { // Safety: we can cast it because of `#[repr(transparent)]` &mut *(ptr.as_ptr() as *mut Self) } - + pub(crate) fn ptr(&self) -> *const libcamera_control_info_map_t { // Safety: we can cast it because of `#[repr(transparent)]` &self.0 as *const libcamera_control_info_map_t @@ -97,32 +95,27 @@ impl ControlInfoMap { pub fn at(&self, key: u32) -> &ControlInfo { unsafe { - let ptr = libcamera_control_info_map_at(self.ptr().cast_mut(), key); println!("ptr {:?}", ptr); - ControlInfo::from_ptr(NonNull::new(ptr.cast_mut()).unwrap()) + ControlInfo::from_ptr(NonNull::new(ptr.cast_mut()).unwrap()) } } pub fn count(&self, key: u32) -> usize { - unsafe { - libcamera_control_info_map_count(self.ptr().cast_mut(), key) - } + unsafe { libcamera_control_info_map_count(self.ptr().cast_mut(), key) } } pub fn find(&self, key: u32) -> &ControlInfo { unsafe { - ControlInfo::from_ptr(NonNull::new(libcamera_control_info_map_find(self.ptr().cast_mut(), key).cast_mut()).unwrap()) + ControlInfo::from_ptr( + NonNull::new(libcamera_control_info_map_find(self.ptr().cast_mut(), key).cast_mut()).unwrap(), + ) } } pub fn size(&self) -> usize { - unsafe { - libcamera_control_info_map_size(self.ptr().cast_mut()) - } + unsafe { libcamera_control_info_map_size(self.ptr().cast_mut()) } } - - } impl<'a> IntoIterator for &'a ControlInfoMap { @@ -320,14 +313,12 @@ impl<'d> Drop for ControlListRefIterator<'d> { } } - pub struct ControlInfoMapIter<'a> { iter: *mut libcamera_control_info_map_iter_t, marker: PhantomData<&'a libcamera_control_info_map_t>, } impl<'a> ControlInfoMapIter<'a> { - pub fn new(map: &'a ControlInfoMap) -> Option { unsafe { let iter = libcamera_control_info_map_iter_create(map.ptr()); @@ -375,4 +366,4 @@ impl<'a> Drop for ControlInfoMapIter<'a> { libcamera_control_info_map_iter_destroy(self.iter); } } -} \ No newline at end of file +} diff --git a/libcamera/src/framebuffer_map.rs b/libcamera/src/framebuffer_map.rs index 67c93f2..cd46eaf 100644 --- a/libcamera/src/framebuffer_map.rs +++ b/libcamera/src/framebuffer_map.rs @@ -113,7 +113,6 @@ impl MemoryMappedFrameBuffer { }) .collect() } - } impl AsFrameBuffer for MemoryMappedFrameBuffer { From 8e5825c2c4fc90dbe3691b4efb24fba8dea9afe7 Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Sun, 8 Dec 2024 00:32:00 -0800 Subject: [PATCH 03/14] added method to get name and id from ControlId and PropertyId enums directly --- libcamera-meta/src/bin/generate_from_git.rs | 33 +++++++++++++++++-- libcamera-sys/c_api/controls.cpp | 4 +-- libcamera-sys/c_api/controls.h | 4 +-- libcamera/versioned_files/0.0.1/controls.rs | 16 ++++++++- libcamera/versioned_files/0.0.1/properties.rs | 16 ++++++++- libcamera/versioned_files/0.0.2/controls.rs | 16 ++++++++- libcamera/versioned_files/0.0.2/properties.rs | 16 ++++++++- libcamera/versioned_files/0.0.3/controls.rs | 16 ++++++++- libcamera/versioned_files/0.0.3/properties.rs | 16 ++++++++- libcamera/versioned_files/0.0.4/controls.rs | 16 ++++++++- libcamera/versioned_files/0.0.4/properties.rs | 16 ++++++++- libcamera/versioned_files/0.0.5/controls.rs | 16 ++++++++- libcamera/versioned_files/0.0.5/properties.rs | 16 ++++++++- libcamera/versioned_files/0.1.0/controls.rs | 16 ++++++++- libcamera/versioned_files/0.1.0/properties.rs | 16 ++++++++- libcamera/versioned_files/0.2.0/controls.rs | 16 ++++++++- libcamera/versioned_files/0.2.0/properties.rs | 16 ++++++++- libcamera/versioned_files/0.3.0/controls.rs | 16 ++++++++- libcamera/versioned_files/0.3.0/properties.rs | 16 ++++++++- libcamera/versioned_files/0.3.1/controls.rs | 16 ++++++++- libcamera/versioned_files/0.3.1/properties.rs | 16 ++++++++- libcamera/versioned_files/0.3.2/controls.rs | 16 ++++++++- libcamera/versioned_files/0.3.2/properties.rs | 16 ++++++++- 23 files changed, 334 insertions(+), 27 deletions(-) diff --git a/libcamera-meta/src/bin/generate_from_git.rs b/libcamera-meta/src/bin/generate_from_git.rs index 5b37f96..5b013c7 100644 --- a/libcamera-meta/src/bin/generate_from_git.rs +++ b/libcamera-meta/src/bin/generate_from_git.rs @@ -301,6 +301,35 @@ mod generate_rust { } out += "}\n"; + let ffi_binding = match ty { + ControlsType::Control => "libcamera_control_name_from_id", + ControlsType::Property => "libcamera_property_name_by_id", + }; + + out += &format!("impl {} {{\n", name); + out += r#" + fn id(&self) -> u32 { + *self as u32 + } + "#; + out += "\n"; + out += r#" + pub fn name(&self) -> String { + unsafe {"#; + out += &format!(" let c_str = {}(self.id());\n", ffi_binding); + out += r#" + if c_str.is_null() { + // Handle null pointer as empty strings + return "".into(); + } + // Convert the C string to a Rust &str + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } + "#; + + out += "}\n"; + let mut dyn_variants = String::new(); for ctrl in controls.iter() { @@ -422,7 +451,7 @@ mod generate_rust { pub fn generate_controls_file(controls: &[Control], ty: ControlsType) -> String { let header = r#" - use std::ops::{{Deref, DerefMut}}; + use std::{ffi::CStr, ops::{{Deref, DerefMut}}}; use num_enum::{{IntoPrimitive, TryFromPrimitive}}; #[allow(unused_imports)] use crate::control::{{Control, Property, ControlEntry, DynControlEntry}}; @@ -431,8 +460,6 @@ mod generate_rust { use crate::geometry::{{Rectangle, Size}}; #[allow(unused_imports)] use libcamera_sys::*; - - "#; let file = format!("{header}\n{}", generate_controls(controls, ty)); diff --git a/libcamera-sys/c_api/controls.cpp b/libcamera-sys/c_api/controls.cpp index 8ae224e..bd3cf08 100644 --- a/libcamera-sys/c_api/controls.cpp +++ b/libcamera-sys/c_api/controls.cpp @@ -41,7 +41,7 @@ enum libcamera_control_type libcamera_control_type_from_id(enum libcamera_contro return LIBCAMERA_CONTROL_TYPE_NONE; } -const char *libcamera_property_name(enum libcamera_property_id id) { +const char *libcamera_property_name_by_id(enum libcamera_property_id id) { auto it = libcamera::properties::properties.find(id); if (it != libcamera::properties::properties.end()) return it->second->name().c_str(); @@ -49,7 +49,7 @@ const char *libcamera_property_name(enum libcamera_property_id id) { return nullptr; } -enum libcamera_control_type libcamera_property_type(enum libcamera_property_id id) { +enum libcamera_control_type libcamera_property_type_by_id(enum libcamera_property_id id) { auto it = libcamera::properties::properties.find(id); if (it != libcamera::properties::properties.end()) return (enum libcamera_control_type)it->second->type(); diff --git a/libcamera-sys/c_api/controls.h b/libcamera-sys/c_api/controls.h index 52b1b4d..8cfe6ae 100644 --- a/libcamera-sys/c_api/controls.h +++ b/libcamera-sys/c_api/controls.h @@ -65,8 +65,8 @@ enum libcamera_control_id_enum libcamera_control_id(libcamera_control_id_t *cont const char *libcamera_control_name(libcamera_control_id_t *control); enum libcamera_control_type libcamera_control_type(libcamera_control_id_t *control); // --- libcamera_property_id --- -const char *libcamera_property_name(enum libcamera_property_id id); -enum libcamera_control_type libcamera_property_type(enum libcamera_property_id id); +const char *libcamera_property_name_by_id(enum libcamera_property_id id); +enum libcamera_control_type libcamera_property_type_by_id(enum libcamera_property_id id); // --- libcamera_control_list_t --- libcamera_control_list_t *libcamera_control_list_create(); diff --git a/libcamera/versioned_files/0.0.1/controls.rs b/libcamera/versioned_files/0.0.1/controls.rs index c8f883f..636bdda 100644 --- a/libcamera/versioned_files/0.0.1/controls.rs +++ b/libcamera/versioned_files/0.0.1/controls.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -386,6 +386,20 @@ pub enum ControlId { #[cfg(feature = "vendor_draft")] TestPatternMode = TEST_PATTERN_MODE, } +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Enable or disable the AE. /// /// \sa ExposureTime AnalogueGain diff --git a/libcamera/versioned_files/0.0.1/properties.rs b/libcamera/versioned_files/0.0.1/properties.rs index 9c119ac..b191233 100644 --- a/libcamera/versioned_files/0.0.1/properties.rs +++ b/libcamera/versioned_files/0.0.1/properties.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -714,6 +714,20 @@ pub enum PropertyId { #[cfg(feature = "vendor_draft")] ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, } +impl PropertyId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_property_name_by_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Camera mounting location #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] diff --git a/libcamera/versioned_files/0.0.2/controls.rs b/libcamera/versioned_files/0.0.2/controls.rs index c8f883f..636bdda 100644 --- a/libcamera/versioned_files/0.0.2/controls.rs +++ b/libcamera/versioned_files/0.0.2/controls.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -386,6 +386,20 @@ pub enum ControlId { #[cfg(feature = "vendor_draft")] TestPatternMode = TEST_PATTERN_MODE, } +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Enable or disable the AE. /// /// \sa ExposureTime AnalogueGain diff --git a/libcamera/versioned_files/0.0.2/properties.rs b/libcamera/versioned_files/0.0.2/properties.rs index 9c119ac..b191233 100644 --- a/libcamera/versioned_files/0.0.2/properties.rs +++ b/libcamera/versioned_files/0.0.2/properties.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -714,6 +714,20 @@ pub enum PropertyId { #[cfg(feature = "vendor_draft")] ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, } +impl PropertyId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_property_name_by_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Camera mounting location #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] diff --git a/libcamera/versioned_files/0.0.3/controls.rs b/libcamera/versioned_files/0.0.3/controls.rs index 216030c..e74bd1c 100644 --- a/libcamera/versioned_files/0.0.3/controls.rs +++ b/libcamera/versioned_files/0.0.3/controls.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -388,6 +388,20 @@ pub enum ControlId { #[cfg(feature = "vendor_draft")] TestPatternMode = TEST_PATTERN_MODE, } +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Enable or disable the AE. /// /// \sa ExposureTime AnalogueGain diff --git a/libcamera/versioned_files/0.0.3/properties.rs b/libcamera/versioned_files/0.0.3/properties.rs index 9c119ac..b191233 100644 --- a/libcamera/versioned_files/0.0.3/properties.rs +++ b/libcamera/versioned_files/0.0.3/properties.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -714,6 +714,20 @@ pub enum PropertyId { #[cfg(feature = "vendor_draft")] ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, } +impl PropertyId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_property_name_by_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Camera mounting location #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] diff --git a/libcamera/versioned_files/0.0.4/controls.rs b/libcamera/versioned_files/0.0.4/controls.rs index 216030c..e74bd1c 100644 --- a/libcamera/versioned_files/0.0.4/controls.rs +++ b/libcamera/versioned_files/0.0.4/controls.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -388,6 +388,20 @@ pub enum ControlId { #[cfg(feature = "vendor_draft")] TestPatternMode = TEST_PATTERN_MODE, } +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Enable or disable the AE. /// /// \sa ExposureTime AnalogueGain diff --git a/libcamera/versioned_files/0.0.4/properties.rs b/libcamera/versioned_files/0.0.4/properties.rs index 9c119ac..b191233 100644 --- a/libcamera/versioned_files/0.0.4/properties.rs +++ b/libcamera/versioned_files/0.0.4/properties.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -714,6 +714,20 @@ pub enum PropertyId { #[cfg(feature = "vendor_draft")] ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, } +impl PropertyId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_property_name_by_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Camera mounting location #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] diff --git a/libcamera/versioned_files/0.0.5/controls.rs b/libcamera/versioned_files/0.0.5/controls.rs index 216030c..e74bd1c 100644 --- a/libcamera/versioned_files/0.0.5/controls.rs +++ b/libcamera/versioned_files/0.0.5/controls.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -388,6 +388,20 @@ pub enum ControlId { #[cfg(feature = "vendor_draft")] TestPatternMode = TEST_PATTERN_MODE, } +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Enable or disable the AE. /// /// \sa ExposureTime AnalogueGain diff --git a/libcamera/versioned_files/0.0.5/properties.rs b/libcamera/versioned_files/0.0.5/properties.rs index 9c119ac..b191233 100644 --- a/libcamera/versioned_files/0.0.5/properties.rs +++ b/libcamera/versioned_files/0.0.5/properties.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -714,6 +714,20 @@ pub enum PropertyId { #[cfg(feature = "vendor_draft")] ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, } +impl PropertyId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_property_name_by_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Camera mounting location #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] diff --git a/libcamera/versioned_files/0.1.0/controls.rs b/libcamera/versioned_files/0.1.0/controls.rs index 8347092..4edce11 100644 --- a/libcamera/versioned_files/0.1.0/controls.rs +++ b/libcamera/versioned_files/0.1.0/controls.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -388,6 +388,20 @@ pub enum ControlId { #[cfg(feature = "vendor_draft")] TestPatternMode = TEST_PATTERN_MODE, } +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Enable or disable the AE. /// /// \sa ExposureTime AnalogueGain diff --git a/libcamera/versioned_files/0.1.0/properties.rs b/libcamera/versioned_files/0.1.0/properties.rs index 69a8e77..d6c9368 100644 --- a/libcamera/versioned_files/0.1.0/properties.rs +++ b/libcamera/versioned_files/0.1.0/properties.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -720,6 +720,20 @@ pub enum PropertyId { #[cfg(feature = "vendor_draft")] ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, } +impl PropertyId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_property_name_by_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Camera mounting location #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] diff --git a/libcamera/versioned_files/0.2.0/controls.rs b/libcamera/versioned_files/0.2.0/controls.rs index f3f3d45..ae6daf1 100644 --- a/libcamera/versioned_files/0.2.0/controls.rs +++ b/libcamera/versioned_files/0.2.0/controls.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -446,6 +446,20 @@ pub enum ControlId { #[cfg(feature = "vendor_rpi")] Bcm2835StatsOutput = BCM2835_STATS_OUTPUT, } +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Enable or disable the AE. /// /// \sa ExposureTime AnalogueGain diff --git a/libcamera/versioned_files/0.2.0/properties.rs b/libcamera/versioned_files/0.2.0/properties.rs index 9a92ef3..f154a8d 100644 --- a/libcamera/versioned_files/0.2.0/properties.rs +++ b/libcamera/versioned_files/0.2.0/properties.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -720,6 +720,20 @@ pub enum PropertyId { #[cfg(feature = "vendor_draft")] ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, } +impl PropertyId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_property_name_by_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Camera mounting location #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] diff --git a/libcamera/versioned_files/0.3.0/controls.rs b/libcamera/versioned_files/0.3.0/controls.rs index ba61089..5799377 100644 --- a/libcamera/versioned_files/0.3.0/controls.rs +++ b/libcamera/versioned_files/0.3.0/controls.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -446,6 +446,20 @@ pub enum ControlId { #[cfg(feature = "vendor_rpi")] Bcm2835StatsOutput = BCM2835_STATS_OUTPUT, } +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Enable or disable the AE. /// /// \sa ExposureTime AnalogueGain diff --git a/libcamera/versioned_files/0.3.0/properties.rs b/libcamera/versioned_files/0.3.0/properties.rs index 9a92ef3..f154a8d 100644 --- a/libcamera/versioned_files/0.3.0/properties.rs +++ b/libcamera/versioned_files/0.3.0/properties.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -720,6 +720,20 @@ pub enum PropertyId { #[cfg(feature = "vendor_draft")] ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, } +impl PropertyId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_property_name_by_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Camera mounting location #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] diff --git a/libcamera/versioned_files/0.3.1/controls.rs b/libcamera/versioned_files/0.3.1/controls.rs index fc265fd..057240c 100644 --- a/libcamera/versioned_files/0.3.1/controls.rs +++ b/libcamera/versioned_files/0.3.1/controls.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -450,6 +450,20 @@ pub enum ControlId { #[cfg(feature = "vendor_rpi")] Bcm2835StatsOutput = BCM2835_STATS_OUTPUT, } +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Enable or disable the AE. /// /// \sa ExposureTime AnalogueGain diff --git a/libcamera/versioned_files/0.3.1/properties.rs b/libcamera/versioned_files/0.3.1/properties.rs index 9a92ef3..f154a8d 100644 --- a/libcamera/versioned_files/0.3.1/properties.rs +++ b/libcamera/versioned_files/0.3.1/properties.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -720,6 +720,20 @@ pub enum PropertyId { #[cfg(feature = "vendor_draft")] ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, } +impl PropertyId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_property_name_by_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Camera mounting location #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] diff --git a/libcamera/versioned_files/0.3.2/controls.rs b/libcamera/versioned_files/0.3.2/controls.rs index 57bafd2..589b9b2 100644 --- a/libcamera/versioned_files/0.3.2/controls.rs +++ b/libcamera/versioned_files/0.3.2/controls.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -528,6 +528,20 @@ pub enum ControlId { #[cfg(feature = "vendor_rpi")] Bcm2835StatsOutput = BCM2835_STATS_OUTPUT, } +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Enable or disable the AE. /// /// \sa ExposureTime AnalogueGain diff --git a/libcamera/versioned_files/0.3.2/properties.rs b/libcamera/versioned_files/0.3.2/properties.rs index 9a92ef3..f154a8d 100644 --- a/libcamera/versioned_files/0.3.2/properties.rs +++ b/libcamera/versioned_files/0.3.2/properties.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -720,6 +720,20 @@ pub enum PropertyId { #[cfg(feature = "vendor_draft")] ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, } +impl PropertyId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_property_name_by_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Camera mounting location #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] #[repr(i32)] From ad95922f31fc8d3962f289216478419ab19bcf45 Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Sun, 8 Dec 2024 00:55:33 -0800 Subject: [PATCH 04/14] Forgot values method for ControlInfo, now added --- libcamera-sys/c_api/controls.cpp | 8 ++++++++ libcamera-sys/c_api/controls.h | 1 + libcamera/src/control.rs | 31 ++++++++++++++++++++++++++++++- 3 files changed, 39 insertions(+), 1 deletion(-) diff --git a/libcamera-sys/c_api/controls.cpp b/libcamera-sys/c_api/controls.cpp index bd3cf08..5776796 100644 --- a/libcamera-sys/c_api/controls.cpp +++ b/libcamera-sys/c_api/controls.cpp @@ -150,6 +150,14 @@ const libcamera_control_value_t *libcamera_control_info_def(libcamera_control_in return &val->def(); } +const libcamera_control_value_t* libcamera_control_info_values(const libcamera_control_info_t* info, size_t* size) +{ + if (!info || !size) return nullptr; + const std::vector& values = info->values(); + *size = values.size(); + return reinterpret_cast(values.data()); +} + bool libcamera_control_id_map_add(libcamera_control_id_map_t *idmap, unsigned int key, const libcamera_control_id_t *control_id) { if (!idmap || !control_id) diff --git a/libcamera-sys/c_api/controls.h b/libcamera-sys/c_api/controls.h index 8cfe6ae..348763e 100644 --- a/libcamera-sys/c_api/controls.h +++ b/libcamera-sys/c_api/controls.h @@ -96,6 +96,7 @@ void libcamera_control_value_set(libcamera_control_value_t *val, enum libcamera_ const libcamera_control_value_t *libcamera_control_info_max(libcamera_control_info_t *val); const libcamera_control_value_t *libcamera_control_info_min(libcamera_control_info_t *val); const libcamera_control_value_t *libcamera_control_info_def(libcamera_control_info_t *val); +const libcamera_control_value_t* libcamera_control_info_values(const libcamera_control_info_t* info, size_t* size); // --- libcamera_control_id_map --- bool libcamera_control_id_map_add(libcamera_control_id_map_t *idmap, unsigned int key, const libcamera_control_id_t *control_id); diff --git a/libcamera/src/control.rs b/libcamera/src/control.rs index a637341..2645901 100644 --- a/libcamera/src/control.rs +++ b/libcamera/src/control.rs @@ -1,4 +1,4 @@ -use std::{marker::PhantomData, ptr::NonNull}; +use std::{marker::PhantomData, ptr::NonNull, slice}; use libcamera_sys::*; use thiserror::Error; @@ -77,6 +77,35 @@ impl ControlInfo { .unwrap() } } + + pub fn values(&self) -> Vec { + unsafe { + let mut size: usize = 0; + let values_ptr = libcamera_control_info_values(self.ptr(), &mut size as *mut usize); + + if values_ptr.is_null() || size == 0 { + return Vec::new(); + } + + let raw_slice = slice::from_raw_parts(values_ptr, size); + + let mut control_values = Vec::with_capacity(size); + for raw_val in raw_slice { + let val_ptr = NonNull::new(raw_val as *const libcamera_control_value_t as *mut libcamera_control_value_t) + .expect("Received a null pointer in raw_slice"); + + match ControlValue::read(val_ptr) { + Ok(control_val) => control_values.push(control_val), + Err(e) => { + eprintln!("Failed to read ControlValue: {:?}", e); + } + } + } + + control_values + } + } + } #[repr(transparent)] From c052b1d9622fa9cea6f6d8f44754da90fdd9426e Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Sun, 8 Dec 2024 01:21:08 -0800 Subject: [PATCH 05/14] fixed issue with ControlInfo.values() not properly decoding into Rust Vec --- libcamera-sys/c_api/controls.cpp | 4 ++++ libcamera-sys/c_api/controls.h | 3 +-- libcamera/src/control.rs | 36 +++++++++++++++++++++----------- 3 files changed, 29 insertions(+), 14 deletions(-) diff --git a/libcamera-sys/c_api/controls.cpp b/libcamera-sys/c_api/controls.cpp index 5776796..1a6fb0c 100644 --- a/libcamera-sys/c_api/controls.cpp +++ b/libcamera-sys/c_api/controls.cpp @@ -140,6 +140,10 @@ void libcamera_control_value_set(libcamera_control_value_t *val, enum libcamera_ memcpy(storage.data(), data, storage.size()); } +size_t libcamera_control_value_size() { + return sizeof(libcamera::ControlValue); +} + const libcamera_control_value_t *libcamera_control_info_max(libcamera_control_info_t *val){ return &val->max(); } diff --git a/libcamera-sys/c_api/controls.h b/libcamera-sys/c_api/controls.h index 348763e..a5390f9 100644 --- a/libcamera-sys/c_api/controls.h +++ b/libcamera-sys/c_api/controls.h @@ -91,13 +91,12 @@ bool libcamera_control_value_is_array(const libcamera_control_value_t *val); size_t libcamera_control_value_num_elements(const libcamera_control_value_t *val); const void *libcamera_control_value_get(const libcamera_control_value_t *val); void libcamera_control_value_set(libcamera_control_value_t *val, enum libcamera_control_type type, const void *data, bool is_array, size_t num_elements); - +size_t libcamera_control_value_size(); // --- libcamera_control_info_t --- const libcamera_control_value_t *libcamera_control_info_max(libcamera_control_info_t *val); const libcamera_control_value_t *libcamera_control_info_min(libcamera_control_info_t *val); const libcamera_control_value_t *libcamera_control_info_def(libcamera_control_info_t *val); const libcamera_control_value_t* libcamera_control_info_values(const libcamera_control_info_t* info, size_t* size); - // --- libcamera_control_id_map --- bool libcamera_control_id_map_add(libcamera_control_id_map_t *idmap, unsigned int key, const libcamera_control_id_t *control_id); const libcamera_control_id_t *libcamera_control_id_map_get(libcamera_control_id_map_t *idmap, unsigned int key); diff --git a/libcamera/src/control.rs b/libcamera/src/control.rs index 2645901..3162d6c 100644 --- a/libcamera/src/control.rs +++ b/libcamera/src/control.rs @@ -1,4 +1,4 @@ -use std::{marker::PhantomData, ptr::NonNull, slice}; +use std::{marker::PhantomData, ptr::NonNull}; use libcamera_sys::*; use thiserror::Error; @@ -82,26 +82,38 @@ impl ControlInfo { unsafe { let mut size: usize = 0; let values_ptr = libcamera_control_info_values(self.ptr(), &mut size as *mut usize); - + if values_ptr.is_null() || size == 0 { return Vec::new(); } - - let raw_slice = slice::from_raw_parts(values_ptr, size); - + + // Determine the size of libcamera_control_value_t + let control_value_size = libcamera_control_value_size(); + println!("libcamera::ControlValue size: {}", control_value_size); + + // Cast the pointer to *const u8 for byte-wise pointer arithmetic + let base_ptr = values_ptr as *const u8; + let mut control_values = Vec::with_capacity(size); - for raw_val in raw_slice { - let val_ptr = NonNull::new(raw_val as *const libcamera_control_value_t as *mut libcamera_control_value_t) - .expect("Received a null pointer in raw_slice"); - - match ControlValue::read(val_ptr) { + for i in 0..size { + // Calculate the pointer to the i-th ControlValue + let offset = i * control_value_size; + let val_ptr = base_ptr.add(offset) as *const libcamera_control_value_t; + + if val_ptr.is_null() { + eprintln!("ControlValue at index {} is null", i); + continue; + } + + // Read and convert the ControlValue + match ControlValue::read(NonNull::new(val_ptr.cast_mut()).unwrap()) { Ok(control_val) => control_values.push(control_val), Err(e) => { - eprintln!("Failed to read ControlValue: {:?}", e); + eprintln!("Failed to read ControlValue at index {}: {:?}", i, e); } } } - + control_values } } From 9295c9685d926e2d5da8d67001e4a5b56a865f4b Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Mon, 9 Dec 2024 18:44:31 -0800 Subject: [PATCH 06/14] Improved error handling --- libcamera/src/control.rs | 37 +++++++++++++++++++++---------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/libcamera/src/control.rs b/libcamera/src/control.rs index 3162d6c..a231824 100644 --- a/libcamera/src/control.rs +++ b/libcamera/src/control.rs @@ -82,29 +82,29 @@ impl ControlInfo { unsafe { let mut size: usize = 0; let values_ptr = libcamera_control_info_values(self.ptr(), &mut size as *mut usize); - + if values_ptr.is_null() || size == 0 { return Vec::new(); } - + // Determine the size of libcamera_control_value_t let control_value_size = libcamera_control_value_size(); println!("libcamera::ControlValue size: {}", control_value_size); - + // Cast the pointer to *const u8 for byte-wise pointer arithmetic let base_ptr = values_ptr as *const u8; - + let mut control_values = Vec::with_capacity(size); for i in 0..size { // Calculate the pointer to the i-th ControlValue let offset = i * control_value_size; let val_ptr = base_ptr.add(offset) as *const libcamera_control_value_t; - + if val_ptr.is_null() { eprintln!("ControlValue at index {} is null", i); continue; } - + // Read and convert the ControlValue match ControlValue::read(NonNull::new(val_ptr.cast_mut()).unwrap()) { Ok(control_val) => control_values.push(control_val), @@ -113,11 +113,10 @@ impl ControlInfo { } } } - + control_values } } - } #[repr(transparent)] @@ -134,11 +133,13 @@ impl ControlInfoMap { &self.0 as *const libcamera_control_info_map_t } - pub fn at(&self, key: u32) -> &ControlInfo { + pub fn at(&self, key: u32) -> Result<&ControlInfo, ControlError> { unsafe { - let ptr = libcamera_control_info_map_at(self.ptr().cast_mut(), key); - println!("ptr {:?}", ptr); - ControlInfo::from_ptr(NonNull::new(ptr.cast_mut()).unwrap()) + let ptr = NonNull::new(libcamera_control_info_map_at(self.ptr().cast_mut(), key).cast_mut()); + match ptr { + Some(ptr) => Ok(ControlInfo::from_ptr(ptr)), + None => Err(ControlError::NotFound(key)), + } } } @@ -146,11 +147,15 @@ impl ControlInfoMap { unsafe { libcamera_control_info_map_count(self.ptr().cast_mut(), key) } } - pub fn find(&self, key: u32) -> &ControlInfo { + pub fn find(&self, key: u32) -> Result<&ControlInfo, ControlError> { unsafe { - ControlInfo::from_ptr( - NonNull::new(libcamera_control_info_map_find(self.ptr().cast_mut(), key).cast_mut()).unwrap(), - ) + + let ptr = NonNull::new(libcamera_control_info_map_find(self.ptr().cast_mut(), key).cast_mut()); + + match ptr { + Some(ptr) => Ok(ControlInfo::from_ptr(ptr)), + None => Err(ControlError::NotFound(key)), + } } } From 200d8bf1589a96d74eca51eede39461f76662f8f Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Mon, 9 Dec 2024 18:44:50 -0800 Subject: [PATCH 07/14] rust fmt --- libcamera/src/control.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/libcamera/src/control.rs b/libcamera/src/control.rs index a231824..d8b719c 100644 --- a/libcamera/src/control.rs +++ b/libcamera/src/control.rs @@ -149,7 +149,6 @@ impl ControlInfoMap { pub fn find(&self, key: u32) -> Result<&ControlInfo, ControlError> { unsafe { - let ptr = NonNull::new(libcamera_control_info_map_find(self.ptr().cast_mut(), key).cast_mut()); match ptr { From 1a26183b80b09f61bd086564bb1b46024ed91236 Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Sat, 12 Apr 2025 17:11:54 -0700 Subject: [PATCH 08/14] yes --- libcamera-meta/src/bin/generate_from_git.rs | 3 +- libcamera-meta/src/lib.rs | 2 + libcamera-sys/Cargo.toml | 2 +- libcamera-sys/c_api/controls.h | 1 + libcamera/Cargo.toml | 6 +- libcamera/build.rs | 24 +- libcamera/src/camera.rs | 2 +- libcamera/src/camera_manager.rs | 42 +- libcamera/src/control.rs | 25 +- libcamera/src/control_value.rs | 12 +- libcamera/src/geometry.rs | 10 + libcamera/versioned_files/0.0.1/controls.rs | 2 +- libcamera/versioned_files/0.0.1/properties.rs | 2 +- libcamera/versioned_files/0.0.2/controls.rs | 2 +- libcamera/versioned_files/0.0.2/properties.rs | 2 +- libcamera/versioned_files/0.0.3/controls.rs | 2 +- libcamera/versioned_files/0.0.3/properties.rs | 2 +- libcamera/versioned_files/0.0.4/controls.rs | 2 +- libcamera/versioned_files/0.0.4/properties.rs | 2 +- libcamera/versioned_files/0.0.5/controls.rs | 2 +- libcamera/versioned_files/0.0.5/properties.rs | 2 +- libcamera/versioned_files/0.1.0/controls.rs | 2 +- libcamera/versioned_files/0.1.0/properties.rs | 2 +- libcamera/versioned_files/0.2.0/controls.rs | 2 +- libcamera/versioned_files/0.2.0/properties.rs | 2 +- libcamera/versioned_files/0.3.0/controls.rs | 2 +- libcamera/versioned_files/0.3.0/properties.rs | 2 +- libcamera/versioned_files/0.3.1/controls.rs | 2 +- libcamera/versioned_files/0.3.1/properties.rs | 2 +- libcamera/versioned_files/0.3.2/controls.rs | 2 +- libcamera/versioned_files/0.3.2/properties.rs | 2 +- .../0.4.0/control_ids_core.yaml | 1052 ++++++ .../0.4.0/control_ids_debug.yaml | 6 + .../0.4.0/control_ids_draft.yaml | 327 ++ .../0.4.0/control_ids_rpi.yaml | 61 + libcamera/versioned_files/0.4.0/controls.rs | 3319 +++++++++++++++++ libcamera/versioned_files/0.4.0/properties.rs | 1777 +++++++++ .../0.4.0/property_ids_core.yaml | 704 ++++ .../0.4.0/property_ids_draft.yaml | 39 + 39 files changed, 7423 insertions(+), 31 deletions(-) create mode 100644 libcamera/versioned_files/0.4.0/control_ids_core.yaml create mode 100644 libcamera/versioned_files/0.4.0/control_ids_debug.yaml create mode 100644 libcamera/versioned_files/0.4.0/control_ids_draft.yaml create mode 100644 libcamera/versioned_files/0.4.0/control_ids_rpi.yaml create mode 100644 libcamera/versioned_files/0.4.0/controls.rs create mode 100644 libcamera/versioned_files/0.4.0/properties.rs create mode 100644 libcamera/versioned_files/0.4.0/property_ids_core.yaml create mode 100644 libcamera/versioned_files/0.4.0/property_ids_draft.yaml diff --git a/libcamera-meta/src/bin/generate_from_git.rs b/libcamera-meta/src/bin/generate_from_git.rs index 5b013c7..3f0cea7 100644 --- a/libcamera-meta/src/bin/generate_from_git.rs +++ b/libcamera-meta/src/bin/generate_from_git.rs @@ -243,6 +243,7 @@ mod generate_rust { ControlType::String => "String", ControlType::Rectangle => "Rectangle", ControlType::Size => "Size", + ControlType::Point => "Point", }; match size { @@ -457,7 +458,7 @@ mod generate_rust { use crate::control::{{Control, Property, ControlEntry, DynControlEntry}}; use crate::control_value::{{ControlValue, ControlValueError}}; #[allow(unused_imports)] - use crate::geometry::{{Rectangle, Size}}; + use crate::geometry::{{Rectangle, Size, Point}}; #[allow(unused_imports)] use libcamera_sys::*; "#; diff --git a/libcamera-meta/src/lib.rs b/libcamera-meta/src/lib.rs index 0565813..b746575 100644 --- a/libcamera-meta/src/lib.rs +++ b/libcamera-meta/src/lib.rs @@ -10,6 +10,7 @@ pub enum ControlType { String, Rectangle, Size, + Point, } impl TryFrom<&str> for ControlType { @@ -25,6 +26,7 @@ impl TryFrom<&str> for ControlType { "string" => Ok(ControlType::String), "Rectangle" => Ok(ControlType::Rectangle), "Size" => Ok(ControlType::Size), + "Point" => Ok(ControlType::Point), _ => Err(format!("Unknown control type {}", value)), } } diff --git a/libcamera-sys/Cargo.toml b/libcamera-sys/Cargo.toml index 883da76..efef19c 100644 --- a/libcamera-sys/Cargo.toml +++ b/libcamera-sys/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcamera-sys" -version = "0.3.0" +version = "0.4.0" edition = "2021" description = "Low-level unsafe bindings to libcamera" documentation = "https://docs.rs/libcamera-sys" diff --git a/libcamera-sys/c_api/controls.h b/libcamera-sys/c_api/controls.h index a5390f9..53b0746 100644 --- a/libcamera-sys/c_api/controls.h +++ b/libcamera-sys/c_api/controls.h @@ -54,6 +54,7 @@ enum libcamera_control_type { LIBCAMERA_CONTROL_TYPE_STRING, LIBCAMERA_CONTROL_TYPE_RECTANGLE, LIBCAMERA_CONTROL_TYPE_SIZE, + LIBCAMERA_CONTROL_TYPE_POINT, }; // --- libcamera_control_id --- diff --git a/libcamera/Cargo.toml b/libcamera/Cargo.toml index de1f407..c9a31f0 100644 --- a/libcamera/Cargo.toml +++ b/libcamera/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcamera" -version = "0.3.0" +version = "0.4.0" edition = "2021" description = "Safe Rust bindings for libcamera" documentation = "https://docs.rs/libcamera" @@ -25,11 +25,11 @@ vendor_rpi = [] bitflags = "2.0.0-rc.2" drm-fourcc = "2.2" libc = "0.2" -libcamera-sys = { path = "../libcamera-sys", version = "0.3.0", default-features = false } +libcamera-sys = { path = "../libcamera-sys", version = "0.4.0", default-features = false } num_enum = "0.6.1" smallvec = "1.10" thiserror = "1.0" [build-dependencies] -# libcamera-sys = { path = "../libcamera-sys", version = "0.3.0" } +pkg-config = "0.3" semver = "1.0.22" diff --git a/libcamera/build.rs b/libcamera/build.rs index 77d623a..273bfad 100644 --- a/libcamera/build.rs +++ b/libcamera/build.rs @@ -3,10 +3,32 @@ use std::{ path::{Path, PathBuf}, }; +use core::panic; use semver::{Comparator, Op, Version}; fn main() { - let libcamera_version = Version::new(0, 3, 2); + + let libcamera = match pkg_config::probe_library("libcamera") { + Ok(lib) => Ok(lib), + Err(e) => { + // Older libcamera versions use camera name instead of libcamera, try that instead + match pkg_config::probe_library("camera") { + Ok(lib) => Ok(lib), + // Return original error + Err(_) => Err(e), + } + } + } + .unwrap(); + + let libcamera_version = match Version::parse(&libcamera.version) { + Ok(v) => v, + Err(e) => { + panic!("bad version from pkgconfig, {e:?}") + } + }; + + let versioned_files = Path::new("versioned_files"); let mut candidates = std::fs::read_dir(versioned_files) diff --git a/libcamera/src/camera.rs b/libcamera/src/camera.rs index f862be8..6e02ec7 100644 --- a/libcamera/src/camera.rs +++ b/libcamera/src/camera.rs @@ -178,7 +178,7 @@ impl<'d> Camera<'d> { } /// Acquires exclusive rights to the camera, which allows changing configuration and capturing. - pub fn acquire(&self) -> io::Result> { + pub fn acquire(&self) -> io::Result> { let ret = unsafe { libcamera_camera_acquire(self.ptr.as_ptr()) }; if ret < 0 { Err(io::Error::from_raw_os_error(ret)) diff --git a/libcamera/src/camera_manager.rs b/libcamera/src/camera_manager.rs index 5bbe5b3..c982efd 100644 --- a/libcamera/src/camera_manager.rs +++ b/libcamera/src/camera_manager.rs @@ -31,10 +31,17 @@ impl CameraManager { } /// Enumerates cameras within the system. - pub fn cameras(&self) -> CameraList<'_> { + pub fn cameras<'a>(&self) -> CameraList<'a> { unsafe { CameraList::from_ptr(NonNull::new(libcamera_camera_manager_cameras(self.ptr.as_ptr())).unwrap()) } } + pub fn stop(&self) { + unsafe { + libcamera_camera_manager_stop(self.ptr.as_ptr()); + // libcamera_camera_manager_destroy(self.ptr.as_ptr()); + } + } + /// Set the log level. /// /// # Parameters @@ -86,10 +93,15 @@ impl<'d> CameraList<'d> { /// Returns camera at a given index. /// /// Returns [None] if index is out of range of available cameras. - pub fn get(&self, index: usize) -> Option> { + pub fn get(&self, index: usize) -> Option> { let cam_ptr = unsafe { libcamera_camera_list_get(self.ptr.as_ptr(), index as _) }; NonNull::new(cam_ptr).map(|p| unsafe { Camera::from_ptr(p) }) } + + /// Returns an iterator over the cameras in the list. + pub fn iter(&'d self) -> CameraListIter<'d> { + CameraListIter { list: self, index: 0 } + } } impl<'d> Drop for CameraList<'d> { @@ -99,3 +111,29 @@ impl<'d> Drop for CameraList<'d> { } } } + +pub struct CameraListIter<'d> { + list: &'d CameraList<'d>, + index: usize, +} + +impl<'d> Iterator for CameraListIter<'d> { + type Item = Camera<'d>; + + fn next(&mut self) -> Option { + if self.index < self.list.len() { + let camera = self.list.get(self.index); + self.index += 1; + camera + } else { + None + } + } + + fn size_hint(&self) -> (usize, Option) { + let len = self.list.len().saturating_sub(self.index); + (len, Some(len)) + } +} + +impl<'d> ExactSizeIterator for CameraListIter<'d> {} diff --git a/libcamera/src/control.rs b/libcamera/src/control.rs index d8b719c..bd7b9b8 100644 --- a/libcamera/src/control.rs +++ b/libcamera/src/control.rs @@ -89,7 +89,6 @@ impl ControlInfo { // Determine the size of libcamera_control_value_t let control_value_size = libcamera_control_value_size(); - println!("libcamera::ControlValue size: {}", control_value_size); // Cast the pointer to *const u8 for byte-wise pointer arithmetic let base_ptr = values_ptr as *const u8; @@ -224,6 +223,29 @@ impl ControlList { Ok(()) } + + /// Sets control value. + /// + /// This can fail if control is not supported by the camera, but due to libcamera API limitations an error will not + /// be returned. Use [ControlList::get] if you need to ensure that value was set. + pub fn set_raw(&mut self, id: u32, val: ControlValue) -> Result<(), ControlError> { + unsafe { + let val_ptr = NonNull::new(libcamera_control_value_create()).unwrap(); + val.write(val_ptr); + libcamera_control_list_set(self.ptr().cast_mut(), id as _, val_ptr.as_ptr()); + libcamera_control_value_destroy(val_ptr.as_ptr()); + } + + Ok(()) + } + + pub fn get_raw(&mut self, id: u32) -> Result { + let val_ptr = NonNull::new(unsafe { libcamera_control_list_get(self.ptr().cast_mut(), id as _).cast_mut() }) + .ok_or(ControlError::NotFound(id))?; + + let val = unsafe { ControlValue::read(val_ptr) }?; + Ok(val) + } } impl<'d> IntoIterator for &'d ControlList { @@ -276,6 +298,7 @@ impl PropertyList { .ok_or(ControlError::NotFound(C::ID))?; let val = unsafe { ControlValue::read(val_ptr) }?; + Ok(C::try_from(val)?) } diff --git a/libcamera/src/control_value.rs b/libcamera/src/control_value.rs index 5e627f2..a87fb68 100644 --- a/libcamera/src/control_value.rs +++ b/libcamera/src/control_value.rs @@ -4,7 +4,7 @@ use libcamera_sys::*; use smallvec::{smallvec, SmallVec}; use thiserror::Error; -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Point, Rectangle, Size}; #[derive(Error, Debug)] pub enum ControlValueError { @@ -34,6 +34,7 @@ pub enum ControlValue { String(String), Rectangle(SmallVec<[Rectangle; 1]>), Size(SmallVec<[Size; 1]>), + Point(SmallVec<[Point; 1]>), } macro_rules! impl_control_value { @@ -77,6 +78,7 @@ impl_control_value!(ControlValue::Int64, i64); impl_control_value!(ControlValue::Float, f32); impl_control_value!(ControlValue::Rectangle, Rectangle); impl_control_value!(ControlValue::Size, Size); +impl_control_value!(ControlValue::Point, Point); macro_rules! impl_control_value_vec { ($p:path, $type:ty) => { @@ -110,6 +112,7 @@ impl_control_value_vec!(ControlValue::Int64, i64); impl_control_value_vec!(ControlValue::Float, f32); impl_control_value_vec!(ControlValue::Rectangle, Rectangle); impl_control_value_vec!(ControlValue::Size, Size); +impl_control_value_vec!(ControlValue::Point, Point); macro_rules! impl_control_value_array { ($p:path, $type:ty) => { @@ -184,6 +187,7 @@ impl_control_value_array!(ControlValue::Int64, i64); impl_control_value_array!(ControlValue::Float, f32); impl_control_value_array!(ControlValue::Rectangle, Rectangle); impl_control_value_array!(ControlValue::Size, Size); +impl_control_value_array!(ControlValue::Point, Point); impl From for ControlValue { fn from(val: String) -> Self { @@ -248,6 +252,10 @@ impl ControlValue { let slice = core::slice::from_raw_parts(data as *const libcamera_size_t, num_elements); Ok(Self::Size(SmallVec::from_iter(slice.iter().map(|r| Size::from(*r))))) } + LIBCAMERA_CONTROL_TYPE_POINT => { + let slice = core::slice::from_raw_parts(data as *const libcamera_point_t, num_elements); + Ok(Self::Point(SmallVec::from_iter(slice.iter().map(|r| Point::from(*r))))) + } _ => Err(ControlValueError::UnknownType(ty)), } } @@ -263,6 +271,7 @@ impl ControlValue { ControlValue::String(v) => (v.as_ptr().cast(), v.len()), ControlValue::Rectangle(v) => (v.as_ptr().cast(), v.len()), ControlValue::Size(v) => (v.as_ptr().cast(), v.len()), + ControlValue::Point(v) => (v.as_ptr().cast(), v.len()), }; let ty = self.ty(); @@ -287,6 +296,7 @@ impl ControlValue { ControlValue::String(_) => LIBCAMERA_CONTROL_TYPE_STRING, ControlValue::Rectangle(_) => LIBCAMERA_CONTROL_TYPE_RECTANGLE, ControlValue::Size(_) => LIBCAMERA_CONTROL_TYPE_SIZE, + ControlValue::Point(_) => LIBCAMERA_CONTROL_TYPE_POINT, } } } diff --git a/libcamera/src/geometry.rs b/libcamera/src/geometry.rs index 46fd6aa..c09ac27 100644 --- a/libcamera/src/geometry.rs +++ b/libcamera/src/geometry.rs @@ -13,6 +13,16 @@ impl From for Point { } } +impl From for libcamera_point_t { + fn from(p: Point) -> Self { + Self { + x: p.x, + y: p.y, + } + } +} + + /// Represents `libcamera::Size` #[derive(Debug, Clone, Copy)] pub struct Size { diff --git a/libcamera/versioned_files/0.0.1/controls.rs b/libcamera/versioned_files/0.0.1/controls.rs index 636bdda..cacab77 100644 --- a/libcamera/versioned_files/0.0.1/controls.rs +++ b/libcamera/versioned_files/0.0.1/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.1/properties.rs b/libcamera/versioned_files/0.0.1/properties.rs index b191233..29549a3 100644 --- a/libcamera/versioned_files/0.0.1/properties.rs +++ b/libcamera/versioned_files/0.0.1/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.2/controls.rs b/libcamera/versioned_files/0.0.2/controls.rs index 636bdda..cacab77 100644 --- a/libcamera/versioned_files/0.0.2/controls.rs +++ b/libcamera/versioned_files/0.0.2/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.2/properties.rs b/libcamera/versioned_files/0.0.2/properties.rs index b191233..29549a3 100644 --- a/libcamera/versioned_files/0.0.2/properties.rs +++ b/libcamera/versioned_files/0.0.2/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.3/controls.rs b/libcamera/versioned_files/0.0.3/controls.rs index e74bd1c..0166d6b 100644 --- a/libcamera/versioned_files/0.0.3/controls.rs +++ b/libcamera/versioned_files/0.0.3/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.3/properties.rs b/libcamera/versioned_files/0.0.3/properties.rs index b191233..29549a3 100644 --- a/libcamera/versioned_files/0.0.3/properties.rs +++ b/libcamera/versioned_files/0.0.3/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.4/controls.rs b/libcamera/versioned_files/0.0.4/controls.rs index e74bd1c..0166d6b 100644 --- a/libcamera/versioned_files/0.0.4/controls.rs +++ b/libcamera/versioned_files/0.0.4/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.4/properties.rs b/libcamera/versioned_files/0.0.4/properties.rs index b191233..29549a3 100644 --- a/libcamera/versioned_files/0.0.4/properties.rs +++ b/libcamera/versioned_files/0.0.4/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.5/controls.rs b/libcamera/versioned_files/0.0.5/controls.rs index e74bd1c..0166d6b 100644 --- a/libcamera/versioned_files/0.0.5/controls.rs +++ b/libcamera/versioned_files/0.0.5/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.5/properties.rs b/libcamera/versioned_files/0.0.5/properties.rs index b191233..29549a3 100644 --- a/libcamera/versioned_files/0.0.5/properties.rs +++ b/libcamera/versioned_files/0.0.5/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.1.0/controls.rs b/libcamera/versioned_files/0.1.0/controls.rs index 4edce11..e08c6d3 100644 --- a/libcamera/versioned_files/0.1.0/controls.rs +++ b/libcamera/versioned_files/0.1.0/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.1.0/properties.rs b/libcamera/versioned_files/0.1.0/properties.rs index d6c9368..e77121b 100644 --- a/libcamera/versioned_files/0.1.0/properties.rs +++ b/libcamera/versioned_files/0.1.0/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.2.0/controls.rs b/libcamera/versioned_files/0.2.0/controls.rs index ae6daf1..baf2c21 100644 --- a/libcamera/versioned_files/0.2.0/controls.rs +++ b/libcamera/versioned_files/0.2.0/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.2.0/properties.rs b/libcamera/versioned_files/0.2.0/properties.rs index f154a8d..7548b75 100644 --- a/libcamera/versioned_files/0.2.0/properties.rs +++ b/libcamera/versioned_files/0.2.0/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.3.0/controls.rs b/libcamera/versioned_files/0.3.0/controls.rs index 5799377..c22ff95 100644 --- a/libcamera/versioned_files/0.3.0/controls.rs +++ b/libcamera/versioned_files/0.3.0/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.3.0/properties.rs b/libcamera/versioned_files/0.3.0/properties.rs index f154a8d..7548b75 100644 --- a/libcamera/versioned_files/0.3.0/properties.rs +++ b/libcamera/versioned_files/0.3.0/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.3.1/controls.rs b/libcamera/versioned_files/0.3.1/controls.rs index 057240c..123aa98 100644 --- a/libcamera/versioned_files/0.3.1/controls.rs +++ b/libcamera/versioned_files/0.3.1/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.3.1/properties.rs b/libcamera/versioned_files/0.3.1/properties.rs index f154a8d..7548b75 100644 --- a/libcamera/versioned_files/0.3.1/properties.rs +++ b/libcamera/versioned_files/0.3.1/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.3.2/controls.rs b/libcamera/versioned_files/0.3.2/controls.rs index 589b9b2..cba15f1 100644 --- a/libcamera/versioned_files/0.3.2/controls.rs +++ b/libcamera/versioned_files/0.3.2/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.3.2/properties.rs b/libcamera/versioned_files/0.3.2/properties.rs index f154a8d..7548b75 100644 --- a/libcamera/versioned_files/0.3.2/properties.rs +++ b/libcamera/versioned_files/0.3.2/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size}; +use crate::geometry::{Rectangle, Size, Point}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.4.0/control_ids_core.yaml b/libcamera/versioned_files/0.4.0/control_ids_core.yaml new file mode 100644 index 0000000..1dfaee0 --- /dev/null +++ b/libcamera/versioned_files/0.4.0/control_ids_core.yaml @@ -0,0 +1,1052 @@ +# SPDX-License-Identifier: LGPL-2.1-or-later +# +# Copyright (C) 2019, Google Inc. +# +%YAML 1.1 +--- +# Unless otherwise stated, all controls are bi-directional, i.e. they can be +# set through Request::controls() and returned out through Request::metadata(). +vendor: libcamera +controls: + - AeEnable: + type: bool + direction: inout + description: | + Enable or disable the AE. + + \sa ExposureTime AnalogueGain + + - AeLocked: + type: bool + direction: out + description: | + Report the lock status of a running AE algorithm. + + If the AE algorithm is locked the value shall be set to true, if it's + converging it shall be set to false. If the AE algorithm is not + running the control shall not be present in the metadata control list. + + \sa AeEnable + + # AeMeteringMode needs further attention: + # - Auto-generate max enum value. + # - Better handling of custom types. + - AeMeteringMode: + type: int32_t + direction: inout + description: | + Specify a metering mode for the AE algorithm to use. + + The metering modes determine which parts of the image are used to + determine the scene brightness. Metering modes may be platform specific + and not all metering modes may be supported. + enum: + - name: MeteringCentreWeighted + value: 0 + description: Centre-weighted metering mode. + - name: MeteringSpot + value: 1 + description: Spot metering mode. + - name: MeteringMatrix + value: 2 + description: Matrix metering mode. + - name: MeteringCustom + value: 3 + description: Custom metering mode. + + # AeConstraintMode needs further attention: + # - Auto-generate max enum value. + # - Better handling of custom types. + - AeConstraintMode: + type: int32_t + direction: inout + description: | + Specify a constraint mode for the AE algorithm to use. + + The constraint modes determine how the measured scene brightness is + adjusted to reach the desired target exposure. Constraint modes may be + platform specific, and not all constraint modes may be supported. + enum: + - name: ConstraintNormal + value: 0 + description: | + Default constraint mode. + + This mode aims to balance the exposure of different parts of the + image so as to reach a reasonable average level. However, highlights + in the image may appear over-exposed and lowlights may appear + under-exposed. + - name: ConstraintHighlight + value: 1 + description: | + Highlight constraint mode. + + This mode adjusts the exposure levels in order to try and avoid + over-exposing the brightest parts (highlights) of an image. + Other non-highlight parts of the image may appear under-exposed. + - name: ConstraintShadows + value: 2 + description: | + Shadows constraint mode. + + This mode adjusts the exposure levels in order to try and avoid + under-exposing the dark parts (shadows) of an image. Other normally + exposed parts of the image may appear over-exposed. + - name: ConstraintCustom + value: 3 + description: | + Custom constraint mode. + + # AeExposureMode needs further attention: + # - Auto-generate max enum value. + # - Better handling of custom types. + - AeExposureMode: + type: int32_t + direction: inout + description: | + Specify an exposure mode for the AE algorithm to use. + + The exposure modes specify how the desired total exposure is divided + between the exposure time and the sensor's analogue gain. They are + platform specific, and not all exposure modes may be supported. + enum: + - name: ExposureNormal + value: 0 + description: Default exposure mode. + - name: ExposureShort + value: 1 + description: Exposure mode allowing only short exposure times. + - name: ExposureLong + value: 2 + description: Exposure mode allowing long exposure times. + - name: ExposureCustom + value: 3 + description: Custom exposure mode. + + - ExposureValue: + type: float + direction: inout + description: | + Specify an Exposure Value (EV) parameter. + + The EV parameter will only be applied if the AE algorithm is currently + enabled. + + By convention EV adjusts the exposure as log2. For example + EV = [-2, -1, -0.5, 0, 0.5, 1, 2] results in an exposure adjustment + of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. + + \sa AeEnable + + - ExposureTime: + type: int32_t + direction: inout + description: | + Exposure time for the frame applied in the sensor device. + + This value is specified in micro-seconds. + + Setting this value means that it is now fixed and the AE algorithm may + not change it. Setting it back to zero returns it to the control of the + AE algorithm. + + \sa AnalogueGain AeEnable + + \todo Document the interactions between AeEnable and setting a fixed + value for this control. Consider interactions with other AE features, + such as aperture and aperture/shutter priority mode, and decide if + control of which features should be automatically adjusted shouldn't + better be handled through a separate AE mode control. + + - AnalogueGain: + type: float + direction: inout + description: | + Analogue gain value applied in the sensor device. + + The value of the control specifies the gain multiplier applied to all + colour channels. This value cannot be lower than 1.0. + + Setting this value means that it is now fixed and the AE algorithm may + not change it. Setting it back to zero returns it to the control of the + AE algorithm. + + \sa ExposureTime AeEnable + + \todo Document the interactions between AeEnable and setting a fixed + value for this control. Consider interactions with other AE features, + such as aperture and aperture/shutter priority mode, and decide if + control of which features should be automatically adjusted shouldn't + better be handled through a separate AE mode control. + + - AeFlickerMode: + type: int32_t + direction: inout + description: | + Set the flicker avoidance mode for AGC/AEC. + + The flicker mode determines whether, and how, the AGC/AEC algorithm + attempts to hide flicker effects caused by the duty cycle of artificial + lighting. + + Although implementation dependent, many algorithms for "flicker + avoidance" work by restricting this exposure time to integer multiples + of the cycle period, wherever possible. + + Implementations may not support all of the flicker modes listed below. + + By default the system will start in FlickerAuto mode if this is + supported, otherwise the flicker mode will be set to FlickerOff. + + enum: + - name: FlickerOff + value: 0 + description: | + No flicker avoidance is performed. + - name: FlickerManual + value: 1 + description: | + Manual flicker avoidance. + + Suppress flicker effects caused by lighting running with a period + specified by the AeFlickerPeriod control. + \sa AeFlickerPeriod + - name: FlickerAuto + value: 2 + description: | + Automatic flicker period detection and avoidance. + + The system will automatically determine the most likely value of + flicker period, and avoid flicker of this frequency. Once flicker + is being corrected, it is implementation dependent whether the + system is still able to detect a change in the flicker period. + \sa AeFlickerDetected + + - AeFlickerPeriod: + type: int32_t + direction: inout + description: | + Manual flicker period in microseconds. + + This value sets the current flicker period to avoid. It is used when + AeFlickerMode is set to FlickerManual. + + To cancel 50Hz mains flicker, this should be set to 10000 (corresponding + to 100Hz), or 8333 (120Hz) for 60Hz mains. + + Setting the mode to FlickerManual when no AeFlickerPeriod has ever been + set means that no flicker cancellation occurs (until the value of this + control is updated). + + Switching to modes other than FlickerManual has no effect on the + value of the AeFlickerPeriod control. + + \sa AeFlickerMode + + - AeFlickerDetected: + type: int32_t + direction: out + description: | + Flicker period detected in microseconds. + + The value reported here indicates the currently detected flicker + period, or zero if no flicker at all is detected. + + When AeFlickerMode is set to FlickerAuto, there may be a period during + which the value reported here remains zero. Once a non-zero value is + reported, then this is the flicker period that has been detected and is + now being cancelled. + + In the case of 50Hz mains flicker, the value would be 10000 + (corresponding to 100Hz), or 8333 (120Hz) for 60Hz mains flicker. + + It is implementation dependent whether the system can continue to detect + flicker of different periods when another frequency is already being + cancelled. + + \sa AeFlickerMode + + - Brightness: + type: float + direction: inout + description: | + Specify a fixed brightness parameter. + + Positive values (up to 1.0) produce brighter images; negative values + (up to -1.0) produce darker images and 0.0 leaves pixels unchanged. + + - Contrast: + type: float + direction: inout + description: | + Specify a fixed contrast parameter. + + Normal contrast is given by the value 1.0; larger values produce images + with more contrast. + + - Lux: + type: float + direction: out + description: | + Report an estimate of the current illuminance level in lux. + + The Lux control can only be returned in metadata. + + - AwbEnable: + type: bool + direction: inout + description: | + Enable or disable the AWB. + + When AWB is enabled, the algorithm estimates the colour temperature of + the scene and computes colour gains and the colour correction matrix + automatically. The computed colour temperature, gains and correction + matrix are reported in metadata. The corresponding controls are ignored + if set in a request. + + When AWB is disabled, the colour temperature, gains and correction + matrix are not updated automatically and can be set manually in + requests. + + \sa ColourCorrectionMatrix + \sa ColourGains + \sa ColourTemperature + + # AwbMode needs further attention: + # - Auto-generate max enum value. + # - Better handling of custom types. + - AwbMode: + type: int32_t + direction: inout + description: | + Specify the range of illuminants to use for the AWB algorithm. + + The modes supported are platform specific, and not all modes may be + supported. + enum: + - name: AwbAuto + value: 0 + description: Search over the whole colour temperature range. + - name: AwbIncandescent + value: 1 + description: Incandescent AWB lamp mode. + - name: AwbTungsten + value: 2 + description: Tungsten AWB lamp mode. + - name: AwbFluorescent + value: 3 + description: Fluorescent AWB lamp mode. + - name: AwbIndoor + value: 4 + description: Indoor AWB lighting mode. + - name: AwbDaylight + value: 5 + description: Daylight AWB lighting mode. + - name: AwbCloudy + value: 6 + description: Cloudy AWB lighting mode. + - name: AwbCustom + value: 7 + description: Custom AWB mode. + + - AwbLocked: + type: bool + direction: out + description: | + Report the lock status of a running AWB algorithm. + + If the AWB algorithm is locked the value shall be set to true, if it's + converging it shall be set to false. If the AWB algorithm is not + running the control shall not be present in the metadata control list. + + \sa AwbEnable + + - ColourGains: + type: float + direction: inout + description: | + Pair of gain values for the Red and Blue colour channels, in that + order. + + ColourGains can only be applied in a Request when the AWB is disabled. + If ColourGains is set in a request but ColourTemperature is not, the + implementation shall calculate and set the ColourTemperature based on + the ColourGains. + + \sa AwbEnable + \sa ColourTemperature + size: [2] + + - ColourTemperature: + type: int32_t + direction: out + description: | + ColourTemperature of the frame, in kelvin. + + ColourTemperature can only be applied in a Request when the AWB is + disabled. + + If ColourTemperature is set in a request but ColourGains is not, the + implementation shall calculate and set the ColourGains based on the + given ColourTemperature. If ColourTemperature is set (either directly, + or indirectly by setting ColourGains) but ColourCorrectionMatrix is not, + the ColourCorrectionMatrix is updated based on the ColourTemperature. + + The ColourTemperature used to process the frame is reported in metadata. + + \sa AwbEnable + \sa ColourCorrectionMatrix + \sa ColourGains + + - Saturation: + type: float + direction: inout + description: | + Specify a fixed saturation parameter. + + Normal saturation is given by the value 1.0; larger values produce more + saturated colours; 0.0 produces a greyscale image. + + - SensorBlackLevels: + type: int32_t + direction: out + description: | + Reports the sensor black levels used for processing a frame. + + The values are in the order R, Gr, Gb, B. They are returned as numbers + out of a 16-bit pixel range (as if pixels ranged from 0 to 65535). The + SensorBlackLevels control can only be returned in metadata. + size: [4] + + - Sharpness: + type: float + direction: inout + description: | + Intensity of the sharpening applied to the image. + + A value of 0.0 means no sharpening. The minimum value means + minimal sharpening, and shall be 0.0 unless the camera can't + disable sharpening completely. The default value shall give a + "reasonable" level of sharpening, suitable for most use cases. + The maximum value may apply extremely high levels of sharpening, + higher than anyone could reasonably want. Negative values are + not allowed. Note also that sharpening is not applied to raw + streams. + + - FocusFoM: + type: int32_t + direction: out + description: | + Reports a Figure of Merit (FoM) to indicate how in-focus the frame is. + + A larger FocusFoM value indicates a more in-focus frame. This singular + value may be based on a combination of statistics gathered from + multiple focus regions within an image. The number of focus regions and + method of combination is platform dependent. In this respect, it is not + necessarily aimed at providing a way to implement a focus algorithm by + the application, rather an indication of how in-focus a frame is. + + - ColourCorrectionMatrix: + type: float + direction: inout + description: | + The 3x3 matrix that converts camera RGB to sRGB within the imaging + pipeline. + + This should describe the matrix that is used after pixels have been + white-balanced, but before any gamma transformation. The 3x3 matrix is + stored in conventional reading order in an array of 9 floating point + values. + + ColourCorrectionMatrix can only be applied in a Request when the AWB is + disabled. + + \sa AwbEnable + \sa ColourTemperature + size: [3,3] + + - ScalerCrop: + type: Rectangle + direction: inout + description: | + Sets the image portion that will be scaled to form the whole of + the final output image. + + The (x,y) location of this rectangle is relative to the + PixelArrayActiveAreas that is being used. The units remain native + sensor pixels, even if the sensor is being used in a binning or + skipping mode. + + This control is only present when the pipeline supports scaling. Its + maximum valid value is given by the properties::ScalerCropMaximum + property, and the two can be used to implement digital zoom. + + - DigitalGain: + type: float + direction: inout + description: | + Digital gain value applied during the processing steps applied + to the image as captured from the sensor. + + The global digital gain factor is applied to all the colour channels + of the RAW image. Different pipeline models are free to + specify how the global gain factor applies to each separate + channel. + + If an imaging pipeline applies digital gain in distinct + processing steps, this value indicates their total sum. + Pipelines are free to decide how to adjust each processing + step to respect the received gain factor and shall report + their total value in the request metadata. + + - FrameDuration: + type: int64_t + direction: out + description: | + The instantaneous frame duration from start of frame exposure to start + of next exposure, expressed in microseconds. + + This control is meant to be returned in metadata. + + - FrameDurationLimits: + type: int64_t + direction: inout + description: | + The minimum and maximum (in that order) frame duration, expressed in + microseconds. + + When provided by applications, the control specifies the sensor frame + duration interval the pipeline has to use. This limits the largest + exposure time the sensor can use. For example, if a maximum frame + duration of 33ms is requested (corresponding to 30 frames per second), + the sensor will not be able to raise the exposure time above 33ms. + A fixed frame duration is achieved by setting the minimum and maximum + values to be the same. Setting both values to 0 reverts to using the + camera defaults. + + The maximum frame duration provides the absolute limit to the exposure + time computed by the AE algorithm and it overrides any exposure mode + setting specified with controls::AeExposureMode. Similarly, when a + manual exposure time is set through controls::ExposureTime, it also + gets clipped to the limits set by this control. When reported in + metadata, the control expresses the minimum and maximum frame durations + used after being clipped to the sensor provided frame duration limits. + + \sa AeExposureMode + \sa ExposureTime + + \todo Define how to calculate the capture frame rate by + defining controls to report additional delays introduced by + the capture pipeline or post-processing stages (ie JPEG + conversion, frame scaling). + + \todo Provide an explicit definition of default control values, for + this and all other controls. + + size: [2] + + - SensorTemperature: + type: float + direction: out + description: | + Temperature measure from the camera sensor in Celsius. + + This value is typically obtained by a thermal sensor present on-die or + in the camera module. The range of reported temperatures is device + dependent. + + The SensorTemperature control will only be returned in metadata if a + thermal sensor is present. + + - SensorTimestamp: + type: int64_t + direction: out + description: | + The time when the first row of the image sensor active array is exposed. + + The timestamp, expressed in nanoseconds, represents a monotonically + increasing counter since the system boot time, as defined by the + Linux-specific CLOCK_BOOTTIME clock id. + + The SensorTimestamp control can only be returned in metadata. + + \todo Define how the sensor timestamp has to be used in the reprocessing + use case. + + - AfMode: + type: int32_t + direction: inout + description: | + The mode of the AF (autofocus) algorithm. + + An implementation may choose not to implement all the modes. + + enum: + - name: AfModeManual + value: 0 + description: | + The AF algorithm is in manual mode. + + In this mode it will never perform any action nor move the lens of + its own accord, but an application can specify the desired lens + position using the LensPosition control. The AfState will always + report AfStateIdle. + + If the camera is started in AfModeManual, it will move the focus + lens to the position specified by the LensPosition control. + + This mode is the recommended default value for the AfMode control. + External cameras (as reported by the Location property set to + CameraLocationExternal) may use a different default value. + - name: AfModeAuto + value: 1 + description: | + The AF algorithm is in auto mode. + + In this mode the algorithm will never move the lens or change state + unless the AfTrigger control is used. The AfTrigger control can be + used to initiate a focus scan, the results of which will be + reported by AfState. + + If the autofocus algorithm is moved from AfModeAuto to another mode + while a scan is in progress, the scan is cancelled immediately, + without waiting for the scan to finish. + + When first entering this mode the AfState will report AfStateIdle. + When a trigger control is sent, AfState will report AfStateScanning + for a period before spontaneously changing to AfStateFocused or + AfStateFailed, depending on the outcome of the scan. It will remain + in this state until another scan is initiated by the AfTrigger + control. If a scan is cancelled (without changing to another mode), + AfState will return to AfStateIdle. + - name: AfModeContinuous + value: 2 + description: | + The AF algorithm is in continuous mode. + + In this mode the lens can re-start a scan spontaneously at any + moment, without any user intervention. The AfState still reports + whether the algorithm is currently scanning or not, though the + application has no ability to initiate or cancel scans, nor to move + the lens for itself. + + However, applications can pause the AF algorithm from continuously + scanning by using the AfPause control. This allows video or still + images to be captured whilst guaranteeing that the focus is fixed. + + When set to AfModeContinuous, the system will immediately initiate a + scan so AfState will report AfStateScanning, and will settle on one + of AfStateFocused or AfStateFailed, depending on the scan result. + + - AfRange: + type: int32_t + direction: inout + description: | + The range of focus distances that is scanned. + + An implementation may choose not to implement all the options here. + enum: + - name: AfRangeNormal + value: 0 + description: | + A wide range of focus distances is scanned. + + Scanned distances cover all the way from infinity down to close + distances, though depending on the implementation, possibly not + including the very closest macro positions. + - name: AfRangeMacro + value: 1 + description: | + Only close distances are scanned. + - name: AfRangeFull + value: 2 + description: | + The full range of focus distances is scanned. + + This range is similar to AfRangeNormal but includes the very + closest macro positions. + + - AfSpeed: + type: int32_t + direction: inout + description: | + Determine whether the AF is to move the lens as quickly as possible or + more steadily. + + For example, during video recording it may be desirable not to move the + lens too abruptly, but when in a preview mode (waiting for a still + capture) it may be helpful to move the lens as quickly as is reasonably + possible. + enum: + - name: AfSpeedNormal + value: 0 + description: Move the lens at its usual speed. + - name: AfSpeedFast + value: 1 + description: Move the lens more quickly. + + - AfMetering: + type: int32_t + direction: inout + description: | + The parts of the image used by the AF algorithm to measure focus. + enum: + - name: AfMeteringAuto + value: 0 + description: | + Let the AF algorithm decide for itself where it will measure focus. + - name: AfMeteringWindows + value: 1 + description: | + Use the rectangles defined by the AfWindows control to measure focus. + + If no windows are specified the behaviour is platform dependent. + + - AfWindows: + type: Rectangle + direction: inout + description: | + The focus windows used by the AF algorithm when AfMetering is set to + AfMeteringWindows. + + The units used are pixels within the rectangle returned by the + ScalerCropMaximum property. + + In order to be activated, a rectangle must be programmed with non-zero + width and height. Internally, these rectangles are intersected with the + ScalerCropMaximum rectangle. If the window becomes empty after this + operation, then the window is ignored. If all the windows end up being + ignored, then the behaviour is platform dependent. + + On platforms that support the ScalerCrop control (for implementing + digital zoom, for example), no automatic recalculation or adjustment of + AF windows is performed internally if the ScalerCrop is changed. If any + window lies outside the output image after the scaler crop has been + applied, it is up to the application to recalculate them. + + The details of how the windows are used are platform dependent. We note + that when there is more than one AF window, a typical implementation + might find the optimal focus position for each one and finally select + the window where the focal distance for the objects shown in that part + of the image are closest to the camera. + + size: [n] + + - AfTrigger: + type: int32_t + direction: in + description: | + Start an autofocus scan. + + This control starts an autofocus scan when AfMode is set to AfModeAuto, + and is ignored if AfMode is set to AfModeManual or AfModeContinuous. It + can also be used to terminate a scan early. + + enum: + - name: AfTriggerStart + value: 0 + description: | + Start an AF scan. + + Setting the control to AfTriggerStart is ignored if a scan is in + progress. + - name: AfTriggerCancel + value: 1 + description: | + Cancel an AF scan. + + This does not cause the lens to move anywhere else. Ignored if no + scan is in progress. + + - AfPause: + type: int32_t + direction: in + description: | + Pause lens movements when in continuous autofocus mode. + + This control has no effect except when in continuous autofocus mode + (AfModeContinuous). It can be used to pause any lens movements while + (for example) images are captured. The algorithm remains inactive + until it is instructed to resume. + + enum: + - name: AfPauseImmediate + value: 0 + description: | + Pause the continuous autofocus algorithm immediately. + + The autofocus algorithm is paused whether or not any kind of scan + is underway. AfPauseState will subsequently report + AfPauseStatePaused. AfState may report any of AfStateScanning, + AfStateFocused or AfStateFailed, depending on the algorithm's state + when it received this control. + - name: AfPauseDeferred + value: 1 + description: | + Pause the continuous autofocus algorithm at the end of the scan. + + This is similar to AfPauseImmediate, and if the AfState is + currently reporting AfStateFocused or AfStateFailed it will remain + in that state and AfPauseState will report AfPauseStatePaused. + + However, if the algorithm is scanning (AfStateScanning), + AfPauseState will report AfPauseStatePausing until the scan is + finished, at which point AfState will report one of AfStateFocused + or AfStateFailed, and AfPauseState will change to + AfPauseStatePaused. + + - name: AfPauseResume + value: 2 + description: | + Resume continuous autofocus operation. + + The algorithm starts again from exactly where it left off, and + AfPauseState will report AfPauseStateRunning. + + - LensPosition: + type: float + direction: inout + description: | + Set and report the focus lens position. + + This control instructs the lens to move to a particular position and + also reports back the position of the lens for each frame. + + The LensPosition control is ignored unless the AfMode is set to + AfModeManual, though the value is reported back unconditionally in all + modes. + + This value, which is generally a non-integer, is the reciprocal of the + focal distance in metres, also known as dioptres. That is, to set a + focal distance D, the lens position LP is given by + + \f$LP = \frac{1\mathrm{m}}{D}\f$ + + For example: + + - 0 moves the lens to infinity. + - 0.5 moves the lens to focus on objects 2m away. + - 2 moves the lens to focus on objects 50cm away. + - And larger values will focus the lens closer. + + The default value of the control should indicate a good general + position for the lens, often corresponding to the hyperfocal distance + (the closest position for which objects at infinity are still + acceptably sharp). The minimum will often be zero (meaning infinity), + and the maximum value defines the closest focus position. + + \todo Define a property to report the Hyperfocal distance of calibrated + lenses. + + - AfState: + type: int32_t + direction: out + description: | + The current state of the AF algorithm. + + This control reports the current state of the AF algorithm in + conjunction with the reported AfMode value and (in continuous AF mode) + the AfPauseState value. The possible state changes are described below, + though we note the following state transitions that occur when the + AfMode is changed. + + If the AfMode is set to AfModeManual, then the AfState will always + report AfStateIdle (even if the lens is subsequently moved). Changing + to the AfModeManual state does not initiate any lens movement. + + If the AfMode is set to AfModeAuto then the AfState will report + AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent + together then AfState will omit AfStateIdle and move straight to + AfStateScanning (and start a scan). + + If the AfMode is set to AfModeContinuous then the AfState will + initially report AfStateScanning. + + enum: + - name: AfStateIdle + value: 0 + description: | + The AF algorithm is in manual mode (AfModeManual) or in auto mode + (AfModeAuto) and a scan has not yet been triggered, or an + in-progress scan was cancelled. + - name: AfStateScanning + value: 1 + description: | + The AF algorithm is in auto mode (AfModeAuto), and a scan has been + started using the AfTrigger control. + + The scan can be cancelled by sending AfTriggerCancel at which point + the algorithm will either move back to AfStateIdle or, if the scan + actually completes before the cancel request is processed, to one + of AfStateFocused or AfStateFailed. + + Alternatively the AF algorithm could be in continuous mode + (AfModeContinuous) at which point it may enter this state + spontaneously whenever it determines that a rescan is needed. + - name: AfStateFocused + value: 2 + description: | + The AF algorithm is in auto (AfModeAuto) or continuous + (AfModeContinuous) mode and a scan has completed with the result + that the algorithm believes the image is now in focus. + - name: AfStateFailed + value: 3 + description: | + The AF algorithm is in auto (AfModeAuto) or continuous + (AfModeContinuous) mode and a scan has completed with the result + that the algorithm did not find a good focus position. + + - AfPauseState: + type: int32_t + direction: out + description: | + Report whether the autofocus is currently running, paused or pausing. + + This control is only applicable in continuous (AfModeContinuous) mode, + and reports whether the algorithm is currently running, paused or + pausing (that is, will pause as soon as any in-progress scan + completes). + + Any change to AfMode will cause AfPauseStateRunning to be reported. + + enum: + - name: AfPauseStateRunning + value: 0 + description: | + Continuous AF is running and the algorithm may restart a scan + spontaneously. + - name: AfPauseStatePausing + value: 1 + description: | + Continuous AF has been sent an AfPauseDeferred control, and will + pause as soon as any in-progress scan completes. + + When the scan completes, the AfPauseState control will report + AfPauseStatePaused. No new scans will be start spontaneously until + the AfPauseResume control is sent. + - name: AfPauseStatePaused + value: 2 + description: | + Continuous AF is paused. + + No further state changes or lens movements will occur until the + AfPauseResume control is sent. + + - HdrMode: + type: int32_t + direction: inout + description: | + Set the mode to be used for High Dynamic Range (HDR) imaging. + + HDR techniques typically include multiple exposure, image fusion and + tone mapping techniques to improve the dynamic range of the resulting + images. + + When using an HDR mode, images are captured with different sets of AGC + settings called HDR channels. Channels indicate in particular the type + of exposure (short, medium or long) used to capture the raw image, + before fusion. Each HDR image is tagged with the corresponding channel + using the HdrChannel control. + + \sa HdrChannel + + enum: + - name: HdrModeOff + value: 0 + description: | + HDR is disabled. + + Metadata for this frame will not include the HdrChannel control. + - name: HdrModeMultiExposureUnmerged + value: 1 + description: | + Multiple exposures will be generated in an alternating fashion. + + The multiple exposures will not be merged together and will be + returned to the application as they are. Each image will be tagged + with the correct HDR channel, indicating what kind of exposure it + is. The tag should be the same as in the HdrModeMultiExposure case. + + The expectation is that an application using this mode would merge + the frames to create HDR images for itself if it requires them. + - name: HdrModeMultiExposure + value: 2 + description: | + Multiple exposures will be generated and merged to create HDR + images. + + Each image will be tagged with the HDR channel (long, medium or + short) that arrived and which caused this image to be output. + + Systems that use two channels for HDR will return images tagged + alternately as the short and long channel. Systems that use three + channels for HDR will cycle through the short, medium and long + channel before repeating. + - name: HdrModeSingleExposure + value: 3 + description: | + Multiple frames all at a single exposure will be used to create HDR + images. + + These images should be reported as all corresponding to the HDR + short channel. + - name: HdrModeNight + value: 4 + description: | + Multiple frames will be combined to produce "night mode" images. + + It is up to the implementation exactly which HDR channels it uses, + and the images will all be tagged accordingly with the correct HDR + channel information. + + - HdrChannel: + type: int32_t + direction: out + description: | + The HDR channel used to capture the frame. + + This value is reported back to the application so that it can discover + whether this capture corresponds to the short or long exposure image + (or any other image used by the HDR procedure). An application can + monitor the HDR channel to discover when the differently exposed images + have arrived. + + This metadata is only available when an HDR mode has been enabled. + + \sa HdrMode + + enum: + - name: HdrChannelNone + value: 0 + description: | + This image does not correspond to any of the captures used to create + an HDR image. + - name: HdrChannelShort + value: 1 + description: | + This is a short exposure image. + - name: HdrChannelMedium + value: 2 + description: | + This is a medium exposure image. + - name: HdrChannelLong + value: 3 + description: | + This is a long exposure image. + + - Gamma: + type: float + direction: inout + description: | + Specify a fixed gamma value. + + The default gamma value must be 2.2 which closely mimics sRGB gamma. + Note that this is camera gamma, so it is applied as 1.0/gamma. + + - DebugMetadataEnable: + type: bool + direction: inout + description: | + Enable or disable the debug metadata. + +... diff --git a/libcamera/versioned_files/0.4.0/control_ids_debug.yaml b/libcamera/versioned_files/0.4.0/control_ids_debug.yaml new file mode 100644 index 0000000..7975327 --- /dev/null +++ b/libcamera/versioned_files/0.4.0/control_ids_debug.yaml @@ -0,0 +1,6 @@ +# SPDX-License-Identifier: LGPL-2.1-or-later +# +%YAML 1.1 +--- +vendor: debug +controls: [] diff --git a/libcamera/versioned_files/0.4.0/control_ids_draft.yaml b/libcamera/versioned_files/0.4.0/control_ids_draft.yaml new file mode 100644 index 0000000..87e4e02 --- /dev/null +++ b/libcamera/versioned_files/0.4.0/control_ids_draft.yaml @@ -0,0 +1,327 @@ +# SPDX-License-Identifier: LGPL-2.1-or-later +# +# Copyright (C) 2019, Google Inc. +# +%YAML 1.1 +--- +# Unless otherwise stated, all controls are bi-directional, i.e. they can be +# set through Request::controls() and returned out through Request::metadata(). +vendor: draft +controls: + - AePrecaptureTrigger: + type: int32_t + direction: inout + description: | + Control for AE metering trigger. Currently identical to + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. + + Whether the camera device will trigger a precapture metering sequence + when it processes this request. + enum: + - name: AePrecaptureTriggerIdle + value: 0 + description: The trigger is idle. + - name: AePrecaptureTriggerStart + value: 1 + description: The pre-capture AE metering is started by the camera. + - name: AePrecaptureTriggerCancel + value: 2 + description: | + The camera will cancel any active or completed metering sequence. + The AE algorithm is reset to its initial state. + + - NoiseReductionMode: + type: int32_t + direction: inout + description: | + Control to select the noise reduction algorithm mode. Currently + identical to ANDROID_NOISE_REDUCTION_MODE. + + Mode of operation for the noise reduction algorithm. + enum: + - name: NoiseReductionModeOff + value: 0 + description: No noise reduction is applied + - name: NoiseReductionModeFast + value: 1 + description: | + Noise reduction is applied without reducing the frame rate. + - name: NoiseReductionModeHighQuality + value: 2 + description: | + High quality noise reduction at the expense of frame rate. + - name: NoiseReductionModeMinimal + value: 3 + description: | + Minimal noise reduction is applied without reducing the frame rate. + - name: NoiseReductionModeZSL + value: 4 + description: | + Noise reduction is applied at different levels to different streams. + + - ColorCorrectionAberrationMode: + type: int32_t + direction: inout + description: | + Control to select the color correction aberration mode. Currently + identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. + + Mode of operation for the chromatic aberration correction algorithm. + enum: + - name: ColorCorrectionAberrationOff + value: 0 + description: No aberration correction is applied. + - name: ColorCorrectionAberrationFast + value: 1 + description: Aberration correction will not slow down the frame rate. + - name: ColorCorrectionAberrationHighQuality + value: 2 + description: | + High quality aberration correction which might reduce the frame + rate. + + - AeState: + type: int32_t + direction: out + description: | + Control to report the current AE algorithm state. Currently identical to + ANDROID_CONTROL_AE_STATE. + + Current state of the AE algorithm. + enum: + - name: AeStateInactive + value: 0 + description: The AE algorithm is inactive. + - name: AeStateSearching + value: 1 + description: The AE algorithm has not converged yet. + - name: AeStateConverged + value: 2 + description: The AE algorithm has converged. + - name: AeStateLocked + value: 3 + description: The AE algorithm is locked. + - name: AeStateFlashRequired + value: 4 + description: The AE algorithm would need a flash for good results + - name: AeStatePrecapture + value: 5 + description: | + The AE algorithm has started a pre-capture metering session. + \sa AePrecaptureTrigger + + - AwbState: + type: int32_t + direction: out + description: | + Control to report the current AWB algorithm state. Currently identical + to ANDROID_CONTROL_AWB_STATE. + + Current state of the AWB algorithm. + enum: + - name: AwbStateInactive + value: 0 + description: The AWB algorithm is inactive. + - name: AwbStateSearching + value: 1 + description: The AWB algorithm has not converged yet. + - name: AwbConverged + value: 2 + description: The AWB algorithm has converged. + - name: AwbLocked + value: 3 + description: The AWB algorithm is locked. + + - SensorRollingShutterSkew: + type: int64_t + direction: out + description: | + Control to report the time between the start of exposure of the first + row and the start of exposure of the last row. Currently identical to + ANDROID_SENSOR_ROLLING_SHUTTER_SKEW + + - LensShadingMapMode: + type: int32_t + direction: inout + description: | + Control to report if the lens shading map is available. Currently + identical to ANDROID_STATISTICS_LENS_SHADING_MAP_MODE. + enum: + - name: LensShadingMapModeOff + value: 0 + description: No lens shading map mode is available. + - name: LensShadingMapModeOn + value: 1 + description: The lens shading map mode is available. + + - PipelineDepth: + type: int32_t + direction: out + description: | + Specifies the number of pipeline stages the frame went through from when + it was exposed to when the final completed result was available to the + framework. Always less than or equal to PipelineMaxDepth. Currently + identical to ANDROID_REQUEST_PIPELINE_DEPTH. + + The typical value for this control is 3 as a frame is first exposed, + captured and then processed in a single pass through the ISP. Any + additional processing step performed after the ISP pass (in example face + detection, additional format conversions etc) count as an additional + pipeline stage. + + - MaxLatency: + type: int32_t + direction: out + description: | + The maximum number of frames that can occur after a request (different + than the previous) has been submitted, and before the result's state + becomes synchronized. A value of -1 indicates unknown latency, and 0 + indicates per-frame control. Currently identical to + ANDROID_SYNC_MAX_LATENCY. + + - TestPatternMode: + type: int32_t + direction: inout + description: | + Control to select the test pattern mode. Currently identical to + ANDROID_SENSOR_TEST_PATTERN_MODE. + enum: + - name: TestPatternModeOff + value: 0 + description: | + No test pattern mode is used. The camera device returns frames from + the image sensor. + - name: TestPatternModeSolidColor + value: 1 + description: | + Each pixel in [R, G_even, G_odd, B] is replaced by its respective + color channel provided in test pattern data. + \todo Add control for test pattern data. + - name: TestPatternModeColorBars + value: 2 + description: | + All pixel data is replaced with an 8-bar color pattern. The vertical + bars (left-to-right) are as follows; white, yellow, cyan, green, + magenta, red, blue and black. Each bar should take up 1/8 of the + sensor pixel array width. When this is not possible, the bar size + should be rounded down to the nearest integer and the pattern can + repeat on the right side. Each bar's height must always take up the + full sensor pixel array height. + - name: TestPatternModeColorBarsFadeToGray + value: 3 + description: | + The test pattern is similar to TestPatternModeColorBars, + except that each bar should start at its specified color at the top + and fade to gray at the bottom. Furthermore each bar is further + subdevided into a left and right half. The left half should have a + smooth gradient, and the right half should have a quantized + gradient. In particular, the right half's should consist of blocks + of the same color for 1/16th active sensor pixel array width. The + least significant bits in the quantized gradient should be copied + from the most significant bits of the smooth gradient. The height of + each bar should always be a multiple of 128. When this is not the + case, the pattern should repeat at the bottom of the image. + - name: TestPatternModePn9 + value: 4 + description: | + All pixel data is replaced by a pseudo-random sequence generated + from a PN9 512-bit sequence (typically implemented in hardware with + a linear feedback shift register). The generator should be reset at + the beginning of each frame, and thus each subsequent raw frame with + this test pattern should be exactly the same as the last. + - name: TestPatternModeCustom1 + value: 256 + description: | + The first custom test pattern. All custom patterns that are + available only on this camera device are at least this numeric + value. All of the custom test patterns will be static (that is the + raw image must not vary from frame to frame). + + - FaceDetectMode: + type: int32_t + direction: inout + description: | + Control to select the face detection mode used by the pipeline. + + Currently identical to ANDROID_STATISTICS_FACE_DETECT_MODE. + + \sa FaceDetectFaceRectangles + \sa FaceDetectFaceScores + \sa FaceDetectFaceLandmarks + \sa FaceDetectFaceIds + + enum: + - name: FaceDetectModeOff + value: 0 + description: | + Pipeline doesn't perform face detection and doesn't report any + control related to face detection. + - name: FaceDetectModeSimple + value: 1 + description: | + Pipeline performs face detection and reports the + FaceDetectFaceRectangles and FaceDetectFaceScores controls for each + detected face. FaceDetectFaceLandmarks and FaceDetectFaceIds are + optional. + - name: FaceDetectModeFull + value: 2 + description: | + Pipeline performs face detection and reports all the controls + related to face detection including FaceDetectFaceRectangles, + FaceDetectFaceScores, FaceDetectFaceLandmarks, and + FaceDeteceFaceIds for each detected face. + + - FaceDetectFaceRectangles: + type: Rectangle + direction: out + description: | + Boundary rectangles of the detected faces. The number of values is + the number of detected faces. + + The FaceDetectFaceRectangles control can only be returned in metadata. + + Currently identical to ANDROID_STATISTICS_FACE_RECTANGLES. + size: [n] + + - FaceDetectFaceScores: + type: uint8_t + direction: out + description: | + Confidence score of each of the detected faces. The range of score is + [0, 100]. The number of values should be the number of faces reported + in FaceDetectFaceRectangles. + + The FaceDetectFaceScores control can only be returned in metadata. + + Currently identical to ANDROID_STATISTICS_FACE_SCORES. + size: [n] + + - FaceDetectFaceLandmarks: + type: Point + direction: out + description: | + Array of human face landmark coordinates in format [..., left_eye_i, + right_eye_i, mouth_i, left_eye_i+1, ...], with i = index of face. The + number of values should be 3 * the number of faces reported in + FaceDetectFaceRectangles. + + The FaceDetectFaceLandmarks control can only be returned in metadata. + + Currently identical to ANDROID_STATISTICS_FACE_LANDMARKS. + size: [n] + + - FaceDetectFaceIds: + type: int32_t + direction: out + description: | + Each detected face is given a unique ID that is valid for as long as the + face is visible to the camera device. A face that leaves the field of + view and later returns may be assigned a new ID. The number of values + should be the number of faces reported in FaceDetectFaceRectangles. + + The FaceDetectFaceIds control can only be returned in metadata. + + Currently identical to ANDROID_STATISTICS_FACE_IDS. + size: [n] + +... diff --git a/libcamera/versioned_files/0.4.0/control_ids_rpi.yaml b/libcamera/versioned_files/0.4.0/control_ids_rpi.yaml new file mode 100644 index 0000000..7524c5d --- /dev/null +++ b/libcamera/versioned_files/0.4.0/control_ids_rpi.yaml @@ -0,0 +1,61 @@ +# SPDX-License-Identifier: LGPL-2.1-or-later +# +# Copyright (C) 2023, Raspberry Pi Ltd +# +%YAML 1.1 +--- +# Raspberry Pi (VC4 and PiSP) specific vendor controls +vendor: rpi +controls: + - StatsOutputEnable: + type: bool + direction: inout + description: | + Toggles the Raspberry Pi IPA to output the hardware generated statistics. + + When this control is set to true, the IPA outputs a binary dump of the + hardware generated statistics through the Request metadata in the + Bcm2835StatsOutput control. + + \sa Bcm2835StatsOutput + + - Bcm2835StatsOutput: + type: uint8_t + size: [n] + direction: out + description: | + Span of the BCM2835 ISP generated statistics for the current frame. + + This is sent in the Request metadata if the StatsOutputEnable is set to + true. The statistics struct definition can be found in + include/linux/bcm2835-isp.h. + + \sa StatsOutputEnable + + - ScalerCrops: + type: Rectangle + size: [n] + direction: out + description: | + An array of rectangles, where each singular value has identical + functionality to the ScalerCrop control. This control allows the + Raspberry Pi pipeline handler to control individual scaler crops per + output stream. + + The order of rectangles passed into the control must match the order of + streams configured by the application. The pipeline handler will only + configure crop retangles up-to the number of output streams configured. + All subsequent rectangles passed into this control are ignored by the + pipeline handler. + + If both rpi::ScalerCrops and ScalerCrop controls are present in a + ControlList, the latter is discarded, and crops are obtained from this + control. + + Note that using different crop rectangles for each output stream with + this control is only applicable on the Pi5/PiSP platform. This control + should also be considered temporary/draft and will be replaced with + official libcamera API support for per-stream controls in the future. + + \sa ScalerCrop +... diff --git a/libcamera/versioned_files/0.4.0/controls.rs b/libcamera/versioned_files/0.4.0/controls.rs new file mode 100644 index 0000000..7bd18cb --- /dev/null +++ b/libcamera/versioned_files/0.4.0/controls.rs @@ -0,0 +1,3319 @@ +use std::{ffi::CStr, ops::{Deref, DerefMut}}; +use num_enum::{IntoPrimitive, TryFromPrimitive}; +#[allow(unused_imports)] +use crate::control::{Control, Property, ControlEntry, DynControlEntry}; +use crate::control_value::{ControlValue, ControlValueError}; +#[allow(unused_imports)] +use crate::geometry::{Rectangle, Size, Point}; +#[allow(unused_imports)] +use libcamera_sys::*; +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(u32)] +pub enum ControlId { + /// Enable or disable the AE. + /// + /// \sa ExposureTime AnalogueGain + AeEnable = AE_ENABLE, + /// Report the lock status of a running AE algorithm. + /// + /// If the AE algorithm is locked the value shall be set to true, if it's + /// converging it shall be set to false. If the AE algorithm is not + /// running the control shall not be present in the metadata control list. + /// + /// \sa AeEnable + AeLocked = AE_LOCKED, + /// Specify a metering mode for the AE algorithm to use. + /// + /// The metering modes determine which parts of the image are used to + /// determine the scene brightness. Metering modes may be platform specific + /// and not all metering modes may be supported. + AeMeteringMode = AE_METERING_MODE, + /// Specify a constraint mode for the AE algorithm to use. + /// + /// The constraint modes determine how the measured scene brightness is + /// adjusted to reach the desired target exposure. Constraint modes may be + /// platform specific, and not all constraint modes may be supported. + AeConstraintMode = AE_CONSTRAINT_MODE, + /// Specify an exposure mode for the AE algorithm to use. + /// + /// The exposure modes specify how the desired total exposure is divided + /// between the exposure time and the sensor's analogue gain. They are + /// platform specific, and not all exposure modes may be supported. + AeExposureMode = AE_EXPOSURE_MODE, + /// Specify an Exposure Value (EV) parameter. + /// + /// The EV parameter will only be applied if the AE algorithm is currently + /// enabled. + /// + /// By convention EV adjusts the exposure as log2. For example + /// EV = [-2, -1, -0.5, 0, 0.5, 1, 2] results in an exposure adjustment + /// of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. + /// + /// \sa AeEnable + ExposureValue = EXPOSURE_VALUE, + /// Exposure time for the frame applied in the sensor device. + /// + /// This value is specified in micro-seconds. + /// + /// Setting this value means that it is now fixed and the AE algorithm may + /// not change it. Setting it back to zero returns it to the control of the + /// AE algorithm. + /// + /// \sa AnalogueGain AeEnable + /// + /// \todo Document the interactions between AeEnable and setting a fixed + /// value for this control. Consider interactions with other AE features, + /// such as aperture and aperture/shutter priority mode, and decide if + /// control of which features should be automatically adjusted shouldn't + /// better be handled through a separate AE mode control. + ExposureTime = EXPOSURE_TIME, + /// Analogue gain value applied in the sensor device. + /// + /// The value of the control specifies the gain multiplier applied to all + /// colour channels. This value cannot be lower than 1.0. + /// + /// Setting this value means that it is now fixed and the AE algorithm may + /// not change it. Setting it back to zero returns it to the control of the + /// AE algorithm. + /// + /// \sa ExposureTime AeEnable + /// + /// \todo Document the interactions between AeEnable and setting a fixed + /// value for this control. Consider interactions with other AE features, + /// such as aperture and aperture/shutter priority mode, and decide if + /// control of which features should be automatically adjusted shouldn't + /// better be handled through a separate AE mode control. + AnalogueGain = ANALOGUE_GAIN, + /// Set the flicker avoidance mode for AGC/AEC. + /// + /// The flicker mode determines whether, and how, the AGC/AEC algorithm + /// attempts to hide flicker effects caused by the duty cycle of artificial + /// lighting. + /// + /// Although implementation dependent, many algorithms for "flicker + /// avoidance" work by restricting this exposure time to integer multiples + /// of the cycle period, wherever possible. + /// + /// Implementations may not support all of the flicker modes listed below. + /// + /// By default the system will start in FlickerAuto mode if this is + /// supported, otherwise the flicker mode will be set to FlickerOff. + AeFlickerMode = AE_FLICKER_MODE, + /// Manual flicker period in microseconds. + /// + /// This value sets the current flicker period to avoid. It is used when + /// AeFlickerMode is set to FlickerManual. + /// + /// To cancel 50Hz mains flicker, this should be set to 10000 (corresponding + /// to 100Hz), or 8333 (120Hz) for 60Hz mains. + /// + /// Setting the mode to FlickerManual when no AeFlickerPeriod has ever been + /// set means that no flicker cancellation occurs (until the value of this + /// control is updated). + /// + /// Switching to modes other than FlickerManual has no effect on the + /// value of the AeFlickerPeriod control. + /// + /// \sa AeFlickerMode + AeFlickerPeriod = AE_FLICKER_PERIOD, + /// Flicker period detected in microseconds. + /// + /// The value reported here indicates the currently detected flicker + /// period, or zero if no flicker at all is detected. + /// + /// When AeFlickerMode is set to FlickerAuto, there may be a period during + /// which the value reported here remains zero. Once a non-zero value is + /// reported, then this is the flicker period that has been detected and is + /// now being cancelled. + /// + /// In the case of 50Hz mains flicker, the value would be 10000 + /// (corresponding to 100Hz), or 8333 (120Hz) for 60Hz mains flicker. + /// + /// It is implementation dependent whether the system can continue to detect + /// flicker of different periods when another frequency is already being + /// cancelled. + /// + /// \sa AeFlickerMode + AeFlickerDetected = AE_FLICKER_DETECTED, + /// Specify a fixed brightness parameter. + /// + /// Positive values (up to 1.0) produce brighter images; negative values + /// (up to -1.0) produce darker images and 0.0 leaves pixels unchanged. + Brightness = BRIGHTNESS, + /// Specify a fixed contrast parameter. + /// + /// Normal contrast is given by the value 1.0; larger values produce images + /// with more contrast. + Contrast = CONTRAST, + /// Report an estimate of the current illuminance level in lux. + /// + /// The Lux control can only be returned in metadata. + Lux = LUX, + /// Enable or disable the AWB. + /// + /// When AWB is enabled, the algorithm estimates the colour temperature of + /// the scene and computes colour gains and the colour correction matrix + /// automatically. The computed colour temperature, gains and correction + /// matrix are reported in metadata. The corresponding controls are ignored + /// if set in a request. + /// + /// When AWB is disabled, the colour temperature, gains and correction + /// matrix are not updated automatically and can be set manually in + /// requests. + /// + /// \sa ColourCorrectionMatrix + /// \sa ColourGains + /// \sa ColourTemperature + AwbEnable = AWB_ENABLE, + /// Specify the range of illuminants to use for the AWB algorithm. + /// + /// The modes supported are platform specific, and not all modes may be + /// supported. + AwbMode = AWB_MODE, + /// Report the lock status of a running AWB algorithm. + /// + /// If the AWB algorithm is locked the value shall be set to true, if it's + /// converging it shall be set to false. If the AWB algorithm is not + /// running the control shall not be present in the metadata control list. + /// + /// \sa AwbEnable + AwbLocked = AWB_LOCKED, + /// Pair of gain values for the Red and Blue colour channels, in that + /// order. + /// + /// ColourGains can only be applied in a Request when the AWB is disabled. + /// If ColourGains is set in a request but ColourTemperature is not, the + /// implementation shall calculate and set the ColourTemperature based on + /// the ColourGains. + /// + /// \sa AwbEnable + /// \sa ColourTemperature + ColourGains = COLOUR_GAINS, + /// ColourTemperature of the frame, in kelvin. + /// + /// ColourTemperature can only be applied in a Request when the AWB is + /// disabled. + /// + /// If ColourTemperature is set in a request but ColourGains is not, the + /// implementation shall calculate and set the ColourGains based on the + /// given ColourTemperature. If ColourTemperature is set (either directly, + /// or indirectly by setting ColourGains) but ColourCorrectionMatrix is not, + /// the ColourCorrectionMatrix is updated based on the ColourTemperature. + /// + /// The ColourTemperature used to process the frame is reported in metadata. + /// + /// \sa AwbEnable + /// \sa ColourCorrectionMatrix + /// \sa ColourGains + ColourTemperature = COLOUR_TEMPERATURE, + /// Specify a fixed saturation parameter. + /// + /// Normal saturation is given by the value 1.0; larger values produce more + /// saturated colours; 0.0 produces a greyscale image. + Saturation = SATURATION, + /// Reports the sensor black levels used for processing a frame. + /// + /// The values are in the order R, Gr, Gb, B. They are returned as numbers + /// out of a 16-bit pixel range (as if pixels ranged from 0 to 65535). The + /// SensorBlackLevels control can only be returned in metadata. + SensorBlackLevels = SENSOR_BLACK_LEVELS, + /// Intensity of the sharpening applied to the image. + /// + /// A value of 0.0 means no sharpening. The minimum value means + /// minimal sharpening, and shall be 0.0 unless the camera can't + /// disable sharpening completely. The default value shall give a + /// "reasonable" level of sharpening, suitable for most use cases. + /// The maximum value may apply extremely high levels of sharpening, + /// higher than anyone could reasonably want. Negative values are + /// not allowed. Note also that sharpening is not applied to raw + /// streams. + Sharpness = SHARPNESS, + /// Reports a Figure of Merit (FoM) to indicate how in-focus the frame is. + /// + /// A larger FocusFoM value indicates a more in-focus frame. This singular + /// value may be based on a combination of statistics gathered from + /// multiple focus regions within an image. The number of focus regions and + /// method of combination is platform dependent. In this respect, it is not + /// necessarily aimed at providing a way to implement a focus algorithm by + /// the application, rather an indication of how in-focus a frame is. + FocusFoM = FOCUS_FO_M, + /// The 3x3 matrix that converts camera RGB to sRGB within the imaging + /// pipeline. + /// + /// This should describe the matrix that is used after pixels have been + /// white-balanced, but before any gamma transformation. The 3x3 matrix is + /// stored in conventional reading order in an array of 9 floating point + /// values. + /// + /// ColourCorrectionMatrix can only be applied in a Request when the AWB is + /// disabled. + /// + /// \sa AwbEnable + /// \sa ColourTemperature + ColourCorrectionMatrix = COLOUR_CORRECTION_MATRIX, + /// Sets the image portion that will be scaled to form the whole of + /// the final output image. + /// + /// The (x,y) location of this rectangle is relative to the + /// PixelArrayActiveAreas that is being used. The units remain native + /// sensor pixels, even if the sensor is being used in a binning or + /// skipping mode. + /// + /// This control is only present when the pipeline supports scaling. Its + /// maximum valid value is given by the properties::ScalerCropMaximum + /// property, and the two can be used to implement digital zoom. + ScalerCrop = SCALER_CROP, + /// Digital gain value applied during the processing steps applied + /// to the image as captured from the sensor. + /// + /// The global digital gain factor is applied to all the colour channels + /// of the RAW image. Different pipeline models are free to + /// specify how the global gain factor applies to each separate + /// channel. + /// + /// If an imaging pipeline applies digital gain in distinct + /// processing steps, this value indicates their total sum. + /// Pipelines are free to decide how to adjust each processing + /// step to respect the received gain factor and shall report + /// their total value in the request metadata. + DigitalGain = DIGITAL_GAIN, + /// The instantaneous frame duration from start of frame exposure to start + /// of next exposure, expressed in microseconds. + /// + /// This control is meant to be returned in metadata. + FrameDuration = FRAME_DURATION, + /// The minimum and maximum (in that order) frame duration, expressed in + /// microseconds. + /// + /// When provided by applications, the control specifies the sensor frame + /// duration interval the pipeline has to use. This limits the largest + /// exposure time the sensor can use. For example, if a maximum frame + /// duration of 33ms is requested (corresponding to 30 frames per second), + /// the sensor will not be able to raise the exposure time above 33ms. + /// A fixed frame duration is achieved by setting the minimum and maximum + /// values to be the same. Setting both values to 0 reverts to using the + /// camera defaults. + /// + /// The maximum frame duration provides the absolute limit to the exposure + /// time computed by the AE algorithm and it overrides any exposure mode + /// setting specified with controls::AeExposureMode. Similarly, when a + /// manual exposure time is set through controls::ExposureTime, it also + /// gets clipped to the limits set by this control. When reported in + /// metadata, the control expresses the minimum and maximum frame durations + /// used after being clipped to the sensor provided frame duration limits. + /// + /// \sa AeExposureMode + /// \sa ExposureTime + /// + /// \todo Define how to calculate the capture frame rate by + /// defining controls to report additional delays introduced by + /// the capture pipeline or post-processing stages (ie JPEG + /// conversion, frame scaling). + /// + /// \todo Provide an explicit definition of default control values, for + /// this and all other controls. + FrameDurationLimits = FRAME_DURATION_LIMITS, + /// Temperature measure from the camera sensor in Celsius. + /// + /// This value is typically obtained by a thermal sensor present on-die or + /// in the camera module. The range of reported temperatures is device + /// dependent. + /// + /// The SensorTemperature control will only be returned in metadata if a + /// thermal sensor is present. + SensorTemperature = SENSOR_TEMPERATURE, + /// The time when the first row of the image sensor active array is exposed. + /// + /// The timestamp, expressed in nanoseconds, represents a monotonically + /// increasing counter since the system boot time, as defined by the + /// Linux-specific CLOCK_BOOTTIME clock id. + /// + /// The SensorTimestamp control can only be returned in metadata. + /// + /// \todo Define how the sensor timestamp has to be used in the reprocessing + /// use case. + SensorTimestamp = SENSOR_TIMESTAMP, + /// The mode of the AF (autofocus) algorithm. + /// + /// An implementation may choose not to implement all the modes. + AfMode = AF_MODE, + /// The range of focus distances that is scanned. + /// + /// An implementation may choose not to implement all the options here. + AfRange = AF_RANGE, + /// Determine whether the AF is to move the lens as quickly as possible or + /// more steadily. + /// + /// For example, during video recording it may be desirable not to move the + /// lens too abruptly, but when in a preview mode (waiting for a still + /// capture) it may be helpful to move the lens as quickly as is reasonably + /// possible. + AfSpeed = AF_SPEED, + /// The parts of the image used by the AF algorithm to measure focus. + AfMetering = AF_METERING, + /// The focus windows used by the AF algorithm when AfMetering is set to + /// AfMeteringWindows. + /// + /// The units used are pixels within the rectangle returned by the + /// ScalerCropMaximum property. + /// + /// In order to be activated, a rectangle must be programmed with non-zero + /// width and height. Internally, these rectangles are intersected with the + /// ScalerCropMaximum rectangle. If the window becomes empty after this + /// operation, then the window is ignored. If all the windows end up being + /// ignored, then the behaviour is platform dependent. + /// + /// On platforms that support the ScalerCrop control (for implementing + /// digital zoom, for example), no automatic recalculation or adjustment of + /// AF windows is performed internally if the ScalerCrop is changed. If any + /// window lies outside the output image after the scaler crop has been + /// applied, it is up to the application to recalculate them. + /// + /// The details of how the windows are used are platform dependent. We note + /// that when there is more than one AF window, a typical implementation + /// might find the optimal focus position for each one and finally select + /// the window where the focal distance for the objects shown in that part + /// of the image are closest to the camera. + AfWindows = AF_WINDOWS, + /// Start an autofocus scan. + /// + /// This control starts an autofocus scan when AfMode is set to AfModeAuto, + /// and is ignored if AfMode is set to AfModeManual or AfModeContinuous. It + /// can also be used to terminate a scan early. + AfTrigger = AF_TRIGGER, + /// Pause lens movements when in continuous autofocus mode. + /// + /// This control has no effect except when in continuous autofocus mode + /// (AfModeContinuous). It can be used to pause any lens movements while + /// (for example) images are captured. The algorithm remains inactive + /// until it is instructed to resume. + AfPause = AF_PAUSE, + /// Set and report the focus lens position. + /// + /// This control instructs the lens to move to a particular position and + /// also reports back the position of the lens for each frame. + /// + /// The LensPosition control is ignored unless the AfMode is set to + /// AfModeManual, though the value is reported back unconditionally in all + /// modes. + /// + /// This value, which is generally a non-integer, is the reciprocal of the + /// focal distance in metres, also known as dioptres. That is, to set a + /// focal distance D, the lens position LP is given by + /// + /// \f$LP = \frac{1\mathrm{m}}{D}\f$ + /// + /// For example: + /// + /// - 0 moves the lens to infinity. + /// - 0.5 moves the lens to focus on objects 2m away. + /// - 2 moves the lens to focus on objects 50cm away. + /// - And larger values will focus the lens closer. + /// + /// The default value of the control should indicate a good general + /// position for the lens, often corresponding to the hyperfocal distance + /// (the closest position for which objects at infinity are still + /// acceptably sharp). The minimum will often be zero (meaning infinity), + /// and the maximum value defines the closest focus position. + /// + /// \todo Define a property to report the Hyperfocal distance of calibrated + /// lenses. + LensPosition = LENS_POSITION, + /// The current state of the AF algorithm. + /// + /// This control reports the current state of the AF algorithm in + /// conjunction with the reported AfMode value and (in continuous AF mode) + /// the AfPauseState value. The possible state changes are described below, + /// though we note the following state transitions that occur when the + /// AfMode is changed. + /// + /// If the AfMode is set to AfModeManual, then the AfState will always + /// report AfStateIdle (even if the lens is subsequently moved). Changing + /// to the AfModeManual state does not initiate any lens movement. + /// + /// If the AfMode is set to AfModeAuto then the AfState will report + /// AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent + /// together then AfState will omit AfStateIdle and move straight to + /// AfStateScanning (and start a scan). + /// + /// If the AfMode is set to AfModeContinuous then the AfState will + /// initially report AfStateScanning. + AfState = AF_STATE, + /// Report whether the autofocus is currently running, paused or pausing. + /// + /// This control is only applicable in continuous (AfModeContinuous) mode, + /// and reports whether the algorithm is currently running, paused or + /// pausing (that is, will pause as soon as any in-progress scan + /// completes). + /// + /// Any change to AfMode will cause AfPauseStateRunning to be reported. + AfPauseState = AF_PAUSE_STATE, + /// Set the mode to be used for High Dynamic Range (HDR) imaging. + /// + /// HDR techniques typically include multiple exposure, image fusion and + /// tone mapping techniques to improve the dynamic range of the resulting + /// images. + /// + /// When using an HDR mode, images are captured with different sets of AGC + /// settings called HDR channels. Channels indicate in particular the type + /// of exposure (short, medium or long) used to capture the raw image, + /// before fusion. Each HDR image is tagged with the corresponding channel + /// using the HdrChannel control. + /// + /// \sa HdrChannel + HdrMode = HDR_MODE, + /// The HDR channel used to capture the frame. + /// + /// This value is reported back to the application so that it can discover + /// whether this capture corresponds to the short or long exposure image + /// (or any other image used by the HDR procedure). An application can + /// monitor the HDR channel to discover when the differently exposed images + /// have arrived. + /// + /// This metadata is only available when an HDR mode has been enabled. + /// + /// \sa HdrMode + HdrChannel = HDR_CHANNEL, + /// Specify a fixed gamma value. + /// + /// The default gamma value must be 2.2 which closely mimics sRGB gamma. + /// Note that this is camera gamma, so it is applied as 1.0/gamma. + Gamma = GAMMA, + /// Enable or disable the debug metadata. + DebugMetadataEnable = DEBUG_METADATA_ENABLE, + /// Control for AE metering trigger. Currently identical to + /// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. + /// + /// Whether the camera device will trigger a precapture metering sequence + /// when it processes this request. + #[cfg(feature = "vendor_draft")] + AePrecaptureTrigger = AE_PRECAPTURE_TRIGGER, + /// Control to select the noise reduction algorithm mode. Currently + /// identical to ANDROID_NOISE_REDUCTION_MODE. + /// + /// Mode of operation for the noise reduction algorithm. + #[cfg(feature = "vendor_draft")] + NoiseReductionMode = NOISE_REDUCTION_MODE, + /// Control to select the color correction aberration mode. Currently + /// identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. + /// + /// Mode of operation for the chromatic aberration correction algorithm. + #[cfg(feature = "vendor_draft")] + ColorCorrectionAberrationMode = COLOR_CORRECTION_ABERRATION_MODE, + /// Control to report the current AE algorithm state. Currently identical to + /// ANDROID_CONTROL_AE_STATE. + /// + /// Current state of the AE algorithm. + #[cfg(feature = "vendor_draft")] + AeState = AE_STATE, + /// Control to report the current AWB algorithm state. Currently identical + /// to ANDROID_CONTROL_AWB_STATE. + /// + /// Current state of the AWB algorithm. + #[cfg(feature = "vendor_draft")] + AwbState = AWB_STATE, + /// Control to report the time between the start of exposure of the first + /// row and the start of exposure of the last row. Currently identical to + /// ANDROID_SENSOR_ROLLING_SHUTTER_SKEW + #[cfg(feature = "vendor_draft")] + SensorRollingShutterSkew = SENSOR_ROLLING_SHUTTER_SKEW, + /// Control to report if the lens shading map is available. Currently + /// identical to ANDROID_STATISTICS_LENS_SHADING_MAP_MODE. + #[cfg(feature = "vendor_draft")] + LensShadingMapMode = LENS_SHADING_MAP_MODE, + /// Specifies the number of pipeline stages the frame went through from when + /// it was exposed to when the final completed result was available to the + /// framework. Always less than or equal to PipelineMaxDepth. Currently + /// identical to ANDROID_REQUEST_PIPELINE_DEPTH. + /// + /// The typical value for this control is 3 as a frame is first exposed, + /// captured and then processed in a single pass through the ISP. Any + /// additional processing step performed after the ISP pass (in example face + /// detection, additional format conversions etc) count as an additional + /// pipeline stage. + #[cfg(feature = "vendor_draft")] + PipelineDepth = PIPELINE_DEPTH, + /// The maximum number of frames that can occur after a request (different + /// than the previous) has been submitted, and before the result's state + /// becomes synchronized. A value of -1 indicates unknown latency, and 0 + /// indicates per-frame control. Currently identical to + /// ANDROID_SYNC_MAX_LATENCY. + #[cfg(feature = "vendor_draft")] + MaxLatency = MAX_LATENCY, + /// Control to select the test pattern mode. Currently identical to + /// ANDROID_SENSOR_TEST_PATTERN_MODE. + #[cfg(feature = "vendor_draft")] + TestPatternMode = TEST_PATTERN_MODE, + /// Control to select the face detection mode used by the pipeline. + /// + /// Currently identical to ANDROID_STATISTICS_FACE_DETECT_MODE. + /// + /// \sa FaceDetectFaceRectangles + /// \sa FaceDetectFaceScores + /// \sa FaceDetectFaceLandmarks + /// \sa FaceDetectFaceIds + #[cfg(feature = "vendor_draft")] + FaceDetectMode = FACE_DETECT_MODE, + /// Boundary rectangles of the detected faces. The number of values is + /// the number of detected faces. + /// + /// The FaceDetectFaceRectangles control can only be returned in metadata. + /// + /// Currently identical to ANDROID_STATISTICS_FACE_RECTANGLES. + #[cfg(feature = "vendor_draft")] + FaceDetectFaceRectangles = FACE_DETECT_FACE_RECTANGLES, + /// Confidence score of each of the detected faces. The range of score is + /// [0, 100]. The number of values should be the number of faces reported + /// in FaceDetectFaceRectangles. + /// + /// The FaceDetectFaceScores control can only be returned in metadata. + /// + /// Currently identical to ANDROID_STATISTICS_FACE_SCORES. + #[cfg(feature = "vendor_draft")] + FaceDetectFaceScores = FACE_DETECT_FACE_SCORES, + /// Array of human face landmark coordinates in format [..., left_eye_i, + /// right_eye_i, mouth_i, left_eye_i+1, ...], with i = index of face. The + /// number of values should be 3 * the number of faces reported in + /// FaceDetectFaceRectangles. + /// + /// The FaceDetectFaceLandmarks control can only be returned in metadata. + /// + /// Currently identical to ANDROID_STATISTICS_FACE_LANDMARKS. + #[cfg(feature = "vendor_draft")] + FaceDetectFaceLandmarks = FACE_DETECT_FACE_LANDMARKS, + /// Each detected face is given a unique ID that is valid for as long as the + /// face is visible to the camera device. A face that leaves the field of + /// view and later returns may be assigned a new ID. The number of values + /// should be the number of faces reported in FaceDetectFaceRectangles. + /// + /// The FaceDetectFaceIds control can only be returned in metadata. + /// + /// Currently identical to ANDROID_STATISTICS_FACE_IDS. + #[cfg(feature = "vendor_draft")] + FaceDetectFaceIds = FACE_DETECT_FACE_IDS, + /// Toggles the Raspberry Pi IPA to output the hardware generated statistics. + /// + /// When this control is set to true, the IPA outputs a binary dump of the + /// hardware generated statistics through the Request metadata in the + /// Bcm2835StatsOutput control. + /// + /// \sa Bcm2835StatsOutput + #[cfg(feature = "vendor_rpi")] + StatsOutputEnable = STATS_OUTPUT_ENABLE, + /// Span of the BCM2835 ISP generated statistics for the current frame. + /// + /// This is sent in the Request metadata if the StatsOutputEnable is set to + /// true. The statistics struct definition can be found in + /// include/linux/bcm2835-isp.h. + /// + /// \sa StatsOutputEnable + #[cfg(feature = "vendor_rpi")] + Bcm2835StatsOutput = BCM2835_STATS_OUTPUT, + /// An array of rectangles, where each singular value has identical + /// functionality to the ScalerCrop control. This control allows the + /// Raspberry Pi pipeline handler to control individual scaler crops per + /// output stream. + /// + /// The order of rectangles passed into the control must match the order of + /// streams configured by the application. The pipeline handler will only + /// configure crop retangles up-to the number of output streams configured. + /// All subsequent rectangles passed into this control are ignored by the + /// pipeline handler. + /// + /// If both rpi::ScalerCrops and ScalerCrop controls are present in a + /// ControlList, the latter is discarded, and crops are obtained from this + /// control. + /// + /// Note that using different crop rectangles for each output stream with + /// this control is only applicable on the Pi5/PiSP platform. This control + /// should also be considered temporary/draft and will be replaced with + /// official libcamera API support for per-stream controls in the future. + /// + /// \sa ScalerCrop + #[cfg(feature = "vendor_rpi")] + ScalerCrops = SCALER_CROPS, +} +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} +/// Enable or disable the AE. +/// +/// \sa ExposureTime AnalogueGain +#[derive(Debug, Clone)] +pub struct AeEnable(pub bool); +impl Deref for AeEnable { + type Target = bool; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AeEnable { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AeEnable { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AeEnable) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AeEnable { + const ID: u32 = ControlId::AeEnable as _; +} +impl Control for AeEnable {} +/// Report the lock status of a running AE algorithm. +/// +/// If the AE algorithm is locked the value shall be set to true, if it's +/// converging it shall be set to false. If the AE algorithm is not +/// running the control shall not be present in the metadata control list. +/// +/// \sa AeEnable +#[derive(Debug, Clone)] +pub struct AeLocked(pub bool); +impl Deref for AeLocked { + type Target = bool; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AeLocked { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AeLocked { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AeLocked) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AeLocked { + const ID: u32 = ControlId::AeLocked as _; +} +impl Control for AeLocked {} +/// Specify a metering mode for the AE algorithm to use. +/// +/// The metering modes determine which parts of the image are used to +/// determine the scene brightness. Metering modes may be platform specific +/// and not all metering modes may be supported. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AeMeteringMode { + /// Centre-weighted metering mode. + MeteringCentreWeighted = 0, + /// Spot metering mode. + MeteringSpot = 1, + /// Matrix metering mode. + MeteringMatrix = 2, + /// Custom metering mode. + MeteringCustom = 3, +} +impl TryFrom for AeMeteringMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AeMeteringMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AeMeteringMode { + const ID: u32 = ControlId::AeMeteringMode as _; +} +impl Control for AeMeteringMode {} +/// Specify a constraint mode for the AE algorithm to use. +/// +/// The constraint modes determine how the measured scene brightness is +/// adjusted to reach the desired target exposure. Constraint modes may be +/// platform specific, and not all constraint modes may be supported. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AeConstraintMode { + /// Default constraint mode. + /// + /// This mode aims to balance the exposure of different parts of the + /// image so as to reach a reasonable average level. However, highlights + /// in the image may appear over-exposed and lowlights may appear + /// under-exposed. + ConstraintNormal = 0, + /// Highlight constraint mode. + /// + /// This mode adjusts the exposure levels in order to try and avoid + /// over-exposing the brightest parts (highlights) of an image. + /// Other non-highlight parts of the image may appear under-exposed. + ConstraintHighlight = 1, + /// Shadows constraint mode. + /// + /// This mode adjusts the exposure levels in order to try and avoid + /// under-exposing the dark parts (shadows) of an image. Other normally + /// exposed parts of the image may appear over-exposed. + ConstraintShadows = 2, + /// Custom constraint mode. + ConstraintCustom = 3, +} +impl TryFrom for AeConstraintMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AeConstraintMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AeConstraintMode { + const ID: u32 = ControlId::AeConstraintMode as _; +} +impl Control for AeConstraintMode {} +/// Specify an exposure mode for the AE algorithm to use. +/// +/// The exposure modes specify how the desired total exposure is divided +/// between the exposure time and the sensor's analogue gain. They are +/// platform specific, and not all exposure modes may be supported. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AeExposureMode { + /// Default exposure mode. + ExposureNormal = 0, + /// Exposure mode allowing only short exposure times. + ExposureShort = 1, + /// Exposure mode allowing long exposure times. + ExposureLong = 2, + /// Custom exposure mode. + ExposureCustom = 3, +} +impl TryFrom for AeExposureMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AeExposureMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AeExposureMode { + const ID: u32 = ControlId::AeExposureMode as _; +} +impl Control for AeExposureMode {} +/// Specify an Exposure Value (EV) parameter. +/// +/// The EV parameter will only be applied if the AE algorithm is currently +/// enabled. +/// +/// By convention EV adjusts the exposure as log2. For example +/// EV = [-2, -1, -0.5, 0, 0.5, 1, 2] results in an exposure adjustment +/// of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. +/// +/// \sa AeEnable +#[derive(Debug, Clone)] +pub struct ExposureValue(pub f32); +impl Deref for ExposureValue { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ExposureValue { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ExposureValue { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ExposureValue) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ExposureValue { + const ID: u32 = ControlId::ExposureValue as _; +} +impl Control for ExposureValue {} +/// Exposure time for the frame applied in the sensor device. +/// +/// This value is specified in micro-seconds. +/// +/// Setting this value means that it is now fixed and the AE algorithm may +/// not change it. Setting it back to zero returns it to the control of the +/// AE algorithm. +/// +/// \sa AnalogueGain AeEnable +/// +/// \todo Document the interactions between AeEnable and setting a fixed +/// value for this control. Consider interactions with other AE features, +/// such as aperture and aperture/shutter priority mode, and decide if +/// control of which features should be automatically adjusted shouldn't +/// better be handled through a separate AE mode control. +#[derive(Debug, Clone)] +pub struct ExposureTime(pub i32); +impl Deref for ExposureTime { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ExposureTime { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ExposureTime { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ExposureTime) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ExposureTime { + const ID: u32 = ControlId::ExposureTime as _; +} +impl Control for ExposureTime {} +/// Analogue gain value applied in the sensor device. +/// +/// The value of the control specifies the gain multiplier applied to all +/// colour channels. This value cannot be lower than 1.0. +/// +/// Setting this value means that it is now fixed and the AE algorithm may +/// not change it. Setting it back to zero returns it to the control of the +/// AE algorithm. +/// +/// \sa ExposureTime AeEnable +/// +/// \todo Document the interactions between AeEnable and setting a fixed +/// value for this control. Consider interactions with other AE features, +/// such as aperture and aperture/shutter priority mode, and decide if +/// control of which features should be automatically adjusted shouldn't +/// better be handled through a separate AE mode control. +#[derive(Debug, Clone)] +pub struct AnalogueGain(pub f32); +impl Deref for AnalogueGain { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AnalogueGain { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AnalogueGain { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AnalogueGain) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AnalogueGain { + const ID: u32 = ControlId::AnalogueGain as _; +} +impl Control for AnalogueGain {} +/// Set the flicker avoidance mode for AGC/AEC. +/// +/// The flicker mode determines whether, and how, the AGC/AEC algorithm +/// attempts to hide flicker effects caused by the duty cycle of artificial +/// lighting. +/// +/// Although implementation dependent, many algorithms for "flicker +/// avoidance" work by restricting this exposure time to integer multiples +/// of the cycle period, wherever possible. +/// +/// Implementations may not support all of the flicker modes listed below. +/// +/// By default the system will start in FlickerAuto mode if this is +/// supported, otherwise the flicker mode will be set to FlickerOff. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AeFlickerMode { + /// No flicker avoidance is performed. + FlickerOff = 0, + /// Manual flicker avoidance. + /// + /// Suppress flicker effects caused by lighting running with a period + /// specified by the AeFlickerPeriod control. + /// \sa AeFlickerPeriod + FlickerManual = 1, + /// Automatic flicker period detection and avoidance. + /// + /// The system will automatically determine the most likely value of + /// flicker period, and avoid flicker of this frequency. Once flicker + /// is being corrected, it is implementation dependent whether the + /// system is still able to detect a change in the flicker period. + /// \sa AeFlickerDetected + FlickerAuto = 2, +} +impl TryFrom for AeFlickerMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AeFlickerMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AeFlickerMode { + const ID: u32 = ControlId::AeFlickerMode as _; +} +impl Control for AeFlickerMode {} +/// Manual flicker period in microseconds. +/// +/// This value sets the current flicker period to avoid. It is used when +/// AeFlickerMode is set to FlickerManual. +/// +/// To cancel 50Hz mains flicker, this should be set to 10000 (corresponding +/// to 100Hz), or 8333 (120Hz) for 60Hz mains. +/// +/// Setting the mode to FlickerManual when no AeFlickerPeriod has ever been +/// set means that no flicker cancellation occurs (until the value of this +/// control is updated). +/// +/// Switching to modes other than FlickerManual has no effect on the +/// value of the AeFlickerPeriod control. +/// +/// \sa AeFlickerMode +#[derive(Debug, Clone)] +pub struct AeFlickerPeriod(pub i32); +impl Deref for AeFlickerPeriod { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AeFlickerPeriod { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AeFlickerPeriod { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AeFlickerPeriod) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AeFlickerPeriod { + const ID: u32 = ControlId::AeFlickerPeriod as _; +} +impl Control for AeFlickerPeriod {} +/// Flicker period detected in microseconds. +/// +/// The value reported here indicates the currently detected flicker +/// period, or zero if no flicker at all is detected. +/// +/// When AeFlickerMode is set to FlickerAuto, there may be a period during +/// which the value reported here remains zero. Once a non-zero value is +/// reported, then this is the flicker period that has been detected and is +/// now being cancelled. +/// +/// In the case of 50Hz mains flicker, the value would be 10000 +/// (corresponding to 100Hz), or 8333 (120Hz) for 60Hz mains flicker. +/// +/// It is implementation dependent whether the system can continue to detect +/// flicker of different periods when another frequency is already being +/// cancelled. +/// +/// \sa AeFlickerMode +#[derive(Debug, Clone)] +pub struct AeFlickerDetected(pub i32); +impl Deref for AeFlickerDetected { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AeFlickerDetected { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AeFlickerDetected { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AeFlickerDetected) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AeFlickerDetected { + const ID: u32 = ControlId::AeFlickerDetected as _; +} +impl Control for AeFlickerDetected {} +/// Specify a fixed brightness parameter. +/// +/// Positive values (up to 1.0) produce brighter images; negative values +/// (up to -1.0) produce darker images and 0.0 leaves pixels unchanged. +#[derive(Debug, Clone)] +pub struct Brightness(pub f32); +impl Deref for Brightness { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Brightness { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Brightness { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Brightness) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Brightness { + const ID: u32 = ControlId::Brightness as _; +} +impl Control for Brightness {} +/// Specify a fixed contrast parameter. +/// +/// Normal contrast is given by the value 1.0; larger values produce images +/// with more contrast. +#[derive(Debug, Clone)] +pub struct Contrast(pub f32); +impl Deref for Contrast { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Contrast { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Contrast { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Contrast) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Contrast { + const ID: u32 = ControlId::Contrast as _; +} +impl Control for Contrast {} +/// Report an estimate of the current illuminance level in lux. +/// +/// The Lux control can only be returned in metadata. +#[derive(Debug, Clone)] +pub struct Lux(pub f32); +impl Deref for Lux { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Lux { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Lux { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Lux) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Lux { + const ID: u32 = ControlId::Lux as _; +} +impl Control for Lux {} +/// Enable or disable the AWB. +/// +/// When AWB is enabled, the algorithm estimates the colour temperature of +/// the scene and computes colour gains and the colour correction matrix +/// automatically. The computed colour temperature, gains and correction +/// matrix are reported in metadata. The corresponding controls are ignored +/// if set in a request. +/// +/// When AWB is disabled, the colour temperature, gains and correction +/// matrix are not updated automatically and can be set manually in +/// requests. +/// +/// \sa ColourCorrectionMatrix +/// \sa ColourGains +/// \sa ColourTemperature +#[derive(Debug, Clone)] +pub struct AwbEnable(pub bool); +impl Deref for AwbEnable { + type Target = bool; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AwbEnable { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AwbEnable { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AwbEnable) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AwbEnable { + const ID: u32 = ControlId::AwbEnable as _; +} +impl Control for AwbEnable {} +/// Specify the range of illuminants to use for the AWB algorithm. +/// +/// The modes supported are platform specific, and not all modes may be +/// supported. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AwbMode { + /// Search over the whole colour temperature range. + AwbAuto = 0, + /// Incandescent AWB lamp mode. + AwbIncandescent = 1, + /// Tungsten AWB lamp mode. + AwbTungsten = 2, + /// Fluorescent AWB lamp mode. + AwbFluorescent = 3, + /// Indoor AWB lighting mode. + AwbIndoor = 4, + /// Daylight AWB lighting mode. + AwbDaylight = 5, + /// Cloudy AWB lighting mode. + AwbCloudy = 6, + /// Custom AWB mode. + AwbCustom = 7, +} +impl TryFrom for AwbMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AwbMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AwbMode { + const ID: u32 = ControlId::AwbMode as _; +} +impl Control for AwbMode {} +/// Report the lock status of a running AWB algorithm. +/// +/// If the AWB algorithm is locked the value shall be set to true, if it's +/// converging it shall be set to false. If the AWB algorithm is not +/// running the control shall not be present in the metadata control list. +/// +/// \sa AwbEnable +#[derive(Debug, Clone)] +pub struct AwbLocked(pub bool); +impl Deref for AwbLocked { + type Target = bool; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AwbLocked { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AwbLocked { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AwbLocked) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AwbLocked { + const ID: u32 = ControlId::AwbLocked as _; +} +impl Control for AwbLocked {} +/// Pair of gain values for the Red and Blue colour channels, in that +/// order. +/// +/// ColourGains can only be applied in a Request when the AWB is disabled. +/// If ColourGains is set in a request but ColourTemperature is not, the +/// implementation shall calculate and set the ColourTemperature based on +/// the ColourGains. +/// +/// \sa AwbEnable +/// \sa ColourTemperature +#[derive(Debug, Clone)] +pub struct ColourGains(pub [f32; 2]); +impl Deref for ColourGains { + type Target = [f32; 2]; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ColourGains { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ColourGains { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(<[f32; 2]>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ColourGains) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ColourGains { + const ID: u32 = ControlId::ColourGains as _; +} +impl Control for ColourGains {} +/// ColourTemperature of the frame, in kelvin. +/// +/// ColourTemperature can only be applied in a Request when the AWB is +/// disabled. +/// +/// If ColourTemperature is set in a request but ColourGains is not, the +/// implementation shall calculate and set the ColourGains based on the +/// given ColourTemperature. If ColourTemperature is set (either directly, +/// or indirectly by setting ColourGains) but ColourCorrectionMatrix is not, +/// the ColourCorrectionMatrix is updated based on the ColourTemperature. +/// +/// The ColourTemperature used to process the frame is reported in metadata. +/// +/// \sa AwbEnable +/// \sa ColourCorrectionMatrix +/// \sa ColourGains +#[derive(Debug, Clone)] +pub struct ColourTemperature(pub i32); +impl Deref for ColourTemperature { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ColourTemperature { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ColourTemperature { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ColourTemperature) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ColourTemperature { + const ID: u32 = ControlId::ColourTemperature as _; +} +impl Control for ColourTemperature {} +/// Specify a fixed saturation parameter. +/// +/// Normal saturation is given by the value 1.0; larger values produce more +/// saturated colours; 0.0 produces a greyscale image. +#[derive(Debug, Clone)] +pub struct Saturation(pub f32); +impl Deref for Saturation { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Saturation { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Saturation { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Saturation) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Saturation { + const ID: u32 = ControlId::Saturation as _; +} +impl Control for Saturation {} +/// Reports the sensor black levels used for processing a frame. +/// +/// The values are in the order R, Gr, Gb, B. They are returned as numbers +/// out of a 16-bit pixel range (as if pixels ranged from 0 to 65535). The +/// SensorBlackLevels control can only be returned in metadata. +#[derive(Debug, Clone)] +pub struct SensorBlackLevels(pub [i32; 4]); +impl Deref for SensorBlackLevels { + type Target = [i32; 4]; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for SensorBlackLevels { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for SensorBlackLevels { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(<[i32; 4]>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: SensorBlackLevels) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for SensorBlackLevels { + const ID: u32 = ControlId::SensorBlackLevels as _; +} +impl Control for SensorBlackLevels {} +/// Intensity of the sharpening applied to the image. +/// +/// A value of 0.0 means no sharpening. The minimum value means +/// minimal sharpening, and shall be 0.0 unless the camera can't +/// disable sharpening completely. The default value shall give a +/// "reasonable" level of sharpening, suitable for most use cases. +/// The maximum value may apply extremely high levels of sharpening, +/// higher than anyone could reasonably want. Negative values are +/// not allowed. Note also that sharpening is not applied to raw +/// streams. +#[derive(Debug, Clone)] +pub struct Sharpness(pub f32); +impl Deref for Sharpness { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Sharpness { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Sharpness { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Sharpness) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Sharpness { + const ID: u32 = ControlId::Sharpness as _; +} +impl Control for Sharpness {} +/// Reports a Figure of Merit (FoM) to indicate how in-focus the frame is. +/// +/// A larger FocusFoM value indicates a more in-focus frame. This singular +/// value may be based on a combination of statistics gathered from +/// multiple focus regions within an image. The number of focus regions and +/// method of combination is platform dependent. In this respect, it is not +/// necessarily aimed at providing a way to implement a focus algorithm by +/// the application, rather an indication of how in-focus a frame is. +#[derive(Debug, Clone)] +pub struct FocusFoM(pub i32); +impl Deref for FocusFoM { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for FocusFoM { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for FocusFoM { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: FocusFoM) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for FocusFoM { + const ID: u32 = ControlId::FocusFoM as _; +} +impl Control for FocusFoM {} +/// The 3x3 matrix that converts camera RGB to sRGB within the imaging +/// pipeline. +/// +/// This should describe the matrix that is used after pixels have been +/// white-balanced, but before any gamma transformation. The 3x3 matrix is +/// stored in conventional reading order in an array of 9 floating point +/// values. +/// +/// ColourCorrectionMatrix can only be applied in a Request when the AWB is +/// disabled. +/// +/// \sa AwbEnable +/// \sa ColourTemperature +#[derive(Debug, Clone)] +pub struct ColourCorrectionMatrix(pub [[f32; 3]; 3]); +impl Deref for ColourCorrectionMatrix { + type Target = [[f32; 3]; 3]; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ColourCorrectionMatrix { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ColourCorrectionMatrix { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(<[[f32; 3]; 3]>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ColourCorrectionMatrix) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ColourCorrectionMatrix { + const ID: u32 = ControlId::ColourCorrectionMatrix as _; +} +impl Control for ColourCorrectionMatrix {} +/// Sets the image portion that will be scaled to form the whole of +/// the final output image. +/// +/// The (x,y) location of this rectangle is relative to the +/// PixelArrayActiveAreas that is being used. The units remain native +/// sensor pixels, even if the sensor is being used in a binning or +/// skipping mode. +/// +/// This control is only present when the pipeline supports scaling. Its +/// maximum valid value is given by the properties::ScalerCropMaximum +/// property, and the two can be used to implement digital zoom. +#[derive(Debug, Clone)] +pub struct ScalerCrop(pub Rectangle); +impl Deref for ScalerCrop { + type Target = Rectangle; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ScalerCrop { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ScalerCrop { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ScalerCrop) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ScalerCrop { + const ID: u32 = ControlId::ScalerCrop as _; +} +impl Control for ScalerCrop {} +/// Digital gain value applied during the processing steps applied +/// to the image as captured from the sensor. +/// +/// The global digital gain factor is applied to all the colour channels +/// of the RAW image. Different pipeline models are free to +/// specify how the global gain factor applies to each separate +/// channel. +/// +/// If an imaging pipeline applies digital gain in distinct +/// processing steps, this value indicates their total sum. +/// Pipelines are free to decide how to adjust each processing +/// step to respect the received gain factor and shall report +/// their total value in the request metadata. +#[derive(Debug, Clone)] +pub struct DigitalGain(pub f32); +impl Deref for DigitalGain { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for DigitalGain { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for DigitalGain { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: DigitalGain) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for DigitalGain { + const ID: u32 = ControlId::DigitalGain as _; +} +impl Control for DigitalGain {} +/// The instantaneous frame duration from start of frame exposure to start +/// of next exposure, expressed in microseconds. +/// +/// This control is meant to be returned in metadata. +#[derive(Debug, Clone)] +pub struct FrameDuration(pub i64); +impl Deref for FrameDuration { + type Target = i64; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for FrameDuration { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for FrameDuration { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: FrameDuration) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for FrameDuration { + const ID: u32 = ControlId::FrameDuration as _; +} +impl Control for FrameDuration {} +/// The minimum and maximum (in that order) frame duration, expressed in +/// microseconds. +/// +/// When provided by applications, the control specifies the sensor frame +/// duration interval the pipeline has to use. This limits the largest +/// exposure time the sensor can use. For example, if a maximum frame +/// duration of 33ms is requested (corresponding to 30 frames per second), +/// the sensor will not be able to raise the exposure time above 33ms. +/// A fixed frame duration is achieved by setting the minimum and maximum +/// values to be the same. Setting both values to 0 reverts to using the +/// camera defaults. +/// +/// The maximum frame duration provides the absolute limit to the exposure +/// time computed by the AE algorithm and it overrides any exposure mode +/// setting specified with controls::AeExposureMode. Similarly, when a +/// manual exposure time is set through controls::ExposureTime, it also +/// gets clipped to the limits set by this control. When reported in +/// metadata, the control expresses the minimum and maximum frame durations +/// used after being clipped to the sensor provided frame duration limits. +/// +/// \sa AeExposureMode +/// \sa ExposureTime +/// +/// \todo Define how to calculate the capture frame rate by +/// defining controls to report additional delays introduced by +/// the capture pipeline or post-processing stages (ie JPEG +/// conversion, frame scaling). +/// +/// \todo Provide an explicit definition of default control values, for +/// this and all other controls. +#[derive(Debug, Clone)] +pub struct FrameDurationLimits(pub [i64; 2]); +impl Deref for FrameDurationLimits { + type Target = [i64; 2]; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for FrameDurationLimits { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for FrameDurationLimits { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(<[i64; 2]>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: FrameDurationLimits) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for FrameDurationLimits { + const ID: u32 = ControlId::FrameDurationLimits as _; +} +impl Control for FrameDurationLimits {} +/// Temperature measure from the camera sensor in Celsius. +/// +/// This value is typically obtained by a thermal sensor present on-die or +/// in the camera module. The range of reported temperatures is device +/// dependent. +/// +/// The SensorTemperature control will only be returned in metadata if a +/// thermal sensor is present. +#[derive(Debug, Clone)] +pub struct SensorTemperature(pub f32); +impl Deref for SensorTemperature { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for SensorTemperature { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for SensorTemperature { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: SensorTemperature) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for SensorTemperature { + const ID: u32 = ControlId::SensorTemperature as _; +} +impl Control for SensorTemperature {} +/// The time when the first row of the image sensor active array is exposed. +/// +/// The timestamp, expressed in nanoseconds, represents a monotonically +/// increasing counter since the system boot time, as defined by the +/// Linux-specific CLOCK_BOOTTIME clock id. +/// +/// The SensorTimestamp control can only be returned in metadata. +/// +/// \todo Define how the sensor timestamp has to be used in the reprocessing +/// use case. +#[derive(Debug, Clone)] +pub struct SensorTimestamp(pub i64); +impl Deref for SensorTimestamp { + type Target = i64; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for SensorTimestamp { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for SensorTimestamp { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: SensorTimestamp) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for SensorTimestamp { + const ID: u32 = ControlId::SensorTimestamp as _; +} +impl Control for SensorTimestamp {} +/// The mode of the AF (autofocus) algorithm. +/// +/// An implementation may choose not to implement all the modes. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfMode { + /// The AF algorithm is in manual mode. + /// + /// In this mode it will never perform any action nor move the lens of + /// its own accord, but an application can specify the desired lens + /// position using the LensPosition control. The AfState will always + /// report AfStateIdle. + /// + /// If the camera is started in AfModeManual, it will move the focus + /// lens to the position specified by the LensPosition control. + /// + /// This mode is the recommended default value for the AfMode control. + /// External cameras (as reported by the Location property set to + /// CameraLocationExternal) may use a different default value. + Manual = 0, + /// The AF algorithm is in auto mode. + /// + /// In this mode the algorithm will never move the lens or change state + /// unless the AfTrigger control is used. The AfTrigger control can be + /// used to initiate a focus scan, the results of which will be + /// reported by AfState. + /// + /// If the autofocus algorithm is moved from AfModeAuto to another mode + /// while a scan is in progress, the scan is cancelled immediately, + /// without waiting for the scan to finish. + /// + /// When first entering this mode the AfState will report AfStateIdle. + /// When a trigger control is sent, AfState will report AfStateScanning + /// for a period before spontaneously changing to AfStateFocused or + /// AfStateFailed, depending on the outcome of the scan. It will remain + /// in this state until another scan is initiated by the AfTrigger + /// control. If a scan is cancelled (without changing to another mode), + /// AfState will return to AfStateIdle. + Auto = 1, + /// The AF algorithm is in continuous mode. + /// + /// In this mode the lens can re-start a scan spontaneously at any + /// moment, without any user intervention. The AfState still reports + /// whether the algorithm is currently scanning or not, though the + /// application has no ability to initiate or cancel scans, nor to move + /// the lens for itself. + /// + /// However, applications can pause the AF algorithm from continuously + /// scanning by using the AfPause control. This allows video or still + /// images to be captured whilst guaranteeing that the focus is fixed. + /// + /// When set to AfModeContinuous, the system will immediately initiate a + /// scan so AfState will report AfStateScanning, and will settle on one + /// of AfStateFocused or AfStateFailed, depending on the scan result. + Continuous = 2, +} +impl TryFrom for AfMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfMode { + const ID: u32 = ControlId::AfMode as _; +} +impl Control for AfMode {} +/// The range of focus distances that is scanned. +/// +/// An implementation may choose not to implement all the options here. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfRange { + /// A wide range of focus distances is scanned. + /// + /// Scanned distances cover all the way from infinity down to close + /// distances, though depending on the implementation, possibly not + /// including the very closest macro positions. + Normal = 0, + /// Only close distances are scanned. + Macro = 1, + /// The full range of focus distances is scanned. + /// + /// This range is similar to AfRangeNormal but includes the very + /// closest macro positions. + Full = 2, +} +impl TryFrom for AfRange { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfRange) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfRange { + const ID: u32 = ControlId::AfRange as _; +} +impl Control for AfRange {} +/// Determine whether the AF is to move the lens as quickly as possible or +/// more steadily. +/// +/// For example, during video recording it may be desirable not to move the +/// lens too abruptly, but when in a preview mode (waiting for a still +/// capture) it may be helpful to move the lens as quickly as is reasonably +/// possible. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfSpeed { + /// Move the lens at its usual speed. + Normal = 0, + /// Move the lens more quickly. + Fast = 1, +} +impl TryFrom for AfSpeed { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfSpeed) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfSpeed { + const ID: u32 = ControlId::AfSpeed as _; +} +impl Control for AfSpeed {} +/// The parts of the image used by the AF algorithm to measure focus. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfMetering { + /// Let the AF algorithm decide for itself where it will measure focus. + Auto = 0, + /// Use the rectangles defined by the AfWindows control to measure focus. + /// + /// If no windows are specified the behaviour is platform dependent. + Windows = 1, +} +impl TryFrom for AfMetering { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfMetering) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfMetering { + const ID: u32 = ControlId::AfMetering as _; +} +impl Control for AfMetering {} +/// The focus windows used by the AF algorithm when AfMetering is set to +/// AfMeteringWindows. +/// +/// The units used are pixels within the rectangle returned by the +/// ScalerCropMaximum property. +/// +/// In order to be activated, a rectangle must be programmed with non-zero +/// width and height. Internally, these rectangles are intersected with the +/// ScalerCropMaximum rectangle. If the window becomes empty after this +/// operation, then the window is ignored. If all the windows end up being +/// ignored, then the behaviour is platform dependent. +/// +/// On platforms that support the ScalerCrop control (for implementing +/// digital zoom, for example), no automatic recalculation or adjustment of +/// AF windows is performed internally if the ScalerCrop is changed. If any +/// window lies outside the output image after the scaler crop has been +/// applied, it is up to the application to recalculate them. +/// +/// The details of how the windows are used are platform dependent. We note +/// that when there is more than one AF window, a typical implementation +/// might find the optimal focus position for each one and finally select +/// the window where the focal distance for the objects shown in that part +/// of the image are closest to the camera. +#[derive(Debug, Clone)] +pub struct AfWindows(pub Vec); +impl Deref for AfWindows { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AfWindows { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AfWindows { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AfWindows) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AfWindows { + const ID: u32 = ControlId::AfWindows as _; +} +impl Control for AfWindows {} +/// Start an autofocus scan. +/// +/// This control starts an autofocus scan when AfMode is set to AfModeAuto, +/// and is ignored if AfMode is set to AfModeManual or AfModeContinuous. It +/// can also be used to terminate a scan early. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfTrigger { + /// Start an AF scan. + /// + /// Setting the control to AfTriggerStart is ignored if a scan is in + /// progress. + Start = 0, + /// Cancel an AF scan. + /// + /// This does not cause the lens to move anywhere else. Ignored if no + /// scan is in progress. + Cancel = 1, +} +impl TryFrom for AfTrigger { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfTrigger) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfTrigger { + const ID: u32 = ControlId::AfTrigger as _; +} +impl Control for AfTrigger {} +/// Pause lens movements when in continuous autofocus mode. +/// +/// This control has no effect except when in continuous autofocus mode +/// (AfModeContinuous). It can be used to pause any lens movements while +/// (for example) images are captured. The algorithm remains inactive +/// until it is instructed to resume. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfPause { + /// Pause the continuous autofocus algorithm immediately. + /// + /// The autofocus algorithm is paused whether or not any kind of scan + /// is underway. AfPauseState will subsequently report + /// AfPauseStatePaused. AfState may report any of AfStateScanning, + /// AfStateFocused or AfStateFailed, depending on the algorithm's state + /// when it received this control. + Immediate = 0, + /// Pause the continuous autofocus algorithm at the end of the scan. + /// + /// This is similar to AfPauseImmediate, and if the AfState is + /// currently reporting AfStateFocused or AfStateFailed it will remain + /// in that state and AfPauseState will report AfPauseStatePaused. + /// + /// However, if the algorithm is scanning (AfStateScanning), + /// AfPauseState will report AfPauseStatePausing until the scan is + /// finished, at which point AfState will report one of AfStateFocused + /// or AfStateFailed, and AfPauseState will change to + /// AfPauseStatePaused. + Deferred = 1, + /// Resume continuous autofocus operation. + /// + /// The algorithm starts again from exactly where it left off, and + /// AfPauseState will report AfPauseStateRunning. + Resume = 2, +} +impl TryFrom for AfPause { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfPause) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfPause { + const ID: u32 = ControlId::AfPause as _; +} +impl Control for AfPause {} +/// Set and report the focus lens position. +/// +/// This control instructs the lens to move to a particular position and +/// also reports back the position of the lens for each frame. +/// +/// The LensPosition control is ignored unless the AfMode is set to +/// AfModeManual, though the value is reported back unconditionally in all +/// modes. +/// +/// This value, which is generally a non-integer, is the reciprocal of the +/// focal distance in metres, also known as dioptres. That is, to set a +/// focal distance D, the lens position LP is given by +/// +/// \f$LP = \frac{1\mathrm{m}}{D}\f$ +/// +/// For example: +/// +/// - 0 moves the lens to infinity. +/// - 0.5 moves the lens to focus on objects 2m away. +/// - 2 moves the lens to focus on objects 50cm away. +/// - And larger values will focus the lens closer. +/// +/// The default value of the control should indicate a good general +/// position for the lens, often corresponding to the hyperfocal distance +/// (the closest position for which objects at infinity are still +/// acceptably sharp). The minimum will often be zero (meaning infinity), +/// and the maximum value defines the closest focus position. +/// +/// \todo Define a property to report the Hyperfocal distance of calibrated +/// lenses. +#[derive(Debug, Clone)] +pub struct LensPosition(pub f32); +impl Deref for LensPosition { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for LensPosition { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for LensPosition { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: LensPosition) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for LensPosition { + const ID: u32 = ControlId::LensPosition as _; +} +impl Control for LensPosition {} +/// The current state of the AF algorithm. +/// +/// This control reports the current state of the AF algorithm in +/// conjunction with the reported AfMode value and (in continuous AF mode) +/// the AfPauseState value. The possible state changes are described below, +/// though we note the following state transitions that occur when the +/// AfMode is changed. +/// +/// If the AfMode is set to AfModeManual, then the AfState will always +/// report AfStateIdle (even if the lens is subsequently moved). Changing +/// to the AfModeManual state does not initiate any lens movement. +/// +/// If the AfMode is set to AfModeAuto then the AfState will report +/// AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent +/// together then AfState will omit AfStateIdle and move straight to +/// AfStateScanning (and start a scan). +/// +/// If the AfMode is set to AfModeContinuous then the AfState will +/// initially report AfStateScanning. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfState { + /// The AF algorithm is in manual mode (AfModeManual) or in auto mode + /// (AfModeAuto) and a scan has not yet been triggered, or an + /// in-progress scan was cancelled. + Idle = 0, + /// The AF algorithm is in auto mode (AfModeAuto), and a scan has been + /// started using the AfTrigger control. + /// + /// The scan can be cancelled by sending AfTriggerCancel at which point + /// the algorithm will either move back to AfStateIdle or, if the scan + /// actually completes before the cancel request is processed, to one + /// of AfStateFocused or AfStateFailed. + /// + /// Alternatively the AF algorithm could be in continuous mode + /// (AfModeContinuous) at which point it may enter this state + /// spontaneously whenever it determines that a rescan is needed. + Scanning = 1, + /// The AF algorithm is in auto (AfModeAuto) or continuous + /// (AfModeContinuous) mode and a scan has completed with the result + /// that the algorithm believes the image is now in focus. + Focused = 2, + /// The AF algorithm is in auto (AfModeAuto) or continuous + /// (AfModeContinuous) mode and a scan has completed with the result + /// that the algorithm did not find a good focus position. + Failed = 3, +} +impl TryFrom for AfState { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfState) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfState { + const ID: u32 = ControlId::AfState as _; +} +impl Control for AfState {} +/// Report whether the autofocus is currently running, paused or pausing. +/// +/// This control is only applicable in continuous (AfModeContinuous) mode, +/// and reports whether the algorithm is currently running, paused or +/// pausing (that is, will pause as soon as any in-progress scan +/// completes). +/// +/// Any change to AfMode will cause AfPauseStateRunning to be reported. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfPauseState { + /// Continuous AF is running and the algorithm may restart a scan + /// spontaneously. + Running = 0, + /// Continuous AF has been sent an AfPauseDeferred control, and will + /// pause as soon as any in-progress scan completes. + /// + /// When the scan completes, the AfPauseState control will report + /// AfPauseStatePaused. No new scans will be start spontaneously until + /// the AfPauseResume control is sent. + Pausing = 1, + /// Continuous AF is paused. + /// + /// No further state changes or lens movements will occur until the + /// AfPauseResume control is sent. + Paused = 2, +} +impl TryFrom for AfPauseState { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfPauseState) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfPauseState { + const ID: u32 = ControlId::AfPauseState as _; +} +impl Control for AfPauseState {} +/// Set the mode to be used for High Dynamic Range (HDR) imaging. +/// +/// HDR techniques typically include multiple exposure, image fusion and +/// tone mapping techniques to improve the dynamic range of the resulting +/// images. +/// +/// When using an HDR mode, images are captured with different sets of AGC +/// settings called HDR channels. Channels indicate in particular the type +/// of exposure (short, medium or long) used to capture the raw image, +/// before fusion. Each HDR image is tagged with the corresponding channel +/// using the HdrChannel control. +/// +/// \sa HdrChannel +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum HdrMode { + /// HDR is disabled. + /// + /// Metadata for this frame will not include the HdrChannel control. + Off = 0, + /// Multiple exposures will be generated in an alternating fashion. + /// + /// The multiple exposures will not be merged together and will be + /// returned to the application as they are. Each image will be tagged + /// with the correct HDR channel, indicating what kind of exposure it + /// is. The tag should be the same as in the HdrModeMultiExposure case. + /// + /// The expectation is that an application using this mode would merge + /// the frames to create HDR images for itself if it requires them. + MultiExposureUnmerged = 1, + /// Multiple exposures will be generated and merged to create HDR + /// images. + /// + /// Each image will be tagged with the HDR channel (long, medium or + /// short) that arrived and which caused this image to be output. + /// + /// Systems that use two channels for HDR will return images tagged + /// alternately as the short and long channel. Systems that use three + /// channels for HDR will cycle through the short, medium and long + /// channel before repeating. + MultiExposure = 2, + /// Multiple frames all at a single exposure will be used to create HDR + /// images. + /// + /// These images should be reported as all corresponding to the HDR + /// short channel. + SingleExposure = 3, + /// Multiple frames will be combined to produce "night mode" images. + /// + /// It is up to the implementation exactly which HDR channels it uses, + /// and the images will all be tagged accordingly with the correct HDR + /// channel information. + Night = 4, +} +impl TryFrom for HdrMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: HdrMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for HdrMode { + const ID: u32 = ControlId::HdrMode as _; +} +impl Control for HdrMode {} +/// The HDR channel used to capture the frame. +/// +/// This value is reported back to the application so that it can discover +/// whether this capture corresponds to the short or long exposure image +/// (or any other image used by the HDR procedure). An application can +/// monitor the HDR channel to discover when the differently exposed images +/// have arrived. +/// +/// This metadata is only available when an HDR mode has been enabled. +/// +/// \sa HdrMode +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum HdrChannel { + /// This image does not correspond to any of the captures used to create + /// an HDR image. + None = 0, + /// This is a short exposure image. + Short = 1, + /// This is a medium exposure image. + Medium = 2, + /// This is a long exposure image. + Long = 3, +} +impl TryFrom for HdrChannel { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: HdrChannel) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for HdrChannel { + const ID: u32 = ControlId::HdrChannel as _; +} +impl Control for HdrChannel {} +/// Specify a fixed gamma value. +/// +/// The default gamma value must be 2.2 which closely mimics sRGB gamma. +/// Note that this is camera gamma, so it is applied as 1.0/gamma. +#[derive(Debug, Clone)] +pub struct Gamma(pub f32); +impl Deref for Gamma { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Gamma { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Gamma { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Gamma) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Gamma { + const ID: u32 = ControlId::Gamma as _; +} +impl Control for Gamma {} +/// Enable or disable the debug metadata. +#[derive(Debug, Clone)] +pub struct DebugMetadataEnable(pub bool); +impl Deref for DebugMetadataEnable { + type Target = bool; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for DebugMetadataEnable { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for DebugMetadataEnable { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: DebugMetadataEnable) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for DebugMetadataEnable { + const ID: u32 = ControlId::DebugMetadataEnable as _; +} +impl Control for DebugMetadataEnable {} +/// Control for AE metering trigger. Currently identical to +/// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. +/// +/// Whether the camera device will trigger a precapture metering sequence +/// when it processes this request. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AePrecaptureTrigger { + /// The trigger is idle. + Idle = 0, + /// The pre-capture AE metering is started by the camera. + Start = 1, + /// The camera will cancel any active or completed metering sequence. + /// The AE algorithm is reset to its initial state. + Cancel = 2, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for AePrecaptureTrigger { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: AePrecaptureTrigger) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for AePrecaptureTrigger { + const ID: u32 = ControlId::AePrecaptureTrigger as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for AePrecaptureTrigger {} +/// Control to select the noise reduction algorithm mode. Currently +/// identical to ANDROID_NOISE_REDUCTION_MODE. +/// +/// Mode of operation for the noise reduction algorithm. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum NoiseReductionMode { + /// No noise reduction is applied + Off = 0, + /// Noise reduction is applied without reducing the frame rate. + Fast = 1, + /// High quality noise reduction at the expense of frame rate. + HighQuality = 2, + /// Minimal noise reduction is applied without reducing the frame rate. + Minimal = 3, + /// Noise reduction is applied at different levels to different streams. + ZSL = 4, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for NoiseReductionMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: NoiseReductionMode) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for NoiseReductionMode { + const ID: u32 = ControlId::NoiseReductionMode as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for NoiseReductionMode {} +/// Control to select the color correction aberration mode. Currently +/// identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. +/// +/// Mode of operation for the chromatic aberration correction algorithm. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum ColorCorrectionAberrationMode { + /// No aberration correction is applied. + ColorCorrectionAberrationOff = 0, + /// Aberration correction will not slow down the frame rate. + ColorCorrectionAberrationFast = 1, + /// High quality aberration correction which might reduce the frame + /// rate. + ColorCorrectionAberrationHighQuality = 2, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for ColorCorrectionAberrationMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: ColorCorrectionAberrationMode) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for ColorCorrectionAberrationMode { + const ID: u32 = ControlId::ColorCorrectionAberrationMode as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for ColorCorrectionAberrationMode {} +/// Control to report the current AE algorithm state. Currently identical to +/// ANDROID_CONTROL_AE_STATE. +/// +/// Current state of the AE algorithm. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AeState { + /// The AE algorithm is inactive. + Inactive = 0, + /// The AE algorithm has not converged yet. + Searching = 1, + /// The AE algorithm has converged. + Converged = 2, + /// The AE algorithm is locked. + Locked = 3, + /// The AE algorithm would need a flash for good results + FlashRequired = 4, + /// The AE algorithm has started a pre-capture metering session. + /// \sa AePrecaptureTrigger + Precapture = 5, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for AeState { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: AeState) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for AeState { + const ID: u32 = ControlId::AeState as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for AeState {} +/// Control to report the current AWB algorithm state. Currently identical +/// to ANDROID_CONTROL_AWB_STATE. +/// +/// Current state of the AWB algorithm. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AwbState { + /// The AWB algorithm is inactive. + Inactive = 0, + /// The AWB algorithm has not converged yet. + Searching = 1, + /// The AWB algorithm has converged. + AwbConverged = 2, + /// The AWB algorithm is locked. + AwbLocked = 3, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for AwbState { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: AwbState) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for AwbState { + const ID: u32 = ControlId::AwbState as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for AwbState {} +/// Control to report the time between the start of exposure of the first +/// row and the start of exposure of the last row. Currently identical to +/// ANDROID_SENSOR_ROLLING_SHUTTER_SKEW +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct SensorRollingShutterSkew(pub i64); +#[cfg(feature = "vendor_draft")] +impl Deref for SensorRollingShutterSkew { + type Target = i64; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for SensorRollingShutterSkew { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for SensorRollingShutterSkew { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: SensorRollingShutterSkew) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for SensorRollingShutterSkew { + const ID: u32 = ControlId::SensorRollingShutterSkew as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for SensorRollingShutterSkew {} +/// Control to report if the lens shading map is available. Currently +/// identical to ANDROID_STATISTICS_LENS_SHADING_MAP_MODE. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum LensShadingMapMode { + /// No lens shading map mode is available. + Off = 0, + /// The lens shading map mode is available. + On = 1, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for LensShadingMapMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: LensShadingMapMode) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for LensShadingMapMode { + const ID: u32 = ControlId::LensShadingMapMode as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for LensShadingMapMode {} +/// Specifies the number of pipeline stages the frame went through from when +/// it was exposed to when the final completed result was available to the +/// framework. Always less than or equal to PipelineMaxDepth. Currently +/// identical to ANDROID_REQUEST_PIPELINE_DEPTH. +/// +/// The typical value for this control is 3 as a frame is first exposed, +/// captured and then processed in a single pass through the ISP. Any +/// additional processing step performed after the ISP pass (in example face +/// detection, additional format conversions etc) count as an additional +/// pipeline stage. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct PipelineDepth(pub i32); +#[cfg(feature = "vendor_draft")] +impl Deref for PipelineDepth { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for PipelineDepth { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for PipelineDepth { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: PipelineDepth) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for PipelineDepth { + const ID: u32 = ControlId::PipelineDepth as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for PipelineDepth {} +/// The maximum number of frames that can occur after a request (different +/// than the previous) has been submitted, and before the result's state +/// becomes synchronized. A value of -1 indicates unknown latency, and 0 +/// indicates per-frame control. Currently identical to +/// ANDROID_SYNC_MAX_LATENCY. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct MaxLatency(pub i32); +#[cfg(feature = "vendor_draft")] +impl Deref for MaxLatency { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for MaxLatency { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for MaxLatency { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: MaxLatency) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for MaxLatency { + const ID: u32 = ControlId::MaxLatency as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for MaxLatency {} +/// Control to select the test pattern mode. Currently identical to +/// ANDROID_SENSOR_TEST_PATTERN_MODE. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum TestPatternMode { + /// No test pattern mode is used. The camera device returns frames from + /// the image sensor. + Off = 0, + /// Each pixel in [R, G_even, G_odd, B] is replaced by its respective + /// color channel provided in test pattern data. + /// \todo Add control for test pattern data. + SolidColor = 1, + /// All pixel data is replaced with an 8-bar color pattern. The vertical + /// bars (left-to-right) are as follows; white, yellow, cyan, green, + /// magenta, red, blue and black. Each bar should take up 1/8 of the + /// sensor pixel array width. When this is not possible, the bar size + /// should be rounded down to the nearest integer and the pattern can + /// repeat on the right side. Each bar's height must always take up the + /// full sensor pixel array height. + ColorBars = 2, + /// The test pattern is similar to TestPatternModeColorBars, + /// except that each bar should start at its specified color at the top + /// and fade to gray at the bottom. Furthermore each bar is further + /// subdevided into a left and right half. The left half should have a + /// smooth gradient, and the right half should have a quantized + /// gradient. In particular, the right half's should consist of blocks + /// of the same color for 1/16th active sensor pixel array width. The + /// least significant bits in the quantized gradient should be copied + /// from the most significant bits of the smooth gradient. The height of + /// each bar should always be a multiple of 128. When this is not the + /// case, the pattern should repeat at the bottom of the image. + ColorBarsFadeToGray = 3, + /// All pixel data is replaced by a pseudo-random sequence generated + /// from a PN9 512-bit sequence (typically implemented in hardware with + /// a linear feedback shift register). The generator should be reset at + /// the beginning of each frame, and thus each subsequent raw frame with + /// this test pattern should be exactly the same as the last. + Pn9 = 4, + /// The first custom test pattern. All custom patterns that are + /// available only on this camera device are at least this numeric + /// value. All of the custom test patterns will be static (that is the + /// raw image must not vary from frame to frame). + Custom1 = 256, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for TestPatternMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: TestPatternMode) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for TestPatternMode { + const ID: u32 = ControlId::TestPatternMode as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for TestPatternMode {} +/// Control to select the face detection mode used by the pipeline. +/// +/// Currently identical to ANDROID_STATISTICS_FACE_DETECT_MODE. +/// +/// \sa FaceDetectFaceRectangles +/// \sa FaceDetectFaceScores +/// \sa FaceDetectFaceLandmarks +/// \sa FaceDetectFaceIds +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum FaceDetectMode { + /// Pipeline doesn't perform face detection and doesn't report any + /// control related to face detection. + Off = 0, + /// Pipeline performs face detection and reports the + /// FaceDetectFaceRectangles and FaceDetectFaceScores controls for each + /// detected face. FaceDetectFaceLandmarks and FaceDetectFaceIds are + /// optional. + Simple = 1, + /// Pipeline performs face detection and reports all the controls + /// related to face detection including FaceDetectFaceRectangles, + /// FaceDetectFaceScores, FaceDetectFaceLandmarks, and + /// FaceDeteceFaceIds for each detected face. + Full = 2, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for FaceDetectMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: FaceDetectMode) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for FaceDetectMode { + const ID: u32 = ControlId::FaceDetectMode as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for FaceDetectMode {} +/// Boundary rectangles of the detected faces. The number of values is +/// the number of detected faces. +/// +/// The FaceDetectFaceRectangles control can only be returned in metadata. +/// +/// Currently identical to ANDROID_STATISTICS_FACE_RECTANGLES. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct FaceDetectFaceRectangles(pub Vec); +#[cfg(feature = "vendor_draft")] +impl Deref for FaceDetectFaceRectangles { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for FaceDetectFaceRectangles { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for FaceDetectFaceRectangles { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: FaceDetectFaceRectangles) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for FaceDetectFaceRectangles { + const ID: u32 = ControlId::FaceDetectFaceRectangles as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for FaceDetectFaceRectangles {} +/// Confidence score of each of the detected faces. The range of score is +/// [0, 100]. The number of values should be the number of faces reported +/// in FaceDetectFaceRectangles. +/// +/// The FaceDetectFaceScores control can only be returned in metadata. +/// +/// Currently identical to ANDROID_STATISTICS_FACE_SCORES. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct FaceDetectFaceScores(pub Vec); +#[cfg(feature = "vendor_draft")] +impl Deref for FaceDetectFaceScores { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for FaceDetectFaceScores { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for FaceDetectFaceScores { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: FaceDetectFaceScores) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for FaceDetectFaceScores { + const ID: u32 = ControlId::FaceDetectFaceScores as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for FaceDetectFaceScores {} +/// Array of human face landmark coordinates in format [..., left_eye_i, +/// right_eye_i, mouth_i, left_eye_i+1, ...], with i = index of face. The +/// number of values should be 3 * the number of faces reported in +/// FaceDetectFaceRectangles. +/// +/// The FaceDetectFaceLandmarks control can only be returned in metadata. +/// +/// Currently identical to ANDROID_STATISTICS_FACE_LANDMARKS. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct FaceDetectFaceLandmarks(pub Vec); +#[cfg(feature = "vendor_draft")] +impl Deref for FaceDetectFaceLandmarks { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for FaceDetectFaceLandmarks { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for FaceDetectFaceLandmarks { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: FaceDetectFaceLandmarks) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for FaceDetectFaceLandmarks { + const ID: u32 = ControlId::FaceDetectFaceLandmarks as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for FaceDetectFaceLandmarks {} +/// Each detected face is given a unique ID that is valid for as long as the +/// face is visible to the camera device. A face that leaves the field of +/// view and later returns may be assigned a new ID. The number of values +/// should be the number of faces reported in FaceDetectFaceRectangles. +/// +/// The FaceDetectFaceIds control can only be returned in metadata. +/// +/// Currently identical to ANDROID_STATISTICS_FACE_IDS. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct FaceDetectFaceIds(pub Vec); +#[cfg(feature = "vendor_draft")] +impl Deref for FaceDetectFaceIds { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for FaceDetectFaceIds { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for FaceDetectFaceIds { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: FaceDetectFaceIds) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for FaceDetectFaceIds { + const ID: u32 = ControlId::FaceDetectFaceIds as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for FaceDetectFaceIds {} +/// Toggles the Raspberry Pi IPA to output the hardware generated statistics. +/// +/// When this control is set to true, the IPA outputs a binary dump of the +/// hardware generated statistics through the Request metadata in the +/// Bcm2835StatsOutput control. +/// +/// \sa Bcm2835StatsOutput +#[cfg(feature = "vendor_rpi")] +#[derive(Debug, Clone)] +pub struct StatsOutputEnable(pub bool); +#[cfg(feature = "vendor_rpi")] +impl Deref for StatsOutputEnable { + type Target = bool; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl DerefMut for StatsOutputEnable { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl TryFrom for StatsOutputEnable { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +#[cfg(feature = "vendor_rpi")] +impl From for ControlValue { + fn from(val: StatsOutputEnable) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_rpi")] +impl ControlEntry for StatsOutputEnable { + const ID: u32 = ControlId::StatsOutputEnable as _; +} +#[cfg(feature = "vendor_rpi")] +impl Control for StatsOutputEnable {} +/// Span of the BCM2835 ISP generated statistics for the current frame. +/// +/// This is sent in the Request metadata if the StatsOutputEnable is set to +/// true. The statistics struct definition can be found in +/// include/linux/bcm2835-isp.h. +/// +/// \sa StatsOutputEnable +#[cfg(feature = "vendor_rpi")] +#[derive(Debug, Clone)] +pub struct Bcm2835StatsOutput(pub Vec); +#[cfg(feature = "vendor_rpi")] +impl Deref for Bcm2835StatsOutput { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl DerefMut for Bcm2835StatsOutput { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl TryFrom for Bcm2835StatsOutput { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_rpi")] +impl From for ControlValue { + fn from(val: Bcm2835StatsOutput) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_rpi")] +impl ControlEntry for Bcm2835StatsOutput { + const ID: u32 = ControlId::Bcm2835StatsOutput as _; +} +#[cfg(feature = "vendor_rpi")] +impl Control for Bcm2835StatsOutput {} +/// An array of rectangles, where each singular value has identical +/// functionality to the ScalerCrop control. This control allows the +/// Raspberry Pi pipeline handler to control individual scaler crops per +/// output stream. +/// +/// The order of rectangles passed into the control must match the order of +/// streams configured by the application. The pipeline handler will only +/// configure crop retangles up-to the number of output streams configured. +/// All subsequent rectangles passed into this control are ignored by the +/// pipeline handler. +/// +/// If both rpi::ScalerCrops and ScalerCrop controls are present in a +/// ControlList, the latter is discarded, and crops are obtained from this +/// control. +/// +/// Note that using different crop rectangles for each output stream with +/// this control is only applicable on the Pi5/PiSP platform. This control +/// should also be considered temporary/draft and will be replaced with +/// official libcamera API support for per-stream controls in the future. +/// +/// \sa ScalerCrop +#[cfg(feature = "vendor_rpi")] +#[derive(Debug, Clone)] +pub struct ScalerCrops(pub Vec); +#[cfg(feature = "vendor_rpi")] +impl Deref for ScalerCrops { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl DerefMut for ScalerCrops { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl TryFrom for ScalerCrops { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_rpi")] +impl From for ControlValue { + fn from(val: ScalerCrops) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_rpi")] +impl ControlEntry for ScalerCrops { + const ID: u32 = ControlId::ScalerCrops as _; +} +#[cfg(feature = "vendor_rpi")] +impl Control for ScalerCrops {} +pub fn make_dyn( + id: ControlId, + val: ControlValue, +) -> Result, ControlValueError> { + match id { + ControlId::AeEnable => Ok(Box::new(AeEnable::try_from(val)?)), + ControlId::AeLocked => Ok(Box::new(AeLocked::try_from(val)?)), + ControlId::AeMeteringMode => Ok(Box::new(AeMeteringMode::try_from(val)?)), + ControlId::AeConstraintMode => Ok(Box::new(AeConstraintMode::try_from(val)?)), + ControlId::AeExposureMode => Ok(Box::new(AeExposureMode::try_from(val)?)), + ControlId::ExposureValue => Ok(Box::new(ExposureValue::try_from(val)?)), + ControlId::ExposureTime => Ok(Box::new(ExposureTime::try_from(val)?)), + ControlId::AnalogueGain => Ok(Box::new(AnalogueGain::try_from(val)?)), + ControlId::AeFlickerMode => Ok(Box::new(AeFlickerMode::try_from(val)?)), + ControlId::AeFlickerPeriod => Ok(Box::new(AeFlickerPeriod::try_from(val)?)), + ControlId::AeFlickerDetected => Ok(Box::new(AeFlickerDetected::try_from(val)?)), + ControlId::Brightness => Ok(Box::new(Brightness::try_from(val)?)), + ControlId::Contrast => Ok(Box::new(Contrast::try_from(val)?)), + ControlId::Lux => Ok(Box::new(Lux::try_from(val)?)), + ControlId::AwbEnable => Ok(Box::new(AwbEnable::try_from(val)?)), + ControlId::AwbMode => Ok(Box::new(AwbMode::try_from(val)?)), + ControlId::AwbLocked => Ok(Box::new(AwbLocked::try_from(val)?)), + ControlId::ColourGains => Ok(Box::new(ColourGains::try_from(val)?)), + ControlId::ColourTemperature => Ok(Box::new(ColourTemperature::try_from(val)?)), + ControlId::Saturation => Ok(Box::new(Saturation::try_from(val)?)), + ControlId::SensorBlackLevels => Ok(Box::new(SensorBlackLevels::try_from(val)?)), + ControlId::Sharpness => Ok(Box::new(Sharpness::try_from(val)?)), + ControlId::FocusFoM => Ok(Box::new(FocusFoM::try_from(val)?)), + ControlId::ColourCorrectionMatrix => { + Ok(Box::new(ColourCorrectionMatrix::try_from(val)?)) + } + ControlId::ScalerCrop => Ok(Box::new(ScalerCrop::try_from(val)?)), + ControlId::DigitalGain => Ok(Box::new(DigitalGain::try_from(val)?)), + ControlId::FrameDuration => Ok(Box::new(FrameDuration::try_from(val)?)), + ControlId::FrameDurationLimits => { + Ok(Box::new(FrameDurationLimits::try_from(val)?)) + } + ControlId::SensorTemperature => Ok(Box::new(SensorTemperature::try_from(val)?)), + ControlId::SensorTimestamp => Ok(Box::new(SensorTimestamp::try_from(val)?)), + ControlId::AfMode => Ok(Box::new(AfMode::try_from(val)?)), + ControlId::AfRange => Ok(Box::new(AfRange::try_from(val)?)), + ControlId::AfSpeed => Ok(Box::new(AfSpeed::try_from(val)?)), + ControlId::AfMetering => Ok(Box::new(AfMetering::try_from(val)?)), + ControlId::AfWindows => Ok(Box::new(AfWindows::try_from(val)?)), + ControlId::AfTrigger => Ok(Box::new(AfTrigger::try_from(val)?)), + ControlId::AfPause => Ok(Box::new(AfPause::try_from(val)?)), + ControlId::LensPosition => Ok(Box::new(LensPosition::try_from(val)?)), + ControlId::AfState => Ok(Box::new(AfState::try_from(val)?)), + ControlId::AfPauseState => Ok(Box::new(AfPauseState::try_from(val)?)), + ControlId::HdrMode => Ok(Box::new(HdrMode::try_from(val)?)), + ControlId::HdrChannel => Ok(Box::new(HdrChannel::try_from(val)?)), + ControlId::Gamma => Ok(Box::new(Gamma::try_from(val)?)), + ControlId::DebugMetadataEnable => { + Ok(Box::new(DebugMetadataEnable::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::AePrecaptureTrigger => { + Ok(Box::new(AePrecaptureTrigger::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::NoiseReductionMode => Ok(Box::new(NoiseReductionMode::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::ColorCorrectionAberrationMode => { + Ok(Box::new(ColorCorrectionAberrationMode::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::AeState => Ok(Box::new(AeState::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::AwbState => Ok(Box::new(AwbState::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::SensorRollingShutterSkew => { + Ok(Box::new(SensorRollingShutterSkew::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::LensShadingMapMode => Ok(Box::new(LensShadingMapMode::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::PipelineDepth => Ok(Box::new(PipelineDepth::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::MaxLatency => Ok(Box::new(MaxLatency::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::TestPatternMode => Ok(Box::new(TestPatternMode::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::FaceDetectMode => Ok(Box::new(FaceDetectMode::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::FaceDetectFaceRectangles => { + Ok(Box::new(FaceDetectFaceRectangles::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::FaceDetectFaceScores => { + Ok(Box::new(FaceDetectFaceScores::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::FaceDetectFaceLandmarks => { + Ok(Box::new(FaceDetectFaceLandmarks::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::FaceDetectFaceIds => Ok(Box::new(FaceDetectFaceIds::try_from(val)?)), + #[cfg(feature = "vendor_rpi")] + ControlId::StatsOutputEnable => Ok(Box::new(StatsOutputEnable::try_from(val)?)), + #[cfg(feature = "vendor_rpi")] + ControlId::Bcm2835StatsOutput => Ok(Box::new(Bcm2835StatsOutput::try_from(val)?)), + #[cfg(feature = "vendor_rpi")] + ControlId::ScalerCrops => Ok(Box::new(ScalerCrops::try_from(val)?)), + } +} diff --git a/libcamera/versioned_files/0.4.0/properties.rs b/libcamera/versioned_files/0.4.0/properties.rs new file mode 100644 index 0000000..7548b75 --- /dev/null +++ b/libcamera/versioned_files/0.4.0/properties.rs @@ -0,0 +1,1777 @@ +use std::{ffi::CStr, ops::{Deref, DerefMut}}; +use num_enum::{IntoPrimitive, TryFromPrimitive}; +#[allow(unused_imports)] +use crate::control::{Control, Property, ControlEntry, DynControlEntry}; +use crate::control_value::{ControlValue, ControlValueError}; +#[allow(unused_imports)] +use crate::geometry::{Rectangle, Size, Point}; +#[allow(unused_imports)] +use libcamera_sys::*; +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(u32)] +pub enum PropertyId { + /// Camera mounting location + Location = LOCATION, + /// The camera physical mounting rotation. It is expressed as the angular + /// difference in degrees between two reference systems, one relative to the + /// camera module, and one defined on the external world scene to be + /// captured when projected on the image sensor pixel array. + /// + /// A camera sensor has a 2-dimensional reference system 'Rc' defined by + /// its pixel array read-out order. The origin is set to the first pixel + /// being read out, the X-axis points along the column read-out direction + /// towards the last columns, and the Y-axis along the row read-out + /// direction towards the last row. + /// + /// A typical example for a sensor with a 2592x1944 pixel array matrix + /// observed from the front is + /// + /// ```text + /// 2591 X-axis 0 + /// <------------------------+ 0 + /// .......... ... ..........! + /// .......... ... ..........! Y-axis + /// ... ! + /// .......... ... ..........! + /// .......... ... ..........! 1943 + /// V + /// ``` + /// + /// + /// The external world scene reference system 'Rs' is a 2-dimensional + /// reference system on the focal plane of the camera module. The origin is + /// placed on the top-left corner of the visible scene, the X-axis points + /// towards the right, and the Y-axis points towards the bottom of the + /// scene. The top, bottom, left and right directions are intentionally not + /// defined and depend on the environment in which the camera is used. + /// + /// A typical example of a (very common) picture of a shark swimming from + /// left to right, as seen from the camera, is + /// + /// ```text + /// 0 X-axis + /// 0 +-------------------------------------> + /// ! + /// ! + /// ! + /// ! |\____)\___ + /// ! ) _____ __`< + /// ! |/ )/ + /// ! + /// ! + /// ! + /// V + /// Y-axis + /// ``` + /// + /// With the reference system 'Rs' placed on the camera focal plane. + /// + /// ```text + /// ¸.·˙! + /// ¸.·˙ ! + /// _ ¸.·˙ ! + /// +-/ \-+¸.·˙ ! + /// | (o) | ! Camera focal plane + /// +-----+˙·.¸ ! + /// ˙·.¸ ! + /// ˙·.¸ ! + /// ˙·.¸! + /// ``` + /// + /// When projected on the sensor's pixel array, the image and the associated + /// reference system 'Rs' are typically (but not always) inverted, due to + /// the camera module's lens optical inversion effect. + /// + /// Assuming the above represented scene of the swimming shark, the lens + /// inversion projects the scene and its reference system onto the sensor + /// pixel array, seen from the front of the camera sensor, as follow + /// + /// ```text + /// Y-axis + /// ^ + /// ! + /// ! + /// ! + /// ! |\_____)\__ + /// ! ) ____ ___.< + /// ! |/ )/ + /// ! + /// ! + /// ! + /// 0 +-------------------------------------> + /// 0 X-axis + /// ``` + /// + /// Note the shark being upside-down. + /// + /// The resulting projected reference system is named 'Rp'. + /// + /// The camera rotation property is then defined as the angular difference + /// in the counter-clockwise direction between the camera reference system + /// 'Rc' and the projected scene reference system 'Rp'. It is expressed in + /// degrees as a number in the range [0, 360[. + /// + /// Examples + /// + /// 0 degrees camera rotation + /// + /// + /// ```text + /// Y-Rp + /// ^ + /// Y-Rc ! + /// ^ ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! 0 +-------------------------------------> + /// ! 0 X-Rp + /// 0 +-------------------------------------> + /// 0 X-Rc + /// ``` + /// + /// + /// ```text + /// X-Rc 0 + /// <------------------------------------+ 0 + /// X-Rp 0 ! + /// <------------------------------------+ 0 ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! V + /// ! Y-Rc + /// V + /// Y-Rp + /// ``` + /// + /// 90 degrees camera rotation + /// + /// ```text + /// 0 Y-Rc + /// 0 +--------------------> + /// ! Y-Rp + /// ! ^ + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! 0 +-------------------------------------> + /// ! 0 X-Rp + /// ! + /// ! + /// ! + /// ! + /// V + /// X-Rc + /// ``` + /// + /// 180 degrees camera rotation + /// + /// ```text + /// 0 + /// <------------------------------------+ 0 + /// X-Rc ! + /// Y-Rp ! + /// ^ ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! V + /// ! Y-Rc + /// 0 +-------------------------------------> + /// 0 X-Rp + /// ``` + /// + /// 270 degrees camera rotation + /// + /// ```text + /// 0 Y-Rc + /// 0 +--------------------> + /// ! 0 + /// ! <-----------------------------------+ 0 + /// ! X-Rp ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! V + /// ! Y-Rp + /// ! + /// ! + /// ! + /// ! + /// V + /// X-Rc + /// ``` + /// + /// + /// Example one - Webcam + /// + /// A camera module installed on the user facing part of a laptop screen + /// casing used for video calls. The captured images are meant to be + /// displayed in landscape mode (width > height) on the laptop screen. + /// + /// The camera is typically mounted upside-down to compensate the lens + /// optical inversion effect. + /// + /// ```text + /// Y-Rp + /// Y-Rc ^ + /// ^ ! + /// ! ! + /// ! ! |\_____)\__ + /// ! ! ) ____ ___.< + /// ! ! |/ )/ + /// ! ! + /// ! ! + /// ! ! + /// ! 0 +-------------------------------------> + /// ! 0 X-Rp + /// 0 +-------------------------------------> + /// 0 X-Rc + /// ``` + /// + /// The two reference systems are aligned, the resulting camera rotation is + /// 0 degrees, no rotation correction needs to be applied to the resulting + /// image once captured to memory buffers to correctly display it to users. + /// + /// ```text + /// +--------------------------------------+ + /// ! ! + /// ! ! + /// ! ! + /// ! |\____)\___ ! + /// ! ) _____ __`< ! + /// ! |/ )/ ! + /// ! ! + /// ! ! + /// ! ! + /// +--------------------------------------+ + /// ``` + /// + /// If the camera sensor is not mounted upside-down to compensate for the + /// lens optical inversion, the two reference systems will not be aligned, + /// with 'Rp' being rotated 180 degrees relatively to 'Rc'. + /// + /// + /// ```text + /// X-Rc 0 + /// <------------------------------------+ 0 + /// ! + /// Y-Rp ! + /// ^ ! + /// ! ! + /// ! |\_____)\__ ! + /// ! ) ____ ___.< ! + /// ! |/ )/ ! + /// ! ! + /// ! ! + /// ! V + /// ! Y-Rc + /// 0 +-------------------------------------> + /// 0 X-Rp + /// ``` + /// + /// The image once captured to memory will then be rotated by 180 degrees + /// + /// ```text + /// +--------------------------------------+ + /// ! ! + /// ! ! + /// ! ! + /// ! __/(_____/| ! + /// ! >.___ ____ ( ! + /// ! \( \| ! + /// ! ! + /// ! ! + /// ! ! + /// +--------------------------------------+ + /// ``` + /// + /// A software rotation correction of 180 degrees should be applied to + /// correctly display the image. + /// + /// ```text + /// +--------------------------------------+ + /// ! ! + /// ! ! + /// ! ! + /// ! |\____)\___ ! + /// ! ) _____ __`< ! + /// ! |/ )/ ! + /// ! ! + /// ! ! + /// ! ! + /// +--------------------------------------+ + /// ``` + /// + /// Example two - Phone camera + /// + /// A camera installed on the back side of a mobile device facing away from + /// the user. The captured images are meant to be displayed in portrait mode + /// (height > width) to match the device screen orientation and the device + /// usage orientation used when taking the picture. + /// + /// The camera sensor is typically mounted with its pixel array longer side + /// aligned to the device longer side, upside-down mounted to compensate for + /// the lens optical inversion effect. + /// + /// ```text + /// 0 Y-Rc + /// 0 +--------------------> + /// ! Y-Rp + /// ! ^ + /// ! ! + /// ! ! + /// ! ! + /// ! ! |\_____)\__ + /// ! ! ) ____ ___.< + /// ! ! |/ )/ + /// ! ! + /// ! ! + /// ! ! + /// ! 0 +-------------------------------------> + /// ! 0 X-Rp + /// ! + /// ! + /// ! + /// ! + /// V + /// X-Rc + /// ``` + /// + /// The two reference systems are not aligned and the 'Rp' reference + /// system is rotated by 90 degrees in the counter-clockwise direction + /// relatively to the 'Rc' reference system. + /// + /// The image once captured to memory will be rotated. + /// + /// ```text + /// +-------------------------------------+ + /// | _ _ | + /// | \ / | + /// | | | | + /// | | | | + /// | | > | + /// | < | | + /// | | | | + /// | . | + /// | V | + /// +-------------------------------------+ + /// ``` + /// + /// A correction of 90 degrees in counter-clockwise direction has to be + /// applied to correctly display the image in portrait mode on the device + /// screen. + /// + /// ```text + /// +--------------------+ + /// | | + /// | | + /// | | + /// | | + /// | | + /// | | + /// | |\____)\___ | + /// | ) _____ __`< | + /// | |/ )/ | + /// | | + /// | | + /// | | + /// | | + /// | | + /// +--------------------+ + Rotation = ROTATION, + /// The model name shall to the extent possible describe the sensor. For + /// most devices this is the model name of the sensor. While for some + /// devices the sensor model is unavailable as the sensor or the entire + /// camera is part of a larger unit and exposed as a black-box to the + /// system. In such cases the model name of the smallest device that + /// contains the camera sensor shall be used. + /// + /// The model name is not meant to be a camera name displayed to the + /// end-user, but may be combined with other camera information to create a + /// camera name. + /// + /// The model name is not guaranteed to be unique in the system nor is + /// it guaranteed to be stable or have any other properties required to make + /// it a good candidate to be used as a permanent identifier of a camera. + /// + /// The model name shall describe the camera in a human readable format and + /// shall be encoded in ASCII. + /// + /// Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. + Model = MODEL, + /// The pixel unit cell physical size, in nanometers. + /// + /// The UnitCellSize properties defines the horizontal and vertical sizes of + /// a single pixel unit, including its active and non-active parts. In + /// other words, it expresses the horizontal and vertical distance between + /// the top-left corners of adjacent pixels. + /// + /// The property can be used to calculate the physical size of the sensor's + /// pixel array area and for calibration purposes. + UnitCellSize = UNIT_CELL_SIZE, + /// The camera sensor pixel array readable area vertical and horizontal + /// sizes, in pixels. + /// + /// The PixelArraySize property defines the size in pixel units of the + /// readable part of full pixel array matrix, including optical black + /// pixels used for calibration, pixels which are not considered valid for + /// capture and active pixels containing valid image data. + /// + /// The property describes the maximum size of the raw data captured by the + /// camera, which might not correspond to the physical size of the sensor + /// pixel array matrix, as some portions of the physical pixel array matrix + /// are not accessible and cannot be transmitted out. + /// + /// For example, let's consider a pixel array matrix assembled as follows + /// + /// ```text + /// +--------------------------------------------------+ + /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// ... ... ... ... ... + /// ``` + /// + /// ```text + /// ... ... ... ... ... + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + /// +--------------------------------------------------+ + /// ``` + /// + /// starting with two lines of non-readable pixels (x), followed by N lines + /// of readable data (D) surrounded by two columns of non-readable pixels on + /// each side, and ending with two more lines of non-readable pixels. Only + /// the readable portion is transmitted to the receiving side, defining the + /// sizes of the largest possible buffer of raw data that can be presented + /// to applications. + /// + /// ```text + /// PixelArraySize.width + /// /----------------------------------------------/ + /// +----------------------------------------------+ / + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | PixelArraySize.height + /// ... ... ... ... ... + /// ... ... ... ... ... + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// +----------------------------------------------+ / + /// ``` + /// + /// This defines a rectangle whose top-left corner is placed in position (0, + /// 0) and whose vertical and horizontal sizes are defined by this property. + /// All other rectangles that describe portions of the pixel array, such as + /// the optical black pixels rectangles and active pixel areas, are defined + /// relatively to this rectangle. + /// + /// All the coordinates are expressed relative to the default sensor readout + /// direction, without any transformation (such as horizontal and vertical + /// flipping) applied. When mapping them to the raw pixel buffer, + /// applications shall take any configured transformation into account. + /// + /// \todo Rename this property to Size once we will have property + /// ```text + /// categories (i.e. Properties::PixelArray::Size) + PixelArraySize = PIXEL_ARRAY_SIZE, + /// The pixel array region(s) which contain optical black pixels + /// considered valid for calibration purposes. + /// + /// This property describes the position and size of optical black pixel + /// regions in the raw data buffer as stored in memory, which might differ + /// from their actual physical location in the pixel array matrix. + /// + /// It is important to note, in fact, that camera sensors might + /// automatically reorder or skip portions of their pixels array matrix when + /// transmitting data to the receiver. For instance, a sensor may merge the + /// top and bottom optical black rectangles into a single rectangle, + /// transmitted at the beginning of the frame. + /// + /// The pixel array contains several areas with different purposes, + /// interleaved by lines and columns which are said not to be valid for + /// capturing purposes. Invalid lines and columns are defined as invalid as + /// they could be positioned too close to the chip margins or to the optical + /// black shielding placed on top of optical black pixels. + /// + /// ```text + /// PixelArraySize.width + /// /----------------------------------------------/ + /// x1 x2 + /// +--o---------------------------------------o---+ / + /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + /// y1 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + /// |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + /// |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + /// y2 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + /// y3 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | + /// |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | PixelArraySize.height + /// |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | + /// ... ... ... ... ... + /// ... ... ... ... ... + /// y4 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | + /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + /// +----------------------------------------------+ / + /// ``` + /// + /// The readable pixel array matrix is composed by + /// 2 invalid lines (I) + /// 4 lines of valid optical black pixels (O) + /// 2 invalid lines (I) + /// n lines of valid pixel data (P) + /// 2 invalid lines (I) + /// + /// And the position of the optical black pixel rectangles is defined by + /// + /// ```text + /// PixelArrayOpticalBlackRectangles = { + /// { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, + /// { x1, y3, 2, y4 - y3 + 1 }, + /// { x2, y3, 2, y4 - y3 + 1 }, + /// }; + /// ``` + /// + /// If the camera, when capturing the full pixel array matrix, automatically + /// skips the invalid lines and columns, producing the following data + /// buffer, when captured to memory + /// + /// ```text + /// PixelArraySize.width + /// /----------------------------------------------/ + /// x1 + /// +--------------------------------------------o-+ / + /// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + /// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + /// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + /// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + /// y1 oOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | PixelArraySize.height + /// ... ... ... ... ... | + /// ... ... ... ... ... | + /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + /// +----------------------------------------------+ / + /// ``` + /// + /// then the invalid lines and columns should not be reported as part of the + /// PixelArraySize property in first place. + /// + /// In this case, the position of the black pixel rectangles will be + /// + /// ```text + /// PixelArrayOpticalBlackRectangles = { + /// { 0, 0, y1 + 1, PixelArraySize[0] }, + /// { 0, y1, 2, PixelArraySize[1] - y1 + 1 }, + /// { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, + /// }; + /// ``` + /// + /// \todo Rename this property to Size once we will have property + /// ```text + /// categories (i.e. Properties::PixelArray::OpticalBlackRectangles) + PixelArrayOpticalBlackRectangles = PIXEL_ARRAY_OPTICAL_BLACK_RECTANGLES, + /// The PixelArrayActiveAreas property defines the (possibly multiple and + /// overlapping) portions of the camera sensor readable pixel matrix + /// which are considered valid for image acquisition purposes. + /// + /// This property describes an arbitrary number of overlapping rectangles, + /// with each rectangle representing the maximum image size that the camera + /// sensor can produce for a particular aspect ratio. They are defined + /// relatively to the PixelArraySize rectangle. + /// + /// When multiple rectangles are reported, they shall be ordered from the + /// tallest to the shortest. + /// + /// Example 1 + /// A camera sensor which only produces images in the 4:3 image resolution + /// will report a single PixelArrayActiveAreas rectangle, from which all + /// other image formats are obtained by either cropping the field-of-view + /// and/or applying pixel sub-sampling techniques such as pixel skipping or + /// binning. + /// + /// ```text + /// PixelArraySize.width + /// /----------------/ + /// x1 x2 + /// (0,0)-> +-o------------o-+ / + /// y1 o +------------+ | | + /// | |////////////| | | + /// | |////////////| | | PixelArraySize.height + /// | |////////////| | | + /// y2 o +------------+ | | + /// +----------------+ / + /// ``` + /// + /// The property reports a single rectangle + /// + /// ```text + /// PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) + /// ``` + /// + /// Example 2 + /// A camera sensor which can produce images in different native + /// resolutions will report several overlapping rectangles, one for each + /// natively supported resolution. + /// + /// ```text + /// PixelArraySize.width + /// /------------------/ + /// x1 x2 x3 x4 + /// (0,0)-> +o---o------o---o+ / + /// y1 o +------+ | | + /// | |//////| | | + /// y2 o+---+------+---+| | + /// ||///|//////|///|| | PixelArraySize.height + /// y3 o+---+------+---+| | + /// | |//////| | | + /// y4 o +------+ | | + /// +----+------+----+ / + /// ``` + /// + /// The property reports two rectangles + /// + /// ```text + /// PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), + /// (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) + /// ``` + /// + /// The first rectangle describes the maximum field-of-view of all image + /// formats in the 4:3 resolutions, while the second one describes the + /// maximum field of view for all image formats in the 16:9 resolutions. + /// + /// Multiple rectangles shall only be reported when the sensor can't capture + /// the pixels in the corner regions. If all the pixels in the (x1,y1) - + /// (x4,y4) area can be captured, the PixelArrayActiveAreas property shall + /// contains the single rectangle (x1,y1) - (x4,y4). + /// + /// \todo Rename this property to ActiveAreas once we will have property + /// ```text + /// categories (i.e. Properties::PixelArray::ActiveAreas) + PixelArrayActiveAreas = PIXEL_ARRAY_ACTIVE_AREAS, + /// The maximum valid rectangle for the controls::ScalerCrop control. This + /// reflects the minimum mandatory cropping applied in the camera sensor and + /// the rest of the pipeline. Just as the ScalerCrop control, it defines a + /// rectangle taken from the sensor's active pixel array. + /// + /// This property is valid only after the camera has been successfully + /// configured and its value may change whenever a new configuration is + /// applied. + /// + /// \todo Turn this property into a "maximum control value" for the + /// ScalerCrop control once "dynamic" controls have been implemented. + ScalerCropMaximum = SCALER_CROP_MAXIMUM, + /// The relative sensitivity of the chosen sensor mode. + /// + /// Some sensors have readout modes with different sensitivities. For example, + /// a binned camera mode might, with the same exposure and gains, produce + /// twice the signal level of the full resolution readout. This would be + /// signalled by the binned mode, when it is chosen, indicating a value here + /// that is twice that of the full resolution mode. This value will be valid + /// after the configure method has returned successfully. + SensorSensitivity = SENSOR_SENSITIVITY, + /// A list of integer values of type dev_t denoting the major and minor + /// device numbers of the underlying devices used in the operation of this + /// camera. + /// + /// Different cameras may report identical devices. + SystemDevices = SYSTEM_DEVICES, + /// The arrangement of color filters on sensor; represents the colors in the + /// top-left 2x2 section of the sensor, in reading order. Currently + /// identical to ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT. + #[cfg(feature = "vendor_draft")] + ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, +} +impl PropertyId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_property_name_by_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} +/// Camera mounting location +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum Location { + /// The camera is mounted on the front side of the device, facing the + /// user + CameraFront = 0, + /// The camera is mounted on the back side of the device, facing away + /// from the user + CameraBack = 1, + /// The camera is attached to the device in a way that allows it to + /// be moved freely + CameraExternal = 2, +} +impl TryFrom for Location { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: Location) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for Location { + const ID: u32 = PropertyId::Location as _; +} +impl Property for Location {} +/// The camera physical mounting rotation. It is expressed as the angular +/// difference in degrees between two reference systems, one relative to the +/// camera module, and one defined on the external world scene to be +/// captured when projected on the image sensor pixel array. +/// +/// A camera sensor has a 2-dimensional reference system 'Rc' defined by +/// its pixel array read-out order. The origin is set to the first pixel +/// being read out, the X-axis points along the column read-out direction +/// towards the last columns, and the Y-axis along the row read-out +/// direction towards the last row. +/// +/// A typical example for a sensor with a 2592x1944 pixel array matrix +/// observed from the front is +/// +/// ```text +/// 2591 X-axis 0 +/// <------------------------+ 0 +/// .......... ... ..........! +/// .......... ... ..........! Y-axis +/// ... ! +/// .......... ... ..........! +/// .......... ... ..........! 1943 +/// V +/// ``` +/// +/// +/// The external world scene reference system 'Rs' is a 2-dimensional +/// reference system on the focal plane of the camera module. The origin is +/// placed on the top-left corner of the visible scene, the X-axis points +/// towards the right, and the Y-axis points towards the bottom of the +/// scene. The top, bottom, left and right directions are intentionally not +/// defined and depend on the environment in which the camera is used. +/// +/// A typical example of a (very common) picture of a shark swimming from +/// left to right, as seen from the camera, is +/// +/// ```text +/// 0 X-axis +/// 0 +-------------------------------------> +/// ! +/// ! +/// ! +/// ! |\____)\___ +/// ! ) _____ __`< +/// ! |/ )/ +/// ! +/// ! +/// ! +/// V +/// Y-axis +/// ``` +/// +/// With the reference system 'Rs' placed on the camera focal plane. +/// +/// ```text +/// ¸.·˙! +/// ¸.·˙ ! +/// _ ¸.·˙ ! +/// +-/ \-+¸.·˙ ! +/// | (o) | ! Camera focal plane +/// +-----+˙·.¸ ! +/// ˙·.¸ ! +/// ˙·.¸ ! +/// ˙·.¸! +/// ``` +/// +/// When projected on the sensor's pixel array, the image and the associated +/// reference system 'Rs' are typically (but not always) inverted, due to +/// the camera module's lens optical inversion effect. +/// +/// Assuming the above represented scene of the swimming shark, the lens +/// inversion projects the scene and its reference system onto the sensor +/// pixel array, seen from the front of the camera sensor, as follow +/// +/// ```text +/// Y-axis +/// ^ +/// ! +/// ! +/// ! +/// ! |\_____)\__ +/// ! ) ____ ___.< +/// ! |/ )/ +/// ! +/// ! +/// ! +/// 0 +-------------------------------------> +/// 0 X-axis +/// ``` +/// +/// Note the shark being upside-down. +/// +/// The resulting projected reference system is named 'Rp'. +/// +/// The camera rotation property is then defined as the angular difference +/// in the counter-clockwise direction between the camera reference system +/// 'Rc' and the projected scene reference system 'Rp'. It is expressed in +/// degrees as a number in the range [0, 360[. +/// +/// Examples +/// +/// 0 degrees camera rotation +/// +/// +/// ```text +/// Y-Rp +/// ^ +/// Y-Rc ! +/// ^ ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! 0 +-------------------------------------> +/// ! 0 X-Rp +/// 0 +-------------------------------------> +/// 0 X-Rc +/// ``` +/// +/// +/// ```text +/// X-Rc 0 +/// <------------------------------------+ 0 +/// X-Rp 0 ! +/// <------------------------------------+ 0 ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! V +/// ! Y-Rc +/// V +/// Y-Rp +/// ``` +/// +/// 90 degrees camera rotation +/// +/// ```text +/// 0 Y-Rc +/// 0 +--------------------> +/// ! Y-Rp +/// ! ^ +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! 0 +-------------------------------------> +/// ! 0 X-Rp +/// ! +/// ! +/// ! +/// ! +/// V +/// X-Rc +/// ``` +/// +/// 180 degrees camera rotation +/// +/// ```text +/// 0 +/// <------------------------------------+ 0 +/// X-Rc ! +/// Y-Rp ! +/// ^ ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! V +/// ! Y-Rc +/// 0 +-------------------------------------> +/// 0 X-Rp +/// ``` +/// +/// 270 degrees camera rotation +/// +/// ```text +/// 0 Y-Rc +/// 0 +--------------------> +/// ! 0 +/// ! <-----------------------------------+ 0 +/// ! X-Rp ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! V +/// ! Y-Rp +/// ! +/// ! +/// ! +/// ! +/// V +/// X-Rc +/// ``` +/// +/// +/// Example one - Webcam +/// +/// A camera module installed on the user facing part of a laptop screen +/// casing used for video calls. The captured images are meant to be +/// displayed in landscape mode (width > height) on the laptop screen. +/// +/// The camera is typically mounted upside-down to compensate the lens +/// optical inversion effect. +/// +/// ```text +/// Y-Rp +/// Y-Rc ^ +/// ^ ! +/// ! ! +/// ! ! |\_____)\__ +/// ! ! ) ____ ___.< +/// ! ! |/ )/ +/// ! ! +/// ! ! +/// ! ! +/// ! 0 +-------------------------------------> +/// ! 0 X-Rp +/// 0 +-------------------------------------> +/// 0 X-Rc +/// ``` +/// +/// The two reference systems are aligned, the resulting camera rotation is +/// 0 degrees, no rotation correction needs to be applied to the resulting +/// image once captured to memory buffers to correctly display it to users. +/// +/// ```text +/// +--------------------------------------+ +/// ! ! +/// ! ! +/// ! ! +/// ! |\____)\___ ! +/// ! ) _____ __`< ! +/// ! |/ )/ ! +/// ! ! +/// ! ! +/// ! ! +/// +--------------------------------------+ +/// ``` +/// +/// If the camera sensor is not mounted upside-down to compensate for the +/// lens optical inversion, the two reference systems will not be aligned, +/// with 'Rp' being rotated 180 degrees relatively to 'Rc'. +/// +/// +/// ```text +/// X-Rc 0 +/// <------------------------------------+ 0 +/// ! +/// Y-Rp ! +/// ^ ! +/// ! ! +/// ! |\_____)\__ ! +/// ! ) ____ ___.< ! +/// ! |/ )/ ! +/// ! ! +/// ! ! +/// ! V +/// ! Y-Rc +/// 0 +-------------------------------------> +/// 0 X-Rp +/// ``` +/// +/// The image once captured to memory will then be rotated by 180 degrees +/// +/// ```text +/// +--------------------------------------+ +/// ! ! +/// ! ! +/// ! ! +/// ! __/(_____/| ! +/// ! >.___ ____ ( ! +/// ! \( \| ! +/// ! ! +/// ! ! +/// ! ! +/// +--------------------------------------+ +/// ``` +/// +/// A software rotation correction of 180 degrees should be applied to +/// correctly display the image. +/// +/// ```text +/// +--------------------------------------+ +/// ! ! +/// ! ! +/// ! ! +/// ! |\____)\___ ! +/// ! ) _____ __`< ! +/// ! |/ )/ ! +/// ! ! +/// ! ! +/// ! ! +/// +--------------------------------------+ +/// ``` +/// +/// Example two - Phone camera +/// +/// A camera installed on the back side of a mobile device facing away from +/// the user. The captured images are meant to be displayed in portrait mode +/// (height > width) to match the device screen orientation and the device +/// usage orientation used when taking the picture. +/// +/// The camera sensor is typically mounted with its pixel array longer side +/// aligned to the device longer side, upside-down mounted to compensate for +/// the lens optical inversion effect. +/// +/// ```text +/// 0 Y-Rc +/// 0 +--------------------> +/// ! Y-Rp +/// ! ^ +/// ! ! +/// ! ! +/// ! ! +/// ! ! |\_____)\__ +/// ! ! ) ____ ___.< +/// ! ! |/ )/ +/// ! ! +/// ! ! +/// ! ! +/// ! 0 +-------------------------------------> +/// ! 0 X-Rp +/// ! +/// ! +/// ! +/// ! +/// V +/// X-Rc +/// ``` +/// +/// The two reference systems are not aligned and the 'Rp' reference +/// system is rotated by 90 degrees in the counter-clockwise direction +/// relatively to the 'Rc' reference system. +/// +/// The image once captured to memory will be rotated. +/// +/// ```text +/// +-------------------------------------+ +/// | _ _ | +/// | \ / | +/// | | | | +/// | | | | +/// | | > | +/// | < | | +/// | | | | +/// | . | +/// | V | +/// +-------------------------------------+ +/// ``` +/// +/// A correction of 90 degrees in counter-clockwise direction has to be +/// applied to correctly display the image in portrait mode on the device +/// screen. +/// +/// ```text +/// +--------------------+ +/// | | +/// | | +/// | | +/// | | +/// | | +/// | | +/// | |\____)\___ | +/// | ) _____ __`< | +/// | |/ )/ | +/// | | +/// | | +/// | | +/// | | +/// | | +/// +--------------------+ +#[derive(Debug, Clone)] +pub struct Rotation(pub i32); +impl Deref for Rotation { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Rotation { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Rotation { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Rotation) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Rotation { + const ID: u32 = PropertyId::Rotation as _; +} +impl Property for Rotation {} +/// The model name shall to the extent possible describe the sensor. For +/// most devices this is the model name of the sensor. While for some +/// devices the sensor model is unavailable as the sensor or the entire +/// camera is part of a larger unit and exposed as a black-box to the +/// system. In such cases the model name of the smallest device that +/// contains the camera sensor shall be used. +/// +/// The model name is not meant to be a camera name displayed to the +/// end-user, but may be combined with other camera information to create a +/// camera name. +/// +/// The model name is not guaranteed to be unique in the system nor is +/// it guaranteed to be stable or have any other properties required to make +/// it a good candidate to be used as a permanent identifier of a camera. +/// +/// The model name shall describe the camera in a human readable format and +/// shall be encoded in ASCII. +/// +/// Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. +#[derive(Debug, Clone)] +pub struct Model(pub String); +impl Deref for Model { + type Target = String; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Model { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Model { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Model) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Model { + const ID: u32 = PropertyId::Model as _; +} +impl Property for Model {} +/// The pixel unit cell physical size, in nanometers. +/// +/// The UnitCellSize properties defines the horizontal and vertical sizes of +/// a single pixel unit, including its active and non-active parts. In +/// other words, it expresses the horizontal and vertical distance between +/// the top-left corners of adjacent pixels. +/// +/// The property can be used to calculate the physical size of the sensor's +/// pixel array area and for calibration purposes. +#[derive(Debug, Clone)] +pub struct UnitCellSize(pub Size); +impl Deref for UnitCellSize { + type Target = Size; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for UnitCellSize { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for UnitCellSize { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: UnitCellSize) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for UnitCellSize { + const ID: u32 = PropertyId::UnitCellSize as _; +} +impl Property for UnitCellSize {} +/// The camera sensor pixel array readable area vertical and horizontal +/// sizes, in pixels. +/// +/// The PixelArraySize property defines the size in pixel units of the +/// readable part of full pixel array matrix, including optical black +/// pixels used for calibration, pixels which are not considered valid for +/// capture and active pixels containing valid image data. +/// +/// The property describes the maximum size of the raw data captured by the +/// camera, which might not correspond to the physical size of the sensor +/// pixel array matrix, as some portions of the physical pixel array matrix +/// are not accessible and cannot be transmitted out. +/// +/// For example, let's consider a pixel array matrix assembled as follows +/// +/// ```text +/// +--------------------------------------------------+ +/// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| +/// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// ... ... ... ... ... +/// ``` +/// +/// ```text +/// ... ... ... ... ... +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| +/// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| +/// +--------------------------------------------------+ +/// ``` +/// +/// starting with two lines of non-readable pixels (x), followed by N lines +/// of readable data (D) surrounded by two columns of non-readable pixels on +/// each side, and ending with two more lines of non-readable pixels. Only +/// the readable portion is transmitted to the receiving side, defining the +/// sizes of the largest possible buffer of raw data that can be presented +/// to applications. +/// +/// ```text +/// PixelArraySize.width +/// /----------------------------------------------/ +/// +----------------------------------------------+ / +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | PixelArraySize.height +/// ... ... ... ... ... +/// ... ... ... ... ... +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// +----------------------------------------------+ / +/// ``` +/// +/// This defines a rectangle whose top-left corner is placed in position (0, +/// 0) and whose vertical and horizontal sizes are defined by this property. +/// All other rectangles that describe portions of the pixel array, such as +/// the optical black pixels rectangles and active pixel areas, are defined +/// relatively to this rectangle. +/// +/// All the coordinates are expressed relative to the default sensor readout +/// direction, without any transformation (such as horizontal and vertical +/// flipping) applied. When mapping them to the raw pixel buffer, +/// applications shall take any configured transformation into account. +/// +/// \todo Rename this property to Size once we will have property +/// ```text +/// categories (i.e. Properties::PixelArray::Size) +#[derive(Debug, Clone)] +pub struct PixelArraySize(pub Size); +impl Deref for PixelArraySize { + type Target = Size; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for PixelArraySize { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for PixelArraySize { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: PixelArraySize) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for PixelArraySize { + const ID: u32 = PropertyId::PixelArraySize as _; +} +impl Property for PixelArraySize {} +/// The pixel array region(s) which contain optical black pixels +/// considered valid for calibration purposes. +/// +/// This property describes the position and size of optical black pixel +/// regions in the raw data buffer as stored in memory, which might differ +/// from their actual physical location in the pixel array matrix. +/// +/// It is important to note, in fact, that camera sensors might +/// automatically reorder or skip portions of their pixels array matrix when +/// transmitting data to the receiver. For instance, a sensor may merge the +/// top and bottom optical black rectangles into a single rectangle, +/// transmitted at the beginning of the frame. +/// +/// The pixel array contains several areas with different purposes, +/// interleaved by lines and columns which are said not to be valid for +/// capturing purposes. Invalid lines and columns are defined as invalid as +/// they could be positioned too close to the chip margins or to the optical +/// black shielding placed on top of optical black pixels. +/// +/// ```text +/// PixelArraySize.width +/// /----------------------------------------------/ +/// x1 x2 +/// +--o---------------------------------------o---+ / +/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | +/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | +/// y1 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | +/// |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | +/// |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | +/// y2 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | +/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | +/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | +/// y3 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | +/// |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | PixelArraySize.height +/// |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | +/// ... ... ... ... ... +/// ... ... ... ... ... +/// y4 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | +/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | +/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | +/// +----------------------------------------------+ / +/// ``` +/// +/// The readable pixel array matrix is composed by +/// 2 invalid lines (I) +/// 4 lines of valid optical black pixels (O) +/// 2 invalid lines (I) +/// n lines of valid pixel data (P) +/// 2 invalid lines (I) +/// +/// And the position of the optical black pixel rectangles is defined by +/// +/// ```text +/// PixelArrayOpticalBlackRectangles = { +/// { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, +/// { x1, y3, 2, y4 - y3 + 1 }, +/// { x2, y3, 2, y4 - y3 + 1 }, +/// }; +/// ``` +/// +/// If the camera, when capturing the full pixel array matrix, automatically +/// skips the invalid lines and columns, producing the following data +/// buffer, when captured to memory +/// +/// ```text +/// PixelArraySize.width +/// /----------------------------------------------/ +/// x1 +/// +--------------------------------------------o-+ / +/// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | +/// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | +/// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | +/// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | +/// y1 oOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | +/// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | +/// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | PixelArraySize.height +/// ... ... ... ... ... | +/// ... ... ... ... ... | +/// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | +/// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | +/// +----------------------------------------------+ / +/// ``` +/// +/// then the invalid lines and columns should not be reported as part of the +/// PixelArraySize property in first place. +/// +/// In this case, the position of the black pixel rectangles will be +/// +/// ```text +/// PixelArrayOpticalBlackRectangles = { +/// { 0, 0, y1 + 1, PixelArraySize[0] }, +/// { 0, y1, 2, PixelArraySize[1] - y1 + 1 }, +/// { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, +/// }; +/// ``` +/// +/// \todo Rename this property to Size once we will have property +/// ```text +/// categories (i.e. Properties::PixelArray::OpticalBlackRectangles) +#[derive(Debug, Clone)] +pub struct PixelArrayOpticalBlackRectangles(pub Vec); +impl Deref for PixelArrayOpticalBlackRectangles { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for PixelArrayOpticalBlackRectangles { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for PixelArrayOpticalBlackRectangles { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: PixelArrayOpticalBlackRectangles) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for PixelArrayOpticalBlackRectangles { + const ID: u32 = PropertyId::PixelArrayOpticalBlackRectangles as _; +} +impl Property for PixelArrayOpticalBlackRectangles {} +/// The PixelArrayActiveAreas property defines the (possibly multiple and +/// overlapping) portions of the camera sensor readable pixel matrix +/// which are considered valid for image acquisition purposes. +/// +/// This property describes an arbitrary number of overlapping rectangles, +/// with each rectangle representing the maximum image size that the camera +/// sensor can produce for a particular aspect ratio. They are defined +/// relatively to the PixelArraySize rectangle. +/// +/// When multiple rectangles are reported, they shall be ordered from the +/// tallest to the shortest. +/// +/// Example 1 +/// A camera sensor which only produces images in the 4:3 image resolution +/// will report a single PixelArrayActiveAreas rectangle, from which all +/// other image formats are obtained by either cropping the field-of-view +/// and/or applying pixel sub-sampling techniques such as pixel skipping or +/// binning. +/// +/// ```text +/// PixelArraySize.width +/// /----------------/ +/// x1 x2 +/// (0,0)-> +-o------------o-+ / +/// y1 o +------------+ | | +/// | |////////////| | | +/// | |////////////| | | PixelArraySize.height +/// | |////////////| | | +/// y2 o +------------+ | | +/// +----------------+ / +/// ``` +/// +/// The property reports a single rectangle +/// +/// ```text +/// PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) +/// ``` +/// +/// Example 2 +/// A camera sensor which can produce images in different native +/// resolutions will report several overlapping rectangles, one for each +/// natively supported resolution. +/// +/// ```text +/// PixelArraySize.width +/// /------------------/ +/// x1 x2 x3 x4 +/// (0,0)-> +o---o------o---o+ / +/// y1 o +------+ | | +/// | |//////| | | +/// y2 o+---+------+---+| | +/// ||///|//////|///|| | PixelArraySize.height +/// y3 o+---+------+---+| | +/// | |//////| | | +/// y4 o +------+ | | +/// +----+------+----+ / +/// ``` +/// +/// The property reports two rectangles +/// +/// ```text +/// PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), +/// (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) +/// ``` +/// +/// The first rectangle describes the maximum field-of-view of all image +/// formats in the 4:3 resolutions, while the second one describes the +/// maximum field of view for all image formats in the 16:9 resolutions. +/// +/// Multiple rectangles shall only be reported when the sensor can't capture +/// the pixels in the corner regions. If all the pixels in the (x1,y1) - +/// (x4,y4) area can be captured, the PixelArrayActiveAreas property shall +/// contains the single rectangle (x1,y1) - (x4,y4). +/// +/// \todo Rename this property to ActiveAreas once we will have property +/// ```text +/// categories (i.e. Properties::PixelArray::ActiveAreas) +#[derive(Debug, Clone)] +pub struct PixelArrayActiveAreas(pub Vec); +impl Deref for PixelArrayActiveAreas { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for PixelArrayActiveAreas { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for PixelArrayActiveAreas { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: PixelArrayActiveAreas) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for PixelArrayActiveAreas { + const ID: u32 = PropertyId::PixelArrayActiveAreas as _; +} +impl Property for PixelArrayActiveAreas {} +/// The maximum valid rectangle for the controls::ScalerCrop control. This +/// reflects the minimum mandatory cropping applied in the camera sensor and +/// the rest of the pipeline. Just as the ScalerCrop control, it defines a +/// rectangle taken from the sensor's active pixel array. +/// +/// This property is valid only after the camera has been successfully +/// configured and its value may change whenever a new configuration is +/// applied. +/// +/// \todo Turn this property into a "maximum control value" for the +/// ScalerCrop control once "dynamic" controls have been implemented. +#[derive(Debug, Clone)] +pub struct ScalerCropMaximum(pub Rectangle); +impl Deref for ScalerCropMaximum { + type Target = Rectangle; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ScalerCropMaximum { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ScalerCropMaximum { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ScalerCropMaximum) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ScalerCropMaximum { + const ID: u32 = PropertyId::ScalerCropMaximum as _; +} +impl Property for ScalerCropMaximum {} +/// The relative sensitivity of the chosen sensor mode. +/// +/// Some sensors have readout modes with different sensitivities. For example, +/// a binned camera mode might, with the same exposure and gains, produce +/// twice the signal level of the full resolution readout. This would be +/// signalled by the binned mode, when it is chosen, indicating a value here +/// that is twice that of the full resolution mode. This value will be valid +/// after the configure method has returned successfully. +#[derive(Debug, Clone)] +pub struct SensorSensitivity(pub f32); +impl Deref for SensorSensitivity { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for SensorSensitivity { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for SensorSensitivity { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: SensorSensitivity) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for SensorSensitivity { + const ID: u32 = PropertyId::SensorSensitivity as _; +} +impl Property for SensorSensitivity {} +/// A list of integer values of type dev_t denoting the major and minor +/// device numbers of the underlying devices used in the operation of this +/// camera. +/// +/// Different cameras may report identical devices. +#[derive(Debug, Clone)] +pub struct SystemDevices(pub Vec); +impl Deref for SystemDevices { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for SystemDevices { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for SystemDevices { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: SystemDevices) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for SystemDevices { + const ID: u32 = PropertyId::SystemDevices as _; +} +impl Property for SystemDevices {} +/// The arrangement of color filters on sensor; represents the colors in the +/// top-left 2x2 section of the sensor, in reading order. Currently +/// identical to ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum ColorFilterArrangement { + /// RGGB Bayer pattern + RGGB = 0, + /// GRBG Bayer pattern + GRBG = 1, + /// GBRG Bayer pattern + GBRG = 2, + /// BGGR Bayer pattern + BGGR = 3, + /// Sensor is not Bayer; output has 3 16-bit values for each pixel, + /// instead of just 1 16-bit value per pixel. + RGB = 4, + /// Sensor is not Bayer; output consists of a single colour channel. + MONO = 5, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for ColorFilterArrangement { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: ColorFilterArrangement) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for ColorFilterArrangement { + const ID: u32 = PropertyId::ColorFilterArrangement as _; +} +#[cfg(feature = "vendor_draft")] +impl Property for ColorFilterArrangement {} +pub fn make_dyn( + id: PropertyId, + val: ControlValue, +) -> Result, ControlValueError> { + match id { + PropertyId::Location => Ok(Box::new(Location::try_from(val)?)), + PropertyId::Rotation => Ok(Box::new(Rotation::try_from(val)?)), + PropertyId::Model => Ok(Box::new(Model::try_from(val)?)), + PropertyId::UnitCellSize => Ok(Box::new(UnitCellSize::try_from(val)?)), + PropertyId::PixelArraySize => Ok(Box::new(PixelArraySize::try_from(val)?)), + PropertyId::PixelArrayOpticalBlackRectangles => { + Ok(Box::new(PixelArrayOpticalBlackRectangles::try_from(val)?)) + } + PropertyId::PixelArrayActiveAreas => { + Ok(Box::new(PixelArrayActiveAreas::try_from(val)?)) + } + PropertyId::ScalerCropMaximum => Ok(Box::new(ScalerCropMaximum::try_from(val)?)), + PropertyId::SensorSensitivity => Ok(Box::new(SensorSensitivity::try_from(val)?)), + PropertyId::SystemDevices => Ok(Box::new(SystemDevices::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + PropertyId::ColorFilterArrangement => { + Ok(Box::new(ColorFilterArrangement::try_from(val)?)) + } + } +} diff --git a/libcamera/versioned_files/0.4.0/property_ids_core.yaml b/libcamera/versioned_files/0.4.0/property_ids_core.yaml new file mode 100644 index 0000000..834454a --- /dev/null +++ b/libcamera/versioned_files/0.4.0/property_ids_core.yaml @@ -0,0 +1,704 @@ +# SPDX-License-Identifier: LGPL-2.1-or-later +# +# Copyright (C) 2019, Google Inc. +# +%YAML 1.1 +--- +vendor: libcamera +controls: + - Location: + type: int32_t + description: | + Camera mounting location + enum: + - name: CameraLocationFront + value: 0 + description: | + The camera is mounted on the front side of the device, facing the + user + - name: CameraLocationBack + value: 1 + description: | + The camera is mounted on the back side of the device, facing away + from the user + - name: CameraLocationExternal + value: 2 + description: | + The camera is attached to the device in a way that allows it to + be moved freely + + - Rotation: + type: int32_t + description: | + The camera physical mounting rotation. It is expressed as the angular + difference in degrees between two reference systems, one relative to the + camera module, and one defined on the external world scene to be + captured when projected on the image sensor pixel array. + + A camera sensor has a 2-dimensional reference system 'Rc' defined by + its pixel array read-out order. The origin is set to the first pixel + being read out, the X-axis points along the column read-out direction + towards the last columns, and the Y-axis along the row read-out + direction towards the last row. + + A typical example for a sensor with a 2592x1944 pixel array matrix + observed from the front is + + 2591 X-axis 0 + <------------------------+ 0 + .......... ... ..........! + .......... ... ..........! Y-axis + ... ! + .......... ... ..........! + .......... ... ..........! 1943 + V + + + The external world scene reference system 'Rs' is a 2-dimensional + reference system on the focal plane of the camera module. The origin is + placed on the top-left corner of the visible scene, the X-axis points + towards the right, and the Y-axis points towards the bottom of the + scene. The top, bottom, left and right directions are intentionally not + defined and depend on the environment in which the camera is used. + + A typical example of a (very common) picture of a shark swimming from + left to right, as seen from the camera, is + + 0 X-axis + 0 +-------------------------------------> + ! + ! + ! + ! |\____)\___ + ! ) _____ __`< + ! |/ )/ + ! + ! + ! + V + Y-axis + + With the reference system 'Rs' placed on the camera focal plane. + + ¸.·˙! + ¸.·˙ ! + _ ¸.·˙ ! + +-/ \-+¸.·˙ ! + | (o) | ! Camera focal plane + +-----+˙·.¸ ! + ˙·.¸ ! + ˙·.¸ ! + ˙·.¸! + + When projected on the sensor's pixel array, the image and the associated + reference system 'Rs' are typically (but not always) inverted, due to + the camera module's lens optical inversion effect. + + Assuming the above represented scene of the swimming shark, the lens + inversion projects the scene and its reference system onto the sensor + pixel array, seen from the front of the camera sensor, as follow + + Y-axis + ^ + ! + ! + ! + ! |\_____)\__ + ! ) ____ ___.< + ! |/ )/ + ! + ! + ! + 0 +-------------------------------------> + 0 X-axis + + Note the shark being upside-down. + + The resulting projected reference system is named 'Rp'. + + The camera rotation property is then defined as the angular difference + in the counter-clockwise direction between the camera reference system + 'Rc' and the projected scene reference system 'Rp'. It is expressed in + degrees as a number in the range [0, 360[. + + Examples + + 0 degrees camera rotation + + + Y-Rp + ^ + Y-Rc ! + ^ ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! 0 +-------------------------------------> + ! 0 X-Rp + 0 +-------------------------------------> + 0 X-Rc + + + X-Rc 0 + <------------------------------------+ 0 + X-Rp 0 ! + <------------------------------------+ 0 ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! V + ! Y-Rc + V + Y-Rp + + 90 degrees camera rotation + + 0 Y-Rc + 0 +--------------------> + ! Y-Rp + ! ^ + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! 0 +-------------------------------------> + ! 0 X-Rp + ! + ! + ! + ! + V + X-Rc + + 180 degrees camera rotation + + 0 + <------------------------------------+ 0 + X-Rc ! + Y-Rp ! + ^ ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! V + ! Y-Rc + 0 +-------------------------------------> + 0 X-Rp + + 270 degrees camera rotation + + 0 Y-Rc + 0 +--------------------> + ! 0 + ! <-----------------------------------+ 0 + ! X-Rp ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! V + ! Y-Rp + ! + ! + ! + ! + V + X-Rc + + + Example one - Webcam + + A camera module installed on the user facing part of a laptop screen + casing used for video calls. The captured images are meant to be + displayed in landscape mode (width > height) on the laptop screen. + + The camera is typically mounted upside-down to compensate the lens + optical inversion effect. + + Y-Rp + Y-Rc ^ + ^ ! + ! ! + ! ! |\_____)\__ + ! ! ) ____ ___.< + ! ! |/ )/ + ! ! + ! ! + ! ! + ! 0 +-------------------------------------> + ! 0 X-Rp + 0 +-------------------------------------> + 0 X-Rc + + The two reference systems are aligned, the resulting camera rotation is + 0 degrees, no rotation correction needs to be applied to the resulting + image once captured to memory buffers to correctly display it to users. + + +--------------------------------------+ + ! ! + ! ! + ! ! + ! |\____)\___ ! + ! ) _____ __`< ! + ! |/ )/ ! + ! ! + ! ! + ! ! + +--------------------------------------+ + + If the camera sensor is not mounted upside-down to compensate for the + lens optical inversion, the two reference systems will not be aligned, + with 'Rp' being rotated 180 degrees relatively to 'Rc'. + + + X-Rc 0 + <------------------------------------+ 0 + ! + Y-Rp ! + ^ ! + ! ! + ! |\_____)\__ ! + ! ) ____ ___.< ! + ! |/ )/ ! + ! ! + ! ! + ! V + ! Y-Rc + 0 +-------------------------------------> + 0 X-Rp + + The image once captured to memory will then be rotated by 180 degrees + + +--------------------------------------+ + ! ! + ! ! + ! ! + ! __/(_____/| ! + ! >.___ ____ ( ! + ! \( \| ! + ! ! + ! ! + ! ! + +--------------------------------------+ + + A software rotation correction of 180 degrees should be applied to + correctly display the image. + + +--------------------------------------+ + ! ! + ! ! + ! ! + ! |\____)\___ ! + ! ) _____ __`< ! + ! |/ )/ ! + ! ! + ! ! + ! ! + +--------------------------------------+ + + Example two - Phone camera + + A camera installed on the back side of a mobile device facing away from + the user. The captured images are meant to be displayed in portrait mode + (height > width) to match the device screen orientation and the device + usage orientation used when taking the picture. + + The camera sensor is typically mounted with its pixel array longer side + aligned to the device longer side, upside-down mounted to compensate for + the lens optical inversion effect. + + 0 Y-Rc + 0 +--------------------> + ! Y-Rp + ! ^ + ! ! + ! ! + ! ! + ! ! |\_____)\__ + ! ! ) ____ ___.< + ! ! |/ )/ + ! ! + ! ! + ! ! + ! 0 +-------------------------------------> + ! 0 X-Rp + ! + ! + ! + ! + V + X-Rc + + The two reference systems are not aligned and the 'Rp' reference + system is rotated by 90 degrees in the counter-clockwise direction + relatively to the 'Rc' reference system. + + The image once captured to memory will be rotated. + + +-------------------------------------+ + | _ _ | + | \ / | + | | | | + | | | | + | | > | + | < | | + | | | | + | . | + | V | + +-------------------------------------+ + + A correction of 90 degrees in counter-clockwise direction has to be + applied to correctly display the image in portrait mode on the device + screen. + + +--------------------+ + | | + | | + | | + | | + | | + | | + | |\____)\___ | + | ) _____ __`< | + | |/ )/ | + | | + | | + | | + | | + | | + +--------------------+ + + - Model: + type: string + description: | + The model name shall to the extent possible describe the sensor. For + most devices this is the model name of the sensor. While for some + devices the sensor model is unavailable as the sensor or the entire + camera is part of a larger unit and exposed as a black-box to the + system. In such cases the model name of the smallest device that + contains the camera sensor shall be used. + + The model name is not meant to be a camera name displayed to the + end-user, but may be combined with other camera information to create a + camera name. + + The model name is not guaranteed to be unique in the system nor is + it guaranteed to be stable or have any other properties required to make + it a good candidate to be used as a permanent identifier of a camera. + + The model name shall describe the camera in a human readable format and + shall be encoded in ASCII. + + Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. + + - UnitCellSize: + type: Size + description: | + The pixel unit cell physical size, in nanometers. + + The UnitCellSize properties defines the horizontal and vertical sizes of + a single pixel unit, including its active and non-active parts. In + other words, it expresses the horizontal and vertical distance between + the top-left corners of adjacent pixels. + + The property can be used to calculate the physical size of the sensor's + pixel array area and for calibration purposes. + + - PixelArraySize: + type: Size + description: | + The camera sensor pixel array readable area vertical and horizontal + sizes, in pixels. + + The PixelArraySize property defines the size in pixel units of the + readable part of full pixel array matrix, including optical black + pixels used for calibration, pixels which are not considered valid for + capture and active pixels containing valid image data. + + The property describes the maximum size of the raw data captured by the + camera, which might not correspond to the physical size of the sensor + pixel array matrix, as some portions of the physical pixel array matrix + are not accessible and cannot be transmitted out. + + For example, let's consider a pixel array matrix assembled as follows + + +--------------------------------------------------+ + |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + ... ... ... ... ... + + ... ... ... ... ... + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + +--------------------------------------------------+ + + starting with two lines of non-readable pixels (x), followed by N lines + of readable data (D) surrounded by two columns of non-readable pixels on + each side, and ending with two more lines of non-readable pixels. Only + the readable portion is transmitted to the receiving side, defining the + sizes of the largest possible buffer of raw data that can be presented + to applications. + + PixelArraySize.width + /----------------------------------------------/ + +----------------------------------------------+ / + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | PixelArraySize.height + ... ... ... ... ... + ... ... ... ... ... + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + +----------------------------------------------+ / + + This defines a rectangle whose top-left corner is placed in position (0, + 0) and whose vertical and horizontal sizes are defined by this property. + All other rectangles that describe portions of the pixel array, such as + the optical black pixels rectangles and active pixel areas, are defined + relatively to this rectangle. + + All the coordinates are expressed relative to the default sensor readout + direction, without any transformation (such as horizontal and vertical + flipping) applied. When mapping them to the raw pixel buffer, + applications shall take any configured transformation into account. + + \todo Rename this property to Size once we will have property + categories (i.e. Properties::PixelArray::Size) + + - PixelArrayOpticalBlackRectangles: + type: Rectangle + size: [n] + description: | + The pixel array region(s) which contain optical black pixels + considered valid for calibration purposes. + + This property describes the position and size of optical black pixel + regions in the raw data buffer as stored in memory, which might differ + from their actual physical location in the pixel array matrix. + + It is important to note, in fact, that camera sensors might + automatically reorder or skip portions of their pixels array matrix when + transmitting data to the receiver. For instance, a sensor may merge the + top and bottom optical black rectangles into a single rectangle, + transmitted at the beginning of the frame. + + The pixel array contains several areas with different purposes, + interleaved by lines and columns which are said not to be valid for + capturing purposes. Invalid lines and columns are defined as invalid as + they could be positioned too close to the chip margins or to the optical + black shielding placed on top of optical black pixels. + + PixelArraySize.width + /----------------------------------------------/ + x1 x2 + +--o---------------------------------------o---+ / + |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + y1 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + y2 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + y3 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | + |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | PixelArraySize.height + |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | + ... ... ... ... ... + ... ... ... ... ... + y4 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | + |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + +----------------------------------------------+ / + + The readable pixel array matrix is composed by + 2 invalid lines (I) + 4 lines of valid optical black pixels (O) + 2 invalid lines (I) + n lines of valid pixel data (P) + 2 invalid lines (I) + + And the position of the optical black pixel rectangles is defined by + + PixelArrayOpticalBlackRectangles = { + { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, + { x1, y3, 2, y4 - y3 + 1 }, + { x2, y3, 2, y4 - y3 + 1 }, + }; + + If the camera, when capturing the full pixel array matrix, automatically + skips the invalid lines and columns, producing the following data + buffer, when captured to memory + + PixelArraySize.width + /----------------------------------------------/ + x1 + +--------------------------------------------o-+ / + |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + y1 oOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | PixelArraySize.height + ... ... ... ... ... | + ... ... ... ... ... | + |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + +----------------------------------------------+ / + + then the invalid lines and columns should not be reported as part of the + PixelArraySize property in first place. + + In this case, the position of the black pixel rectangles will be + + PixelArrayOpticalBlackRectangles = { + { 0, 0, y1 + 1, PixelArraySize[0] }, + { 0, y1, 2, PixelArraySize[1] - y1 + 1 }, + { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, + }; + + \todo Rename this property to Size once we will have property + categories (i.e. Properties::PixelArray::OpticalBlackRectangles) + + - PixelArrayActiveAreas: + type: Rectangle + size: [n] + description: | + The PixelArrayActiveAreas property defines the (possibly multiple and + overlapping) portions of the camera sensor readable pixel matrix + which are considered valid for image acquisition purposes. + + This property describes an arbitrary number of overlapping rectangles, + with each rectangle representing the maximum image size that the camera + sensor can produce for a particular aspect ratio. They are defined + relatively to the PixelArraySize rectangle. + + When multiple rectangles are reported, they shall be ordered from the + tallest to the shortest. + + Example 1 + A camera sensor which only produces images in the 4:3 image resolution + will report a single PixelArrayActiveAreas rectangle, from which all + other image formats are obtained by either cropping the field-of-view + and/or applying pixel sub-sampling techniques such as pixel skipping or + binning. + + PixelArraySize.width + /----------------/ + x1 x2 + (0,0)-> +-o------------o-+ / + y1 o +------------+ | | + | |////////////| | | + | |////////////| | | PixelArraySize.height + | |////////////| | | + y2 o +------------+ | | + +----------------+ / + + The property reports a single rectangle + + PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) + + Example 2 + A camera sensor which can produce images in different native + resolutions will report several overlapping rectangles, one for each + natively supported resolution. + + PixelArraySize.width + /------------------/ + x1 x2 x3 x4 + (0,0)-> +o---o------o---o+ / + y1 o +------+ | | + | |//////| | | + y2 o+---+------+---+| | + ||///|//////|///|| | PixelArraySize.height + y3 o+---+------+---+| | + | |//////| | | + y4 o +------+ | | + +----+------+----+ / + + The property reports two rectangles + + PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), + (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) + + The first rectangle describes the maximum field-of-view of all image + formats in the 4:3 resolutions, while the second one describes the + maximum field of view for all image formats in the 16:9 resolutions. + + Multiple rectangles shall only be reported when the sensor can't capture + the pixels in the corner regions. If all the pixels in the (x1,y1) - + (x4,y4) area can be captured, the PixelArrayActiveAreas property shall + contains the single rectangle (x1,y1) - (x4,y4). + + \todo Rename this property to ActiveAreas once we will have property + categories (i.e. Properties::PixelArray::ActiveAreas) + + - ScalerCropMaximum: + type: Rectangle + description: | + The maximum valid rectangle for the controls::ScalerCrop control. This + reflects the minimum mandatory cropping applied in the camera sensor and + the rest of the pipeline. Just as the ScalerCrop control, it defines a + rectangle taken from the sensor's active pixel array. + + This property is valid only after the camera has been successfully + configured and its value may change whenever a new configuration is + applied. + + \todo Turn this property into a "maximum control value" for the + ScalerCrop control once "dynamic" controls have been implemented. + + - SensorSensitivity: + type: float + description: | + The relative sensitivity of the chosen sensor mode. + + Some sensors have readout modes with different sensitivities. For example, + a binned camera mode might, with the same exposure and gains, produce + twice the signal level of the full resolution readout. This would be + signalled by the binned mode, when it is chosen, indicating a value here + that is twice that of the full resolution mode. This value will be valid + after the configure method has returned successfully. + + - SystemDevices: + type: int64_t + size: [n] + description: | + A list of integer values of type dev_t denoting the major and minor + device numbers of the underlying devices used in the operation of this + camera. + + Different cameras may report identical devices. + +... diff --git a/libcamera/versioned_files/0.4.0/property_ids_draft.yaml b/libcamera/versioned_files/0.4.0/property_ids_draft.yaml new file mode 100644 index 0000000..62f0e24 --- /dev/null +++ b/libcamera/versioned_files/0.4.0/property_ids_draft.yaml @@ -0,0 +1,39 @@ +# SPDX-License-Identifier: LGPL-2.1-or-later +# +# Copyright (C) 2019, Google Inc. +# +%YAML 1.1 +--- +vendor: draft +controls: + - ColorFilterArrangement: + type: int32_t + vendor: draft + description: | + The arrangement of color filters on sensor; represents the colors in the + top-left 2x2 section of the sensor, in reading order. Currently + identical to ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT. + enum: + - name: RGGB + value: 0 + description: RGGB Bayer pattern + - name: GRBG + value: 1 + description: GRBG Bayer pattern + - name: GBRG + value: 2 + description: GBRG Bayer pattern + - name: BGGR + value: 3 + description: BGGR Bayer pattern + - name: RGB + value: 4 + description: | + Sensor is not Bayer; output has 3 16-bit values for each pixel, + instead of just 1 16-bit value per pixel. + - name: MONO + value: 5 + description: | + Sensor is not Bayer; output consists of a single colour channel. + +... From 46188bbd5100af020aa514202389a7a05b8d07db Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Fri, 30 May 2025 20:29:23 -0700 Subject: [PATCH 09/14] fixes for merging into main --- libcamera-meta/src/bin/generate_from_git.rs | 3 +- libcamera-meta/src/lib.rs | 2 - libcamera-sys/Cargo.toml | 2 +- libcamera-sys/c_api/controls.h | 1 - libcamera/Cargo.toml | 6 +- libcamera/build.rs | 24 +- libcamera/src/control_value.rs | 12 +- libcamera/src/geometry.rs | 10 - libcamera/versioned_files/0.0.1/controls.rs | 2 +- libcamera/versioned_files/0.0.1/properties.rs | 2 +- libcamera/versioned_files/0.0.2/controls.rs | 2 +- libcamera/versioned_files/0.0.2/properties.rs | 2 +- libcamera/versioned_files/0.0.3/controls.rs | 2 +- libcamera/versioned_files/0.0.3/properties.rs | 2 +- libcamera/versioned_files/0.0.4/controls.rs | 2 +- libcamera/versioned_files/0.0.4/properties.rs | 2 +- libcamera/versioned_files/0.0.5/controls.rs | 2 +- libcamera/versioned_files/0.0.5/properties.rs | 2 +- libcamera/versioned_files/0.1.0/controls.rs | 2 +- libcamera/versioned_files/0.1.0/properties.rs | 2 +- libcamera/versioned_files/0.2.0/controls.rs | 2 +- libcamera/versioned_files/0.2.0/properties.rs | 2 +- libcamera/versioned_files/0.3.0/controls.rs | 2 +- libcamera/versioned_files/0.3.0/properties.rs | 2 +- libcamera/versioned_files/0.3.1/controls.rs | 2 +- libcamera/versioned_files/0.3.1/properties.rs | 2 +- libcamera/versioned_files/0.3.2/controls.rs | 2 +- libcamera/versioned_files/0.3.2/properties.rs | 2 +- .../0.4.0/control_ids_core.yaml | 1052 ------ .../0.4.0/control_ids_debug.yaml | 6 - .../0.4.0/control_ids_draft.yaml | 327 -- .../0.4.0/control_ids_rpi.yaml | 61 - libcamera/versioned_files/0.4.0/controls.rs | 3319 ----------------- libcamera/versioned_files/0.4.0/properties.rs | 1777 --------- .../0.4.0/property_ids_core.yaml | 704 ---- .../0.4.0/property_ids_draft.yaml | 39 - 36 files changed, 27 insertions(+), 7358 deletions(-) delete mode 100644 libcamera/versioned_files/0.4.0/control_ids_core.yaml delete mode 100644 libcamera/versioned_files/0.4.0/control_ids_debug.yaml delete mode 100644 libcamera/versioned_files/0.4.0/control_ids_draft.yaml delete mode 100644 libcamera/versioned_files/0.4.0/control_ids_rpi.yaml delete mode 100644 libcamera/versioned_files/0.4.0/controls.rs delete mode 100644 libcamera/versioned_files/0.4.0/properties.rs delete mode 100644 libcamera/versioned_files/0.4.0/property_ids_core.yaml delete mode 100644 libcamera/versioned_files/0.4.0/property_ids_draft.yaml diff --git a/libcamera-meta/src/bin/generate_from_git.rs b/libcamera-meta/src/bin/generate_from_git.rs index 3f0cea7..5b013c7 100644 --- a/libcamera-meta/src/bin/generate_from_git.rs +++ b/libcamera-meta/src/bin/generate_from_git.rs @@ -243,7 +243,6 @@ mod generate_rust { ControlType::String => "String", ControlType::Rectangle => "Rectangle", ControlType::Size => "Size", - ControlType::Point => "Point", }; match size { @@ -458,7 +457,7 @@ mod generate_rust { use crate::control::{{Control, Property, ControlEntry, DynControlEntry}}; use crate::control_value::{{ControlValue, ControlValueError}}; #[allow(unused_imports)] - use crate::geometry::{{Rectangle, Size, Point}}; + use crate::geometry::{{Rectangle, Size}}; #[allow(unused_imports)] use libcamera_sys::*; "#; diff --git a/libcamera-meta/src/lib.rs b/libcamera-meta/src/lib.rs index b746575..0565813 100644 --- a/libcamera-meta/src/lib.rs +++ b/libcamera-meta/src/lib.rs @@ -10,7 +10,6 @@ pub enum ControlType { String, Rectangle, Size, - Point, } impl TryFrom<&str> for ControlType { @@ -26,7 +25,6 @@ impl TryFrom<&str> for ControlType { "string" => Ok(ControlType::String), "Rectangle" => Ok(ControlType::Rectangle), "Size" => Ok(ControlType::Size), - "Point" => Ok(ControlType::Point), _ => Err(format!("Unknown control type {}", value)), } } diff --git a/libcamera-sys/Cargo.toml b/libcamera-sys/Cargo.toml index efef19c..883da76 100644 --- a/libcamera-sys/Cargo.toml +++ b/libcamera-sys/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcamera-sys" -version = "0.4.0" +version = "0.3.0" edition = "2021" description = "Low-level unsafe bindings to libcamera" documentation = "https://docs.rs/libcamera-sys" diff --git a/libcamera-sys/c_api/controls.h b/libcamera-sys/c_api/controls.h index 53b0746..a5390f9 100644 --- a/libcamera-sys/c_api/controls.h +++ b/libcamera-sys/c_api/controls.h @@ -54,7 +54,6 @@ enum libcamera_control_type { LIBCAMERA_CONTROL_TYPE_STRING, LIBCAMERA_CONTROL_TYPE_RECTANGLE, LIBCAMERA_CONTROL_TYPE_SIZE, - LIBCAMERA_CONTROL_TYPE_POINT, }; // --- libcamera_control_id --- diff --git a/libcamera/Cargo.toml b/libcamera/Cargo.toml index c9a31f0..de1f407 100644 --- a/libcamera/Cargo.toml +++ b/libcamera/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "libcamera" -version = "0.4.0" +version = "0.3.0" edition = "2021" description = "Safe Rust bindings for libcamera" documentation = "https://docs.rs/libcamera" @@ -25,11 +25,11 @@ vendor_rpi = [] bitflags = "2.0.0-rc.2" drm-fourcc = "2.2" libc = "0.2" -libcamera-sys = { path = "../libcamera-sys", version = "0.4.0", default-features = false } +libcamera-sys = { path = "../libcamera-sys", version = "0.3.0", default-features = false } num_enum = "0.6.1" smallvec = "1.10" thiserror = "1.0" [build-dependencies] -pkg-config = "0.3" +# libcamera-sys = { path = "../libcamera-sys", version = "0.3.0" } semver = "1.0.22" diff --git a/libcamera/build.rs b/libcamera/build.rs index 273bfad..77d623a 100644 --- a/libcamera/build.rs +++ b/libcamera/build.rs @@ -3,32 +3,10 @@ use std::{ path::{Path, PathBuf}, }; -use core::panic; use semver::{Comparator, Op, Version}; fn main() { - - let libcamera = match pkg_config::probe_library("libcamera") { - Ok(lib) => Ok(lib), - Err(e) => { - // Older libcamera versions use camera name instead of libcamera, try that instead - match pkg_config::probe_library("camera") { - Ok(lib) => Ok(lib), - // Return original error - Err(_) => Err(e), - } - } - } - .unwrap(); - - let libcamera_version = match Version::parse(&libcamera.version) { - Ok(v) => v, - Err(e) => { - panic!("bad version from pkgconfig, {e:?}") - } - }; - - + let libcamera_version = Version::new(0, 3, 2); let versioned_files = Path::new("versioned_files"); let mut candidates = std::fs::read_dir(versioned_files) diff --git a/libcamera/src/control_value.rs b/libcamera/src/control_value.rs index a87fb68..5e627f2 100644 --- a/libcamera/src/control_value.rs +++ b/libcamera/src/control_value.rs @@ -4,7 +4,7 @@ use libcamera_sys::*; use smallvec::{smallvec, SmallVec}; use thiserror::Error; -use crate::geometry::{Point, Rectangle, Size}; +use crate::geometry::{Rectangle, Size}; #[derive(Error, Debug)] pub enum ControlValueError { @@ -34,7 +34,6 @@ pub enum ControlValue { String(String), Rectangle(SmallVec<[Rectangle; 1]>), Size(SmallVec<[Size; 1]>), - Point(SmallVec<[Point; 1]>), } macro_rules! impl_control_value { @@ -78,7 +77,6 @@ impl_control_value!(ControlValue::Int64, i64); impl_control_value!(ControlValue::Float, f32); impl_control_value!(ControlValue::Rectangle, Rectangle); impl_control_value!(ControlValue::Size, Size); -impl_control_value!(ControlValue::Point, Point); macro_rules! impl_control_value_vec { ($p:path, $type:ty) => { @@ -112,7 +110,6 @@ impl_control_value_vec!(ControlValue::Int64, i64); impl_control_value_vec!(ControlValue::Float, f32); impl_control_value_vec!(ControlValue::Rectangle, Rectangle); impl_control_value_vec!(ControlValue::Size, Size); -impl_control_value_vec!(ControlValue::Point, Point); macro_rules! impl_control_value_array { ($p:path, $type:ty) => { @@ -187,7 +184,6 @@ impl_control_value_array!(ControlValue::Int64, i64); impl_control_value_array!(ControlValue::Float, f32); impl_control_value_array!(ControlValue::Rectangle, Rectangle); impl_control_value_array!(ControlValue::Size, Size); -impl_control_value_array!(ControlValue::Point, Point); impl From for ControlValue { fn from(val: String) -> Self { @@ -252,10 +248,6 @@ impl ControlValue { let slice = core::slice::from_raw_parts(data as *const libcamera_size_t, num_elements); Ok(Self::Size(SmallVec::from_iter(slice.iter().map(|r| Size::from(*r))))) } - LIBCAMERA_CONTROL_TYPE_POINT => { - let slice = core::slice::from_raw_parts(data as *const libcamera_point_t, num_elements); - Ok(Self::Point(SmallVec::from_iter(slice.iter().map(|r| Point::from(*r))))) - } _ => Err(ControlValueError::UnknownType(ty)), } } @@ -271,7 +263,6 @@ impl ControlValue { ControlValue::String(v) => (v.as_ptr().cast(), v.len()), ControlValue::Rectangle(v) => (v.as_ptr().cast(), v.len()), ControlValue::Size(v) => (v.as_ptr().cast(), v.len()), - ControlValue::Point(v) => (v.as_ptr().cast(), v.len()), }; let ty = self.ty(); @@ -296,7 +287,6 @@ impl ControlValue { ControlValue::String(_) => LIBCAMERA_CONTROL_TYPE_STRING, ControlValue::Rectangle(_) => LIBCAMERA_CONTROL_TYPE_RECTANGLE, ControlValue::Size(_) => LIBCAMERA_CONTROL_TYPE_SIZE, - ControlValue::Point(_) => LIBCAMERA_CONTROL_TYPE_POINT, } } } diff --git a/libcamera/src/geometry.rs b/libcamera/src/geometry.rs index c09ac27..46fd6aa 100644 --- a/libcamera/src/geometry.rs +++ b/libcamera/src/geometry.rs @@ -13,16 +13,6 @@ impl From for Point { } } -impl From for libcamera_point_t { - fn from(p: Point) -> Self { - Self { - x: p.x, - y: p.y, - } - } -} - - /// Represents `libcamera::Size` #[derive(Debug, Clone, Copy)] pub struct Size { diff --git a/libcamera/versioned_files/0.0.1/controls.rs b/libcamera/versioned_files/0.0.1/controls.rs index cacab77..636bdda 100644 --- a/libcamera/versioned_files/0.0.1/controls.rs +++ b/libcamera/versioned_files/0.0.1/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.1/properties.rs b/libcamera/versioned_files/0.0.1/properties.rs index 29549a3..b191233 100644 --- a/libcamera/versioned_files/0.0.1/properties.rs +++ b/libcamera/versioned_files/0.0.1/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.2/controls.rs b/libcamera/versioned_files/0.0.2/controls.rs index cacab77..636bdda 100644 --- a/libcamera/versioned_files/0.0.2/controls.rs +++ b/libcamera/versioned_files/0.0.2/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.2/properties.rs b/libcamera/versioned_files/0.0.2/properties.rs index 29549a3..b191233 100644 --- a/libcamera/versioned_files/0.0.2/properties.rs +++ b/libcamera/versioned_files/0.0.2/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.3/controls.rs b/libcamera/versioned_files/0.0.3/controls.rs index 0166d6b..e74bd1c 100644 --- a/libcamera/versioned_files/0.0.3/controls.rs +++ b/libcamera/versioned_files/0.0.3/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.3/properties.rs b/libcamera/versioned_files/0.0.3/properties.rs index 29549a3..b191233 100644 --- a/libcamera/versioned_files/0.0.3/properties.rs +++ b/libcamera/versioned_files/0.0.3/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.4/controls.rs b/libcamera/versioned_files/0.0.4/controls.rs index 0166d6b..e74bd1c 100644 --- a/libcamera/versioned_files/0.0.4/controls.rs +++ b/libcamera/versioned_files/0.0.4/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.4/properties.rs b/libcamera/versioned_files/0.0.4/properties.rs index 29549a3..b191233 100644 --- a/libcamera/versioned_files/0.0.4/properties.rs +++ b/libcamera/versioned_files/0.0.4/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.5/controls.rs b/libcamera/versioned_files/0.0.5/controls.rs index 0166d6b..e74bd1c 100644 --- a/libcamera/versioned_files/0.0.5/controls.rs +++ b/libcamera/versioned_files/0.0.5/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.0.5/properties.rs b/libcamera/versioned_files/0.0.5/properties.rs index 29549a3..b191233 100644 --- a/libcamera/versioned_files/0.0.5/properties.rs +++ b/libcamera/versioned_files/0.0.5/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.1.0/controls.rs b/libcamera/versioned_files/0.1.0/controls.rs index e08c6d3..4edce11 100644 --- a/libcamera/versioned_files/0.1.0/controls.rs +++ b/libcamera/versioned_files/0.1.0/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.1.0/properties.rs b/libcamera/versioned_files/0.1.0/properties.rs index e77121b..d6c9368 100644 --- a/libcamera/versioned_files/0.1.0/properties.rs +++ b/libcamera/versioned_files/0.1.0/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.2.0/controls.rs b/libcamera/versioned_files/0.2.0/controls.rs index baf2c21..ae6daf1 100644 --- a/libcamera/versioned_files/0.2.0/controls.rs +++ b/libcamera/versioned_files/0.2.0/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.2.0/properties.rs b/libcamera/versioned_files/0.2.0/properties.rs index 7548b75..f154a8d 100644 --- a/libcamera/versioned_files/0.2.0/properties.rs +++ b/libcamera/versioned_files/0.2.0/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.3.0/controls.rs b/libcamera/versioned_files/0.3.0/controls.rs index c22ff95..5799377 100644 --- a/libcamera/versioned_files/0.3.0/controls.rs +++ b/libcamera/versioned_files/0.3.0/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.3.0/properties.rs b/libcamera/versioned_files/0.3.0/properties.rs index 7548b75..f154a8d 100644 --- a/libcamera/versioned_files/0.3.0/properties.rs +++ b/libcamera/versioned_files/0.3.0/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.3.1/controls.rs b/libcamera/versioned_files/0.3.1/controls.rs index 123aa98..057240c 100644 --- a/libcamera/versioned_files/0.3.1/controls.rs +++ b/libcamera/versioned_files/0.3.1/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.3.1/properties.rs b/libcamera/versioned_files/0.3.1/properties.rs index 7548b75..f154a8d 100644 --- a/libcamera/versioned_files/0.3.1/properties.rs +++ b/libcamera/versioned_files/0.3.1/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.3.2/controls.rs b/libcamera/versioned_files/0.3.2/controls.rs index cba15f1..589b9b2 100644 --- a/libcamera/versioned_files/0.3.2/controls.rs +++ b/libcamera/versioned_files/0.3.2/controls.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.3.2/properties.rs b/libcamera/versioned_files/0.3.2/properties.rs index 7548b75..f154a8d 100644 --- a/libcamera/versioned_files/0.3.2/properties.rs +++ b/libcamera/versioned_files/0.3.2/properties.rs @@ -4,7 +4,7 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use crate::control::{Control, Property, ControlEntry, DynControlEntry}; use crate::control_value::{ControlValue, ControlValueError}; #[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; +use crate::geometry::{Rectangle, Size}; #[allow(unused_imports)] use libcamera_sys::*; #[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] diff --git a/libcamera/versioned_files/0.4.0/control_ids_core.yaml b/libcamera/versioned_files/0.4.0/control_ids_core.yaml deleted file mode 100644 index 1dfaee0..0000000 --- a/libcamera/versioned_files/0.4.0/control_ids_core.yaml +++ /dev/null @@ -1,1052 +0,0 @@ -# SPDX-License-Identifier: LGPL-2.1-or-later -# -# Copyright (C) 2019, Google Inc. -# -%YAML 1.1 ---- -# Unless otherwise stated, all controls are bi-directional, i.e. they can be -# set through Request::controls() and returned out through Request::metadata(). -vendor: libcamera -controls: - - AeEnable: - type: bool - direction: inout - description: | - Enable or disable the AE. - - \sa ExposureTime AnalogueGain - - - AeLocked: - type: bool - direction: out - description: | - Report the lock status of a running AE algorithm. - - If the AE algorithm is locked the value shall be set to true, if it's - converging it shall be set to false. If the AE algorithm is not - running the control shall not be present in the metadata control list. - - \sa AeEnable - - # AeMeteringMode needs further attention: - # - Auto-generate max enum value. - # - Better handling of custom types. - - AeMeteringMode: - type: int32_t - direction: inout - description: | - Specify a metering mode for the AE algorithm to use. - - The metering modes determine which parts of the image are used to - determine the scene brightness. Metering modes may be platform specific - and not all metering modes may be supported. - enum: - - name: MeteringCentreWeighted - value: 0 - description: Centre-weighted metering mode. - - name: MeteringSpot - value: 1 - description: Spot metering mode. - - name: MeteringMatrix - value: 2 - description: Matrix metering mode. - - name: MeteringCustom - value: 3 - description: Custom metering mode. - - # AeConstraintMode needs further attention: - # - Auto-generate max enum value. - # - Better handling of custom types. - - AeConstraintMode: - type: int32_t - direction: inout - description: | - Specify a constraint mode for the AE algorithm to use. - - The constraint modes determine how the measured scene brightness is - adjusted to reach the desired target exposure. Constraint modes may be - platform specific, and not all constraint modes may be supported. - enum: - - name: ConstraintNormal - value: 0 - description: | - Default constraint mode. - - This mode aims to balance the exposure of different parts of the - image so as to reach a reasonable average level. However, highlights - in the image may appear over-exposed and lowlights may appear - under-exposed. - - name: ConstraintHighlight - value: 1 - description: | - Highlight constraint mode. - - This mode adjusts the exposure levels in order to try and avoid - over-exposing the brightest parts (highlights) of an image. - Other non-highlight parts of the image may appear under-exposed. - - name: ConstraintShadows - value: 2 - description: | - Shadows constraint mode. - - This mode adjusts the exposure levels in order to try and avoid - under-exposing the dark parts (shadows) of an image. Other normally - exposed parts of the image may appear over-exposed. - - name: ConstraintCustom - value: 3 - description: | - Custom constraint mode. - - # AeExposureMode needs further attention: - # - Auto-generate max enum value. - # - Better handling of custom types. - - AeExposureMode: - type: int32_t - direction: inout - description: | - Specify an exposure mode for the AE algorithm to use. - - The exposure modes specify how the desired total exposure is divided - between the exposure time and the sensor's analogue gain. They are - platform specific, and not all exposure modes may be supported. - enum: - - name: ExposureNormal - value: 0 - description: Default exposure mode. - - name: ExposureShort - value: 1 - description: Exposure mode allowing only short exposure times. - - name: ExposureLong - value: 2 - description: Exposure mode allowing long exposure times. - - name: ExposureCustom - value: 3 - description: Custom exposure mode. - - - ExposureValue: - type: float - direction: inout - description: | - Specify an Exposure Value (EV) parameter. - - The EV parameter will only be applied if the AE algorithm is currently - enabled. - - By convention EV adjusts the exposure as log2. For example - EV = [-2, -1, -0.5, 0, 0.5, 1, 2] results in an exposure adjustment - of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. - - \sa AeEnable - - - ExposureTime: - type: int32_t - direction: inout - description: | - Exposure time for the frame applied in the sensor device. - - This value is specified in micro-seconds. - - Setting this value means that it is now fixed and the AE algorithm may - not change it. Setting it back to zero returns it to the control of the - AE algorithm. - - \sa AnalogueGain AeEnable - - \todo Document the interactions between AeEnable and setting a fixed - value for this control. Consider interactions with other AE features, - such as aperture and aperture/shutter priority mode, and decide if - control of which features should be automatically adjusted shouldn't - better be handled through a separate AE mode control. - - - AnalogueGain: - type: float - direction: inout - description: | - Analogue gain value applied in the sensor device. - - The value of the control specifies the gain multiplier applied to all - colour channels. This value cannot be lower than 1.0. - - Setting this value means that it is now fixed and the AE algorithm may - not change it. Setting it back to zero returns it to the control of the - AE algorithm. - - \sa ExposureTime AeEnable - - \todo Document the interactions between AeEnable and setting a fixed - value for this control. Consider interactions with other AE features, - such as aperture and aperture/shutter priority mode, and decide if - control of which features should be automatically adjusted shouldn't - better be handled through a separate AE mode control. - - - AeFlickerMode: - type: int32_t - direction: inout - description: | - Set the flicker avoidance mode for AGC/AEC. - - The flicker mode determines whether, and how, the AGC/AEC algorithm - attempts to hide flicker effects caused by the duty cycle of artificial - lighting. - - Although implementation dependent, many algorithms for "flicker - avoidance" work by restricting this exposure time to integer multiples - of the cycle period, wherever possible. - - Implementations may not support all of the flicker modes listed below. - - By default the system will start in FlickerAuto mode if this is - supported, otherwise the flicker mode will be set to FlickerOff. - - enum: - - name: FlickerOff - value: 0 - description: | - No flicker avoidance is performed. - - name: FlickerManual - value: 1 - description: | - Manual flicker avoidance. - - Suppress flicker effects caused by lighting running with a period - specified by the AeFlickerPeriod control. - \sa AeFlickerPeriod - - name: FlickerAuto - value: 2 - description: | - Automatic flicker period detection and avoidance. - - The system will automatically determine the most likely value of - flicker period, and avoid flicker of this frequency. Once flicker - is being corrected, it is implementation dependent whether the - system is still able to detect a change in the flicker period. - \sa AeFlickerDetected - - - AeFlickerPeriod: - type: int32_t - direction: inout - description: | - Manual flicker period in microseconds. - - This value sets the current flicker period to avoid. It is used when - AeFlickerMode is set to FlickerManual. - - To cancel 50Hz mains flicker, this should be set to 10000 (corresponding - to 100Hz), or 8333 (120Hz) for 60Hz mains. - - Setting the mode to FlickerManual when no AeFlickerPeriod has ever been - set means that no flicker cancellation occurs (until the value of this - control is updated). - - Switching to modes other than FlickerManual has no effect on the - value of the AeFlickerPeriod control. - - \sa AeFlickerMode - - - AeFlickerDetected: - type: int32_t - direction: out - description: | - Flicker period detected in microseconds. - - The value reported here indicates the currently detected flicker - period, or zero if no flicker at all is detected. - - When AeFlickerMode is set to FlickerAuto, there may be a period during - which the value reported here remains zero. Once a non-zero value is - reported, then this is the flicker period that has been detected and is - now being cancelled. - - In the case of 50Hz mains flicker, the value would be 10000 - (corresponding to 100Hz), or 8333 (120Hz) for 60Hz mains flicker. - - It is implementation dependent whether the system can continue to detect - flicker of different periods when another frequency is already being - cancelled. - - \sa AeFlickerMode - - - Brightness: - type: float - direction: inout - description: | - Specify a fixed brightness parameter. - - Positive values (up to 1.0) produce brighter images; negative values - (up to -1.0) produce darker images and 0.0 leaves pixels unchanged. - - - Contrast: - type: float - direction: inout - description: | - Specify a fixed contrast parameter. - - Normal contrast is given by the value 1.0; larger values produce images - with more contrast. - - - Lux: - type: float - direction: out - description: | - Report an estimate of the current illuminance level in lux. - - The Lux control can only be returned in metadata. - - - AwbEnable: - type: bool - direction: inout - description: | - Enable or disable the AWB. - - When AWB is enabled, the algorithm estimates the colour temperature of - the scene and computes colour gains and the colour correction matrix - automatically. The computed colour temperature, gains and correction - matrix are reported in metadata. The corresponding controls are ignored - if set in a request. - - When AWB is disabled, the colour temperature, gains and correction - matrix are not updated automatically and can be set manually in - requests. - - \sa ColourCorrectionMatrix - \sa ColourGains - \sa ColourTemperature - - # AwbMode needs further attention: - # - Auto-generate max enum value. - # - Better handling of custom types. - - AwbMode: - type: int32_t - direction: inout - description: | - Specify the range of illuminants to use for the AWB algorithm. - - The modes supported are platform specific, and not all modes may be - supported. - enum: - - name: AwbAuto - value: 0 - description: Search over the whole colour temperature range. - - name: AwbIncandescent - value: 1 - description: Incandescent AWB lamp mode. - - name: AwbTungsten - value: 2 - description: Tungsten AWB lamp mode. - - name: AwbFluorescent - value: 3 - description: Fluorescent AWB lamp mode. - - name: AwbIndoor - value: 4 - description: Indoor AWB lighting mode. - - name: AwbDaylight - value: 5 - description: Daylight AWB lighting mode. - - name: AwbCloudy - value: 6 - description: Cloudy AWB lighting mode. - - name: AwbCustom - value: 7 - description: Custom AWB mode. - - - AwbLocked: - type: bool - direction: out - description: | - Report the lock status of a running AWB algorithm. - - If the AWB algorithm is locked the value shall be set to true, if it's - converging it shall be set to false. If the AWB algorithm is not - running the control shall not be present in the metadata control list. - - \sa AwbEnable - - - ColourGains: - type: float - direction: inout - description: | - Pair of gain values for the Red and Blue colour channels, in that - order. - - ColourGains can only be applied in a Request when the AWB is disabled. - If ColourGains is set in a request but ColourTemperature is not, the - implementation shall calculate and set the ColourTemperature based on - the ColourGains. - - \sa AwbEnable - \sa ColourTemperature - size: [2] - - - ColourTemperature: - type: int32_t - direction: out - description: | - ColourTemperature of the frame, in kelvin. - - ColourTemperature can only be applied in a Request when the AWB is - disabled. - - If ColourTemperature is set in a request but ColourGains is not, the - implementation shall calculate and set the ColourGains based on the - given ColourTemperature. If ColourTemperature is set (either directly, - or indirectly by setting ColourGains) but ColourCorrectionMatrix is not, - the ColourCorrectionMatrix is updated based on the ColourTemperature. - - The ColourTemperature used to process the frame is reported in metadata. - - \sa AwbEnable - \sa ColourCorrectionMatrix - \sa ColourGains - - - Saturation: - type: float - direction: inout - description: | - Specify a fixed saturation parameter. - - Normal saturation is given by the value 1.0; larger values produce more - saturated colours; 0.0 produces a greyscale image. - - - SensorBlackLevels: - type: int32_t - direction: out - description: | - Reports the sensor black levels used for processing a frame. - - The values are in the order R, Gr, Gb, B. They are returned as numbers - out of a 16-bit pixel range (as if pixels ranged from 0 to 65535). The - SensorBlackLevels control can only be returned in metadata. - size: [4] - - - Sharpness: - type: float - direction: inout - description: | - Intensity of the sharpening applied to the image. - - A value of 0.0 means no sharpening. The minimum value means - minimal sharpening, and shall be 0.0 unless the camera can't - disable sharpening completely. The default value shall give a - "reasonable" level of sharpening, suitable for most use cases. - The maximum value may apply extremely high levels of sharpening, - higher than anyone could reasonably want. Negative values are - not allowed. Note also that sharpening is not applied to raw - streams. - - - FocusFoM: - type: int32_t - direction: out - description: | - Reports a Figure of Merit (FoM) to indicate how in-focus the frame is. - - A larger FocusFoM value indicates a more in-focus frame. This singular - value may be based on a combination of statistics gathered from - multiple focus regions within an image. The number of focus regions and - method of combination is platform dependent. In this respect, it is not - necessarily aimed at providing a way to implement a focus algorithm by - the application, rather an indication of how in-focus a frame is. - - - ColourCorrectionMatrix: - type: float - direction: inout - description: | - The 3x3 matrix that converts camera RGB to sRGB within the imaging - pipeline. - - This should describe the matrix that is used after pixels have been - white-balanced, but before any gamma transformation. The 3x3 matrix is - stored in conventional reading order in an array of 9 floating point - values. - - ColourCorrectionMatrix can only be applied in a Request when the AWB is - disabled. - - \sa AwbEnable - \sa ColourTemperature - size: [3,3] - - - ScalerCrop: - type: Rectangle - direction: inout - description: | - Sets the image portion that will be scaled to form the whole of - the final output image. - - The (x,y) location of this rectangle is relative to the - PixelArrayActiveAreas that is being used. The units remain native - sensor pixels, even if the sensor is being used in a binning or - skipping mode. - - This control is only present when the pipeline supports scaling. Its - maximum valid value is given by the properties::ScalerCropMaximum - property, and the two can be used to implement digital zoom. - - - DigitalGain: - type: float - direction: inout - description: | - Digital gain value applied during the processing steps applied - to the image as captured from the sensor. - - The global digital gain factor is applied to all the colour channels - of the RAW image. Different pipeline models are free to - specify how the global gain factor applies to each separate - channel. - - If an imaging pipeline applies digital gain in distinct - processing steps, this value indicates their total sum. - Pipelines are free to decide how to adjust each processing - step to respect the received gain factor and shall report - their total value in the request metadata. - - - FrameDuration: - type: int64_t - direction: out - description: | - The instantaneous frame duration from start of frame exposure to start - of next exposure, expressed in microseconds. - - This control is meant to be returned in metadata. - - - FrameDurationLimits: - type: int64_t - direction: inout - description: | - The minimum and maximum (in that order) frame duration, expressed in - microseconds. - - When provided by applications, the control specifies the sensor frame - duration interval the pipeline has to use. This limits the largest - exposure time the sensor can use. For example, if a maximum frame - duration of 33ms is requested (corresponding to 30 frames per second), - the sensor will not be able to raise the exposure time above 33ms. - A fixed frame duration is achieved by setting the minimum and maximum - values to be the same. Setting both values to 0 reverts to using the - camera defaults. - - The maximum frame duration provides the absolute limit to the exposure - time computed by the AE algorithm and it overrides any exposure mode - setting specified with controls::AeExposureMode. Similarly, when a - manual exposure time is set through controls::ExposureTime, it also - gets clipped to the limits set by this control. When reported in - metadata, the control expresses the minimum and maximum frame durations - used after being clipped to the sensor provided frame duration limits. - - \sa AeExposureMode - \sa ExposureTime - - \todo Define how to calculate the capture frame rate by - defining controls to report additional delays introduced by - the capture pipeline or post-processing stages (ie JPEG - conversion, frame scaling). - - \todo Provide an explicit definition of default control values, for - this and all other controls. - - size: [2] - - - SensorTemperature: - type: float - direction: out - description: | - Temperature measure from the camera sensor in Celsius. - - This value is typically obtained by a thermal sensor present on-die or - in the camera module. The range of reported temperatures is device - dependent. - - The SensorTemperature control will only be returned in metadata if a - thermal sensor is present. - - - SensorTimestamp: - type: int64_t - direction: out - description: | - The time when the first row of the image sensor active array is exposed. - - The timestamp, expressed in nanoseconds, represents a monotonically - increasing counter since the system boot time, as defined by the - Linux-specific CLOCK_BOOTTIME clock id. - - The SensorTimestamp control can only be returned in metadata. - - \todo Define how the sensor timestamp has to be used in the reprocessing - use case. - - - AfMode: - type: int32_t - direction: inout - description: | - The mode of the AF (autofocus) algorithm. - - An implementation may choose not to implement all the modes. - - enum: - - name: AfModeManual - value: 0 - description: | - The AF algorithm is in manual mode. - - In this mode it will never perform any action nor move the lens of - its own accord, but an application can specify the desired lens - position using the LensPosition control. The AfState will always - report AfStateIdle. - - If the camera is started in AfModeManual, it will move the focus - lens to the position specified by the LensPosition control. - - This mode is the recommended default value for the AfMode control. - External cameras (as reported by the Location property set to - CameraLocationExternal) may use a different default value. - - name: AfModeAuto - value: 1 - description: | - The AF algorithm is in auto mode. - - In this mode the algorithm will never move the lens or change state - unless the AfTrigger control is used. The AfTrigger control can be - used to initiate a focus scan, the results of which will be - reported by AfState. - - If the autofocus algorithm is moved from AfModeAuto to another mode - while a scan is in progress, the scan is cancelled immediately, - without waiting for the scan to finish. - - When first entering this mode the AfState will report AfStateIdle. - When a trigger control is sent, AfState will report AfStateScanning - for a period before spontaneously changing to AfStateFocused or - AfStateFailed, depending on the outcome of the scan. It will remain - in this state until another scan is initiated by the AfTrigger - control. If a scan is cancelled (without changing to another mode), - AfState will return to AfStateIdle. - - name: AfModeContinuous - value: 2 - description: | - The AF algorithm is in continuous mode. - - In this mode the lens can re-start a scan spontaneously at any - moment, without any user intervention. The AfState still reports - whether the algorithm is currently scanning or not, though the - application has no ability to initiate or cancel scans, nor to move - the lens for itself. - - However, applications can pause the AF algorithm from continuously - scanning by using the AfPause control. This allows video or still - images to be captured whilst guaranteeing that the focus is fixed. - - When set to AfModeContinuous, the system will immediately initiate a - scan so AfState will report AfStateScanning, and will settle on one - of AfStateFocused or AfStateFailed, depending on the scan result. - - - AfRange: - type: int32_t - direction: inout - description: | - The range of focus distances that is scanned. - - An implementation may choose not to implement all the options here. - enum: - - name: AfRangeNormal - value: 0 - description: | - A wide range of focus distances is scanned. - - Scanned distances cover all the way from infinity down to close - distances, though depending on the implementation, possibly not - including the very closest macro positions. - - name: AfRangeMacro - value: 1 - description: | - Only close distances are scanned. - - name: AfRangeFull - value: 2 - description: | - The full range of focus distances is scanned. - - This range is similar to AfRangeNormal but includes the very - closest macro positions. - - - AfSpeed: - type: int32_t - direction: inout - description: | - Determine whether the AF is to move the lens as quickly as possible or - more steadily. - - For example, during video recording it may be desirable not to move the - lens too abruptly, but when in a preview mode (waiting for a still - capture) it may be helpful to move the lens as quickly as is reasonably - possible. - enum: - - name: AfSpeedNormal - value: 0 - description: Move the lens at its usual speed. - - name: AfSpeedFast - value: 1 - description: Move the lens more quickly. - - - AfMetering: - type: int32_t - direction: inout - description: | - The parts of the image used by the AF algorithm to measure focus. - enum: - - name: AfMeteringAuto - value: 0 - description: | - Let the AF algorithm decide for itself where it will measure focus. - - name: AfMeteringWindows - value: 1 - description: | - Use the rectangles defined by the AfWindows control to measure focus. - - If no windows are specified the behaviour is platform dependent. - - - AfWindows: - type: Rectangle - direction: inout - description: | - The focus windows used by the AF algorithm when AfMetering is set to - AfMeteringWindows. - - The units used are pixels within the rectangle returned by the - ScalerCropMaximum property. - - In order to be activated, a rectangle must be programmed with non-zero - width and height. Internally, these rectangles are intersected with the - ScalerCropMaximum rectangle. If the window becomes empty after this - operation, then the window is ignored. If all the windows end up being - ignored, then the behaviour is platform dependent. - - On platforms that support the ScalerCrop control (for implementing - digital zoom, for example), no automatic recalculation or adjustment of - AF windows is performed internally if the ScalerCrop is changed. If any - window lies outside the output image after the scaler crop has been - applied, it is up to the application to recalculate them. - - The details of how the windows are used are platform dependent. We note - that when there is more than one AF window, a typical implementation - might find the optimal focus position for each one and finally select - the window where the focal distance for the objects shown in that part - of the image are closest to the camera. - - size: [n] - - - AfTrigger: - type: int32_t - direction: in - description: | - Start an autofocus scan. - - This control starts an autofocus scan when AfMode is set to AfModeAuto, - and is ignored if AfMode is set to AfModeManual or AfModeContinuous. It - can also be used to terminate a scan early. - - enum: - - name: AfTriggerStart - value: 0 - description: | - Start an AF scan. - - Setting the control to AfTriggerStart is ignored if a scan is in - progress. - - name: AfTriggerCancel - value: 1 - description: | - Cancel an AF scan. - - This does not cause the lens to move anywhere else. Ignored if no - scan is in progress. - - - AfPause: - type: int32_t - direction: in - description: | - Pause lens movements when in continuous autofocus mode. - - This control has no effect except when in continuous autofocus mode - (AfModeContinuous). It can be used to pause any lens movements while - (for example) images are captured. The algorithm remains inactive - until it is instructed to resume. - - enum: - - name: AfPauseImmediate - value: 0 - description: | - Pause the continuous autofocus algorithm immediately. - - The autofocus algorithm is paused whether or not any kind of scan - is underway. AfPauseState will subsequently report - AfPauseStatePaused. AfState may report any of AfStateScanning, - AfStateFocused or AfStateFailed, depending on the algorithm's state - when it received this control. - - name: AfPauseDeferred - value: 1 - description: | - Pause the continuous autofocus algorithm at the end of the scan. - - This is similar to AfPauseImmediate, and if the AfState is - currently reporting AfStateFocused or AfStateFailed it will remain - in that state and AfPauseState will report AfPauseStatePaused. - - However, if the algorithm is scanning (AfStateScanning), - AfPauseState will report AfPauseStatePausing until the scan is - finished, at which point AfState will report one of AfStateFocused - or AfStateFailed, and AfPauseState will change to - AfPauseStatePaused. - - - name: AfPauseResume - value: 2 - description: | - Resume continuous autofocus operation. - - The algorithm starts again from exactly where it left off, and - AfPauseState will report AfPauseStateRunning. - - - LensPosition: - type: float - direction: inout - description: | - Set and report the focus lens position. - - This control instructs the lens to move to a particular position and - also reports back the position of the lens for each frame. - - The LensPosition control is ignored unless the AfMode is set to - AfModeManual, though the value is reported back unconditionally in all - modes. - - This value, which is generally a non-integer, is the reciprocal of the - focal distance in metres, also known as dioptres. That is, to set a - focal distance D, the lens position LP is given by - - \f$LP = \frac{1\mathrm{m}}{D}\f$ - - For example: - - - 0 moves the lens to infinity. - - 0.5 moves the lens to focus on objects 2m away. - - 2 moves the lens to focus on objects 50cm away. - - And larger values will focus the lens closer. - - The default value of the control should indicate a good general - position for the lens, often corresponding to the hyperfocal distance - (the closest position for which objects at infinity are still - acceptably sharp). The minimum will often be zero (meaning infinity), - and the maximum value defines the closest focus position. - - \todo Define a property to report the Hyperfocal distance of calibrated - lenses. - - - AfState: - type: int32_t - direction: out - description: | - The current state of the AF algorithm. - - This control reports the current state of the AF algorithm in - conjunction with the reported AfMode value and (in continuous AF mode) - the AfPauseState value. The possible state changes are described below, - though we note the following state transitions that occur when the - AfMode is changed. - - If the AfMode is set to AfModeManual, then the AfState will always - report AfStateIdle (even if the lens is subsequently moved). Changing - to the AfModeManual state does not initiate any lens movement. - - If the AfMode is set to AfModeAuto then the AfState will report - AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent - together then AfState will omit AfStateIdle and move straight to - AfStateScanning (and start a scan). - - If the AfMode is set to AfModeContinuous then the AfState will - initially report AfStateScanning. - - enum: - - name: AfStateIdle - value: 0 - description: | - The AF algorithm is in manual mode (AfModeManual) or in auto mode - (AfModeAuto) and a scan has not yet been triggered, or an - in-progress scan was cancelled. - - name: AfStateScanning - value: 1 - description: | - The AF algorithm is in auto mode (AfModeAuto), and a scan has been - started using the AfTrigger control. - - The scan can be cancelled by sending AfTriggerCancel at which point - the algorithm will either move back to AfStateIdle or, if the scan - actually completes before the cancel request is processed, to one - of AfStateFocused or AfStateFailed. - - Alternatively the AF algorithm could be in continuous mode - (AfModeContinuous) at which point it may enter this state - spontaneously whenever it determines that a rescan is needed. - - name: AfStateFocused - value: 2 - description: | - The AF algorithm is in auto (AfModeAuto) or continuous - (AfModeContinuous) mode and a scan has completed with the result - that the algorithm believes the image is now in focus. - - name: AfStateFailed - value: 3 - description: | - The AF algorithm is in auto (AfModeAuto) or continuous - (AfModeContinuous) mode and a scan has completed with the result - that the algorithm did not find a good focus position. - - - AfPauseState: - type: int32_t - direction: out - description: | - Report whether the autofocus is currently running, paused or pausing. - - This control is only applicable in continuous (AfModeContinuous) mode, - and reports whether the algorithm is currently running, paused or - pausing (that is, will pause as soon as any in-progress scan - completes). - - Any change to AfMode will cause AfPauseStateRunning to be reported. - - enum: - - name: AfPauseStateRunning - value: 0 - description: | - Continuous AF is running and the algorithm may restart a scan - spontaneously. - - name: AfPauseStatePausing - value: 1 - description: | - Continuous AF has been sent an AfPauseDeferred control, and will - pause as soon as any in-progress scan completes. - - When the scan completes, the AfPauseState control will report - AfPauseStatePaused. No new scans will be start spontaneously until - the AfPauseResume control is sent. - - name: AfPauseStatePaused - value: 2 - description: | - Continuous AF is paused. - - No further state changes or lens movements will occur until the - AfPauseResume control is sent. - - - HdrMode: - type: int32_t - direction: inout - description: | - Set the mode to be used for High Dynamic Range (HDR) imaging. - - HDR techniques typically include multiple exposure, image fusion and - tone mapping techniques to improve the dynamic range of the resulting - images. - - When using an HDR mode, images are captured with different sets of AGC - settings called HDR channels. Channels indicate in particular the type - of exposure (short, medium or long) used to capture the raw image, - before fusion. Each HDR image is tagged with the corresponding channel - using the HdrChannel control. - - \sa HdrChannel - - enum: - - name: HdrModeOff - value: 0 - description: | - HDR is disabled. - - Metadata for this frame will not include the HdrChannel control. - - name: HdrModeMultiExposureUnmerged - value: 1 - description: | - Multiple exposures will be generated in an alternating fashion. - - The multiple exposures will not be merged together and will be - returned to the application as they are. Each image will be tagged - with the correct HDR channel, indicating what kind of exposure it - is. The tag should be the same as in the HdrModeMultiExposure case. - - The expectation is that an application using this mode would merge - the frames to create HDR images for itself if it requires them. - - name: HdrModeMultiExposure - value: 2 - description: | - Multiple exposures will be generated and merged to create HDR - images. - - Each image will be tagged with the HDR channel (long, medium or - short) that arrived and which caused this image to be output. - - Systems that use two channels for HDR will return images tagged - alternately as the short and long channel. Systems that use three - channels for HDR will cycle through the short, medium and long - channel before repeating. - - name: HdrModeSingleExposure - value: 3 - description: | - Multiple frames all at a single exposure will be used to create HDR - images. - - These images should be reported as all corresponding to the HDR - short channel. - - name: HdrModeNight - value: 4 - description: | - Multiple frames will be combined to produce "night mode" images. - - It is up to the implementation exactly which HDR channels it uses, - and the images will all be tagged accordingly with the correct HDR - channel information. - - - HdrChannel: - type: int32_t - direction: out - description: | - The HDR channel used to capture the frame. - - This value is reported back to the application so that it can discover - whether this capture corresponds to the short or long exposure image - (or any other image used by the HDR procedure). An application can - monitor the HDR channel to discover when the differently exposed images - have arrived. - - This metadata is only available when an HDR mode has been enabled. - - \sa HdrMode - - enum: - - name: HdrChannelNone - value: 0 - description: | - This image does not correspond to any of the captures used to create - an HDR image. - - name: HdrChannelShort - value: 1 - description: | - This is a short exposure image. - - name: HdrChannelMedium - value: 2 - description: | - This is a medium exposure image. - - name: HdrChannelLong - value: 3 - description: | - This is a long exposure image. - - - Gamma: - type: float - direction: inout - description: | - Specify a fixed gamma value. - - The default gamma value must be 2.2 which closely mimics sRGB gamma. - Note that this is camera gamma, so it is applied as 1.0/gamma. - - - DebugMetadataEnable: - type: bool - direction: inout - description: | - Enable or disable the debug metadata. - -... diff --git a/libcamera/versioned_files/0.4.0/control_ids_debug.yaml b/libcamera/versioned_files/0.4.0/control_ids_debug.yaml deleted file mode 100644 index 7975327..0000000 --- a/libcamera/versioned_files/0.4.0/control_ids_debug.yaml +++ /dev/null @@ -1,6 +0,0 @@ -# SPDX-License-Identifier: LGPL-2.1-or-later -# -%YAML 1.1 ---- -vendor: debug -controls: [] diff --git a/libcamera/versioned_files/0.4.0/control_ids_draft.yaml b/libcamera/versioned_files/0.4.0/control_ids_draft.yaml deleted file mode 100644 index 87e4e02..0000000 --- a/libcamera/versioned_files/0.4.0/control_ids_draft.yaml +++ /dev/null @@ -1,327 +0,0 @@ -# SPDX-License-Identifier: LGPL-2.1-or-later -# -# Copyright (C) 2019, Google Inc. -# -%YAML 1.1 ---- -# Unless otherwise stated, all controls are bi-directional, i.e. they can be -# set through Request::controls() and returned out through Request::metadata(). -vendor: draft -controls: - - AePrecaptureTrigger: - type: int32_t - direction: inout - description: | - Control for AE metering trigger. Currently identical to - ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. - - Whether the camera device will trigger a precapture metering sequence - when it processes this request. - enum: - - name: AePrecaptureTriggerIdle - value: 0 - description: The trigger is idle. - - name: AePrecaptureTriggerStart - value: 1 - description: The pre-capture AE metering is started by the camera. - - name: AePrecaptureTriggerCancel - value: 2 - description: | - The camera will cancel any active or completed metering sequence. - The AE algorithm is reset to its initial state. - - - NoiseReductionMode: - type: int32_t - direction: inout - description: | - Control to select the noise reduction algorithm mode. Currently - identical to ANDROID_NOISE_REDUCTION_MODE. - - Mode of operation for the noise reduction algorithm. - enum: - - name: NoiseReductionModeOff - value: 0 - description: No noise reduction is applied - - name: NoiseReductionModeFast - value: 1 - description: | - Noise reduction is applied without reducing the frame rate. - - name: NoiseReductionModeHighQuality - value: 2 - description: | - High quality noise reduction at the expense of frame rate. - - name: NoiseReductionModeMinimal - value: 3 - description: | - Minimal noise reduction is applied without reducing the frame rate. - - name: NoiseReductionModeZSL - value: 4 - description: | - Noise reduction is applied at different levels to different streams. - - - ColorCorrectionAberrationMode: - type: int32_t - direction: inout - description: | - Control to select the color correction aberration mode. Currently - identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. - - Mode of operation for the chromatic aberration correction algorithm. - enum: - - name: ColorCorrectionAberrationOff - value: 0 - description: No aberration correction is applied. - - name: ColorCorrectionAberrationFast - value: 1 - description: Aberration correction will not slow down the frame rate. - - name: ColorCorrectionAberrationHighQuality - value: 2 - description: | - High quality aberration correction which might reduce the frame - rate. - - - AeState: - type: int32_t - direction: out - description: | - Control to report the current AE algorithm state. Currently identical to - ANDROID_CONTROL_AE_STATE. - - Current state of the AE algorithm. - enum: - - name: AeStateInactive - value: 0 - description: The AE algorithm is inactive. - - name: AeStateSearching - value: 1 - description: The AE algorithm has not converged yet. - - name: AeStateConverged - value: 2 - description: The AE algorithm has converged. - - name: AeStateLocked - value: 3 - description: The AE algorithm is locked. - - name: AeStateFlashRequired - value: 4 - description: The AE algorithm would need a flash for good results - - name: AeStatePrecapture - value: 5 - description: | - The AE algorithm has started a pre-capture metering session. - \sa AePrecaptureTrigger - - - AwbState: - type: int32_t - direction: out - description: | - Control to report the current AWB algorithm state. Currently identical - to ANDROID_CONTROL_AWB_STATE. - - Current state of the AWB algorithm. - enum: - - name: AwbStateInactive - value: 0 - description: The AWB algorithm is inactive. - - name: AwbStateSearching - value: 1 - description: The AWB algorithm has not converged yet. - - name: AwbConverged - value: 2 - description: The AWB algorithm has converged. - - name: AwbLocked - value: 3 - description: The AWB algorithm is locked. - - - SensorRollingShutterSkew: - type: int64_t - direction: out - description: | - Control to report the time between the start of exposure of the first - row and the start of exposure of the last row. Currently identical to - ANDROID_SENSOR_ROLLING_SHUTTER_SKEW - - - LensShadingMapMode: - type: int32_t - direction: inout - description: | - Control to report if the lens shading map is available. Currently - identical to ANDROID_STATISTICS_LENS_SHADING_MAP_MODE. - enum: - - name: LensShadingMapModeOff - value: 0 - description: No lens shading map mode is available. - - name: LensShadingMapModeOn - value: 1 - description: The lens shading map mode is available. - - - PipelineDepth: - type: int32_t - direction: out - description: | - Specifies the number of pipeline stages the frame went through from when - it was exposed to when the final completed result was available to the - framework. Always less than or equal to PipelineMaxDepth. Currently - identical to ANDROID_REQUEST_PIPELINE_DEPTH. - - The typical value for this control is 3 as a frame is first exposed, - captured and then processed in a single pass through the ISP. Any - additional processing step performed after the ISP pass (in example face - detection, additional format conversions etc) count as an additional - pipeline stage. - - - MaxLatency: - type: int32_t - direction: out - description: | - The maximum number of frames that can occur after a request (different - than the previous) has been submitted, and before the result's state - becomes synchronized. A value of -1 indicates unknown latency, and 0 - indicates per-frame control. Currently identical to - ANDROID_SYNC_MAX_LATENCY. - - - TestPatternMode: - type: int32_t - direction: inout - description: | - Control to select the test pattern mode. Currently identical to - ANDROID_SENSOR_TEST_PATTERN_MODE. - enum: - - name: TestPatternModeOff - value: 0 - description: | - No test pattern mode is used. The camera device returns frames from - the image sensor. - - name: TestPatternModeSolidColor - value: 1 - description: | - Each pixel in [R, G_even, G_odd, B] is replaced by its respective - color channel provided in test pattern data. - \todo Add control for test pattern data. - - name: TestPatternModeColorBars - value: 2 - description: | - All pixel data is replaced with an 8-bar color pattern. The vertical - bars (left-to-right) are as follows; white, yellow, cyan, green, - magenta, red, blue and black. Each bar should take up 1/8 of the - sensor pixel array width. When this is not possible, the bar size - should be rounded down to the nearest integer and the pattern can - repeat on the right side. Each bar's height must always take up the - full sensor pixel array height. - - name: TestPatternModeColorBarsFadeToGray - value: 3 - description: | - The test pattern is similar to TestPatternModeColorBars, - except that each bar should start at its specified color at the top - and fade to gray at the bottom. Furthermore each bar is further - subdevided into a left and right half. The left half should have a - smooth gradient, and the right half should have a quantized - gradient. In particular, the right half's should consist of blocks - of the same color for 1/16th active sensor pixel array width. The - least significant bits in the quantized gradient should be copied - from the most significant bits of the smooth gradient. The height of - each bar should always be a multiple of 128. When this is not the - case, the pattern should repeat at the bottom of the image. - - name: TestPatternModePn9 - value: 4 - description: | - All pixel data is replaced by a pseudo-random sequence generated - from a PN9 512-bit sequence (typically implemented in hardware with - a linear feedback shift register). The generator should be reset at - the beginning of each frame, and thus each subsequent raw frame with - this test pattern should be exactly the same as the last. - - name: TestPatternModeCustom1 - value: 256 - description: | - The first custom test pattern. All custom patterns that are - available only on this camera device are at least this numeric - value. All of the custom test patterns will be static (that is the - raw image must not vary from frame to frame). - - - FaceDetectMode: - type: int32_t - direction: inout - description: | - Control to select the face detection mode used by the pipeline. - - Currently identical to ANDROID_STATISTICS_FACE_DETECT_MODE. - - \sa FaceDetectFaceRectangles - \sa FaceDetectFaceScores - \sa FaceDetectFaceLandmarks - \sa FaceDetectFaceIds - - enum: - - name: FaceDetectModeOff - value: 0 - description: | - Pipeline doesn't perform face detection and doesn't report any - control related to face detection. - - name: FaceDetectModeSimple - value: 1 - description: | - Pipeline performs face detection and reports the - FaceDetectFaceRectangles and FaceDetectFaceScores controls for each - detected face. FaceDetectFaceLandmarks and FaceDetectFaceIds are - optional. - - name: FaceDetectModeFull - value: 2 - description: | - Pipeline performs face detection and reports all the controls - related to face detection including FaceDetectFaceRectangles, - FaceDetectFaceScores, FaceDetectFaceLandmarks, and - FaceDeteceFaceIds for each detected face. - - - FaceDetectFaceRectangles: - type: Rectangle - direction: out - description: | - Boundary rectangles of the detected faces. The number of values is - the number of detected faces. - - The FaceDetectFaceRectangles control can only be returned in metadata. - - Currently identical to ANDROID_STATISTICS_FACE_RECTANGLES. - size: [n] - - - FaceDetectFaceScores: - type: uint8_t - direction: out - description: | - Confidence score of each of the detected faces. The range of score is - [0, 100]. The number of values should be the number of faces reported - in FaceDetectFaceRectangles. - - The FaceDetectFaceScores control can only be returned in metadata. - - Currently identical to ANDROID_STATISTICS_FACE_SCORES. - size: [n] - - - FaceDetectFaceLandmarks: - type: Point - direction: out - description: | - Array of human face landmark coordinates in format [..., left_eye_i, - right_eye_i, mouth_i, left_eye_i+1, ...], with i = index of face. The - number of values should be 3 * the number of faces reported in - FaceDetectFaceRectangles. - - The FaceDetectFaceLandmarks control can only be returned in metadata. - - Currently identical to ANDROID_STATISTICS_FACE_LANDMARKS. - size: [n] - - - FaceDetectFaceIds: - type: int32_t - direction: out - description: | - Each detected face is given a unique ID that is valid for as long as the - face is visible to the camera device. A face that leaves the field of - view and later returns may be assigned a new ID. The number of values - should be the number of faces reported in FaceDetectFaceRectangles. - - The FaceDetectFaceIds control can only be returned in metadata. - - Currently identical to ANDROID_STATISTICS_FACE_IDS. - size: [n] - -... diff --git a/libcamera/versioned_files/0.4.0/control_ids_rpi.yaml b/libcamera/versioned_files/0.4.0/control_ids_rpi.yaml deleted file mode 100644 index 7524c5d..0000000 --- a/libcamera/versioned_files/0.4.0/control_ids_rpi.yaml +++ /dev/null @@ -1,61 +0,0 @@ -# SPDX-License-Identifier: LGPL-2.1-or-later -# -# Copyright (C) 2023, Raspberry Pi Ltd -# -%YAML 1.1 ---- -# Raspberry Pi (VC4 and PiSP) specific vendor controls -vendor: rpi -controls: - - StatsOutputEnable: - type: bool - direction: inout - description: | - Toggles the Raspberry Pi IPA to output the hardware generated statistics. - - When this control is set to true, the IPA outputs a binary dump of the - hardware generated statistics through the Request metadata in the - Bcm2835StatsOutput control. - - \sa Bcm2835StatsOutput - - - Bcm2835StatsOutput: - type: uint8_t - size: [n] - direction: out - description: | - Span of the BCM2835 ISP generated statistics for the current frame. - - This is sent in the Request metadata if the StatsOutputEnable is set to - true. The statistics struct definition can be found in - include/linux/bcm2835-isp.h. - - \sa StatsOutputEnable - - - ScalerCrops: - type: Rectangle - size: [n] - direction: out - description: | - An array of rectangles, where each singular value has identical - functionality to the ScalerCrop control. This control allows the - Raspberry Pi pipeline handler to control individual scaler crops per - output stream. - - The order of rectangles passed into the control must match the order of - streams configured by the application. The pipeline handler will only - configure crop retangles up-to the number of output streams configured. - All subsequent rectangles passed into this control are ignored by the - pipeline handler. - - If both rpi::ScalerCrops and ScalerCrop controls are present in a - ControlList, the latter is discarded, and crops are obtained from this - control. - - Note that using different crop rectangles for each output stream with - this control is only applicable on the Pi5/PiSP platform. This control - should also be considered temporary/draft and will be replaced with - official libcamera API support for per-stream controls in the future. - - \sa ScalerCrop -... diff --git a/libcamera/versioned_files/0.4.0/controls.rs b/libcamera/versioned_files/0.4.0/controls.rs deleted file mode 100644 index 7bd18cb..0000000 --- a/libcamera/versioned_files/0.4.0/controls.rs +++ /dev/null @@ -1,3319 +0,0 @@ -use std::{ffi::CStr, ops::{Deref, DerefMut}}; -use num_enum::{IntoPrimitive, TryFromPrimitive}; -#[allow(unused_imports)] -use crate::control::{Control, Property, ControlEntry, DynControlEntry}; -use crate::control_value::{ControlValue, ControlValueError}; -#[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; -#[allow(unused_imports)] -use libcamera_sys::*; -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(u32)] -pub enum ControlId { - /// Enable or disable the AE. - /// - /// \sa ExposureTime AnalogueGain - AeEnable = AE_ENABLE, - /// Report the lock status of a running AE algorithm. - /// - /// If the AE algorithm is locked the value shall be set to true, if it's - /// converging it shall be set to false. If the AE algorithm is not - /// running the control shall not be present in the metadata control list. - /// - /// \sa AeEnable - AeLocked = AE_LOCKED, - /// Specify a metering mode for the AE algorithm to use. - /// - /// The metering modes determine which parts of the image are used to - /// determine the scene brightness. Metering modes may be platform specific - /// and not all metering modes may be supported. - AeMeteringMode = AE_METERING_MODE, - /// Specify a constraint mode for the AE algorithm to use. - /// - /// The constraint modes determine how the measured scene brightness is - /// adjusted to reach the desired target exposure. Constraint modes may be - /// platform specific, and not all constraint modes may be supported. - AeConstraintMode = AE_CONSTRAINT_MODE, - /// Specify an exposure mode for the AE algorithm to use. - /// - /// The exposure modes specify how the desired total exposure is divided - /// between the exposure time and the sensor's analogue gain. They are - /// platform specific, and not all exposure modes may be supported. - AeExposureMode = AE_EXPOSURE_MODE, - /// Specify an Exposure Value (EV) parameter. - /// - /// The EV parameter will only be applied if the AE algorithm is currently - /// enabled. - /// - /// By convention EV adjusts the exposure as log2. For example - /// EV = [-2, -1, -0.5, 0, 0.5, 1, 2] results in an exposure adjustment - /// of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. - /// - /// \sa AeEnable - ExposureValue = EXPOSURE_VALUE, - /// Exposure time for the frame applied in the sensor device. - /// - /// This value is specified in micro-seconds. - /// - /// Setting this value means that it is now fixed and the AE algorithm may - /// not change it. Setting it back to zero returns it to the control of the - /// AE algorithm. - /// - /// \sa AnalogueGain AeEnable - /// - /// \todo Document the interactions between AeEnable and setting a fixed - /// value for this control. Consider interactions with other AE features, - /// such as aperture and aperture/shutter priority mode, and decide if - /// control of which features should be automatically adjusted shouldn't - /// better be handled through a separate AE mode control. - ExposureTime = EXPOSURE_TIME, - /// Analogue gain value applied in the sensor device. - /// - /// The value of the control specifies the gain multiplier applied to all - /// colour channels. This value cannot be lower than 1.0. - /// - /// Setting this value means that it is now fixed and the AE algorithm may - /// not change it. Setting it back to zero returns it to the control of the - /// AE algorithm. - /// - /// \sa ExposureTime AeEnable - /// - /// \todo Document the interactions between AeEnable and setting a fixed - /// value for this control. Consider interactions with other AE features, - /// such as aperture and aperture/shutter priority mode, and decide if - /// control of which features should be automatically adjusted shouldn't - /// better be handled through a separate AE mode control. - AnalogueGain = ANALOGUE_GAIN, - /// Set the flicker avoidance mode for AGC/AEC. - /// - /// The flicker mode determines whether, and how, the AGC/AEC algorithm - /// attempts to hide flicker effects caused by the duty cycle of artificial - /// lighting. - /// - /// Although implementation dependent, many algorithms for "flicker - /// avoidance" work by restricting this exposure time to integer multiples - /// of the cycle period, wherever possible. - /// - /// Implementations may not support all of the flicker modes listed below. - /// - /// By default the system will start in FlickerAuto mode if this is - /// supported, otherwise the flicker mode will be set to FlickerOff. - AeFlickerMode = AE_FLICKER_MODE, - /// Manual flicker period in microseconds. - /// - /// This value sets the current flicker period to avoid. It is used when - /// AeFlickerMode is set to FlickerManual. - /// - /// To cancel 50Hz mains flicker, this should be set to 10000 (corresponding - /// to 100Hz), or 8333 (120Hz) for 60Hz mains. - /// - /// Setting the mode to FlickerManual when no AeFlickerPeriod has ever been - /// set means that no flicker cancellation occurs (until the value of this - /// control is updated). - /// - /// Switching to modes other than FlickerManual has no effect on the - /// value of the AeFlickerPeriod control. - /// - /// \sa AeFlickerMode - AeFlickerPeriod = AE_FLICKER_PERIOD, - /// Flicker period detected in microseconds. - /// - /// The value reported here indicates the currently detected flicker - /// period, or zero if no flicker at all is detected. - /// - /// When AeFlickerMode is set to FlickerAuto, there may be a period during - /// which the value reported here remains zero. Once a non-zero value is - /// reported, then this is the flicker period that has been detected and is - /// now being cancelled. - /// - /// In the case of 50Hz mains flicker, the value would be 10000 - /// (corresponding to 100Hz), or 8333 (120Hz) for 60Hz mains flicker. - /// - /// It is implementation dependent whether the system can continue to detect - /// flicker of different periods when another frequency is already being - /// cancelled. - /// - /// \sa AeFlickerMode - AeFlickerDetected = AE_FLICKER_DETECTED, - /// Specify a fixed brightness parameter. - /// - /// Positive values (up to 1.0) produce brighter images; negative values - /// (up to -1.0) produce darker images and 0.0 leaves pixels unchanged. - Brightness = BRIGHTNESS, - /// Specify a fixed contrast parameter. - /// - /// Normal contrast is given by the value 1.0; larger values produce images - /// with more contrast. - Contrast = CONTRAST, - /// Report an estimate of the current illuminance level in lux. - /// - /// The Lux control can only be returned in metadata. - Lux = LUX, - /// Enable or disable the AWB. - /// - /// When AWB is enabled, the algorithm estimates the colour temperature of - /// the scene and computes colour gains and the colour correction matrix - /// automatically. The computed colour temperature, gains and correction - /// matrix are reported in metadata. The corresponding controls are ignored - /// if set in a request. - /// - /// When AWB is disabled, the colour temperature, gains and correction - /// matrix are not updated automatically and can be set manually in - /// requests. - /// - /// \sa ColourCorrectionMatrix - /// \sa ColourGains - /// \sa ColourTemperature - AwbEnable = AWB_ENABLE, - /// Specify the range of illuminants to use for the AWB algorithm. - /// - /// The modes supported are platform specific, and not all modes may be - /// supported. - AwbMode = AWB_MODE, - /// Report the lock status of a running AWB algorithm. - /// - /// If the AWB algorithm is locked the value shall be set to true, if it's - /// converging it shall be set to false. If the AWB algorithm is not - /// running the control shall not be present in the metadata control list. - /// - /// \sa AwbEnable - AwbLocked = AWB_LOCKED, - /// Pair of gain values for the Red and Blue colour channels, in that - /// order. - /// - /// ColourGains can only be applied in a Request when the AWB is disabled. - /// If ColourGains is set in a request but ColourTemperature is not, the - /// implementation shall calculate and set the ColourTemperature based on - /// the ColourGains. - /// - /// \sa AwbEnable - /// \sa ColourTemperature - ColourGains = COLOUR_GAINS, - /// ColourTemperature of the frame, in kelvin. - /// - /// ColourTemperature can only be applied in a Request when the AWB is - /// disabled. - /// - /// If ColourTemperature is set in a request but ColourGains is not, the - /// implementation shall calculate and set the ColourGains based on the - /// given ColourTemperature. If ColourTemperature is set (either directly, - /// or indirectly by setting ColourGains) but ColourCorrectionMatrix is not, - /// the ColourCorrectionMatrix is updated based on the ColourTemperature. - /// - /// The ColourTemperature used to process the frame is reported in metadata. - /// - /// \sa AwbEnable - /// \sa ColourCorrectionMatrix - /// \sa ColourGains - ColourTemperature = COLOUR_TEMPERATURE, - /// Specify a fixed saturation parameter. - /// - /// Normal saturation is given by the value 1.0; larger values produce more - /// saturated colours; 0.0 produces a greyscale image. - Saturation = SATURATION, - /// Reports the sensor black levels used for processing a frame. - /// - /// The values are in the order R, Gr, Gb, B. They are returned as numbers - /// out of a 16-bit pixel range (as if pixels ranged from 0 to 65535). The - /// SensorBlackLevels control can only be returned in metadata. - SensorBlackLevels = SENSOR_BLACK_LEVELS, - /// Intensity of the sharpening applied to the image. - /// - /// A value of 0.0 means no sharpening. The minimum value means - /// minimal sharpening, and shall be 0.0 unless the camera can't - /// disable sharpening completely. The default value shall give a - /// "reasonable" level of sharpening, suitable for most use cases. - /// The maximum value may apply extremely high levels of sharpening, - /// higher than anyone could reasonably want. Negative values are - /// not allowed. Note also that sharpening is not applied to raw - /// streams. - Sharpness = SHARPNESS, - /// Reports a Figure of Merit (FoM) to indicate how in-focus the frame is. - /// - /// A larger FocusFoM value indicates a more in-focus frame. This singular - /// value may be based on a combination of statistics gathered from - /// multiple focus regions within an image. The number of focus regions and - /// method of combination is platform dependent. In this respect, it is not - /// necessarily aimed at providing a way to implement a focus algorithm by - /// the application, rather an indication of how in-focus a frame is. - FocusFoM = FOCUS_FO_M, - /// The 3x3 matrix that converts camera RGB to sRGB within the imaging - /// pipeline. - /// - /// This should describe the matrix that is used after pixels have been - /// white-balanced, but before any gamma transformation. The 3x3 matrix is - /// stored in conventional reading order in an array of 9 floating point - /// values. - /// - /// ColourCorrectionMatrix can only be applied in a Request when the AWB is - /// disabled. - /// - /// \sa AwbEnable - /// \sa ColourTemperature - ColourCorrectionMatrix = COLOUR_CORRECTION_MATRIX, - /// Sets the image portion that will be scaled to form the whole of - /// the final output image. - /// - /// The (x,y) location of this rectangle is relative to the - /// PixelArrayActiveAreas that is being used. The units remain native - /// sensor pixels, even if the sensor is being used in a binning or - /// skipping mode. - /// - /// This control is only present when the pipeline supports scaling. Its - /// maximum valid value is given by the properties::ScalerCropMaximum - /// property, and the two can be used to implement digital zoom. - ScalerCrop = SCALER_CROP, - /// Digital gain value applied during the processing steps applied - /// to the image as captured from the sensor. - /// - /// The global digital gain factor is applied to all the colour channels - /// of the RAW image. Different pipeline models are free to - /// specify how the global gain factor applies to each separate - /// channel. - /// - /// If an imaging pipeline applies digital gain in distinct - /// processing steps, this value indicates their total sum. - /// Pipelines are free to decide how to adjust each processing - /// step to respect the received gain factor and shall report - /// their total value in the request metadata. - DigitalGain = DIGITAL_GAIN, - /// The instantaneous frame duration from start of frame exposure to start - /// of next exposure, expressed in microseconds. - /// - /// This control is meant to be returned in metadata. - FrameDuration = FRAME_DURATION, - /// The minimum and maximum (in that order) frame duration, expressed in - /// microseconds. - /// - /// When provided by applications, the control specifies the sensor frame - /// duration interval the pipeline has to use. This limits the largest - /// exposure time the sensor can use. For example, if a maximum frame - /// duration of 33ms is requested (corresponding to 30 frames per second), - /// the sensor will not be able to raise the exposure time above 33ms. - /// A fixed frame duration is achieved by setting the minimum and maximum - /// values to be the same. Setting both values to 0 reverts to using the - /// camera defaults. - /// - /// The maximum frame duration provides the absolute limit to the exposure - /// time computed by the AE algorithm and it overrides any exposure mode - /// setting specified with controls::AeExposureMode. Similarly, when a - /// manual exposure time is set through controls::ExposureTime, it also - /// gets clipped to the limits set by this control. When reported in - /// metadata, the control expresses the minimum and maximum frame durations - /// used after being clipped to the sensor provided frame duration limits. - /// - /// \sa AeExposureMode - /// \sa ExposureTime - /// - /// \todo Define how to calculate the capture frame rate by - /// defining controls to report additional delays introduced by - /// the capture pipeline or post-processing stages (ie JPEG - /// conversion, frame scaling). - /// - /// \todo Provide an explicit definition of default control values, for - /// this and all other controls. - FrameDurationLimits = FRAME_DURATION_LIMITS, - /// Temperature measure from the camera sensor in Celsius. - /// - /// This value is typically obtained by a thermal sensor present on-die or - /// in the camera module. The range of reported temperatures is device - /// dependent. - /// - /// The SensorTemperature control will only be returned in metadata if a - /// thermal sensor is present. - SensorTemperature = SENSOR_TEMPERATURE, - /// The time when the first row of the image sensor active array is exposed. - /// - /// The timestamp, expressed in nanoseconds, represents a monotonically - /// increasing counter since the system boot time, as defined by the - /// Linux-specific CLOCK_BOOTTIME clock id. - /// - /// The SensorTimestamp control can only be returned in metadata. - /// - /// \todo Define how the sensor timestamp has to be used in the reprocessing - /// use case. - SensorTimestamp = SENSOR_TIMESTAMP, - /// The mode of the AF (autofocus) algorithm. - /// - /// An implementation may choose not to implement all the modes. - AfMode = AF_MODE, - /// The range of focus distances that is scanned. - /// - /// An implementation may choose not to implement all the options here. - AfRange = AF_RANGE, - /// Determine whether the AF is to move the lens as quickly as possible or - /// more steadily. - /// - /// For example, during video recording it may be desirable not to move the - /// lens too abruptly, but when in a preview mode (waiting for a still - /// capture) it may be helpful to move the lens as quickly as is reasonably - /// possible. - AfSpeed = AF_SPEED, - /// The parts of the image used by the AF algorithm to measure focus. - AfMetering = AF_METERING, - /// The focus windows used by the AF algorithm when AfMetering is set to - /// AfMeteringWindows. - /// - /// The units used are pixels within the rectangle returned by the - /// ScalerCropMaximum property. - /// - /// In order to be activated, a rectangle must be programmed with non-zero - /// width and height. Internally, these rectangles are intersected with the - /// ScalerCropMaximum rectangle. If the window becomes empty after this - /// operation, then the window is ignored. If all the windows end up being - /// ignored, then the behaviour is platform dependent. - /// - /// On platforms that support the ScalerCrop control (for implementing - /// digital zoom, for example), no automatic recalculation or adjustment of - /// AF windows is performed internally if the ScalerCrop is changed. If any - /// window lies outside the output image after the scaler crop has been - /// applied, it is up to the application to recalculate them. - /// - /// The details of how the windows are used are platform dependent. We note - /// that when there is more than one AF window, a typical implementation - /// might find the optimal focus position for each one and finally select - /// the window where the focal distance for the objects shown in that part - /// of the image are closest to the camera. - AfWindows = AF_WINDOWS, - /// Start an autofocus scan. - /// - /// This control starts an autofocus scan when AfMode is set to AfModeAuto, - /// and is ignored if AfMode is set to AfModeManual or AfModeContinuous. It - /// can also be used to terminate a scan early. - AfTrigger = AF_TRIGGER, - /// Pause lens movements when in continuous autofocus mode. - /// - /// This control has no effect except when in continuous autofocus mode - /// (AfModeContinuous). It can be used to pause any lens movements while - /// (for example) images are captured. The algorithm remains inactive - /// until it is instructed to resume. - AfPause = AF_PAUSE, - /// Set and report the focus lens position. - /// - /// This control instructs the lens to move to a particular position and - /// also reports back the position of the lens for each frame. - /// - /// The LensPosition control is ignored unless the AfMode is set to - /// AfModeManual, though the value is reported back unconditionally in all - /// modes. - /// - /// This value, which is generally a non-integer, is the reciprocal of the - /// focal distance in metres, also known as dioptres. That is, to set a - /// focal distance D, the lens position LP is given by - /// - /// \f$LP = \frac{1\mathrm{m}}{D}\f$ - /// - /// For example: - /// - /// - 0 moves the lens to infinity. - /// - 0.5 moves the lens to focus on objects 2m away. - /// - 2 moves the lens to focus on objects 50cm away. - /// - And larger values will focus the lens closer. - /// - /// The default value of the control should indicate a good general - /// position for the lens, often corresponding to the hyperfocal distance - /// (the closest position for which objects at infinity are still - /// acceptably sharp). The minimum will often be zero (meaning infinity), - /// and the maximum value defines the closest focus position. - /// - /// \todo Define a property to report the Hyperfocal distance of calibrated - /// lenses. - LensPosition = LENS_POSITION, - /// The current state of the AF algorithm. - /// - /// This control reports the current state of the AF algorithm in - /// conjunction with the reported AfMode value and (in continuous AF mode) - /// the AfPauseState value. The possible state changes are described below, - /// though we note the following state transitions that occur when the - /// AfMode is changed. - /// - /// If the AfMode is set to AfModeManual, then the AfState will always - /// report AfStateIdle (even if the lens is subsequently moved). Changing - /// to the AfModeManual state does not initiate any lens movement. - /// - /// If the AfMode is set to AfModeAuto then the AfState will report - /// AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent - /// together then AfState will omit AfStateIdle and move straight to - /// AfStateScanning (and start a scan). - /// - /// If the AfMode is set to AfModeContinuous then the AfState will - /// initially report AfStateScanning. - AfState = AF_STATE, - /// Report whether the autofocus is currently running, paused or pausing. - /// - /// This control is only applicable in continuous (AfModeContinuous) mode, - /// and reports whether the algorithm is currently running, paused or - /// pausing (that is, will pause as soon as any in-progress scan - /// completes). - /// - /// Any change to AfMode will cause AfPauseStateRunning to be reported. - AfPauseState = AF_PAUSE_STATE, - /// Set the mode to be used for High Dynamic Range (HDR) imaging. - /// - /// HDR techniques typically include multiple exposure, image fusion and - /// tone mapping techniques to improve the dynamic range of the resulting - /// images. - /// - /// When using an HDR mode, images are captured with different sets of AGC - /// settings called HDR channels. Channels indicate in particular the type - /// of exposure (short, medium or long) used to capture the raw image, - /// before fusion. Each HDR image is tagged with the corresponding channel - /// using the HdrChannel control. - /// - /// \sa HdrChannel - HdrMode = HDR_MODE, - /// The HDR channel used to capture the frame. - /// - /// This value is reported back to the application so that it can discover - /// whether this capture corresponds to the short or long exposure image - /// (or any other image used by the HDR procedure). An application can - /// monitor the HDR channel to discover when the differently exposed images - /// have arrived. - /// - /// This metadata is only available when an HDR mode has been enabled. - /// - /// \sa HdrMode - HdrChannel = HDR_CHANNEL, - /// Specify a fixed gamma value. - /// - /// The default gamma value must be 2.2 which closely mimics sRGB gamma. - /// Note that this is camera gamma, so it is applied as 1.0/gamma. - Gamma = GAMMA, - /// Enable or disable the debug metadata. - DebugMetadataEnable = DEBUG_METADATA_ENABLE, - /// Control for AE metering trigger. Currently identical to - /// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. - /// - /// Whether the camera device will trigger a precapture metering sequence - /// when it processes this request. - #[cfg(feature = "vendor_draft")] - AePrecaptureTrigger = AE_PRECAPTURE_TRIGGER, - /// Control to select the noise reduction algorithm mode. Currently - /// identical to ANDROID_NOISE_REDUCTION_MODE. - /// - /// Mode of operation for the noise reduction algorithm. - #[cfg(feature = "vendor_draft")] - NoiseReductionMode = NOISE_REDUCTION_MODE, - /// Control to select the color correction aberration mode. Currently - /// identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. - /// - /// Mode of operation for the chromatic aberration correction algorithm. - #[cfg(feature = "vendor_draft")] - ColorCorrectionAberrationMode = COLOR_CORRECTION_ABERRATION_MODE, - /// Control to report the current AE algorithm state. Currently identical to - /// ANDROID_CONTROL_AE_STATE. - /// - /// Current state of the AE algorithm. - #[cfg(feature = "vendor_draft")] - AeState = AE_STATE, - /// Control to report the current AWB algorithm state. Currently identical - /// to ANDROID_CONTROL_AWB_STATE. - /// - /// Current state of the AWB algorithm. - #[cfg(feature = "vendor_draft")] - AwbState = AWB_STATE, - /// Control to report the time between the start of exposure of the first - /// row and the start of exposure of the last row. Currently identical to - /// ANDROID_SENSOR_ROLLING_SHUTTER_SKEW - #[cfg(feature = "vendor_draft")] - SensorRollingShutterSkew = SENSOR_ROLLING_SHUTTER_SKEW, - /// Control to report if the lens shading map is available. Currently - /// identical to ANDROID_STATISTICS_LENS_SHADING_MAP_MODE. - #[cfg(feature = "vendor_draft")] - LensShadingMapMode = LENS_SHADING_MAP_MODE, - /// Specifies the number of pipeline stages the frame went through from when - /// it was exposed to when the final completed result was available to the - /// framework. Always less than or equal to PipelineMaxDepth. Currently - /// identical to ANDROID_REQUEST_PIPELINE_DEPTH. - /// - /// The typical value for this control is 3 as a frame is first exposed, - /// captured and then processed in a single pass through the ISP. Any - /// additional processing step performed after the ISP pass (in example face - /// detection, additional format conversions etc) count as an additional - /// pipeline stage. - #[cfg(feature = "vendor_draft")] - PipelineDepth = PIPELINE_DEPTH, - /// The maximum number of frames that can occur after a request (different - /// than the previous) has been submitted, and before the result's state - /// becomes synchronized. A value of -1 indicates unknown latency, and 0 - /// indicates per-frame control. Currently identical to - /// ANDROID_SYNC_MAX_LATENCY. - #[cfg(feature = "vendor_draft")] - MaxLatency = MAX_LATENCY, - /// Control to select the test pattern mode. Currently identical to - /// ANDROID_SENSOR_TEST_PATTERN_MODE. - #[cfg(feature = "vendor_draft")] - TestPatternMode = TEST_PATTERN_MODE, - /// Control to select the face detection mode used by the pipeline. - /// - /// Currently identical to ANDROID_STATISTICS_FACE_DETECT_MODE. - /// - /// \sa FaceDetectFaceRectangles - /// \sa FaceDetectFaceScores - /// \sa FaceDetectFaceLandmarks - /// \sa FaceDetectFaceIds - #[cfg(feature = "vendor_draft")] - FaceDetectMode = FACE_DETECT_MODE, - /// Boundary rectangles of the detected faces. The number of values is - /// the number of detected faces. - /// - /// The FaceDetectFaceRectangles control can only be returned in metadata. - /// - /// Currently identical to ANDROID_STATISTICS_FACE_RECTANGLES. - #[cfg(feature = "vendor_draft")] - FaceDetectFaceRectangles = FACE_DETECT_FACE_RECTANGLES, - /// Confidence score of each of the detected faces. The range of score is - /// [0, 100]. The number of values should be the number of faces reported - /// in FaceDetectFaceRectangles. - /// - /// The FaceDetectFaceScores control can only be returned in metadata. - /// - /// Currently identical to ANDROID_STATISTICS_FACE_SCORES. - #[cfg(feature = "vendor_draft")] - FaceDetectFaceScores = FACE_DETECT_FACE_SCORES, - /// Array of human face landmark coordinates in format [..., left_eye_i, - /// right_eye_i, mouth_i, left_eye_i+1, ...], with i = index of face. The - /// number of values should be 3 * the number of faces reported in - /// FaceDetectFaceRectangles. - /// - /// The FaceDetectFaceLandmarks control can only be returned in metadata. - /// - /// Currently identical to ANDROID_STATISTICS_FACE_LANDMARKS. - #[cfg(feature = "vendor_draft")] - FaceDetectFaceLandmarks = FACE_DETECT_FACE_LANDMARKS, - /// Each detected face is given a unique ID that is valid for as long as the - /// face is visible to the camera device. A face that leaves the field of - /// view and later returns may be assigned a new ID. The number of values - /// should be the number of faces reported in FaceDetectFaceRectangles. - /// - /// The FaceDetectFaceIds control can only be returned in metadata. - /// - /// Currently identical to ANDROID_STATISTICS_FACE_IDS. - #[cfg(feature = "vendor_draft")] - FaceDetectFaceIds = FACE_DETECT_FACE_IDS, - /// Toggles the Raspberry Pi IPA to output the hardware generated statistics. - /// - /// When this control is set to true, the IPA outputs a binary dump of the - /// hardware generated statistics through the Request metadata in the - /// Bcm2835StatsOutput control. - /// - /// \sa Bcm2835StatsOutput - #[cfg(feature = "vendor_rpi")] - StatsOutputEnable = STATS_OUTPUT_ENABLE, - /// Span of the BCM2835 ISP generated statistics for the current frame. - /// - /// This is sent in the Request metadata if the StatsOutputEnable is set to - /// true. The statistics struct definition can be found in - /// include/linux/bcm2835-isp.h. - /// - /// \sa StatsOutputEnable - #[cfg(feature = "vendor_rpi")] - Bcm2835StatsOutput = BCM2835_STATS_OUTPUT, - /// An array of rectangles, where each singular value has identical - /// functionality to the ScalerCrop control. This control allows the - /// Raspberry Pi pipeline handler to control individual scaler crops per - /// output stream. - /// - /// The order of rectangles passed into the control must match the order of - /// streams configured by the application. The pipeline handler will only - /// configure crop retangles up-to the number of output streams configured. - /// All subsequent rectangles passed into this control are ignored by the - /// pipeline handler. - /// - /// If both rpi::ScalerCrops and ScalerCrop controls are present in a - /// ControlList, the latter is discarded, and crops are obtained from this - /// control. - /// - /// Note that using different crop rectangles for each output stream with - /// this control is only applicable on the Pi5/PiSP platform. This control - /// should also be considered temporary/draft and will be replaced with - /// official libcamera API support for per-stream controls in the future. - /// - /// \sa ScalerCrop - #[cfg(feature = "vendor_rpi")] - ScalerCrops = SCALER_CROPS, -} -impl ControlId { - fn id(&self) -> u32 { - *self as u32 - } - pub fn name(&self) -> String { - unsafe { - let c_str = libcamera_control_name_from_id(self.id()); - if c_str.is_null() { - return "".into(); - } - CStr::from_ptr(c_str).to_str().unwrap().into() - } - } -} -/// Enable or disable the AE. -/// -/// \sa ExposureTime AnalogueGain -#[derive(Debug, Clone)] -pub struct AeEnable(pub bool); -impl Deref for AeEnable { - type Target = bool; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for AeEnable { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for AeEnable { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: AeEnable) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for AeEnable { - const ID: u32 = ControlId::AeEnable as _; -} -impl Control for AeEnable {} -/// Report the lock status of a running AE algorithm. -/// -/// If the AE algorithm is locked the value shall be set to true, if it's -/// converging it shall be set to false. If the AE algorithm is not -/// running the control shall not be present in the metadata control list. -/// -/// \sa AeEnable -#[derive(Debug, Clone)] -pub struct AeLocked(pub bool); -impl Deref for AeLocked { - type Target = bool; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for AeLocked { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for AeLocked { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: AeLocked) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for AeLocked { - const ID: u32 = ControlId::AeLocked as _; -} -impl Control for AeLocked {} -/// Specify a metering mode for the AE algorithm to use. -/// -/// The metering modes determine which parts of the image are used to -/// determine the scene brightness. Metering modes may be platform specific -/// and not all metering modes may be supported. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AeMeteringMode { - /// Centre-weighted metering mode. - MeteringCentreWeighted = 0, - /// Spot metering mode. - MeteringSpot = 1, - /// Matrix metering mode. - MeteringMatrix = 2, - /// Custom metering mode. - MeteringCustom = 3, -} -impl TryFrom for AeMeteringMode { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AeMeteringMode) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AeMeteringMode { - const ID: u32 = ControlId::AeMeteringMode as _; -} -impl Control for AeMeteringMode {} -/// Specify a constraint mode for the AE algorithm to use. -/// -/// The constraint modes determine how the measured scene brightness is -/// adjusted to reach the desired target exposure. Constraint modes may be -/// platform specific, and not all constraint modes may be supported. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AeConstraintMode { - /// Default constraint mode. - /// - /// This mode aims to balance the exposure of different parts of the - /// image so as to reach a reasonable average level. However, highlights - /// in the image may appear over-exposed and lowlights may appear - /// under-exposed. - ConstraintNormal = 0, - /// Highlight constraint mode. - /// - /// This mode adjusts the exposure levels in order to try and avoid - /// over-exposing the brightest parts (highlights) of an image. - /// Other non-highlight parts of the image may appear under-exposed. - ConstraintHighlight = 1, - /// Shadows constraint mode. - /// - /// This mode adjusts the exposure levels in order to try and avoid - /// under-exposing the dark parts (shadows) of an image. Other normally - /// exposed parts of the image may appear over-exposed. - ConstraintShadows = 2, - /// Custom constraint mode. - ConstraintCustom = 3, -} -impl TryFrom for AeConstraintMode { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AeConstraintMode) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AeConstraintMode { - const ID: u32 = ControlId::AeConstraintMode as _; -} -impl Control for AeConstraintMode {} -/// Specify an exposure mode for the AE algorithm to use. -/// -/// The exposure modes specify how the desired total exposure is divided -/// between the exposure time and the sensor's analogue gain. They are -/// platform specific, and not all exposure modes may be supported. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AeExposureMode { - /// Default exposure mode. - ExposureNormal = 0, - /// Exposure mode allowing only short exposure times. - ExposureShort = 1, - /// Exposure mode allowing long exposure times. - ExposureLong = 2, - /// Custom exposure mode. - ExposureCustom = 3, -} -impl TryFrom for AeExposureMode { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AeExposureMode) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AeExposureMode { - const ID: u32 = ControlId::AeExposureMode as _; -} -impl Control for AeExposureMode {} -/// Specify an Exposure Value (EV) parameter. -/// -/// The EV parameter will only be applied if the AE algorithm is currently -/// enabled. -/// -/// By convention EV adjusts the exposure as log2. For example -/// EV = [-2, -1, -0.5, 0, 0.5, 1, 2] results in an exposure adjustment -/// of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. -/// -/// \sa AeEnable -#[derive(Debug, Clone)] -pub struct ExposureValue(pub f32); -impl Deref for ExposureValue { - type Target = f32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for ExposureValue { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for ExposureValue { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: ExposureValue) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for ExposureValue { - const ID: u32 = ControlId::ExposureValue as _; -} -impl Control for ExposureValue {} -/// Exposure time for the frame applied in the sensor device. -/// -/// This value is specified in micro-seconds. -/// -/// Setting this value means that it is now fixed and the AE algorithm may -/// not change it. Setting it back to zero returns it to the control of the -/// AE algorithm. -/// -/// \sa AnalogueGain AeEnable -/// -/// \todo Document the interactions between AeEnable and setting a fixed -/// value for this control. Consider interactions with other AE features, -/// such as aperture and aperture/shutter priority mode, and decide if -/// control of which features should be automatically adjusted shouldn't -/// better be handled through a separate AE mode control. -#[derive(Debug, Clone)] -pub struct ExposureTime(pub i32); -impl Deref for ExposureTime { - type Target = i32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for ExposureTime { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for ExposureTime { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: ExposureTime) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for ExposureTime { - const ID: u32 = ControlId::ExposureTime as _; -} -impl Control for ExposureTime {} -/// Analogue gain value applied in the sensor device. -/// -/// The value of the control specifies the gain multiplier applied to all -/// colour channels. This value cannot be lower than 1.0. -/// -/// Setting this value means that it is now fixed and the AE algorithm may -/// not change it. Setting it back to zero returns it to the control of the -/// AE algorithm. -/// -/// \sa ExposureTime AeEnable -/// -/// \todo Document the interactions between AeEnable and setting a fixed -/// value for this control. Consider interactions with other AE features, -/// such as aperture and aperture/shutter priority mode, and decide if -/// control of which features should be automatically adjusted shouldn't -/// better be handled through a separate AE mode control. -#[derive(Debug, Clone)] -pub struct AnalogueGain(pub f32); -impl Deref for AnalogueGain { - type Target = f32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for AnalogueGain { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for AnalogueGain { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: AnalogueGain) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for AnalogueGain { - const ID: u32 = ControlId::AnalogueGain as _; -} -impl Control for AnalogueGain {} -/// Set the flicker avoidance mode for AGC/AEC. -/// -/// The flicker mode determines whether, and how, the AGC/AEC algorithm -/// attempts to hide flicker effects caused by the duty cycle of artificial -/// lighting. -/// -/// Although implementation dependent, many algorithms for "flicker -/// avoidance" work by restricting this exposure time to integer multiples -/// of the cycle period, wherever possible. -/// -/// Implementations may not support all of the flicker modes listed below. -/// -/// By default the system will start in FlickerAuto mode if this is -/// supported, otherwise the flicker mode will be set to FlickerOff. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AeFlickerMode { - /// No flicker avoidance is performed. - FlickerOff = 0, - /// Manual flicker avoidance. - /// - /// Suppress flicker effects caused by lighting running with a period - /// specified by the AeFlickerPeriod control. - /// \sa AeFlickerPeriod - FlickerManual = 1, - /// Automatic flicker period detection and avoidance. - /// - /// The system will automatically determine the most likely value of - /// flicker period, and avoid flicker of this frequency. Once flicker - /// is being corrected, it is implementation dependent whether the - /// system is still able to detect a change in the flicker period. - /// \sa AeFlickerDetected - FlickerAuto = 2, -} -impl TryFrom for AeFlickerMode { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AeFlickerMode) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AeFlickerMode { - const ID: u32 = ControlId::AeFlickerMode as _; -} -impl Control for AeFlickerMode {} -/// Manual flicker period in microseconds. -/// -/// This value sets the current flicker period to avoid. It is used when -/// AeFlickerMode is set to FlickerManual. -/// -/// To cancel 50Hz mains flicker, this should be set to 10000 (corresponding -/// to 100Hz), or 8333 (120Hz) for 60Hz mains. -/// -/// Setting the mode to FlickerManual when no AeFlickerPeriod has ever been -/// set means that no flicker cancellation occurs (until the value of this -/// control is updated). -/// -/// Switching to modes other than FlickerManual has no effect on the -/// value of the AeFlickerPeriod control. -/// -/// \sa AeFlickerMode -#[derive(Debug, Clone)] -pub struct AeFlickerPeriod(pub i32); -impl Deref for AeFlickerPeriod { - type Target = i32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for AeFlickerPeriod { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for AeFlickerPeriod { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: AeFlickerPeriod) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for AeFlickerPeriod { - const ID: u32 = ControlId::AeFlickerPeriod as _; -} -impl Control for AeFlickerPeriod {} -/// Flicker period detected in microseconds. -/// -/// The value reported here indicates the currently detected flicker -/// period, or zero if no flicker at all is detected. -/// -/// When AeFlickerMode is set to FlickerAuto, there may be a period during -/// which the value reported here remains zero. Once a non-zero value is -/// reported, then this is the flicker period that has been detected and is -/// now being cancelled. -/// -/// In the case of 50Hz mains flicker, the value would be 10000 -/// (corresponding to 100Hz), or 8333 (120Hz) for 60Hz mains flicker. -/// -/// It is implementation dependent whether the system can continue to detect -/// flicker of different periods when another frequency is already being -/// cancelled. -/// -/// \sa AeFlickerMode -#[derive(Debug, Clone)] -pub struct AeFlickerDetected(pub i32); -impl Deref for AeFlickerDetected { - type Target = i32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for AeFlickerDetected { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for AeFlickerDetected { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: AeFlickerDetected) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for AeFlickerDetected { - const ID: u32 = ControlId::AeFlickerDetected as _; -} -impl Control for AeFlickerDetected {} -/// Specify a fixed brightness parameter. -/// -/// Positive values (up to 1.0) produce brighter images; negative values -/// (up to -1.0) produce darker images and 0.0 leaves pixels unchanged. -#[derive(Debug, Clone)] -pub struct Brightness(pub f32); -impl Deref for Brightness { - type Target = f32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for Brightness { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for Brightness { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: Brightness) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for Brightness { - const ID: u32 = ControlId::Brightness as _; -} -impl Control for Brightness {} -/// Specify a fixed contrast parameter. -/// -/// Normal contrast is given by the value 1.0; larger values produce images -/// with more contrast. -#[derive(Debug, Clone)] -pub struct Contrast(pub f32); -impl Deref for Contrast { - type Target = f32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for Contrast { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for Contrast { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: Contrast) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for Contrast { - const ID: u32 = ControlId::Contrast as _; -} -impl Control for Contrast {} -/// Report an estimate of the current illuminance level in lux. -/// -/// The Lux control can only be returned in metadata. -#[derive(Debug, Clone)] -pub struct Lux(pub f32); -impl Deref for Lux { - type Target = f32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for Lux { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for Lux { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: Lux) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for Lux { - const ID: u32 = ControlId::Lux as _; -} -impl Control for Lux {} -/// Enable or disable the AWB. -/// -/// When AWB is enabled, the algorithm estimates the colour temperature of -/// the scene and computes colour gains and the colour correction matrix -/// automatically. The computed colour temperature, gains and correction -/// matrix are reported in metadata. The corresponding controls are ignored -/// if set in a request. -/// -/// When AWB is disabled, the colour temperature, gains and correction -/// matrix are not updated automatically and can be set manually in -/// requests. -/// -/// \sa ColourCorrectionMatrix -/// \sa ColourGains -/// \sa ColourTemperature -#[derive(Debug, Clone)] -pub struct AwbEnable(pub bool); -impl Deref for AwbEnable { - type Target = bool; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for AwbEnable { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for AwbEnable { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: AwbEnable) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for AwbEnable { - const ID: u32 = ControlId::AwbEnable as _; -} -impl Control for AwbEnable {} -/// Specify the range of illuminants to use for the AWB algorithm. -/// -/// The modes supported are platform specific, and not all modes may be -/// supported. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AwbMode { - /// Search over the whole colour temperature range. - AwbAuto = 0, - /// Incandescent AWB lamp mode. - AwbIncandescent = 1, - /// Tungsten AWB lamp mode. - AwbTungsten = 2, - /// Fluorescent AWB lamp mode. - AwbFluorescent = 3, - /// Indoor AWB lighting mode. - AwbIndoor = 4, - /// Daylight AWB lighting mode. - AwbDaylight = 5, - /// Cloudy AWB lighting mode. - AwbCloudy = 6, - /// Custom AWB mode. - AwbCustom = 7, -} -impl TryFrom for AwbMode { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AwbMode) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AwbMode { - const ID: u32 = ControlId::AwbMode as _; -} -impl Control for AwbMode {} -/// Report the lock status of a running AWB algorithm. -/// -/// If the AWB algorithm is locked the value shall be set to true, if it's -/// converging it shall be set to false. If the AWB algorithm is not -/// running the control shall not be present in the metadata control list. -/// -/// \sa AwbEnable -#[derive(Debug, Clone)] -pub struct AwbLocked(pub bool); -impl Deref for AwbLocked { - type Target = bool; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for AwbLocked { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for AwbLocked { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: AwbLocked) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for AwbLocked { - const ID: u32 = ControlId::AwbLocked as _; -} -impl Control for AwbLocked {} -/// Pair of gain values for the Red and Blue colour channels, in that -/// order. -/// -/// ColourGains can only be applied in a Request when the AWB is disabled. -/// If ColourGains is set in a request but ColourTemperature is not, the -/// implementation shall calculate and set the ColourTemperature based on -/// the ColourGains. -/// -/// \sa AwbEnable -/// \sa ColourTemperature -#[derive(Debug, Clone)] -pub struct ColourGains(pub [f32; 2]); -impl Deref for ColourGains { - type Target = [f32; 2]; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for ColourGains { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for ColourGains { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(<[f32; 2]>::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: ColourGains) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for ColourGains { - const ID: u32 = ControlId::ColourGains as _; -} -impl Control for ColourGains {} -/// ColourTemperature of the frame, in kelvin. -/// -/// ColourTemperature can only be applied in a Request when the AWB is -/// disabled. -/// -/// If ColourTemperature is set in a request but ColourGains is not, the -/// implementation shall calculate and set the ColourGains based on the -/// given ColourTemperature. If ColourTemperature is set (either directly, -/// or indirectly by setting ColourGains) but ColourCorrectionMatrix is not, -/// the ColourCorrectionMatrix is updated based on the ColourTemperature. -/// -/// The ColourTemperature used to process the frame is reported in metadata. -/// -/// \sa AwbEnable -/// \sa ColourCorrectionMatrix -/// \sa ColourGains -#[derive(Debug, Clone)] -pub struct ColourTemperature(pub i32); -impl Deref for ColourTemperature { - type Target = i32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for ColourTemperature { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for ColourTemperature { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: ColourTemperature) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for ColourTemperature { - const ID: u32 = ControlId::ColourTemperature as _; -} -impl Control for ColourTemperature {} -/// Specify a fixed saturation parameter. -/// -/// Normal saturation is given by the value 1.0; larger values produce more -/// saturated colours; 0.0 produces a greyscale image. -#[derive(Debug, Clone)] -pub struct Saturation(pub f32); -impl Deref for Saturation { - type Target = f32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for Saturation { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for Saturation { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: Saturation) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for Saturation { - const ID: u32 = ControlId::Saturation as _; -} -impl Control for Saturation {} -/// Reports the sensor black levels used for processing a frame. -/// -/// The values are in the order R, Gr, Gb, B. They are returned as numbers -/// out of a 16-bit pixel range (as if pixels ranged from 0 to 65535). The -/// SensorBlackLevels control can only be returned in metadata. -#[derive(Debug, Clone)] -pub struct SensorBlackLevels(pub [i32; 4]); -impl Deref for SensorBlackLevels { - type Target = [i32; 4]; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for SensorBlackLevels { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for SensorBlackLevels { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(<[i32; 4]>::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: SensorBlackLevels) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for SensorBlackLevels { - const ID: u32 = ControlId::SensorBlackLevels as _; -} -impl Control for SensorBlackLevels {} -/// Intensity of the sharpening applied to the image. -/// -/// A value of 0.0 means no sharpening. The minimum value means -/// minimal sharpening, and shall be 0.0 unless the camera can't -/// disable sharpening completely. The default value shall give a -/// "reasonable" level of sharpening, suitable for most use cases. -/// The maximum value may apply extremely high levels of sharpening, -/// higher than anyone could reasonably want. Negative values are -/// not allowed. Note also that sharpening is not applied to raw -/// streams. -#[derive(Debug, Clone)] -pub struct Sharpness(pub f32); -impl Deref for Sharpness { - type Target = f32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for Sharpness { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for Sharpness { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: Sharpness) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for Sharpness { - const ID: u32 = ControlId::Sharpness as _; -} -impl Control for Sharpness {} -/// Reports a Figure of Merit (FoM) to indicate how in-focus the frame is. -/// -/// A larger FocusFoM value indicates a more in-focus frame. This singular -/// value may be based on a combination of statistics gathered from -/// multiple focus regions within an image. The number of focus regions and -/// method of combination is platform dependent. In this respect, it is not -/// necessarily aimed at providing a way to implement a focus algorithm by -/// the application, rather an indication of how in-focus a frame is. -#[derive(Debug, Clone)] -pub struct FocusFoM(pub i32); -impl Deref for FocusFoM { - type Target = i32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for FocusFoM { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for FocusFoM { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: FocusFoM) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for FocusFoM { - const ID: u32 = ControlId::FocusFoM as _; -} -impl Control for FocusFoM {} -/// The 3x3 matrix that converts camera RGB to sRGB within the imaging -/// pipeline. -/// -/// This should describe the matrix that is used after pixels have been -/// white-balanced, but before any gamma transformation. The 3x3 matrix is -/// stored in conventional reading order in an array of 9 floating point -/// values. -/// -/// ColourCorrectionMatrix can only be applied in a Request when the AWB is -/// disabled. -/// -/// \sa AwbEnable -/// \sa ColourTemperature -#[derive(Debug, Clone)] -pub struct ColourCorrectionMatrix(pub [[f32; 3]; 3]); -impl Deref for ColourCorrectionMatrix { - type Target = [[f32; 3]; 3]; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for ColourCorrectionMatrix { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for ColourCorrectionMatrix { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(<[[f32; 3]; 3]>::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: ColourCorrectionMatrix) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for ColourCorrectionMatrix { - const ID: u32 = ControlId::ColourCorrectionMatrix as _; -} -impl Control for ColourCorrectionMatrix {} -/// Sets the image portion that will be scaled to form the whole of -/// the final output image. -/// -/// The (x,y) location of this rectangle is relative to the -/// PixelArrayActiveAreas that is being used. The units remain native -/// sensor pixels, even if the sensor is being used in a binning or -/// skipping mode. -/// -/// This control is only present when the pipeline supports scaling. Its -/// maximum valid value is given by the properties::ScalerCropMaximum -/// property, and the two can be used to implement digital zoom. -#[derive(Debug, Clone)] -pub struct ScalerCrop(pub Rectangle); -impl Deref for ScalerCrop { - type Target = Rectangle; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for ScalerCrop { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for ScalerCrop { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: ScalerCrop) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for ScalerCrop { - const ID: u32 = ControlId::ScalerCrop as _; -} -impl Control for ScalerCrop {} -/// Digital gain value applied during the processing steps applied -/// to the image as captured from the sensor. -/// -/// The global digital gain factor is applied to all the colour channels -/// of the RAW image. Different pipeline models are free to -/// specify how the global gain factor applies to each separate -/// channel. -/// -/// If an imaging pipeline applies digital gain in distinct -/// processing steps, this value indicates their total sum. -/// Pipelines are free to decide how to adjust each processing -/// step to respect the received gain factor and shall report -/// their total value in the request metadata. -#[derive(Debug, Clone)] -pub struct DigitalGain(pub f32); -impl Deref for DigitalGain { - type Target = f32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for DigitalGain { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for DigitalGain { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: DigitalGain) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for DigitalGain { - const ID: u32 = ControlId::DigitalGain as _; -} -impl Control for DigitalGain {} -/// The instantaneous frame duration from start of frame exposure to start -/// of next exposure, expressed in microseconds. -/// -/// This control is meant to be returned in metadata. -#[derive(Debug, Clone)] -pub struct FrameDuration(pub i64); -impl Deref for FrameDuration { - type Target = i64; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for FrameDuration { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for FrameDuration { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: FrameDuration) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for FrameDuration { - const ID: u32 = ControlId::FrameDuration as _; -} -impl Control for FrameDuration {} -/// The minimum and maximum (in that order) frame duration, expressed in -/// microseconds. -/// -/// When provided by applications, the control specifies the sensor frame -/// duration interval the pipeline has to use. This limits the largest -/// exposure time the sensor can use. For example, if a maximum frame -/// duration of 33ms is requested (corresponding to 30 frames per second), -/// the sensor will not be able to raise the exposure time above 33ms. -/// A fixed frame duration is achieved by setting the minimum and maximum -/// values to be the same. Setting both values to 0 reverts to using the -/// camera defaults. -/// -/// The maximum frame duration provides the absolute limit to the exposure -/// time computed by the AE algorithm and it overrides any exposure mode -/// setting specified with controls::AeExposureMode. Similarly, when a -/// manual exposure time is set through controls::ExposureTime, it also -/// gets clipped to the limits set by this control. When reported in -/// metadata, the control expresses the minimum and maximum frame durations -/// used after being clipped to the sensor provided frame duration limits. -/// -/// \sa AeExposureMode -/// \sa ExposureTime -/// -/// \todo Define how to calculate the capture frame rate by -/// defining controls to report additional delays introduced by -/// the capture pipeline or post-processing stages (ie JPEG -/// conversion, frame scaling). -/// -/// \todo Provide an explicit definition of default control values, for -/// this and all other controls. -#[derive(Debug, Clone)] -pub struct FrameDurationLimits(pub [i64; 2]); -impl Deref for FrameDurationLimits { - type Target = [i64; 2]; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for FrameDurationLimits { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for FrameDurationLimits { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(<[i64; 2]>::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: FrameDurationLimits) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for FrameDurationLimits { - const ID: u32 = ControlId::FrameDurationLimits as _; -} -impl Control for FrameDurationLimits {} -/// Temperature measure from the camera sensor in Celsius. -/// -/// This value is typically obtained by a thermal sensor present on-die or -/// in the camera module. The range of reported temperatures is device -/// dependent. -/// -/// The SensorTemperature control will only be returned in metadata if a -/// thermal sensor is present. -#[derive(Debug, Clone)] -pub struct SensorTemperature(pub f32); -impl Deref for SensorTemperature { - type Target = f32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for SensorTemperature { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for SensorTemperature { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: SensorTemperature) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for SensorTemperature { - const ID: u32 = ControlId::SensorTemperature as _; -} -impl Control for SensorTemperature {} -/// The time when the first row of the image sensor active array is exposed. -/// -/// The timestamp, expressed in nanoseconds, represents a monotonically -/// increasing counter since the system boot time, as defined by the -/// Linux-specific CLOCK_BOOTTIME clock id. -/// -/// The SensorTimestamp control can only be returned in metadata. -/// -/// \todo Define how the sensor timestamp has to be used in the reprocessing -/// use case. -#[derive(Debug, Clone)] -pub struct SensorTimestamp(pub i64); -impl Deref for SensorTimestamp { - type Target = i64; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for SensorTimestamp { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for SensorTimestamp { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: SensorTimestamp) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for SensorTimestamp { - const ID: u32 = ControlId::SensorTimestamp as _; -} -impl Control for SensorTimestamp {} -/// The mode of the AF (autofocus) algorithm. -/// -/// An implementation may choose not to implement all the modes. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AfMode { - /// The AF algorithm is in manual mode. - /// - /// In this mode it will never perform any action nor move the lens of - /// its own accord, but an application can specify the desired lens - /// position using the LensPosition control. The AfState will always - /// report AfStateIdle. - /// - /// If the camera is started in AfModeManual, it will move the focus - /// lens to the position specified by the LensPosition control. - /// - /// This mode is the recommended default value for the AfMode control. - /// External cameras (as reported by the Location property set to - /// CameraLocationExternal) may use a different default value. - Manual = 0, - /// The AF algorithm is in auto mode. - /// - /// In this mode the algorithm will never move the lens or change state - /// unless the AfTrigger control is used. The AfTrigger control can be - /// used to initiate a focus scan, the results of which will be - /// reported by AfState. - /// - /// If the autofocus algorithm is moved from AfModeAuto to another mode - /// while a scan is in progress, the scan is cancelled immediately, - /// without waiting for the scan to finish. - /// - /// When first entering this mode the AfState will report AfStateIdle. - /// When a trigger control is sent, AfState will report AfStateScanning - /// for a period before spontaneously changing to AfStateFocused or - /// AfStateFailed, depending on the outcome of the scan. It will remain - /// in this state until another scan is initiated by the AfTrigger - /// control. If a scan is cancelled (without changing to another mode), - /// AfState will return to AfStateIdle. - Auto = 1, - /// The AF algorithm is in continuous mode. - /// - /// In this mode the lens can re-start a scan spontaneously at any - /// moment, without any user intervention. The AfState still reports - /// whether the algorithm is currently scanning or not, though the - /// application has no ability to initiate or cancel scans, nor to move - /// the lens for itself. - /// - /// However, applications can pause the AF algorithm from continuously - /// scanning by using the AfPause control. This allows video or still - /// images to be captured whilst guaranteeing that the focus is fixed. - /// - /// When set to AfModeContinuous, the system will immediately initiate a - /// scan so AfState will report AfStateScanning, and will settle on one - /// of AfStateFocused or AfStateFailed, depending on the scan result. - Continuous = 2, -} -impl TryFrom for AfMode { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AfMode) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AfMode { - const ID: u32 = ControlId::AfMode as _; -} -impl Control for AfMode {} -/// The range of focus distances that is scanned. -/// -/// An implementation may choose not to implement all the options here. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AfRange { - /// A wide range of focus distances is scanned. - /// - /// Scanned distances cover all the way from infinity down to close - /// distances, though depending on the implementation, possibly not - /// including the very closest macro positions. - Normal = 0, - /// Only close distances are scanned. - Macro = 1, - /// The full range of focus distances is scanned. - /// - /// This range is similar to AfRangeNormal but includes the very - /// closest macro positions. - Full = 2, -} -impl TryFrom for AfRange { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AfRange) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AfRange { - const ID: u32 = ControlId::AfRange as _; -} -impl Control for AfRange {} -/// Determine whether the AF is to move the lens as quickly as possible or -/// more steadily. -/// -/// For example, during video recording it may be desirable not to move the -/// lens too abruptly, but when in a preview mode (waiting for a still -/// capture) it may be helpful to move the lens as quickly as is reasonably -/// possible. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AfSpeed { - /// Move the lens at its usual speed. - Normal = 0, - /// Move the lens more quickly. - Fast = 1, -} -impl TryFrom for AfSpeed { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AfSpeed) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AfSpeed { - const ID: u32 = ControlId::AfSpeed as _; -} -impl Control for AfSpeed {} -/// The parts of the image used by the AF algorithm to measure focus. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AfMetering { - /// Let the AF algorithm decide for itself where it will measure focus. - Auto = 0, - /// Use the rectangles defined by the AfWindows control to measure focus. - /// - /// If no windows are specified the behaviour is platform dependent. - Windows = 1, -} -impl TryFrom for AfMetering { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AfMetering) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AfMetering { - const ID: u32 = ControlId::AfMetering as _; -} -impl Control for AfMetering {} -/// The focus windows used by the AF algorithm when AfMetering is set to -/// AfMeteringWindows. -/// -/// The units used are pixels within the rectangle returned by the -/// ScalerCropMaximum property. -/// -/// In order to be activated, a rectangle must be programmed with non-zero -/// width and height. Internally, these rectangles are intersected with the -/// ScalerCropMaximum rectangle. If the window becomes empty after this -/// operation, then the window is ignored. If all the windows end up being -/// ignored, then the behaviour is platform dependent. -/// -/// On platforms that support the ScalerCrop control (for implementing -/// digital zoom, for example), no automatic recalculation or adjustment of -/// AF windows is performed internally if the ScalerCrop is changed. If any -/// window lies outside the output image after the scaler crop has been -/// applied, it is up to the application to recalculate them. -/// -/// The details of how the windows are used are platform dependent. We note -/// that when there is more than one AF window, a typical implementation -/// might find the optimal focus position for each one and finally select -/// the window where the focal distance for the objects shown in that part -/// of the image are closest to the camera. -#[derive(Debug, Clone)] -pub struct AfWindows(pub Vec); -impl Deref for AfWindows { - type Target = Vec; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for AfWindows { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for AfWindows { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(>::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: AfWindows) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for AfWindows { - const ID: u32 = ControlId::AfWindows as _; -} -impl Control for AfWindows {} -/// Start an autofocus scan. -/// -/// This control starts an autofocus scan when AfMode is set to AfModeAuto, -/// and is ignored if AfMode is set to AfModeManual or AfModeContinuous. It -/// can also be used to terminate a scan early. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AfTrigger { - /// Start an AF scan. - /// - /// Setting the control to AfTriggerStart is ignored if a scan is in - /// progress. - Start = 0, - /// Cancel an AF scan. - /// - /// This does not cause the lens to move anywhere else. Ignored if no - /// scan is in progress. - Cancel = 1, -} -impl TryFrom for AfTrigger { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AfTrigger) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AfTrigger { - const ID: u32 = ControlId::AfTrigger as _; -} -impl Control for AfTrigger {} -/// Pause lens movements when in continuous autofocus mode. -/// -/// This control has no effect except when in continuous autofocus mode -/// (AfModeContinuous). It can be used to pause any lens movements while -/// (for example) images are captured. The algorithm remains inactive -/// until it is instructed to resume. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AfPause { - /// Pause the continuous autofocus algorithm immediately. - /// - /// The autofocus algorithm is paused whether or not any kind of scan - /// is underway. AfPauseState will subsequently report - /// AfPauseStatePaused. AfState may report any of AfStateScanning, - /// AfStateFocused or AfStateFailed, depending on the algorithm's state - /// when it received this control. - Immediate = 0, - /// Pause the continuous autofocus algorithm at the end of the scan. - /// - /// This is similar to AfPauseImmediate, and if the AfState is - /// currently reporting AfStateFocused or AfStateFailed it will remain - /// in that state and AfPauseState will report AfPauseStatePaused. - /// - /// However, if the algorithm is scanning (AfStateScanning), - /// AfPauseState will report AfPauseStatePausing until the scan is - /// finished, at which point AfState will report one of AfStateFocused - /// or AfStateFailed, and AfPauseState will change to - /// AfPauseStatePaused. - Deferred = 1, - /// Resume continuous autofocus operation. - /// - /// The algorithm starts again from exactly where it left off, and - /// AfPauseState will report AfPauseStateRunning. - Resume = 2, -} -impl TryFrom for AfPause { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AfPause) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AfPause { - const ID: u32 = ControlId::AfPause as _; -} -impl Control for AfPause {} -/// Set and report the focus lens position. -/// -/// This control instructs the lens to move to a particular position and -/// also reports back the position of the lens for each frame. -/// -/// The LensPosition control is ignored unless the AfMode is set to -/// AfModeManual, though the value is reported back unconditionally in all -/// modes. -/// -/// This value, which is generally a non-integer, is the reciprocal of the -/// focal distance in metres, also known as dioptres. That is, to set a -/// focal distance D, the lens position LP is given by -/// -/// \f$LP = \frac{1\mathrm{m}}{D}\f$ -/// -/// For example: -/// -/// - 0 moves the lens to infinity. -/// - 0.5 moves the lens to focus on objects 2m away. -/// - 2 moves the lens to focus on objects 50cm away. -/// - And larger values will focus the lens closer. -/// -/// The default value of the control should indicate a good general -/// position for the lens, often corresponding to the hyperfocal distance -/// (the closest position for which objects at infinity are still -/// acceptably sharp). The minimum will often be zero (meaning infinity), -/// and the maximum value defines the closest focus position. -/// -/// \todo Define a property to report the Hyperfocal distance of calibrated -/// lenses. -#[derive(Debug, Clone)] -pub struct LensPosition(pub f32); -impl Deref for LensPosition { - type Target = f32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for LensPosition { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for LensPosition { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: LensPosition) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for LensPosition { - const ID: u32 = ControlId::LensPosition as _; -} -impl Control for LensPosition {} -/// The current state of the AF algorithm. -/// -/// This control reports the current state of the AF algorithm in -/// conjunction with the reported AfMode value and (in continuous AF mode) -/// the AfPauseState value. The possible state changes are described below, -/// though we note the following state transitions that occur when the -/// AfMode is changed. -/// -/// If the AfMode is set to AfModeManual, then the AfState will always -/// report AfStateIdle (even if the lens is subsequently moved). Changing -/// to the AfModeManual state does not initiate any lens movement. -/// -/// If the AfMode is set to AfModeAuto then the AfState will report -/// AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent -/// together then AfState will omit AfStateIdle and move straight to -/// AfStateScanning (and start a scan). -/// -/// If the AfMode is set to AfModeContinuous then the AfState will -/// initially report AfStateScanning. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AfState { - /// The AF algorithm is in manual mode (AfModeManual) or in auto mode - /// (AfModeAuto) and a scan has not yet been triggered, or an - /// in-progress scan was cancelled. - Idle = 0, - /// The AF algorithm is in auto mode (AfModeAuto), and a scan has been - /// started using the AfTrigger control. - /// - /// The scan can be cancelled by sending AfTriggerCancel at which point - /// the algorithm will either move back to AfStateIdle or, if the scan - /// actually completes before the cancel request is processed, to one - /// of AfStateFocused or AfStateFailed. - /// - /// Alternatively the AF algorithm could be in continuous mode - /// (AfModeContinuous) at which point it may enter this state - /// spontaneously whenever it determines that a rescan is needed. - Scanning = 1, - /// The AF algorithm is in auto (AfModeAuto) or continuous - /// (AfModeContinuous) mode and a scan has completed with the result - /// that the algorithm believes the image is now in focus. - Focused = 2, - /// The AF algorithm is in auto (AfModeAuto) or continuous - /// (AfModeContinuous) mode and a scan has completed with the result - /// that the algorithm did not find a good focus position. - Failed = 3, -} -impl TryFrom for AfState { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AfState) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AfState { - const ID: u32 = ControlId::AfState as _; -} -impl Control for AfState {} -/// Report whether the autofocus is currently running, paused or pausing. -/// -/// This control is only applicable in continuous (AfModeContinuous) mode, -/// and reports whether the algorithm is currently running, paused or -/// pausing (that is, will pause as soon as any in-progress scan -/// completes). -/// -/// Any change to AfMode will cause AfPauseStateRunning to be reported. -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AfPauseState { - /// Continuous AF is running and the algorithm may restart a scan - /// spontaneously. - Running = 0, - /// Continuous AF has been sent an AfPauseDeferred control, and will - /// pause as soon as any in-progress scan completes. - /// - /// When the scan completes, the AfPauseState control will report - /// AfPauseStatePaused. No new scans will be start spontaneously until - /// the AfPauseResume control is sent. - Pausing = 1, - /// Continuous AF is paused. - /// - /// No further state changes or lens movements will occur until the - /// AfPauseResume control is sent. - Paused = 2, -} -impl TryFrom for AfPauseState { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: AfPauseState) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for AfPauseState { - const ID: u32 = ControlId::AfPauseState as _; -} -impl Control for AfPauseState {} -/// Set the mode to be used for High Dynamic Range (HDR) imaging. -/// -/// HDR techniques typically include multiple exposure, image fusion and -/// tone mapping techniques to improve the dynamic range of the resulting -/// images. -/// -/// When using an HDR mode, images are captured with different sets of AGC -/// settings called HDR channels. Channels indicate in particular the type -/// of exposure (short, medium or long) used to capture the raw image, -/// before fusion. Each HDR image is tagged with the corresponding channel -/// using the HdrChannel control. -/// -/// \sa HdrChannel -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum HdrMode { - /// HDR is disabled. - /// - /// Metadata for this frame will not include the HdrChannel control. - Off = 0, - /// Multiple exposures will be generated in an alternating fashion. - /// - /// The multiple exposures will not be merged together and will be - /// returned to the application as they are. Each image will be tagged - /// with the correct HDR channel, indicating what kind of exposure it - /// is. The tag should be the same as in the HdrModeMultiExposure case. - /// - /// The expectation is that an application using this mode would merge - /// the frames to create HDR images for itself if it requires them. - MultiExposureUnmerged = 1, - /// Multiple exposures will be generated and merged to create HDR - /// images. - /// - /// Each image will be tagged with the HDR channel (long, medium or - /// short) that arrived and which caused this image to be output. - /// - /// Systems that use two channels for HDR will return images tagged - /// alternately as the short and long channel. Systems that use three - /// channels for HDR will cycle through the short, medium and long - /// channel before repeating. - MultiExposure = 2, - /// Multiple frames all at a single exposure will be used to create HDR - /// images. - /// - /// These images should be reported as all corresponding to the HDR - /// short channel. - SingleExposure = 3, - /// Multiple frames will be combined to produce "night mode" images. - /// - /// It is up to the implementation exactly which HDR channels it uses, - /// and the images will all be tagged accordingly with the correct HDR - /// channel information. - Night = 4, -} -impl TryFrom for HdrMode { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: HdrMode) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for HdrMode { - const ID: u32 = ControlId::HdrMode as _; -} -impl Control for HdrMode {} -/// The HDR channel used to capture the frame. -/// -/// This value is reported back to the application so that it can discover -/// whether this capture corresponds to the short or long exposure image -/// (or any other image used by the HDR procedure). An application can -/// monitor the HDR channel to discover when the differently exposed images -/// have arrived. -/// -/// This metadata is only available when an HDR mode has been enabled. -/// -/// \sa HdrMode -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum HdrChannel { - /// This image does not correspond to any of the captures used to create - /// an HDR image. - None = 0, - /// This is a short exposure image. - Short = 1, - /// This is a medium exposure image. - Medium = 2, - /// This is a long exposure image. - Long = 3, -} -impl TryFrom for HdrChannel { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: HdrChannel) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for HdrChannel { - const ID: u32 = ControlId::HdrChannel as _; -} -impl Control for HdrChannel {} -/// Specify a fixed gamma value. -/// -/// The default gamma value must be 2.2 which closely mimics sRGB gamma. -/// Note that this is camera gamma, so it is applied as 1.0/gamma. -#[derive(Debug, Clone)] -pub struct Gamma(pub f32); -impl Deref for Gamma { - type Target = f32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for Gamma { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for Gamma { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: Gamma) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for Gamma { - const ID: u32 = ControlId::Gamma as _; -} -impl Control for Gamma {} -/// Enable or disable the debug metadata. -#[derive(Debug, Clone)] -pub struct DebugMetadataEnable(pub bool); -impl Deref for DebugMetadataEnable { - type Target = bool; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for DebugMetadataEnable { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for DebugMetadataEnable { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: DebugMetadataEnable) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for DebugMetadataEnable { - const ID: u32 = ControlId::DebugMetadataEnable as _; -} -impl Control for DebugMetadataEnable {} -/// Control for AE metering trigger. Currently identical to -/// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. -/// -/// Whether the camera device will trigger a precapture metering sequence -/// when it processes this request. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AePrecaptureTrigger { - /// The trigger is idle. - Idle = 0, - /// The pre-capture AE metering is started by the camera. - Start = 1, - /// The camera will cancel any active or completed metering sequence. - /// The AE algorithm is reset to its initial state. - Cancel = 2, -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for AePrecaptureTrigger { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: AePrecaptureTrigger) -> Self { - ControlValue::from(::from(val)) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for AePrecaptureTrigger { - const ID: u32 = ControlId::AePrecaptureTrigger as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for AePrecaptureTrigger {} -/// Control to select the noise reduction algorithm mode. Currently -/// identical to ANDROID_NOISE_REDUCTION_MODE. -/// -/// Mode of operation for the noise reduction algorithm. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum NoiseReductionMode { - /// No noise reduction is applied - Off = 0, - /// Noise reduction is applied without reducing the frame rate. - Fast = 1, - /// High quality noise reduction at the expense of frame rate. - HighQuality = 2, - /// Minimal noise reduction is applied without reducing the frame rate. - Minimal = 3, - /// Noise reduction is applied at different levels to different streams. - ZSL = 4, -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for NoiseReductionMode { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: NoiseReductionMode) -> Self { - ControlValue::from(::from(val)) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for NoiseReductionMode { - const ID: u32 = ControlId::NoiseReductionMode as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for NoiseReductionMode {} -/// Control to select the color correction aberration mode. Currently -/// identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. -/// -/// Mode of operation for the chromatic aberration correction algorithm. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum ColorCorrectionAberrationMode { - /// No aberration correction is applied. - ColorCorrectionAberrationOff = 0, - /// Aberration correction will not slow down the frame rate. - ColorCorrectionAberrationFast = 1, - /// High quality aberration correction which might reduce the frame - /// rate. - ColorCorrectionAberrationHighQuality = 2, -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for ColorCorrectionAberrationMode { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: ColorCorrectionAberrationMode) -> Self { - ControlValue::from(::from(val)) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for ColorCorrectionAberrationMode { - const ID: u32 = ControlId::ColorCorrectionAberrationMode as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for ColorCorrectionAberrationMode {} -/// Control to report the current AE algorithm state. Currently identical to -/// ANDROID_CONTROL_AE_STATE. -/// -/// Current state of the AE algorithm. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AeState { - /// The AE algorithm is inactive. - Inactive = 0, - /// The AE algorithm has not converged yet. - Searching = 1, - /// The AE algorithm has converged. - Converged = 2, - /// The AE algorithm is locked. - Locked = 3, - /// The AE algorithm would need a flash for good results - FlashRequired = 4, - /// The AE algorithm has started a pre-capture metering session. - /// \sa AePrecaptureTrigger - Precapture = 5, -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for AeState { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: AeState) -> Self { - ControlValue::from(::from(val)) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for AeState { - const ID: u32 = ControlId::AeState as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for AeState {} -/// Control to report the current AWB algorithm state. Currently identical -/// to ANDROID_CONTROL_AWB_STATE. -/// -/// Current state of the AWB algorithm. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum AwbState { - /// The AWB algorithm is inactive. - Inactive = 0, - /// The AWB algorithm has not converged yet. - Searching = 1, - /// The AWB algorithm has converged. - AwbConverged = 2, - /// The AWB algorithm is locked. - AwbLocked = 3, -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for AwbState { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: AwbState) -> Self { - ControlValue::from(::from(val)) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for AwbState { - const ID: u32 = ControlId::AwbState as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for AwbState {} -/// Control to report the time between the start of exposure of the first -/// row and the start of exposure of the last row. Currently identical to -/// ANDROID_SENSOR_ROLLING_SHUTTER_SKEW -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone)] -pub struct SensorRollingShutterSkew(pub i64); -#[cfg(feature = "vendor_draft")] -impl Deref for SensorRollingShutterSkew { - type Target = i64; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl DerefMut for SensorRollingShutterSkew { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for SensorRollingShutterSkew { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: SensorRollingShutterSkew) -> Self { - ControlValue::from(val.0) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for SensorRollingShutterSkew { - const ID: u32 = ControlId::SensorRollingShutterSkew as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for SensorRollingShutterSkew {} -/// Control to report if the lens shading map is available. Currently -/// identical to ANDROID_STATISTICS_LENS_SHADING_MAP_MODE. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum LensShadingMapMode { - /// No lens shading map mode is available. - Off = 0, - /// The lens shading map mode is available. - On = 1, -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for LensShadingMapMode { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: LensShadingMapMode) -> Self { - ControlValue::from(::from(val)) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for LensShadingMapMode { - const ID: u32 = ControlId::LensShadingMapMode as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for LensShadingMapMode {} -/// Specifies the number of pipeline stages the frame went through from when -/// it was exposed to when the final completed result was available to the -/// framework. Always less than or equal to PipelineMaxDepth. Currently -/// identical to ANDROID_REQUEST_PIPELINE_DEPTH. -/// -/// The typical value for this control is 3 as a frame is first exposed, -/// captured and then processed in a single pass through the ISP. Any -/// additional processing step performed after the ISP pass (in example face -/// detection, additional format conversions etc) count as an additional -/// pipeline stage. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone)] -pub struct PipelineDepth(pub i32); -#[cfg(feature = "vendor_draft")] -impl Deref for PipelineDepth { - type Target = i32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl DerefMut for PipelineDepth { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for PipelineDepth { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: PipelineDepth) -> Self { - ControlValue::from(val.0) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for PipelineDepth { - const ID: u32 = ControlId::PipelineDepth as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for PipelineDepth {} -/// The maximum number of frames that can occur after a request (different -/// than the previous) has been submitted, and before the result's state -/// becomes synchronized. A value of -1 indicates unknown latency, and 0 -/// indicates per-frame control. Currently identical to -/// ANDROID_SYNC_MAX_LATENCY. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone)] -pub struct MaxLatency(pub i32); -#[cfg(feature = "vendor_draft")] -impl Deref for MaxLatency { - type Target = i32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl DerefMut for MaxLatency { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for MaxLatency { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: MaxLatency) -> Self { - ControlValue::from(val.0) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for MaxLatency { - const ID: u32 = ControlId::MaxLatency as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for MaxLatency {} -/// Control to select the test pattern mode. Currently identical to -/// ANDROID_SENSOR_TEST_PATTERN_MODE. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum TestPatternMode { - /// No test pattern mode is used. The camera device returns frames from - /// the image sensor. - Off = 0, - /// Each pixel in [R, G_even, G_odd, B] is replaced by its respective - /// color channel provided in test pattern data. - /// \todo Add control for test pattern data. - SolidColor = 1, - /// All pixel data is replaced with an 8-bar color pattern. The vertical - /// bars (left-to-right) are as follows; white, yellow, cyan, green, - /// magenta, red, blue and black. Each bar should take up 1/8 of the - /// sensor pixel array width. When this is not possible, the bar size - /// should be rounded down to the nearest integer and the pattern can - /// repeat on the right side. Each bar's height must always take up the - /// full sensor pixel array height. - ColorBars = 2, - /// The test pattern is similar to TestPatternModeColorBars, - /// except that each bar should start at its specified color at the top - /// and fade to gray at the bottom. Furthermore each bar is further - /// subdevided into a left and right half. The left half should have a - /// smooth gradient, and the right half should have a quantized - /// gradient. In particular, the right half's should consist of blocks - /// of the same color for 1/16th active sensor pixel array width. The - /// least significant bits in the quantized gradient should be copied - /// from the most significant bits of the smooth gradient. The height of - /// each bar should always be a multiple of 128. When this is not the - /// case, the pattern should repeat at the bottom of the image. - ColorBarsFadeToGray = 3, - /// All pixel data is replaced by a pseudo-random sequence generated - /// from a PN9 512-bit sequence (typically implemented in hardware with - /// a linear feedback shift register). The generator should be reset at - /// the beginning of each frame, and thus each subsequent raw frame with - /// this test pattern should be exactly the same as the last. - Pn9 = 4, - /// The first custom test pattern. All custom patterns that are - /// available only on this camera device are at least this numeric - /// value. All of the custom test patterns will be static (that is the - /// raw image must not vary from frame to frame). - Custom1 = 256, -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for TestPatternMode { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: TestPatternMode) -> Self { - ControlValue::from(::from(val)) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for TestPatternMode { - const ID: u32 = ControlId::TestPatternMode as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for TestPatternMode {} -/// Control to select the face detection mode used by the pipeline. -/// -/// Currently identical to ANDROID_STATISTICS_FACE_DETECT_MODE. -/// -/// \sa FaceDetectFaceRectangles -/// \sa FaceDetectFaceScores -/// \sa FaceDetectFaceLandmarks -/// \sa FaceDetectFaceIds -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum FaceDetectMode { - /// Pipeline doesn't perform face detection and doesn't report any - /// control related to face detection. - Off = 0, - /// Pipeline performs face detection and reports the - /// FaceDetectFaceRectangles and FaceDetectFaceScores controls for each - /// detected face. FaceDetectFaceLandmarks and FaceDetectFaceIds are - /// optional. - Simple = 1, - /// Pipeline performs face detection and reports all the controls - /// related to face detection including FaceDetectFaceRectangles, - /// FaceDetectFaceScores, FaceDetectFaceLandmarks, and - /// FaceDeteceFaceIds for each detected face. - Full = 2, -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for FaceDetectMode { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: FaceDetectMode) -> Self { - ControlValue::from(::from(val)) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for FaceDetectMode { - const ID: u32 = ControlId::FaceDetectMode as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for FaceDetectMode {} -/// Boundary rectangles of the detected faces. The number of values is -/// the number of detected faces. -/// -/// The FaceDetectFaceRectangles control can only be returned in metadata. -/// -/// Currently identical to ANDROID_STATISTICS_FACE_RECTANGLES. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone)] -pub struct FaceDetectFaceRectangles(pub Vec); -#[cfg(feature = "vendor_draft")] -impl Deref for FaceDetectFaceRectangles { - type Target = Vec; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl DerefMut for FaceDetectFaceRectangles { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for FaceDetectFaceRectangles { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(>::try_from(value)?)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: FaceDetectFaceRectangles) -> Self { - ControlValue::from(val.0) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for FaceDetectFaceRectangles { - const ID: u32 = ControlId::FaceDetectFaceRectangles as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for FaceDetectFaceRectangles {} -/// Confidence score of each of the detected faces. The range of score is -/// [0, 100]. The number of values should be the number of faces reported -/// in FaceDetectFaceRectangles. -/// -/// The FaceDetectFaceScores control can only be returned in metadata. -/// -/// Currently identical to ANDROID_STATISTICS_FACE_SCORES. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone)] -pub struct FaceDetectFaceScores(pub Vec); -#[cfg(feature = "vendor_draft")] -impl Deref for FaceDetectFaceScores { - type Target = Vec; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl DerefMut for FaceDetectFaceScores { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for FaceDetectFaceScores { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(>::try_from(value)?)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: FaceDetectFaceScores) -> Self { - ControlValue::from(val.0) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for FaceDetectFaceScores { - const ID: u32 = ControlId::FaceDetectFaceScores as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for FaceDetectFaceScores {} -/// Array of human face landmark coordinates in format [..., left_eye_i, -/// right_eye_i, mouth_i, left_eye_i+1, ...], with i = index of face. The -/// number of values should be 3 * the number of faces reported in -/// FaceDetectFaceRectangles. -/// -/// The FaceDetectFaceLandmarks control can only be returned in metadata. -/// -/// Currently identical to ANDROID_STATISTICS_FACE_LANDMARKS. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone)] -pub struct FaceDetectFaceLandmarks(pub Vec); -#[cfg(feature = "vendor_draft")] -impl Deref for FaceDetectFaceLandmarks { - type Target = Vec; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl DerefMut for FaceDetectFaceLandmarks { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for FaceDetectFaceLandmarks { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(>::try_from(value)?)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: FaceDetectFaceLandmarks) -> Self { - ControlValue::from(val.0) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for FaceDetectFaceLandmarks { - const ID: u32 = ControlId::FaceDetectFaceLandmarks as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for FaceDetectFaceLandmarks {} -/// Each detected face is given a unique ID that is valid for as long as the -/// face is visible to the camera device. A face that leaves the field of -/// view and later returns may be assigned a new ID. The number of values -/// should be the number of faces reported in FaceDetectFaceRectangles. -/// -/// The FaceDetectFaceIds control can only be returned in metadata. -/// -/// Currently identical to ANDROID_STATISTICS_FACE_IDS. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone)] -pub struct FaceDetectFaceIds(pub Vec); -#[cfg(feature = "vendor_draft")] -impl Deref for FaceDetectFaceIds { - type Target = Vec; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl DerefMut for FaceDetectFaceIds { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for FaceDetectFaceIds { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(>::try_from(value)?)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: FaceDetectFaceIds) -> Self { - ControlValue::from(val.0) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for FaceDetectFaceIds { - const ID: u32 = ControlId::FaceDetectFaceIds as _; -} -#[cfg(feature = "vendor_draft")] -impl Control for FaceDetectFaceIds {} -/// Toggles the Raspberry Pi IPA to output the hardware generated statistics. -/// -/// When this control is set to true, the IPA outputs a binary dump of the -/// hardware generated statistics through the Request metadata in the -/// Bcm2835StatsOutput control. -/// -/// \sa Bcm2835StatsOutput -#[cfg(feature = "vendor_rpi")] -#[derive(Debug, Clone)] -pub struct StatsOutputEnable(pub bool); -#[cfg(feature = "vendor_rpi")] -impl Deref for StatsOutputEnable { - type Target = bool; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -#[cfg(feature = "vendor_rpi")] -impl DerefMut for StatsOutputEnable { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -#[cfg(feature = "vendor_rpi")] -impl TryFrom for StatsOutputEnable { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -#[cfg(feature = "vendor_rpi")] -impl From for ControlValue { - fn from(val: StatsOutputEnable) -> Self { - ControlValue::from(val.0) - } -} -#[cfg(feature = "vendor_rpi")] -impl ControlEntry for StatsOutputEnable { - const ID: u32 = ControlId::StatsOutputEnable as _; -} -#[cfg(feature = "vendor_rpi")] -impl Control for StatsOutputEnable {} -/// Span of the BCM2835 ISP generated statistics for the current frame. -/// -/// This is sent in the Request metadata if the StatsOutputEnable is set to -/// true. The statistics struct definition can be found in -/// include/linux/bcm2835-isp.h. -/// -/// \sa StatsOutputEnable -#[cfg(feature = "vendor_rpi")] -#[derive(Debug, Clone)] -pub struct Bcm2835StatsOutput(pub Vec); -#[cfg(feature = "vendor_rpi")] -impl Deref for Bcm2835StatsOutput { - type Target = Vec; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -#[cfg(feature = "vendor_rpi")] -impl DerefMut for Bcm2835StatsOutput { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -#[cfg(feature = "vendor_rpi")] -impl TryFrom for Bcm2835StatsOutput { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(>::try_from(value)?)) - } -} -#[cfg(feature = "vendor_rpi")] -impl From for ControlValue { - fn from(val: Bcm2835StatsOutput) -> Self { - ControlValue::from(val.0) - } -} -#[cfg(feature = "vendor_rpi")] -impl ControlEntry for Bcm2835StatsOutput { - const ID: u32 = ControlId::Bcm2835StatsOutput as _; -} -#[cfg(feature = "vendor_rpi")] -impl Control for Bcm2835StatsOutput {} -/// An array of rectangles, where each singular value has identical -/// functionality to the ScalerCrop control. This control allows the -/// Raspberry Pi pipeline handler to control individual scaler crops per -/// output stream. -/// -/// The order of rectangles passed into the control must match the order of -/// streams configured by the application. The pipeline handler will only -/// configure crop retangles up-to the number of output streams configured. -/// All subsequent rectangles passed into this control are ignored by the -/// pipeline handler. -/// -/// If both rpi::ScalerCrops and ScalerCrop controls are present in a -/// ControlList, the latter is discarded, and crops are obtained from this -/// control. -/// -/// Note that using different crop rectangles for each output stream with -/// this control is only applicable on the Pi5/PiSP platform. This control -/// should also be considered temporary/draft and will be replaced with -/// official libcamera API support for per-stream controls in the future. -/// -/// \sa ScalerCrop -#[cfg(feature = "vendor_rpi")] -#[derive(Debug, Clone)] -pub struct ScalerCrops(pub Vec); -#[cfg(feature = "vendor_rpi")] -impl Deref for ScalerCrops { - type Target = Vec; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -#[cfg(feature = "vendor_rpi")] -impl DerefMut for ScalerCrops { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -#[cfg(feature = "vendor_rpi")] -impl TryFrom for ScalerCrops { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(>::try_from(value)?)) - } -} -#[cfg(feature = "vendor_rpi")] -impl From for ControlValue { - fn from(val: ScalerCrops) -> Self { - ControlValue::from(val.0) - } -} -#[cfg(feature = "vendor_rpi")] -impl ControlEntry for ScalerCrops { - const ID: u32 = ControlId::ScalerCrops as _; -} -#[cfg(feature = "vendor_rpi")] -impl Control for ScalerCrops {} -pub fn make_dyn( - id: ControlId, - val: ControlValue, -) -> Result, ControlValueError> { - match id { - ControlId::AeEnable => Ok(Box::new(AeEnable::try_from(val)?)), - ControlId::AeLocked => Ok(Box::new(AeLocked::try_from(val)?)), - ControlId::AeMeteringMode => Ok(Box::new(AeMeteringMode::try_from(val)?)), - ControlId::AeConstraintMode => Ok(Box::new(AeConstraintMode::try_from(val)?)), - ControlId::AeExposureMode => Ok(Box::new(AeExposureMode::try_from(val)?)), - ControlId::ExposureValue => Ok(Box::new(ExposureValue::try_from(val)?)), - ControlId::ExposureTime => Ok(Box::new(ExposureTime::try_from(val)?)), - ControlId::AnalogueGain => Ok(Box::new(AnalogueGain::try_from(val)?)), - ControlId::AeFlickerMode => Ok(Box::new(AeFlickerMode::try_from(val)?)), - ControlId::AeFlickerPeriod => Ok(Box::new(AeFlickerPeriod::try_from(val)?)), - ControlId::AeFlickerDetected => Ok(Box::new(AeFlickerDetected::try_from(val)?)), - ControlId::Brightness => Ok(Box::new(Brightness::try_from(val)?)), - ControlId::Contrast => Ok(Box::new(Contrast::try_from(val)?)), - ControlId::Lux => Ok(Box::new(Lux::try_from(val)?)), - ControlId::AwbEnable => Ok(Box::new(AwbEnable::try_from(val)?)), - ControlId::AwbMode => Ok(Box::new(AwbMode::try_from(val)?)), - ControlId::AwbLocked => Ok(Box::new(AwbLocked::try_from(val)?)), - ControlId::ColourGains => Ok(Box::new(ColourGains::try_from(val)?)), - ControlId::ColourTemperature => Ok(Box::new(ColourTemperature::try_from(val)?)), - ControlId::Saturation => Ok(Box::new(Saturation::try_from(val)?)), - ControlId::SensorBlackLevels => Ok(Box::new(SensorBlackLevels::try_from(val)?)), - ControlId::Sharpness => Ok(Box::new(Sharpness::try_from(val)?)), - ControlId::FocusFoM => Ok(Box::new(FocusFoM::try_from(val)?)), - ControlId::ColourCorrectionMatrix => { - Ok(Box::new(ColourCorrectionMatrix::try_from(val)?)) - } - ControlId::ScalerCrop => Ok(Box::new(ScalerCrop::try_from(val)?)), - ControlId::DigitalGain => Ok(Box::new(DigitalGain::try_from(val)?)), - ControlId::FrameDuration => Ok(Box::new(FrameDuration::try_from(val)?)), - ControlId::FrameDurationLimits => { - Ok(Box::new(FrameDurationLimits::try_from(val)?)) - } - ControlId::SensorTemperature => Ok(Box::new(SensorTemperature::try_from(val)?)), - ControlId::SensorTimestamp => Ok(Box::new(SensorTimestamp::try_from(val)?)), - ControlId::AfMode => Ok(Box::new(AfMode::try_from(val)?)), - ControlId::AfRange => Ok(Box::new(AfRange::try_from(val)?)), - ControlId::AfSpeed => Ok(Box::new(AfSpeed::try_from(val)?)), - ControlId::AfMetering => Ok(Box::new(AfMetering::try_from(val)?)), - ControlId::AfWindows => Ok(Box::new(AfWindows::try_from(val)?)), - ControlId::AfTrigger => Ok(Box::new(AfTrigger::try_from(val)?)), - ControlId::AfPause => Ok(Box::new(AfPause::try_from(val)?)), - ControlId::LensPosition => Ok(Box::new(LensPosition::try_from(val)?)), - ControlId::AfState => Ok(Box::new(AfState::try_from(val)?)), - ControlId::AfPauseState => Ok(Box::new(AfPauseState::try_from(val)?)), - ControlId::HdrMode => Ok(Box::new(HdrMode::try_from(val)?)), - ControlId::HdrChannel => Ok(Box::new(HdrChannel::try_from(val)?)), - ControlId::Gamma => Ok(Box::new(Gamma::try_from(val)?)), - ControlId::DebugMetadataEnable => { - Ok(Box::new(DebugMetadataEnable::try_from(val)?)) - } - #[cfg(feature = "vendor_draft")] - ControlId::AePrecaptureTrigger => { - Ok(Box::new(AePrecaptureTrigger::try_from(val)?)) - } - #[cfg(feature = "vendor_draft")] - ControlId::NoiseReductionMode => Ok(Box::new(NoiseReductionMode::try_from(val)?)), - #[cfg(feature = "vendor_draft")] - ControlId::ColorCorrectionAberrationMode => { - Ok(Box::new(ColorCorrectionAberrationMode::try_from(val)?)) - } - #[cfg(feature = "vendor_draft")] - ControlId::AeState => Ok(Box::new(AeState::try_from(val)?)), - #[cfg(feature = "vendor_draft")] - ControlId::AwbState => Ok(Box::new(AwbState::try_from(val)?)), - #[cfg(feature = "vendor_draft")] - ControlId::SensorRollingShutterSkew => { - Ok(Box::new(SensorRollingShutterSkew::try_from(val)?)) - } - #[cfg(feature = "vendor_draft")] - ControlId::LensShadingMapMode => Ok(Box::new(LensShadingMapMode::try_from(val)?)), - #[cfg(feature = "vendor_draft")] - ControlId::PipelineDepth => Ok(Box::new(PipelineDepth::try_from(val)?)), - #[cfg(feature = "vendor_draft")] - ControlId::MaxLatency => Ok(Box::new(MaxLatency::try_from(val)?)), - #[cfg(feature = "vendor_draft")] - ControlId::TestPatternMode => Ok(Box::new(TestPatternMode::try_from(val)?)), - #[cfg(feature = "vendor_draft")] - ControlId::FaceDetectMode => Ok(Box::new(FaceDetectMode::try_from(val)?)), - #[cfg(feature = "vendor_draft")] - ControlId::FaceDetectFaceRectangles => { - Ok(Box::new(FaceDetectFaceRectangles::try_from(val)?)) - } - #[cfg(feature = "vendor_draft")] - ControlId::FaceDetectFaceScores => { - Ok(Box::new(FaceDetectFaceScores::try_from(val)?)) - } - #[cfg(feature = "vendor_draft")] - ControlId::FaceDetectFaceLandmarks => { - Ok(Box::new(FaceDetectFaceLandmarks::try_from(val)?)) - } - #[cfg(feature = "vendor_draft")] - ControlId::FaceDetectFaceIds => Ok(Box::new(FaceDetectFaceIds::try_from(val)?)), - #[cfg(feature = "vendor_rpi")] - ControlId::StatsOutputEnable => Ok(Box::new(StatsOutputEnable::try_from(val)?)), - #[cfg(feature = "vendor_rpi")] - ControlId::Bcm2835StatsOutput => Ok(Box::new(Bcm2835StatsOutput::try_from(val)?)), - #[cfg(feature = "vendor_rpi")] - ControlId::ScalerCrops => Ok(Box::new(ScalerCrops::try_from(val)?)), - } -} diff --git a/libcamera/versioned_files/0.4.0/properties.rs b/libcamera/versioned_files/0.4.0/properties.rs deleted file mode 100644 index 7548b75..0000000 --- a/libcamera/versioned_files/0.4.0/properties.rs +++ /dev/null @@ -1,1777 +0,0 @@ -use std::{ffi::CStr, ops::{Deref, DerefMut}}; -use num_enum::{IntoPrimitive, TryFromPrimitive}; -#[allow(unused_imports)] -use crate::control::{Control, Property, ControlEntry, DynControlEntry}; -use crate::control_value::{ControlValue, ControlValueError}; -#[allow(unused_imports)] -use crate::geometry::{Rectangle, Size, Point}; -#[allow(unused_imports)] -use libcamera_sys::*; -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(u32)] -pub enum PropertyId { - /// Camera mounting location - Location = LOCATION, - /// The camera physical mounting rotation. It is expressed as the angular - /// difference in degrees between two reference systems, one relative to the - /// camera module, and one defined on the external world scene to be - /// captured when projected on the image sensor pixel array. - /// - /// A camera sensor has a 2-dimensional reference system 'Rc' defined by - /// its pixel array read-out order. The origin is set to the first pixel - /// being read out, the X-axis points along the column read-out direction - /// towards the last columns, and the Y-axis along the row read-out - /// direction towards the last row. - /// - /// A typical example for a sensor with a 2592x1944 pixel array matrix - /// observed from the front is - /// - /// ```text - /// 2591 X-axis 0 - /// <------------------------+ 0 - /// .......... ... ..........! - /// .......... ... ..........! Y-axis - /// ... ! - /// .......... ... ..........! - /// .......... ... ..........! 1943 - /// V - /// ``` - /// - /// - /// The external world scene reference system 'Rs' is a 2-dimensional - /// reference system on the focal plane of the camera module. The origin is - /// placed on the top-left corner of the visible scene, the X-axis points - /// towards the right, and the Y-axis points towards the bottom of the - /// scene. The top, bottom, left and right directions are intentionally not - /// defined and depend on the environment in which the camera is used. - /// - /// A typical example of a (very common) picture of a shark swimming from - /// left to right, as seen from the camera, is - /// - /// ```text - /// 0 X-axis - /// 0 +-------------------------------------> - /// ! - /// ! - /// ! - /// ! |\____)\___ - /// ! ) _____ __`< - /// ! |/ )/ - /// ! - /// ! - /// ! - /// V - /// Y-axis - /// ``` - /// - /// With the reference system 'Rs' placed on the camera focal plane. - /// - /// ```text - /// ¸.·˙! - /// ¸.·˙ ! - /// _ ¸.·˙ ! - /// +-/ \-+¸.·˙ ! - /// | (o) | ! Camera focal plane - /// +-----+˙·.¸ ! - /// ˙·.¸ ! - /// ˙·.¸ ! - /// ˙·.¸! - /// ``` - /// - /// When projected on the sensor's pixel array, the image and the associated - /// reference system 'Rs' are typically (but not always) inverted, due to - /// the camera module's lens optical inversion effect. - /// - /// Assuming the above represented scene of the swimming shark, the lens - /// inversion projects the scene and its reference system onto the sensor - /// pixel array, seen from the front of the camera sensor, as follow - /// - /// ```text - /// Y-axis - /// ^ - /// ! - /// ! - /// ! - /// ! |\_____)\__ - /// ! ) ____ ___.< - /// ! |/ )/ - /// ! - /// ! - /// ! - /// 0 +-------------------------------------> - /// 0 X-axis - /// ``` - /// - /// Note the shark being upside-down. - /// - /// The resulting projected reference system is named 'Rp'. - /// - /// The camera rotation property is then defined as the angular difference - /// in the counter-clockwise direction between the camera reference system - /// 'Rc' and the projected scene reference system 'Rp'. It is expressed in - /// degrees as a number in the range [0, 360[. - /// - /// Examples - /// - /// 0 degrees camera rotation - /// - /// - /// ```text - /// Y-Rp - /// ^ - /// Y-Rc ! - /// ^ ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! 0 +-------------------------------------> - /// ! 0 X-Rp - /// 0 +-------------------------------------> - /// 0 X-Rc - /// ``` - /// - /// - /// ```text - /// X-Rc 0 - /// <------------------------------------+ 0 - /// X-Rp 0 ! - /// <------------------------------------+ 0 ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! V - /// ! Y-Rc - /// V - /// Y-Rp - /// ``` - /// - /// 90 degrees camera rotation - /// - /// ```text - /// 0 Y-Rc - /// 0 +--------------------> - /// ! Y-Rp - /// ! ^ - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! 0 +-------------------------------------> - /// ! 0 X-Rp - /// ! - /// ! - /// ! - /// ! - /// V - /// X-Rc - /// ``` - /// - /// 180 degrees camera rotation - /// - /// ```text - /// 0 - /// <------------------------------------+ 0 - /// X-Rc ! - /// Y-Rp ! - /// ^ ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! V - /// ! Y-Rc - /// 0 +-------------------------------------> - /// 0 X-Rp - /// ``` - /// - /// 270 degrees camera rotation - /// - /// ```text - /// 0 Y-Rc - /// 0 +--------------------> - /// ! 0 - /// ! <-----------------------------------+ 0 - /// ! X-Rp ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! ! - /// ! V - /// ! Y-Rp - /// ! - /// ! - /// ! - /// ! - /// V - /// X-Rc - /// ``` - /// - /// - /// Example one - Webcam - /// - /// A camera module installed on the user facing part of a laptop screen - /// casing used for video calls. The captured images are meant to be - /// displayed in landscape mode (width > height) on the laptop screen. - /// - /// The camera is typically mounted upside-down to compensate the lens - /// optical inversion effect. - /// - /// ```text - /// Y-Rp - /// Y-Rc ^ - /// ^ ! - /// ! ! - /// ! ! |\_____)\__ - /// ! ! ) ____ ___.< - /// ! ! |/ )/ - /// ! ! - /// ! ! - /// ! ! - /// ! 0 +-------------------------------------> - /// ! 0 X-Rp - /// 0 +-------------------------------------> - /// 0 X-Rc - /// ``` - /// - /// The two reference systems are aligned, the resulting camera rotation is - /// 0 degrees, no rotation correction needs to be applied to the resulting - /// image once captured to memory buffers to correctly display it to users. - /// - /// ```text - /// +--------------------------------------+ - /// ! ! - /// ! ! - /// ! ! - /// ! |\____)\___ ! - /// ! ) _____ __`< ! - /// ! |/ )/ ! - /// ! ! - /// ! ! - /// ! ! - /// +--------------------------------------+ - /// ``` - /// - /// If the camera sensor is not mounted upside-down to compensate for the - /// lens optical inversion, the two reference systems will not be aligned, - /// with 'Rp' being rotated 180 degrees relatively to 'Rc'. - /// - /// - /// ```text - /// X-Rc 0 - /// <------------------------------------+ 0 - /// ! - /// Y-Rp ! - /// ^ ! - /// ! ! - /// ! |\_____)\__ ! - /// ! ) ____ ___.< ! - /// ! |/ )/ ! - /// ! ! - /// ! ! - /// ! V - /// ! Y-Rc - /// 0 +-------------------------------------> - /// 0 X-Rp - /// ``` - /// - /// The image once captured to memory will then be rotated by 180 degrees - /// - /// ```text - /// +--------------------------------------+ - /// ! ! - /// ! ! - /// ! ! - /// ! __/(_____/| ! - /// ! >.___ ____ ( ! - /// ! \( \| ! - /// ! ! - /// ! ! - /// ! ! - /// +--------------------------------------+ - /// ``` - /// - /// A software rotation correction of 180 degrees should be applied to - /// correctly display the image. - /// - /// ```text - /// +--------------------------------------+ - /// ! ! - /// ! ! - /// ! ! - /// ! |\____)\___ ! - /// ! ) _____ __`< ! - /// ! |/ )/ ! - /// ! ! - /// ! ! - /// ! ! - /// +--------------------------------------+ - /// ``` - /// - /// Example two - Phone camera - /// - /// A camera installed on the back side of a mobile device facing away from - /// the user. The captured images are meant to be displayed in portrait mode - /// (height > width) to match the device screen orientation and the device - /// usage orientation used when taking the picture. - /// - /// The camera sensor is typically mounted with its pixel array longer side - /// aligned to the device longer side, upside-down mounted to compensate for - /// the lens optical inversion effect. - /// - /// ```text - /// 0 Y-Rc - /// 0 +--------------------> - /// ! Y-Rp - /// ! ^ - /// ! ! - /// ! ! - /// ! ! - /// ! ! |\_____)\__ - /// ! ! ) ____ ___.< - /// ! ! |/ )/ - /// ! ! - /// ! ! - /// ! ! - /// ! 0 +-------------------------------------> - /// ! 0 X-Rp - /// ! - /// ! - /// ! - /// ! - /// V - /// X-Rc - /// ``` - /// - /// The two reference systems are not aligned and the 'Rp' reference - /// system is rotated by 90 degrees in the counter-clockwise direction - /// relatively to the 'Rc' reference system. - /// - /// The image once captured to memory will be rotated. - /// - /// ```text - /// +-------------------------------------+ - /// | _ _ | - /// | \ / | - /// | | | | - /// | | | | - /// | | > | - /// | < | | - /// | | | | - /// | . | - /// | V | - /// +-------------------------------------+ - /// ``` - /// - /// A correction of 90 degrees in counter-clockwise direction has to be - /// applied to correctly display the image in portrait mode on the device - /// screen. - /// - /// ```text - /// +--------------------+ - /// | | - /// | | - /// | | - /// | | - /// | | - /// | | - /// | |\____)\___ | - /// | ) _____ __`< | - /// | |/ )/ | - /// | | - /// | | - /// | | - /// | | - /// | | - /// +--------------------+ - Rotation = ROTATION, - /// The model name shall to the extent possible describe the sensor. For - /// most devices this is the model name of the sensor. While for some - /// devices the sensor model is unavailable as the sensor or the entire - /// camera is part of a larger unit and exposed as a black-box to the - /// system. In such cases the model name of the smallest device that - /// contains the camera sensor shall be used. - /// - /// The model name is not meant to be a camera name displayed to the - /// end-user, but may be combined with other camera information to create a - /// camera name. - /// - /// The model name is not guaranteed to be unique in the system nor is - /// it guaranteed to be stable or have any other properties required to make - /// it a good candidate to be used as a permanent identifier of a camera. - /// - /// The model name shall describe the camera in a human readable format and - /// shall be encoded in ASCII. - /// - /// Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. - Model = MODEL, - /// The pixel unit cell physical size, in nanometers. - /// - /// The UnitCellSize properties defines the horizontal and vertical sizes of - /// a single pixel unit, including its active and non-active parts. In - /// other words, it expresses the horizontal and vertical distance between - /// the top-left corners of adjacent pixels. - /// - /// The property can be used to calculate the physical size of the sensor's - /// pixel array area and for calibration purposes. - UnitCellSize = UNIT_CELL_SIZE, - /// The camera sensor pixel array readable area vertical and horizontal - /// sizes, in pixels. - /// - /// The PixelArraySize property defines the size in pixel units of the - /// readable part of full pixel array matrix, including optical black - /// pixels used for calibration, pixels which are not considered valid for - /// capture and active pixels containing valid image data. - /// - /// The property describes the maximum size of the raw data captured by the - /// camera, which might not correspond to the physical size of the sensor - /// pixel array matrix, as some portions of the physical pixel array matrix - /// are not accessible and cannot be transmitted out. - /// - /// For example, let's consider a pixel array matrix assembled as follows - /// - /// ```text - /// +--------------------------------------------------+ - /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| - /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| - /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - /// ... ... ... ... ... - /// ``` - /// - /// ```text - /// ... ... ... ... ... - /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| - /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| - /// +--------------------------------------------------+ - /// ``` - /// - /// starting with two lines of non-readable pixels (x), followed by N lines - /// of readable data (D) surrounded by two columns of non-readable pixels on - /// each side, and ending with two more lines of non-readable pixels. Only - /// the readable portion is transmitted to the receiving side, defining the - /// sizes of the largest possible buffer of raw data that can be presented - /// to applications. - /// - /// ```text - /// PixelArraySize.width - /// /----------------------------------------------/ - /// +----------------------------------------------+ / - /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | PixelArraySize.height - /// ... ... ... ... ... - /// ... ... ... ... ... - /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - /// +----------------------------------------------+ / - /// ``` - /// - /// This defines a rectangle whose top-left corner is placed in position (0, - /// 0) and whose vertical and horizontal sizes are defined by this property. - /// All other rectangles that describe portions of the pixel array, such as - /// the optical black pixels rectangles and active pixel areas, are defined - /// relatively to this rectangle. - /// - /// All the coordinates are expressed relative to the default sensor readout - /// direction, without any transformation (such as horizontal and vertical - /// flipping) applied. When mapping them to the raw pixel buffer, - /// applications shall take any configured transformation into account. - /// - /// \todo Rename this property to Size once we will have property - /// ```text - /// categories (i.e. Properties::PixelArray::Size) - PixelArraySize = PIXEL_ARRAY_SIZE, - /// The pixel array region(s) which contain optical black pixels - /// considered valid for calibration purposes. - /// - /// This property describes the position and size of optical black pixel - /// regions in the raw data buffer as stored in memory, which might differ - /// from their actual physical location in the pixel array matrix. - /// - /// It is important to note, in fact, that camera sensors might - /// automatically reorder or skip portions of their pixels array matrix when - /// transmitting data to the receiver. For instance, a sensor may merge the - /// top and bottom optical black rectangles into a single rectangle, - /// transmitted at the beginning of the frame. - /// - /// The pixel array contains several areas with different purposes, - /// interleaved by lines and columns which are said not to be valid for - /// capturing purposes. Invalid lines and columns are defined as invalid as - /// they could be positioned too close to the chip margins or to the optical - /// black shielding placed on top of optical black pixels. - /// - /// ```text - /// PixelArraySize.width - /// /----------------------------------------------/ - /// x1 x2 - /// +--o---------------------------------------o---+ / - /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | - /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | - /// y1 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | - /// |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | - /// |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | - /// y2 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | - /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | - /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | - /// y3 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | - /// |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | PixelArraySize.height - /// |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | - /// ... ... ... ... ... - /// ... ... ... ... ... - /// y4 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | - /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | - /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | - /// +----------------------------------------------+ / - /// ``` - /// - /// The readable pixel array matrix is composed by - /// 2 invalid lines (I) - /// 4 lines of valid optical black pixels (O) - /// 2 invalid lines (I) - /// n lines of valid pixel data (P) - /// 2 invalid lines (I) - /// - /// And the position of the optical black pixel rectangles is defined by - /// - /// ```text - /// PixelArrayOpticalBlackRectangles = { - /// { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, - /// { x1, y3, 2, y4 - y3 + 1 }, - /// { x2, y3, 2, y4 - y3 + 1 }, - /// }; - /// ``` - /// - /// If the camera, when capturing the full pixel array matrix, automatically - /// skips the invalid lines and columns, producing the following data - /// buffer, when captured to memory - /// - /// ```text - /// PixelArraySize.width - /// /----------------------------------------------/ - /// x1 - /// +--------------------------------------------o-+ / - /// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | - /// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | - /// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | - /// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | - /// y1 oOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | - /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | - /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | PixelArraySize.height - /// ... ... ... ... ... | - /// ... ... ... ... ... | - /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | - /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | - /// +----------------------------------------------+ / - /// ``` - /// - /// then the invalid lines and columns should not be reported as part of the - /// PixelArraySize property in first place. - /// - /// In this case, the position of the black pixel rectangles will be - /// - /// ```text - /// PixelArrayOpticalBlackRectangles = { - /// { 0, 0, y1 + 1, PixelArraySize[0] }, - /// { 0, y1, 2, PixelArraySize[1] - y1 + 1 }, - /// { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, - /// }; - /// ``` - /// - /// \todo Rename this property to Size once we will have property - /// ```text - /// categories (i.e. Properties::PixelArray::OpticalBlackRectangles) - PixelArrayOpticalBlackRectangles = PIXEL_ARRAY_OPTICAL_BLACK_RECTANGLES, - /// The PixelArrayActiveAreas property defines the (possibly multiple and - /// overlapping) portions of the camera sensor readable pixel matrix - /// which are considered valid for image acquisition purposes. - /// - /// This property describes an arbitrary number of overlapping rectangles, - /// with each rectangle representing the maximum image size that the camera - /// sensor can produce for a particular aspect ratio. They are defined - /// relatively to the PixelArraySize rectangle. - /// - /// When multiple rectangles are reported, they shall be ordered from the - /// tallest to the shortest. - /// - /// Example 1 - /// A camera sensor which only produces images in the 4:3 image resolution - /// will report a single PixelArrayActiveAreas rectangle, from which all - /// other image formats are obtained by either cropping the field-of-view - /// and/or applying pixel sub-sampling techniques such as pixel skipping or - /// binning. - /// - /// ```text - /// PixelArraySize.width - /// /----------------/ - /// x1 x2 - /// (0,0)-> +-o------------o-+ / - /// y1 o +------------+ | | - /// | |////////////| | | - /// | |////////////| | | PixelArraySize.height - /// | |////////////| | | - /// y2 o +------------+ | | - /// +----------------+ / - /// ``` - /// - /// The property reports a single rectangle - /// - /// ```text - /// PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) - /// ``` - /// - /// Example 2 - /// A camera sensor which can produce images in different native - /// resolutions will report several overlapping rectangles, one for each - /// natively supported resolution. - /// - /// ```text - /// PixelArraySize.width - /// /------------------/ - /// x1 x2 x3 x4 - /// (0,0)-> +o---o------o---o+ / - /// y1 o +------+ | | - /// | |//////| | | - /// y2 o+---+------+---+| | - /// ||///|//////|///|| | PixelArraySize.height - /// y3 o+---+------+---+| | - /// | |//////| | | - /// y4 o +------+ | | - /// +----+------+----+ / - /// ``` - /// - /// The property reports two rectangles - /// - /// ```text - /// PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), - /// (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) - /// ``` - /// - /// The first rectangle describes the maximum field-of-view of all image - /// formats in the 4:3 resolutions, while the second one describes the - /// maximum field of view for all image formats in the 16:9 resolutions. - /// - /// Multiple rectangles shall only be reported when the sensor can't capture - /// the pixels in the corner regions. If all the pixels in the (x1,y1) - - /// (x4,y4) area can be captured, the PixelArrayActiveAreas property shall - /// contains the single rectangle (x1,y1) - (x4,y4). - /// - /// \todo Rename this property to ActiveAreas once we will have property - /// ```text - /// categories (i.e. Properties::PixelArray::ActiveAreas) - PixelArrayActiveAreas = PIXEL_ARRAY_ACTIVE_AREAS, - /// The maximum valid rectangle for the controls::ScalerCrop control. This - /// reflects the minimum mandatory cropping applied in the camera sensor and - /// the rest of the pipeline. Just as the ScalerCrop control, it defines a - /// rectangle taken from the sensor's active pixel array. - /// - /// This property is valid only after the camera has been successfully - /// configured and its value may change whenever a new configuration is - /// applied. - /// - /// \todo Turn this property into a "maximum control value" for the - /// ScalerCrop control once "dynamic" controls have been implemented. - ScalerCropMaximum = SCALER_CROP_MAXIMUM, - /// The relative sensitivity of the chosen sensor mode. - /// - /// Some sensors have readout modes with different sensitivities. For example, - /// a binned camera mode might, with the same exposure and gains, produce - /// twice the signal level of the full resolution readout. This would be - /// signalled by the binned mode, when it is chosen, indicating a value here - /// that is twice that of the full resolution mode. This value will be valid - /// after the configure method has returned successfully. - SensorSensitivity = SENSOR_SENSITIVITY, - /// A list of integer values of type dev_t denoting the major and minor - /// device numbers of the underlying devices used in the operation of this - /// camera. - /// - /// Different cameras may report identical devices. - SystemDevices = SYSTEM_DEVICES, - /// The arrangement of color filters on sensor; represents the colors in the - /// top-left 2x2 section of the sensor, in reading order. Currently - /// identical to ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT. - #[cfg(feature = "vendor_draft")] - ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, -} -impl PropertyId { - fn id(&self) -> u32 { - *self as u32 - } - pub fn name(&self) -> String { - unsafe { - let c_str = libcamera_property_name_by_id(self.id()); - if c_str.is_null() { - return "".into(); - } - CStr::from_ptr(c_str).to_str().unwrap().into() - } - } -} -/// Camera mounting location -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum Location { - /// The camera is mounted on the front side of the device, facing the - /// user - CameraFront = 0, - /// The camera is mounted on the back side of the device, facing away - /// from the user - CameraBack = 1, - /// The camera is attached to the device in a way that allows it to - /// be moved freely - CameraExternal = 2, -} -impl TryFrom for Location { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -impl From for ControlValue { - fn from(val: Location) -> Self { - ControlValue::from(::from(val)) - } -} -impl ControlEntry for Location { - const ID: u32 = PropertyId::Location as _; -} -impl Property for Location {} -/// The camera physical mounting rotation. It is expressed as the angular -/// difference in degrees between two reference systems, one relative to the -/// camera module, and one defined on the external world scene to be -/// captured when projected on the image sensor pixel array. -/// -/// A camera sensor has a 2-dimensional reference system 'Rc' defined by -/// its pixel array read-out order. The origin is set to the first pixel -/// being read out, the X-axis points along the column read-out direction -/// towards the last columns, and the Y-axis along the row read-out -/// direction towards the last row. -/// -/// A typical example for a sensor with a 2592x1944 pixel array matrix -/// observed from the front is -/// -/// ```text -/// 2591 X-axis 0 -/// <------------------------+ 0 -/// .......... ... ..........! -/// .......... ... ..........! Y-axis -/// ... ! -/// .......... ... ..........! -/// .......... ... ..........! 1943 -/// V -/// ``` -/// -/// -/// The external world scene reference system 'Rs' is a 2-dimensional -/// reference system on the focal plane of the camera module. The origin is -/// placed on the top-left corner of the visible scene, the X-axis points -/// towards the right, and the Y-axis points towards the bottom of the -/// scene. The top, bottom, left and right directions are intentionally not -/// defined and depend on the environment in which the camera is used. -/// -/// A typical example of a (very common) picture of a shark swimming from -/// left to right, as seen from the camera, is -/// -/// ```text -/// 0 X-axis -/// 0 +-------------------------------------> -/// ! -/// ! -/// ! -/// ! |\____)\___ -/// ! ) _____ __`< -/// ! |/ )/ -/// ! -/// ! -/// ! -/// V -/// Y-axis -/// ``` -/// -/// With the reference system 'Rs' placed on the camera focal plane. -/// -/// ```text -/// ¸.·˙! -/// ¸.·˙ ! -/// _ ¸.·˙ ! -/// +-/ \-+¸.·˙ ! -/// | (o) | ! Camera focal plane -/// +-----+˙·.¸ ! -/// ˙·.¸ ! -/// ˙·.¸ ! -/// ˙·.¸! -/// ``` -/// -/// When projected on the sensor's pixel array, the image and the associated -/// reference system 'Rs' are typically (but not always) inverted, due to -/// the camera module's lens optical inversion effect. -/// -/// Assuming the above represented scene of the swimming shark, the lens -/// inversion projects the scene and its reference system onto the sensor -/// pixel array, seen from the front of the camera sensor, as follow -/// -/// ```text -/// Y-axis -/// ^ -/// ! -/// ! -/// ! -/// ! |\_____)\__ -/// ! ) ____ ___.< -/// ! |/ )/ -/// ! -/// ! -/// ! -/// 0 +-------------------------------------> -/// 0 X-axis -/// ``` -/// -/// Note the shark being upside-down. -/// -/// The resulting projected reference system is named 'Rp'. -/// -/// The camera rotation property is then defined as the angular difference -/// in the counter-clockwise direction between the camera reference system -/// 'Rc' and the projected scene reference system 'Rp'. It is expressed in -/// degrees as a number in the range [0, 360[. -/// -/// Examples -/// -/// 0 degrees camera rotation -/// -/// -/// ```text -/// Y-Rp -/// ^ -/// Y-Rc ! -/// ^ ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! 0 +-------------------------------------> -/// ! 0 X-Rp -/// 0 +-------------------------------------> -/// 0 X-Rc -/// ``` -/// -/// -/// ```text -/// X-Rc 0 -/// <------------------------------------+ 0 -/// X-Rp 0 ! -/// <------------------------------------+ 0 ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! V -/// ! Y-Rc -/// V -/// Y-Rp -/// ``` -/// -/// 90 degrees camera rotation -/// -/// ```text -/// 0 Y-Rc -/// 0 +--------------------> -/// ! Y-Rp -/// ! ^ -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! 0 +-------------------------------------> -/// ! 0 X-Rp -/// ! -/// ! -/// ! -/// ! -/// V -/// X-Rc -/// ``` -/// -/// 180 degrees camera rotation -/// -/// ```text -/// 0 -/// <------------------------------------+ 0 -/// X-Rc ! -/// Y-Rp ! -/// ^ ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! V -/// ! Y-Rc -/// 0 +-------------------------------------> -/// 0 X-Rp -/// ``` -/// -/// 270 degrees camera rotation -/// -/// ```text -/// 0 Y-Rc -/// 0 +--------------------> -/// ! 0 -/// ! <-----------------------------------+ 0 -/// ! X-Rp ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! ! -/// ! V -/// ! Y-Rp -/// ! -/// ! -/// ! -/// ! -/// V -/// X-Rc -/// ``` -/// -/// -/// Example one - Webcam -/// -/// A camera module installed on the user facing part of a laptop screen -/// casing used for video calls. The captured images are meant to be -/// displayed in landscape mode (width > height) on the laptop screen. -/// -/// The camera is typically mounted upside-down to compensate the lens -/// optical inversion effect. -/// -/// ```text -/// Y-Rp -/// Y-Rc ^ -/// ^ ! -/// ! ! -/// ! ! |\_____)\__ -/// ! ! ) ____ ___.< -/// ! ! |/ )/ -/// ! ! -/// ! ! -/// ! ! -/// ! 0 +-------------------------------------> -/// ! 0 X-Rp -/// 0 +-------------------------------------> -/// 0 X-Rc -/// ``` -/// -/// The two reference systems are aligned, the resulting camera rotation is -/// 0 degrees, no rotation correction needs to be applied to the resulting -/// image once captured to memory buffers to correctly display it to users. -/// -/// ```text -/// +--------------------------------------+ -/// ! ! -/// ! ! -/// ! ! -/// ! |\____)\___ ! -/// ! ) _____ __`< ! -/// ! |/ )/ ! -/// ! ! -/// ! ! -/// ! ! -/// +--------------------------------------+ -/// ``` -/// -/// If the camera sensor is not mounted upside-down to compensate for the -/// lens optical inversion, the two reference systems will not be aligned, -/// with 'Rp' being rotated 180 degrees relatively to 'Rc'. -/// -/// -/// ```text -/// X-Rc 0 -/// <------------------------------------+ 0 -/// ! -/// Y-Rp ! -/// ^ ! -/// ! ! -/// ! |\_____)\__ ! -/// ! ) ____ ___.< ! -/// ! |/ )/ ! -/// ! ! -/// ! ! -/// ! V -/// ! Y-Rc -/// 0 +-------------------------------------> -/// 0 X-Rp -/// ``` -/// -/// The image once captured to memory will then be rotated by 180 degrees -/// -/// ```text -/// +--------------------------------------+ -/// ! ! -/// ! ! -/// ! ! -/// ! __/(_____/| ! -/// ! >.___ ____ ( ! -/// ! \( \| ! -/// ! ! -/// ! ! -/// ! ! -/// +--------------------------------------+ -/// ``` -/// -/// A software rotation correction of 180 degrees should be applied to -/// correctly display the image. -/// -/// ```text -/// +--------------------------------------+ -/// ! ! -/// ! ! -/// ! ! -/// ! |\____)\___ ! -/// ! ) _____ __`< ! -/// ! |/ )/ ! -/// ! ! -/// ! ! -/// ! ! -/// +--------------------------------------+ -/// ``` -/// -/// Example two - Phone camera -/// -/// A camera installed on the back side of a mobile device facing away from -/// the user. The captured images are meant to be displayed in portrait mode -/// (height > width) to match the device screen orientation and the device -/// usage orientation used when taking the picture. -/// -/// The camera sensor is typically mounted with its pixel array longer side -/// aligned to the device longer side, upside-down mounted to compensate for -/// the lens optical inversion effect. -/// -/// ```text -/// 0 Y-Rc -/// 0 +--------------------> -/// ! Y-Rp -/// ! ^ -/// ! ! -/// ! ! -/// ! ! -/// ! ! |\_____)\__ -/// ! ! ) ____ ___.< -/// ! ! |/ )/ -/// ! ! -/// ! ! -/// ! ! -/// ! 0 +-------------------------------------> -/// ! 0 X-Rp -/// ! -/// ! -/// ! -/// ! -/// V -/// X-Rc -/// ``` -/// -/// The two reference systems are not aligned and the 'Rp' reference -/// system is rotated by 90 degrees in the counter-clockwise direction -/// relatively to the 'Rc' reference system. -/// -/// The image once captured to memory will be rotated. -/// -/// ```text -/// +-------------------------------------+ -/// | _ _ | -/// | \ / | -/// | | | | -/// | | | | -/// | | > | -/// | < | | -/// | | | | -/// | . | -/// | V | -/// +-------------------------------------+ -/// ``` -/// -/// A correction of 90 degrees in counter-clockwise direction has to be -/// applied to correctly display the image in portrait mode on the device -/// screen. -/// -/// ```text -/// +--------------------+ -/// | | -/// | | -/// | | -/// | | -/// | | -/// | | -/// | |\____)\___ | -/// | ) _____ __`< | -/// | |/ )/ | -/// | | -/// | | -/// | | -/// | | -/// | | -/// +--------------------+ -#[derive(Debug, Clone)] -pub struct Rotation(pub i32); -impl Deref for Rotation { - type Target = i32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for Rotation { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for Rotation { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: Rotation) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for Rotation { - const ID: u32 = PropertyId::Rotation as _; -} -impl Property for Rotation {} -/// The model name shall to the extent possible describe the sensor. For -/// most devices this is the model name of the sensor. While for some -/// devices the sensor model is unavailable as the sensor or the entire -/// camera is part of a larger unit and exposed as a black-box to the -/// system. In such cases the model name of the smallest device that -/// contains the camera sensor shall be used. -/// -/// The model name is not meant to be a camera name displayed to the -/// end-user, but may be combined with other camera information to create a -/// camera name. -/// -/// The model name is not guaranteed to be unique in the system nor is -/// it guaranteed to be stable or have any other properties required to make -/// it a good candidate to be used as a permanent identifier of a camera. -/// -/// The model name shall describe the camera in a human readable format and -/// shall be encoded in ASCII. -/// -/// Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. -#[derive(Debug, Clone)] -pub struct Model(pub String); -impl Deref for Model { - type Target = String; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for Model { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for Model { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: Model) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for Model { - const ID: u32 = PropertyId::Model as _; -} -impl Property for Model {} -/// The pixel unit cell physical size, in nanometers. -/// -/// The UnitCellSize properties defines the horizontal and vertical sizes of -/// a single pixel unit, including its active and non-active parts. In -/// other words, it expresses the horizontal and vertical distance between -/// the top-left corners of adjacent pixels. -/// -/// The property can be used to calculate the physical size of the sensor's -/// pixel array area and for calibration purposes. -#[derive(Debug, Clone)] -pub struct UnitCellSize(pub Size); -impl Deref for UnitCellSize { - type Target = Size; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for UnitCellSize { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for UnitCellSize { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: UnitCellSize) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for UnitCellSize { - const ID: u32 = PropertyId::UnitCellSize as _; -} -impl Property for UnitCellSize {} -/// The camera sensor pixel array readable area vertical and horizontal -/// sizes, in pixels. -/// -/// The PixelArraySize property defines the size in pixel units of the -/// readable part of full pixel array matrix, including optical black -/// pixels used for calibration, pixels which are not considered valid for -/// capture and active pixels containing valid image data. -/// -/// The property describes the maximum size of the raw data captured by the -/// camera, which might not correspond to the physical size of the sensor -/// pixel array matrix, as some portions of the physical pixel array matrix -/// are not accessible and cannot be transmitted out. -/// -/// For example, let's consider a pixel array matrix assembled as follows -/// -/// ```text -/// +--------------------------------------------------+ -/// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| -/// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| -/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| -/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| -/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| -/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| -/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| -/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| -/// ... ... ... ... ... -/// ``` -/// -/// ```text -/// ... ... ... ... ... -/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| -/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| -/// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| -/// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| -/// +--------------------------------------------------+ -/// ``` -/// -/// starting with two lines of non-readable pixels (x), followed by N lines -/// of readable data (D) surrounded by two columns of non-readable pixels on -/// each side, and ending with two more lines of non-readable pixels. Only -/// the readable portion is transmitted to the receiving side, defining the -/// sizes of the largest possible buffer of raw data that can be presented -/// to applications. -/// -/// ```text -/// PixelArraySize.width -/// /----------------------------------------------/ -/// +----------------------------------------------+ / -/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | -/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | -/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | -/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | -/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | -/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | PixelArraySize.height -/// ... ... ... ... ... -/// ... ... ... ... ... -/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | -/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | -/// +----------------------------------------------+ / -/// ``` -/// -/// This defines a rectangle whose top-left corner is placed in position (0, -/// 0) and whose vertical and horizontal sizes are defined by this property. -/// All other rectangles that describe portions of the pixel array, such as -/// the optical black pixels rectangles and active pixel areas, are defined -/// relatively to this rectangle. -/// -/// All the coordinates are expressed relative to the default sensor readout -/// direction, without any transformation (such as horizontal and vertical -/// flipping) applied. When mapping them to the raw pixel buffer, -/// applications shall take any configured transformation into account. -/// -/// \todo Rename this property to Size once we will have property -/// ```text -/// categories (i.e. Properties::PixelArray::Size) -#[derive(Debug, Clone)] -pub struct PixelArraySize(pub Size); -impl Deref for PixelArraySize { - type Target = Size; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for PixelArraySize { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for PixelArraySize { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: PixelArraySize) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for PixelArraySize { - const ID: u32 = PropertyId::PixelArraySize as _; -} -impl Property for PixelArraySize {} -/// The pixel array region(s) which contain optical black pixels -/// considered valid for calibration purposes. -/// -/// This property describes the position and size of optical black pixel -/// regions in the raw data buffer as stored in memory, which might differ -/// from their actual physical location in the pixel array matrix. -/// -/// It is important to note, in fact, that camera sensors might -/// automatically reorder or skip portions of their pixels array matrix when -/// transmitting data to the receiver. For instance, a sensor may merge the -/// top and bottom optical black rectangles into a single rectangle, -/// transmitted at the beginning of the frame. -/// -/// The pixel array contains several areas with different purposes, -/// interleaved by lines and columns which are said not to be valid for -/// capturing purposes. Invalid lines and columns are defined as invalid as -/// they could be positioned too close to the chip margins or to the optical -/// black shielding placed on top of optical black pixels. -/// -/// ```text -/// PixelArraySize.width -/// /----------------------------------------------/ -/// x1 x2 -/// +--o---------------------------------------o---+ / -/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | -/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | -/// y1 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | -/// |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | -/// |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | -/// y2 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | -/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | -/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | -/// y3 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | -/// |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | PixelArraySize.height -/// |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | -/// ... ... ... ... ... -/// ... ... ... ... ... -/// y4 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | -/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | -/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | -/// +----------------------------------------------+ / -/// ``` -/// -/// The readable pixel array matrix is composed by -/// 2 invalid lines (I) -/// 4 lines of valid optical black pixels (O) -/// 2 invalid lines (I) -/// n lines of valid pixel data (P) -/// 2 invalid lines (I) -/// -/// And the position of the optical black pixel rectangles is defined by -/// -/// ```text -/// PixelArrayOpticalBlackRectangles = { -/// { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, -/// { x1, y3, 2, y4 - y3 + 1 }, -/// { x2, y3, 2, y4 - y3 + 1 }, -/// }; -/// ``` -/// -/// If the camera, when capturing the full pixel array matrix, automatically -/// skips the invalid lines and columns, producing the following data -/// buffer, when captured to memory -/// -/// ```text -/// PixelArraySize.width -/// /----------------------------------------------/ -/// x1 -/// +--------------------------------------------o-+ / -/// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | -/// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | -/// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | -/// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | -/// y1 oOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | -/// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | -/// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | PixelArraySize.height -/// ... ... ... ... ... | -/// ... ... ... ... ... | -/// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | -/// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | -/// +----------------------------------------------+ / -/// ``` -/// -/// then the invalid lines and columns should not be reported as part of the -/// PixelArraySize property in first place. -/// -/// In this case, the position of the black pixel rectangles will be -/// -/// ```text -/// PixelArrayOpticalBlackRectangles = { -/// { 0, 0, y1 + 1, PixelArraySize[0] }, -/// { 0, y1, 2, PixelArraySize[1] - y1 + 1 }, -/// { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, -/// }; -/// ``` -/// -/// \todo Rename this property to Size once we will have property -/// ```text -/// categories (i.e. Properties::PixelArray::OpticalBlackRectangles) -#[derive(Debug, Clone)] -pub struct PixelArrayOpticalBlackRectangles(pub Vec); -impl Deref for PixelArrayOpticalBlackRectangles { - type Target = Vec; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for PixelArrayOpticalBlackRectangles { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for PixelArrayOpticalBlackRectangles { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(>::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: PixelArrayOpticalBlackRectangles) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for PixelArrayOpticalBlackRectangles { - const ID: u32 = PropertyId::PixelArrayOpticalBlackRectangles as _; -} -impl Property for PixelArrayOpticalBlackRectangles {} -/// The PixelArrayActiveAreas property defines the (possibly multiple and -/// overlapping) portions of the camera sensor readable pixel matrix -/// which are considered valid for image acquisition purposes. -/// -/// This property describes an arbitrary number of overlapping rectangles, -/// with each rectangle representing the maximum image size that the camera -/// sensor can produce for a particular aspect ratio. They are defined -/// relatively to the PixelArraySize rectangle. -/// -/// When multiple rectangles are reported, they shall be ordered from the -/// tallest to the shortest. -/// -/// Example 1 -/// A camera sensor which only produces images in the 4:3 image resolution -/// will report a single PixelArrayActiveAreas rectangle, from which all -/// other image formats are obtained by either cropping the field-of-view -/// and/or applying pixel sub-sampling techniques such as pixel skipping or -/// binning. -/// -/// ```text -/// PixelArraySize.width -/// /----------------/ -/// x1 x2 -/// (0,0)-> +-o------------o-+ / -/// y1 o +------------+ | | -/// | |////////////| | | -/// | |////////////| | | PixelArraySize.height -/// | |////////////| | | -/// y2 o +------------+ | | -/// +----------------+ / -/// ``` -/// -/// The property reports a single rectangle -/// -/// ```text -/// PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) -/// ``` -/// -/// Example 2 -/// A camera sensor which can produce images in different native -/// resolutions will report several overlapping rectangles, one for each -/// natively supported resolution. -/// -/// ```text -/// PixelArraySize.width -/// /------------------/ -/// x1 x2 x3 x4 -/// (0,0)-> +o---o------o---o+ / -/// y1 o +------+ | | -/// | |//////| | | -/// y2 o+---+------+---+| | -/// ||///|//////|///|| | PixelArraySize.height -/// y3 o+---+------+---+| | -/// | |//////| | | -/// y4 o +------+ | | -/// +----+------+----+ / -/// ``` -/// -/// The property reports two rectangles -/// -/// ```text -/// PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), -/// (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) -/// ``` -/// -/// The first rectangle describes the maximum field-of-view of all image -/// formats in the 4:3 resolutions, while the second one describes the -/// maximum field of view for all image formats in the 16:9 resolutions. -/// -/// Multiple rectangles shall only be reported when the sensor can't capture -/// the pixels in the corner regions. If all the pixels in the (x1,y1) - -/// (x4,y4) area can be captured, the PixelArrayActiveAreas property shall -/// contains the single rectangle (x1,y1) - (x4,y4). -/// -/// \todo Rename this property to ActiveAreas once we will have property -/// ```text -/// categories (i.e. Properties::PixelArray::ActiveAreas) -#[derive(Debug, Clone)] -pub struct PixelArrayActiveAreas(pub Vec); -impl Deref for PixelArrayActiveAreas { - type Target = Vec; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for PixelArrayActiveAreas { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for PixelArrayActiveAreas { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(>::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: PixelArrayActiveAreas) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for PixelArrayActiveAreas { - const ID: u32 = PropertyId::PixelArrayActiveAreas as _; -} -impl Property for PixelArrayActiveAreas {} -/// The maximum valid rectangle for the controls::ScalerCrop control. This -/// reflects the minimum mandatory cropping applied in the camera sensor and -/// the rest of the pipeline. Just as the ScalerCrop control, it defines a -/// rectangle taken from the sensor's active pixel array. -/// -/// This property is valid only after the camera has been successfully -/// configured and its value may change whenever a new configuration is -/// applied. -/// -/// \todo Turn this property into a "maximum control value" for the -/// ScalerCrop control once "dynamic" controls have been implemented. -#[derive(Debug, Clone)] -pub struct ScalerCropMaximum(pub Rectangle); -impl Deref for ScalerCropMaximum { - type Target = Rectangle; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for ScalerCropMaximum { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for ScalerCropMaximum { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: ScalerCropMaximum) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for ScalerCropMaximum { - const ID: u32 = PropertyId::ScalerCropMaximum as _; -} -impl Property for ScalerCropMaximum {} -/// The relative sensitivity of the chosen sensor mode. -/// -/// Some sensors have readout modes with different sensitivities. For example, -/// a binned camera mode might, with the same exposure and gains, produce -/// twice the signal level of the full resolution readout. This would be -/// signalled by the binned mode, when it is chosen, indicating a value here -/// that is twice that of the full resolution mode. This value will be valid -/// after the configure method has returned successfully. -#[derive(Debug, Clone)] -pub struct SensorSensitivity(pub f32); -impl Deref for SensorSensitivity { - type Target = f32; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for SensorSensitivity { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for SensorSensitivity { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: SensorSensitivity) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for SensorSensitivity { - const ID: u32 = PropertyId::SensorSensitivity as _; -} -impl Property for SensorSensitivity {} -/// A list of integer values of type dev_t denoting the major and minor -/// device numbers of the underlying devices used in the operation of this -/// camera. -/// -/// Different cameras may report identical devices. -#[derive(Debug, Clone)] -pub struct SystemDevices(pub Vec); -impl Deref for SystemDevices { - type Target = Vec; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for SystemDevices { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl TryFrom for SystemDevices { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Ok(Self(>::try_from(value)?)) - } -} -impl From for ControlValue { - fn from(val: SystemDevices) -> Self { - ControlValue::from(val.0) - } -} -impl ControlEntry for SystemDevices { - const ID: u32 = PropertyId::SystemDevices as _; -} -impl Property for SystemDevices {} -/// The arrangement of color filters on sensor; represents the colors in the -/// top-left 2x2 section of the sensor, in reading order. Currently -/// identical to ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT. -#[cfg(feature = "vendor_draft")] -#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] -#[repr(i32)] -pub enum ColorFilterArrangement { - /// RGGB Bayer pattern - RGGB = 0, - /// GRBG Bayer pattern - GRBG = 1, - /// GBRG Bayer pattern - GBRG = 2, - /// BGGR Bayer pattern - BGGR = 3, - /// Sensor is not Bayer; output has 3 16-bit values for each pixel, - /// instead of just 1 16-bit value per pixel. - RGB = 4, - /// Sensor is not Bayer; output consists of a single colour channel. - MONO = 5, -} -#[cfg(feature = "vendor_draft")] -impl TryFrom for ColorFilterArrangement { - type Error = ControlValueError; - fn try_from(value: ControlValue) -> Result { - Self::try_from(i32::try_from(value.clone())?) - .map_err(|_| ControlValueError::UnknownVariant(value)) - } -} -#[cfg(feature = "vendor_draft")] -impl From for ControlValue { - fn from(val: ColorFilterArrangement) -> Self { - ControlValue::from(::from(val)) - } -} -#[cfg(feature = "vendor_draft")] -impl ControlEntry for ColorFilterArrangement { - const ID: u32 = PropertyId::ColorFilterArrangement as _; -} -#[cfg(feature = "vendor_draft")] -impl Property for ColorFilterArrangement {} -pub fn make_dyn( - id: PropertyId, - val: ControlValue, -) -> Result, ControlValueError> { - match id { - PropertyId::Location => Ok(Box::new(Location::try_from(val)?)), - PropertyId::Rotation => Ok(Box::new(Rotation::try_from(val)?)), - PropertyId::Model => Ok(Box::new(Model::try_from(val)?)), - PropertyId::UnitCellSize => Ok(Box::new(UnitCellSize::try_from(val)?)), - PropertyId::PixelArraySize => Ok(Box::new(PixelArraySize::try_from(val)?)), - PropertyId::PixelArrayOpticalBlackRectangles => { - Ok(Box::new(PixelArrayOpticalBlackRectangles::try_from(val)?)) - } - PropertyId::PixelArrayActiveAreas => { - Ok(Box::new(PixelArrayActiveAreas::try_from(val)?)) - } - PropertyId::ScalerCropMaximum => Ok(Box::new(ScalerCropMaximum::try_from(val)?)), - PropertyId::SensorSensitivity => Ok(Box::new(SensorSensitivity::try_from(val)?)), - PropertyId::SystemDevices => Ok(Box::new(SystemDevices::try_from(val)?)), - #[cfg(feature = "vendor_draft")] - PropertyId::ColorFilterArrangement => { - Ok(Box::new(ColorFilterArrangement::try_from(val)?)) - } - } -} diff --git a/libcamera/versioned_files/0.4.0/property_ids_core.yaml b/libcamera/versioned_files/0.4.0/property_ids_core.yaml deleted file mode 100644 index 834454a..0000000 --- a/libcamera/versioned_files/0.4.0/property_ids_core.yaml +++ /dev/null @@ -1,704 +0,0 @@ -# SPDX-License-Identifier: LGPL-2.1-or-later -# -# Copyright (C) 2019, Google Inc. -# -%YAML 1.1 ---- -vendor: libcamera -controls: - - Location: - type: int32_t - description: | - Camera mounting location - enum: - - name: CameraLocationFront - value: 0 - description: | - The camera is mounted on the front side of the device, facing the - user - - name: CameraLocationBack - value: 1 - description: | - The camera is mounted on the back side of the device, facing away - from the user - - name: CameraLocationExternal - value: 2 - description: | - The camera is attached to the device in a way that allows it to - be moved freely - - - Rotation: - type: int32_t - description: | - The camera physical mounting rotation. It is expressed as the angular - difference in degrees between two reference systems, one relative to the - camera module, and one defined on the external world scene to be - captured when projected on the image sensor pixel array. - - A camera sensor has a 2-dimensional reference system 'Rc' defined by - its pixel array read-out order. The origin is set to the first pixel - being read out, the X-axis points along the column read-out direction - towards the last columns, and the Y-axis along the row read-out - direction towards the last row. - - A typical example for a sensor with a 2592x1944 pixel array matrix - observed from the front is - - 2591 X-axis 0 - <------------------------+ 0 - .......... ... ..........! - .......... ... ..........! Y-axis - ... ! - .......... ... ..........! - .......... ... ..........! 1943 - V - - - The external world scene reference system 'Rs' is a 2-dimensional - reference system on the focal plane of the camera module. The origin is - placed on the top-left corner of the visible scene, the X-axis points - towards the right, and the Y-axis points towards the bottom of the - scene. The top, bottom, left and right directions are intentionally not - defined and depend on the environment in which the camera is used. - - A typical example of a (very common) picture of a shark swimming from - left to right, as seen from the camera, is - - 0 X-axis - 0 +-------------------------------------> - ! - ! - ! - ! |\____)\___ - ! ) _____ __`< - ! |/ )/ - ! - ! - ! - V - Y-axis - - With the reference system 'Rs' placed on the camera focal plane. - - ¸.·˙! - ¸.·˙ ! - _ ¸.·˙ ! - +-/ \-+¸.·˙ ! - | (o) | ! Camera focal plane - +-----+˙·.¸ ! - ˙·.¸ ! - ˙·.¸ ! - ˙·.¸! - - When projected on the sensor's pixel array, the image and the associated - reference system 'Rs' are typically (but not always) inverted, due to - the camera module's lens optical inversion effect. - - Assuming the above represented scene of the swimming shark, the lens - inversion projects the scene and its reference system onto the sensor - pixel array, seen from the front of the camera sensor, as follow - - Y-axis - ^ - ! - ! - ! - ! |\_____)\__ - ! ) ____ ___.< - ! |/ )/ - ! - ! - ! - 0 +-------------------------------------> - 0 X-axis - - Note the shark being upside-down. - - The resulting projected reference system is named 'Rp'. - - The camera rotation property is then defined as the angular difference - in the counter-clockwise direction between the camera reference system - 'Rc' and the projected scene reference system 'Rp'. It is expressed in - degrees as a number in the range [0, 360[. - - Examples - - 0 degrees camera rotation - - - Y-Rp - ^ - Y-Rc ! - ^ ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! 0 +-------------------------------------> - ! 0 X-Rp - 0 +-------------------------------------> - 0 X-Rc - - - X-Rc 0 - <------------------------------------+ 0 - X-Rp 0 ! - <------------------------------------+ 0 ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! V - ! Y-Rc - V - Y-Rp - - 90 degrees camera rotation - - 0 Y-Rc - 0 +--------------------> - ! Y-Rp - ! ^ - ! ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! 0 +-------------------------------------> - ! 0 X-Rp - ! - ! - ! - ! - V - X-Rc - - 180 degrees camera rotation - - 0 - <------------------------------------+ 0 - X-Rc ! - Y-Rp ! - ^ ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! V - ! Y-Rc - 0 +-------------------------------------> - 0 X-Rp - - 270 degrees camera rotation - - 0 Y-Rc - 0 +--------------------> - ! 0 - ! <-----------------------------------+ 0 - ! X-Rp ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! ! - ! V - ! Y-Rp - ! - ! - ! - ! - V - X-Rc - - - Example one - Webcam - - A camera module installed on the user facing part of a laptop screen - casing used for video calls. The captured images are meant to be - displayed in landscape mode (width > height) on the laptop screen. - - The camera is typically mounted upside-down to compensate the lens - optical inversion effect. - - Y-Rp - Y-Rc ^ - ^ ! - ! ! - ! ! |\_____)\__ - ! ! ) ____ ___.< - ! ! |/ )/ - ! ! - ! ! - ! ! - ! 0 +-------------------------------------> - ! 0 X-Rp - 0 +-------------------------------------> - 0 X-Rc - - The two reference systems are aligned, the resulting camera rotation is - 0 degrees, no rotation correction needs to be applied to the resulting - image once captured to memory buffers to correctly display it to users. - - +--------------------------------------+ - ! ! - ! ! - ! ! - ! |\____)\___ ! - ! ) _____ __`< ! - ! |/ )/ ! - ! ! - ! ! - ! ! - +--------------------------------------+ - - If the camera sensor is not mounted upside-down to compensate for the - lens optical inversion, the two reference systems will not be aligned, - with 'Rp' being rotated 180 degrees relatively to 'Rc'. - - - X-Rc 0 - <------------------------------------+ 0 - ! - Y-Rp ! - ^ ! - ! ! - ! |\_____)\__ ! - ! ) ____ ___.< ! - ! |/ )/ ! - ! ! - ! ! - ! V - ! Y-Rc - 0 +-------------------------------------> - 0 X-Rp - - The image once captured to memory will then be rotated by 180 degrees - - +--------------------------------------+ - ! ! - ! ! - ! ! - ! __/(_____/| ! - ! >.___ ____ ( ! - ! \( \| ! - ! ! - ! ! - ! ! - +--------------------------------------+ - - A software rotation correction of 180 degrees should be applied to - correctly display the image. - - +--------------------------------------+ - ! ! - ! ! - ! ! - ! |\____)\___ ! - ! ) _____ __`< ! - ! |/ )/ ! - ! ! - ! ! - ! ! - +--------------------------------------+ - - Example two - Phone camera - - A camera installed on the back side of a mobile device facing away from - the user. The captured images are meant to be displayed in portrait mode - (height > width) to match the device screen orientation and the device - usage orientation used when taking the picture. - - The camera sensor is typically mounted with its pixel array longer side - aligned to the device longer side, upside-down mounted to compensate for - the lens optical inversion effect. - - 0 Y-Rc - 0 +--------------------> - ! Y-Rp - ! ^ - ! ! - ! ! - ! ! - ! ! |\_____)\__ - ! ! ) ____ ___.< - ! ! |/ )/ - ! ! - ! ! - ! ! - ! 0 +-------------------------------------> - ! 0 X-Rp - ! - ! - ! - ! - V - X-Rc - - The two reference systems are not aligned and the 'Rp' reference - system is rotated by 90 degrees in the counter-clockwise direction - relatively to the 'Rc' reference system. - - The image once captured to memory will be rotated. - - +-------------------------------------+ - | _ _ | - | \ / | - | | | | - | | | | - | | > | - | < | | - | | | | - | . | - | V | - +-------------------------------------+ - - A correction of 90 degrees in counter-clockwise direction has to be - applied to correctly display the image in portrait mode on the device - screen. - - +--------------------+ - | | - | | - | | - | | - | | - | | - | |\____)\___ | - | ) _____ __`< | - | |/ )/ | - | | - | | - | | - | | - | | - +--------------------+ - - - Model: - type: string - description: | - The model name shall to the extent possible describe the sensor. For - most devices this is the model name of the sensor. While for some - devices the sensor model is unavailable as the sensor or the entire - camera is part of a larger unit and exposed as a black-box to the - system. In such cases the model name of the smallest device that - contains the camera sensor shall be used. - - The model name is not meant to be a camera name displayed to the - end-user, but may be combined with other camera information to create a - camera name. - - The model name is not guaranteed to be unique in the system nor is - it guaranteed to be stable or have any other properties required to make - it a good candidate to be used as a permanent identifier of a camera. - - The model name shall describe the camera in a human readable format and - shall be encoded in ASCII. - - Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. - - - UnitCellSize: - type: Size - description: | - The pixel unit cell physical size, in nanometers. - - The UnitCellSize properties defines the horizontal and vertical sizes of - a single pixel unit, including its active and non-active parts. In - other words, it expresses the horizontal and vertical distance between - the top-left corners of adjacent pixels. - - The property can be used to calculate the physical size of the sensor's - pixel array area and for calibration purposes. - - - PixelArraySize: - type: Size - description: | - The camera sensor pixel array readable area vertical and horizontal - sizes, in pixels. - - The PixelArraySize property defines the size in pixel units of the - readable part of full pixel array matrix, including optical black - pixels used for calibration, pixels which are not considered valid for - capture and active pixels containing valid image data. - - The property describes the maximum size of the raw data captured by the - camera, which might not correspond to the physical size of the sensor - pixel array matrix, as some portions of the physical pixel array matrix - are not accessible and cannot be transmitted out. - - For example, let's consider a pixel array matrix assembled as follows - - +--------------------------------------------------+ - |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| - |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| - |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - ... ... ... ... ... - - ... ... ... ... ... - |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| - |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| - |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| - +--------------------------------------------------+ - - starting with two lines of non-readable pixels (x), followed by N lines - of readable data (D) surrounded by two columns of non-readable pixels on - each side, and ending with two more lines of non-readable pixels. Only - the readable portion is transmitted to the receiving side, defining the - sizes of the largest possible buffer of raw data that can be presented - to applications. - - PixelArraySize.width - /----------------------------------------------/ - +----------------------------------------------+ / - |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | PixelArraySize.height - ... ... ... ... ... - ... ... ... ... ... - |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | - +----------------------------------------------+ / - - This defines a rectangle whose top-left corner is placed in position (0, - 0) and whose vertical and horizontal sizes are defined by this property. - All other rectangles that describe portions of the pixel array, such as - the optical black pixels rectangles and active pixel areas, are defined - relatively to this rectangle. - - All the coordinates are expressed relative to the default sensor readout - direction, without any transformation (such as horizontal and vertical - flipping) applied. When mapping them to the raw pixel buffer, - applications shall take any configured transformation into account. - - \todo Rename this property to Size once we will have property - categories (i.e. Properties::PixelArray::Size) - - - PixelArrayOpticalBlackRectangles: - type: Rectangle - size: [n] - description: | - The pixel array region(s) which contain optical black pixels - considered valid for calibration purposes. - - This property describes the position and size of optical black pixel - regions in the raw data buffer as stored in memory, which might differ - from their actual physical location in the pixel array matrix. - - It is important to note, in fact, that camera sensors might - automatically reorder or skip portions of their pixels array matrix when - transmitting data to the receiver. For instance, a sensor may merge the - top and bottom optical black rectangles into a single rectangle, - transmitted at the beginning of the frame. - - The pixel array contains several areas with different purposes, - interleaved by lines and columns which are said not to be valid for - capturing purposes. Invalid lines and columns are defined as invalid as - they could be positioned too close to the chip margins or to the optical - black shielding placed on top of optical black pixels. - - PixelArraySize.width - /----------------------------------------------/ - x1 x2 - +--o---------------------------------------o---+ / - |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | - |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | - y1 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | - |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | - |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | - y2 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | - |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | - |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | - y3 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | - |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | PixelArraySize.height - |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | - ... ... ... ... ... - ... ... ... ... ... - y4 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | - |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | - |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | - +----------------------------------------------+ / - - The readable pixel array matrix is composed by - 2 invalid lines (I) - 4 lines of valid optical black pixels (O) - 2 invalid lines (I) - n lines of valid pixel data (P) - 2 invalid lines (I) - - And the position of the optical black pixel rectangles is defined by - - PixelArrayOpticalBlackRectangles = { - { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, - { x1, y3, 2, y4 - y3 + 1 }, - { x2, y3, 2, y4 - y3 + 1 }, - }; - - If the camera, when capturing the full pixel array matrix, automatically - skips the invalid lines and columns, producing the following data - buffer, when captured to memory - - PixelArraySize.width - /----------------------------------------------/ - x1 - +--------------------------------------------o-+ / - |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | - |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | - |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | - |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | - y1 oOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | - |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | - |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | PixelArraySize.height - ... ... ... ... ... | - ... ... ... ... ... | - |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | - |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | - +----------------------------------------------+ / - - then the invalid lines and columns should not be reported as part of the - PixelArraySize property in first place. - - In this case, the position of the black pixel rectangles will be - - PixelArrayOpticalBlackRectangles = { - { 0, 0, y1 + 1, PixelArraySize[0] }, - { 0, y1, 2, PixelArraySize[1] - y1 + 1 }, - { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, - }; - - \todo Rename this property to Size once we will have property - categories (i.e. Properties::PixelArray::OpticalBlackRectangles) - - - PixelArrayActiveAreas: - type: Rectangle - size: [n] - description: | - The PixelArrayActiveAreas property defines the (possibly multiple and - overlapping) portions of the camera sensor readable pixel matrix - which are considered valid for image acquisition purposes. - - This property describes an arbitrary number of overlapping rectangles, - with each rectangle representing the maximum image size that the camera - sensor can produce for a particular aspect ratio. They are defined - relatively to the PixelArraySize rectangle. - - When multiple rectangles are reported, they shall be ordered from the - tallest to the shortest. - - Example 1 - A camera sensor which only produces images in the 4:3 image resolution - will report a single PixelArrayActiveAreas rectangle, from which all - other image formats are obtained by either cropping the field-of-view - and/or applying pixel sub-sampling techniques such as pixel skipping or - binning. - - PixelArraySize.width - /----------------/ - x1 x2 - (0,0)-> +-o------------o-+ / - y1 o +------------+ | | - | |////////////| | | - | |////////////| | | PixelArraySize.height - | |////////////| | | - y2 o +------------+ | | - +----------------+ / - - The property reports a single rectangle - - PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) - - Example 2 - A camera sensor which can produce images in different native - resolutions will report several overlapping rectangles, one for each - natively supported resolution. - - PixelArraySize.width - /------------------/ - x1 x2 x3 x4 - (0,0)-> +o---o------o---o+ / - y1 o +------+ | | - | |//////| | | - y2 o+---+------+---+| | - ||///|//////|///|| | PixelArraySize.height - y3 o+---+------+---+| | - | |//////| | | - y4 o +------+ | | - +----+------+----+ / - - The property reports two rectangles - - PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), - (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) - - The first rectangle describes the maximum field-of-view of all image - formats in the 4:3 resolutions, while the second one describes the - maximum field of view for all image formats in the 16:9 resolutions. - - Multiple rectangles shall only be reported when the sensor can't capture - the pixels in the corner regions. If all the pixels in the (x1,y1) - - (x4,y4) area can be captured, the PixelArrayActiveAreas property shall - contains the single rectangle (x1,y1) - (x4,y4). - - \todo Rename this property to ActiveAreas once we will have property - categories (i.e. Properties::PixelArray::ActiveAreas) - - - ScalerCropMaximum: - type: Rectangle - description: | - The maximum valid rectangle for the controls::ScalerCrop control. This - reflects the minimum mandatory cropping applied in the camera sensor and - the rest of the pipeline. Just as the ScalerCrop control, it defines a - rectangle taken from the sensor's active pixel array. - - This property is valid only after the camera has been successfully - configured and its value may change whenever a new configuration is - applied. - - \todo Turn this property into a "maximum control value" for the - ScalerCrop control once "dynamic" controls have been implemented. - - - SensorSensitivity: - type: float - description: | - The relative sensitivity of the chosen sensor mode. - - Some sensors have readout modes with different sensitivities. For example, - a binned camera mode might, with the same exposure and gains, produce - twice the signal level of the full resolution readout. This would be - signalled by the binned mode, when it is chosen, indicating a value here - that is twice that of the full resolution mode. This value will be valid - after the configure method has returned successfully. - - - SystemDevices: - type: int64_t - size: [n] - description: | - A list of integer values of type dev_t denoting the major and minor - device numbers of the underlying devices used in the operation of this - camera. - - Different cameras may report identical devices. - -... diff --git a/libcamera/versioned_files/0.4.0/property_ids_draft.yaml b/libcamera/versioned_files/0.4.0/property_ids_draft.yaml deleted file mode 100644 index 62f0e24..0000000 --- a/libcamera/versioned_files/0.4.0/property_ids_draft.yaml +++ /dev/null @@ -1,39 +0,0 @@ -# SPDX-License-Identifier: LGPL-2.1-or-later -# -# Copyright (C) 2019, Google Inc. -# -%YAML 1.1 ---- -vendor: draft -controls: - - ColorFilterArrangement: - type: int32_t - vendor: draft - description: | - The arrangement of color filters on sensor; represents the colors in the - top-left 2x2 section of the sensor, in reading order. Currently - identical to ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT. - enum: - - name: RGGB - value: 0 - description: RGGB Bayer pattern - - name: GRBG - value: 1 - description: GRBG Bayer pattern - - name: GBRG - value: 2 - description: GBRG Bayer pattern - - name: BGGR - value: 3 - description: BGGR Bayer pattern - - name: RGB - value: 4 - description: | - Sensor is not Bayer; output has 3 16-bit values for each pixel, - instead of just 1 16-bit value per pixel. - - name: MONO - value: 5 - description: | - Sensor is not Bayer; output consists of a single colour channel. - -... From a08bd85dbf72acecd6274b3109f644720e8d1a1e Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Fri, 30 May 2025 20:43:06 -0700 Subject: [PATCH 10/14] Corrected lifetimes --- libcamera/src/camera.rs | 2 +- libcamera/src/camera_manager.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/libcamera/src/camera.rs b/libcamera/src/camera.rs index 96eb004..5b15728 100644 --- a/libcamera/src/camera.rs +++ b/libcamera/src/camera.rs @@ -130,7 +130,7 @@ pub struct Camera<'d> { _phantom: PhantomData<&'d ()>, } -impl Camera<'_> { +impl<'d> Camera<'d> { pub(crate) unsafe fn from_ptr(ptr: NonNull) -> Self { Self { ptr, diff --git a/libcamera/src/camera_manager.rs b/libcamera/src/camera_manager.rs index 463c2dc..de466c0 100644 --- a/libcamera/src/camera_manager.rs +++ b/libcamera/src/camera_manager.rs @@ -72,7 +72,7 @@ pub struct CameraList<'d> { _phantom: PhantomData<&'d ()>, } -impl CameraList<'_> { +impl<'d> CameraList<'d> { pub(crate) unsafe fn from_ptr(ptr: NonNull) -> Self { Self { ptr, From ec4916d0f549730cf2bec837e61e8d95c7129c2b Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Fri, 30 May 2025 20:43:19 -0700 Subject: [PATCH 11/14] Missing control values implamented --- libcamera/src/control_value.rs | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/libcamera/src/control_value.rs b/libcamera/src/control_value.rs index b4f8b08..e4a909b 100644 --- a/libcamera/src/control_value.rs +++ b/libcamera/src/control_value.rs @@ -28,6 +28,8 @@ pub enum ControlValue { None, Bool(SmallVec<[bool; 1]>), Byte(SmallVec<[u8; 1]>), + Uint16(SmallVec<[u16; 1]>), + Uint32(SmallVec<[u32; 1]>), Int32(SmallVec<[i32; 1]>), Int64(SmallVec<[i64; 1]>), Float(SmallVec<[f32; 1]>), @@ -74,11 +76,14 @@ macro_rules! impl_control_value { impl_control_value!(ControlValue::Bool, bool); impl_control_value!(ControlValue::Byte, u8); +impl_control_value!(ControlValue::Uint16, u16); +impl_control_value!(ControlValue::Uint32, u32); impl_control_value!(ControlValue::Int32, i32); impl_control_value!(ControlValue::Int64, i64); impl_control_value!(ControlValue::Float, f32); impl_control_value!(ControlValue::Rectangle, Rectangle); impl_control_value!(ControlValue::Size, Size); +impl_control_value!(ControlValue::Point, Point); macro_rules! impl_control_value_vec { ($p:path, $type:ty) => { @@ -107,6 +112,8 @@ macro_rules! impl_control_value_vec { impl_control_value_vec!(ControlValue::Bool, bool); impl_control_value_vec!(ControlValue::Byte, u8); +impl_control_value_vec!(ControlValue::Uint16, u16); +impl_control_value_vec!(ControlValue::Uint32, u32); impl_control_value_vec!(ControlValue::Int32, i32); impl_control_value_vec!(ControlValue::Int64, i64); impl_control_value_vec!(ControlValue::Float, f32); @@ -182,11 +189,14 @@ macro_rules! impl_control_value_array { impl_control_value_array!(ControlValue::Bool, bool); impl_control_value_array!(ControlValue::Byte, u8); +impl_control_value_array!(ControlValue::Uint16, u16); +impl_control_value_array!(ControlValue::Uint32, u32); impl_control_value_array!(ControlValue::Int32, i32); impl_control_value_array!(ControlValue::Int64, i64); impl_control_value_array!(ControlValue::Float, f32); impl_control_value_array!(ControlValue::Rectangle, Rectangle); impl_control_value_array!(ControlValue::Size, Size); +impl_control_value_array!(ControlValue::Point, Point); impl From for ControlValue { fn from(val: String) -> Self { @@ -225,6 +235,14 @@ impl ControlValue { let slice = core::slice::from_raw_parts(data as *const u8, num_elements); Ok(Self::Byte(SmallVec::from_slice(slice))) } + LIBCAMERA_CONTROL_TYPE_UINT16 => { + let slice = core::slice::from_raw_parts(data as *const u16, num_elements); + Ok(Self::Uint16(SmallVec::from_slice(slice))) + } + LIBCAMERA_CONTROL_TYPE_UINT32 => { + let slice = core::slice::from_raw_parts(data as *const u32, num_elements); + Ok(Self::Uint32(SmallVec::from_slice(slice))) + } LIBCAMERA_CONTROL_TYPE_INT32 => { let slice = core::slice::from_raw_parts(data as *const i32, num_elements); Ok(Self::Int32(SmallVec::from_slice(slice))) @@ -251,6 +269,10 @@ impl ControlValue { let slice = core::slice::from_raw_parts(data as *const libcamera_size_t, num_elements); Ok(Self::Size(SmallVec::from_iter(slice.iter().map(|r| Size::from(*r))))) } + LIBCAMERA_CONTROL_TYPE_POINT=> { + let slice = core::slice::from_raw_parts(data as *const libcamera_point_t, num_elements); + Ok(Self::Point(SmallVec::from_iter(slice.iter().map(|r| Point::from(*r))))) + } _ => Err(ControlValueError::UnknownType(ty)), } } @@ -260,6 +282,8 @@ impl ControlValue { ControlValue::None => (core::ptr::null(), 0), ControlValue::Bool(v) => (v.as_ptr().cast(), v.len()), ControlValue::Byte(v) => (v.as_ptr().cast(), v.len()), + ControlValue::Uint16(v) => (v.as_ptr().cast(), v.len()), + ControlValue::Uint32(v) => (v.as_ptr().cast(), v.len()), ControlValue::Int32(v) => (v.as_ptr().cast(), v.len()), ControlValue::Int64(v) => (v.as_ptr().cast(), v.len()), ControlValue::Float(v) => (v.as_ptr().cast(), v.len()), @@ -267,6 +291,7 @@ impl ControlValue { ControlValue::Rectangle(v) => (v.as_ptr().cast(), v.len()), ControlValue::Size(v) => (v.as_ptr().cast(), v.len()), ControlValue::Point(v) => (v.as_ptr().cast(), v.len()), + }; let ty = self.ty(); @@ -285,6 +310,8 @@ impl ControlValue { ControlValue::None => LIBCAMERA_CONTROL_TYPE_NONE, ControlValue::Bool(_) => LIBCAMERA_CONTROL_TYPE_BOOL, ControlValue::Byte(_) => LIBCAMERA_CONTROL_TYPE_BYTE, + ControlValue::Uint16(_) => LIBCAMERA_CONTROL_TYPE_UINT16, + ControlValue::Uint32(_) => LIBCAMERA_CONTROL_TYPE_UINT32, ControlValue::Int32(_) => LIBCAMERA_CONTROL_TYPE_INT32, ControlValue::Int64(_) => LIBCAMERA_CONTROL_TYPE_INT64, ControlValue::Float(_) => LIBCAMERA_CONTROL_TYPE_FLOAT, From e9b6c2b799066523848a08853ef5fab8cc219830 Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Fri, 30 May 2025 20:50:27 -0700 Subject: [PATCH 12/14] cargo format and clippy - ran a short test and should be ready to merge --- libcamera/src/control.rs | 14 +++++--------- libcamera/src/control_value.rs | 3 +-- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/libcamera/src/control.rs b/libcamera/src/control.rs index f14a90f..3265db2 100644 --- a/libcamera/src/control.rs +++ b/libcamera/src/control.rs @@ -400,8 +400,12 @@ impl<'a> ControlInfoMapIter<'a> { } } } +} + +impl<'a> Iterator for ControlInfoMapIter<'a> { + type Item = (u32, &'a ControlInfo); - pub fn next(&mut self) -> Option<(u32, &'a ControlInfo)> { + fn next(&mut self) -> Option { unsafe { if libcamera_control_info_map_iter_has_next(self.iter) { let key = libcamera_control_info_map_iter_key(self.iter); @@ -420,14 +424,6 @@ impl<'a> ControlInfoMapIter<'a> { } } -impl<'a> Iterator for ControlInfoMapIter<'a> { - type Item = (u32, &'a ControlInfo); - - fn next(&mut self) -> Option { - self.next() - } -} - impl<'a> Drop for ControlInfoMapIter<'a> { fn drop(&mut self) { unsafe { diff --git a/libcamera/src/control_value.rs b/libcamera/src/control_value.rs index e4a909b..3c4f643 100644 --- a/libcamera/src/control_value.rs +++ b/libcamera/src/control_value.rs @@ -269,7 +269,7 @@ impl ControlValue { let slice = core::slice::from_raw_parts(data as *const libcamera_size_t, num_elements); Ok(Self::Size(SmallVec::from_iter(slice.iter().map(|r| Size::from(*r))))) } - LIBCAMERA_CONTROL_TYPE_POINT=> { + LIBCAMERA_CONTROL_TYPE_POINT => { let slice = core::slice::from_raw_parts(data as *const libcamera_point_t, num_elements); Ok(Self::Point(SmallVec::from_iter(slice.iter().map(|r| Point::from(*r))))) } @@ -291,7 +291,6 @@ impl ControlValue { ControlValue::Rectangle(v) => (v.as_ptr().cast(), v.len()), ControlValue::Size(v) => (v.as_ptr().cast(), v.len()), ControlValue::Point(v) => (v.as_ptr().cast(), v.len()), - }; let ty = self.ty(); From cedb2670017bcf8aa875de21f30a6dff31cff54f Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Mon, 2 Jun 2025 18:10:44 -0700 Subject: [PATCH 13/14] updated examples and new examples. impl debug for ControlInfoMap and ControlInfo --- libcamera/examples/cameras_iter.rs | 20 ++++++++++++++++ libcamera/examples/list_cameras.rs | 1 + libcamera/examples/read_control.rs | 38 ++++++++++++++++++++++++++++++ libcamera/src/control.rs | 24 +++++++++++++++++++ 4 files changed, 83 insertions(+) create mode 100644 libcamera/examples/cameras_iter.rs create mode 100644 libcamera/examples/read_control.rs diff --git a/libcamera/examples/cameras_iter.rs b/libcamera/examples/cameras_iter.rs new file mode 100644 index 0000000..d4c8516 --- /dev/null +++ b/libcamera/examples/cameras_iter.rs @@ -0,0 +1,20 @@ +use libcamera::{camera_manager::CameraManager, logging::LoggingLevel, stream::StreamRole}; + +fn main() { + let mgr = CameraManager::new().unwrap(); + + mgr.log_set_level("Camera", LoggingLevel::Error); + + let cameras = mgr.cameras(); + + for cam in cameras.iter() { + println!("ID: {}", cam.id()); + + println!("Properties: {:#?}", cam.properties()); + println!("Controls: {:#?}", cam.controls()); + + let config = cam.generate_configuration(&[StreamRole::ViewFinder]).unwrap(); + let view_finder_cfg = config.get(0).unwrap(); + println!("Available formats: {:#?}", view_finder_cfg.formats()); + } +} diff --git a/libcamera/examples/list_cameras.rs b/libcamera/examples/list_cameras.rs index 35a9867..5fe1330 100644 --- a/libcamera/examples/list_cameras.rs +++ b/libcamera/examples/list_cameras.rs @@ -13,6 +13,7 @@ fn main() { println!("ID: {}", cam.id()); println!("Properties: {:#?}", cam.properties()); + println!("Controls: {:#?}", cam.controls()); let config = cam.generate_configuration(&[StreamRole::ViewFinder]).unwrap(); let view_finder_cfg = config.get(0).unwrap(); diff --git a/libcamera/examples/read_control.rs b/libcamera/examples/read_control.rs new file mode 100644 index 0000000..60dcd78 --- /dev/null +++ b/libcamera/examples/read_control.rs @@ -0,0 +1,38 @@ +use libcamera::{camera_manager::CameraManager, controls::ControlId, logging::LoggingLevel}; + +fn main() { + let mgr = CameraManager::new().unwrap(); + + mgr.log_set_level("Camera", LoggingLevel::Error); + + let cameras = mgr.cameras(); + + //Grab the first camera, if one exists + for cam in cameras.iter().take(1) { + println!("ID: {}", cam.id()); + + //Read the first ControlInfo + for (id, control_info) in cam.controls().into_iter().take(1) { + //Attempt to get ControlID + match ControlId::try_from(id) { + Ok(id) => println!("Control Id {:?} - {:?}", id as u32, id.name()), + Err(_) => println!("Control Id {:?} - UNKOWN", id), + } + + println!("Control Max: {:?}", control_info.max()); + println!("Control Min: {:?}", control_info.min()); + println!("Control Defualt: {:?}", control_info.def()); + + let values = control_info.values(); + + //Some controls only support specific values within their ranges. + //this will display those possible values if they exist + if values.len() > 0 { + println!("Supported Values:"); + for value in values { + println!("{:?}", value); + } + } + } + } +} diff --git a/libcamera/src/control.rs b/libcamera/src/control.rs index 3265db2..a91573d 100644 --- a/libcamera/src/control.rs +++ b/libcamera/src/control.rs @@ -118,6 +118,17 @@ impl ControlInfo { } } +impl core::fmt::Debug for ControlInfo { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("ControlInfo") + .field("min", &self.min()) + .field("max", &self.max()) + .field("def", &self.def()) + .field("values", &self.values()) + .finish() + } +} + #[repr(transparent)] pub struct ControlInfoMap(libcamera_control_info_map_t); @@ -171,6 +182,19 @@ impl<'a> IntoIterator for &'a ControlInfoMap { } } +impl core::fmt::Debug for ControlInfoMap { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let mut dm = f.debug_map(); + for (key, value) in self.into_iter() { + match ControlId::try_from(key) { + Ok(id) => dm.entry(&id, value), + Err(_) => dm.entry(&key, value), + }; + } + dm.finish() + } +} + #[repr(transparent)] pub struct ControlList(libcamera_control_list_t); From c5830ddef51944d9b38906a147edfbe4ce91771f Mon Sep 17 00:00:00 2001 From: Mathias <70431366+SoZ0@users.noreply.github.com> Date: Mon, 2 Jun 2025 18:16:10 -0700 Subject: [PATCH 14/14] regnerate and example fix --- libcamera/examples/read_control.rs | 2 +- libcamera/versioned_files/0.5.0/controls.rs | 16 +- .../0.5.1/control_ids_core.yaml | 1271 ++++++ .../0.5.1/control_ids_debug.yaml | 6 + .../0.5.1/control_ids_draft.yaml | 297 ++ .../0.5.1/control_ids_rpi.yaml | 74 + libcamera/versioned_files/0.5.1/controls.rs | 3675 +++++++++++++++++ libcamera/versioned_files/0.5.1/properties.rs | 1777 ++++++++ .../0.5.1/property_ids_core.yaml | 704 ++++ .../0.5.1/property_ids_draft.yaml | 39 + 10 files changed, 7859 insertions(+), 2 deletions(-) create mode 100644 libcamera/versioned_files/0.5.1/control_ids_core.yaml create mode 100644 libcamera/versioned_files/0.5.1/control_ids_debug.yaml create mode 100644 libcamera/versioned_files/0.5.1/control_ids_draft.yaml create mode 100644 libcamera/versioned_files/0.5.1/control_ids_rpi.yaml create mode 100644 libcamera/versioned_files/0.5.1/controls.rs create mode 100644 libcamera/versioned_files/0.5.1/properties.rs create mode 100644 libcamera/versioned_files/0.5.1/property_ids_core.yaml create mode 100644 libcamera/versioned_files/0.5.1/property_ids_draft.yaml diff --git a/libcamera/examples/read_control.rs b/libcamera/examples/read_control.rs index 60dcd78..f6f1022 100644 --- a/libcamera/examples/read_control.rs +++ b/libcamera/examples/read_control.rs @@ -15,7 +15,7 @@ fn main() { for (id, control_info) in cam.controls().into_iter().take(1) { //Attempt to get ControlID match ControlId::try_from(id) { - Ok(id) => println!("Control Id {:?} - {:?}", id as u32, id.name()), + Ok(id) => println!("Control Id {:?} - {:?}", id as u32, id), Err(_) => println!("Control Id {:?} - UNKOWN", id), } diff --git a/libcamera/versioned_files/0.5.0/controls.rs b/libcamera/versioned_files/0.5.0/controls.rs index 1d94a86..9b2dc07 100644 --- a/libcamera/versioned_files/0.5.0/controls.rs +++ b/libcamera/versioned_files/0.5.0/controls.rs @@ -1,4 +1,4 @@ -use std::ops::{Deref, DerefMut}; +use std::{ffi::CStr, ops::{Deref, DerefMut}}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[allow(unused_imports)] use crate::control::{Control, Property, ControlEntry, DynControlEntry}; @@ -766,6 +766,20 @@ pub enum ControlId { #[cfg(feature = "vendor_rpi")] PispStatsOutput = PISP_STATS_OUTPUT, } +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} /// Enable or disable the AEGC algorithm. When this control is set to true, /// both ExposureTimeMode and AnalogueGainMode are set to auto, and if this /// control is set to false then both are set to manual. diff --git a/libcamera/versioned_files/0.5.1/control_ids_core.yaml b/libcamera/versioned_files/0.5.1/control_ids_core.yaml new file mode 100644 index 0000000..aa74486 --- /dev/null +++ b/libcamera/versioned_files/0.5.1/control_ids_core.yaml @@ -0,0 +1,1271 @@ +# SPDX-License-Identifier: LGPL-2.1-or-later +# +# Copyright (C) 2019, Google Inc. +# +%YAML 1.1 +--- +# Unless otherwise stated, all controls are bi-directional, i.e. they can be +# set through Request::controls() and returned out through Request::metadata(). +vendor: libcamera +controls: + - AeEnable: + type: bool + direction: in + description: | + Enable or disable the AEGC algorithm. When this control is set to true, + both ExposureTimeMode and AnalogueGainMode are set to auto, and if this + control is set to false then both are set to manual. + + If ExposureTimeMode or AnalogueGainMode are also set in the same + request as AeEnable, then the modes supplied by ExposureTimeMode or + AnalogueGainMode will take precedence. + + \sa ExposureTimeMode AnalogueGainMode + + - AeState: + type: int32_t + direction: out + description: | + Report the AEGC algorithm state. + + The AEGC algorithm computes the exposure time and the analogue gain + to be applied to the image sensor. + + The AEGC algorithm behaviour is controlled by the ExposureTimeMode and + AnalogueGainMode controls, which allow applications to decide how + the exposure time and gain are computed, in Auto or Manual mode, + independently from one another. + + The AeState control reports the AEGC algorithm state through a single + value and describes it as a single computation block which computes + both the exposure time and the analogue gain values. + + When both the exposure time and analogue gain values are configured to + be in Manual mode, the AEGC algorithm is quiescent and does not actively + compute any value and the AeState control will report AeStateIdle. + + When at least the exposure time or analogue gain are configured to be + computed by the AEGC algorithm, the AeState control will report if the + algorithm has converged to stable values for all of the controls set + to be computed in Auto mode. + + \sa AnalogueGainMode + \sa ExposureTimeMode + + enum: + - name: AeStateIdle + value: 0 + description: | + The AEGC algorithm is inactive. + + This state is returned when both AnalogueGainMode and + ExposureTimeMode are set to Manual and the algorithm is not + actively computing any value. + - name: AeStateSearching + value: 1 + description: | + The AEGC algorithm is actively computing new values, for either the + exposure time or the analogue gain, but has not converged to a + stable result yet. + + This state is returned if at least one of AnalogueGainMode or + ExposureTimeMode is auto and the algorithm hasn't converged yet. + + The AEGC algorithm converges once stable values are computed for + all of the controls set to be computed in Auto mode. Once the + algorithm converges the state is moved to AeStateConverged. + - name: AeStateConverged + value: 2 + description: | + The AEGC algorithm has converged. + + This state is returned if at least one of AnalogueGainMode or + ExposureTimeMode is Auto, and the AEGC algorithm has converged to a + stable value. + + If the measurements move too far away from the convergence point + then the AEGC algorithm might start adjusting again, in which case + the state is moved to AeStateSearching. + + # AeMeteringMode needs further attention: + # - Auto-generate max enum value. + # - Better handling of custom types. + - AeMeteringMode: + type: int32_t + direction: inout + description: | + Specify a metering mode for the AE algorithm to use. + + The metering modes determine which parts of the image are used to + determine the scene brightness. Metering modes may be platform specific + and not all metering modes may be supported. + enum: + - name: MeteringCentreWeighted + value: 0 + description: Centre-weighted metering mode. + - name: MeteringSpot + value: 1 + description: Spot metering mode. + - name: MeteringMatrix + value: 2 + description: Matrix metering mode. + - name: MeteringCustom + value: 3 + description: Custom metering mode. + + # AeConstraintMode needs further attention: + # - Auto-generate max enum value. + # - Better handling of custom types. + - AeConstraintMode: + type: int32_t + direction: inout + description: | + Specify a constraint mode for the AE algorithm to use. + + The constraint modes determine how the measured scene brightness is + adjusted to reach the desired target exposure. Constraint modes may be + platform specific, and not all constraint modes may be supported. + enum: + - name: ConstraintNormal + value: 0 + description: | + Default constraint mode. + + This mode aims to balance the exposure of different parts of the + image so as to reach a reasonable average level. However, highlights + in the image may appear over-exposed and lowlights may appear + under-exposed. + - name: ConstraintHighlight + value: 1 + description: | + Highlight constraint mode. + + This mode adjusts the exposure levels in order to try and avoid + over-exposing the brightest parts (highlights) of an image. + Other non-highlight parts of the image may appear under-exposed. + - name: ConstraintShadows + value: 2 + description: | + Shadows constraint mode. + + This mode adjusts the exposure levels in order to try and avoid + under-exposing the dark parts (shadows) of an image. Other normally + exposed parts of the image may appear over-exposed. + - name: ConstraintCustom + value: 3 + description: | + Custom constraint mode. + + # AeExposureMode needs further attention: + # - Auto-generate max enum value. + # - Better handling of custom types. + - AeExposureMode: + type: int32_t + direction: inout + description: | + Specify an exposure mode for the AE algorithm to use. + + The exposure modes specify how the desired total exposure is divided + between the exposure time and the sensor's analogue gain. They are + platform specific, and not all exposure modes may be supported. + + When one of AnalogueGainMode or ExposureTimeMode is set to Manual, + the fixed values will override any choices made by AeExposureMode. + + \sa AnalogueGainMode + \sa ExposureTimeMode + + enum: + - name: ExposureNormal + value: 0 + description: Default exposure mode. + - name: ExposureShort + value: 1 + description: Exposure mode allowing only short exposure times. + - name: ExposureLong + value: 2 + description: Exposure mode allowing long exposure times. + - name: ExposureCustom + value: 3 + description: Custom exposure mode. + + - ExposureValue: + type: float + direction: inout + description: | + Specify an Exposure Value (EV) parameter. + + The EV parameter will only be applied if the AE algorithm is currently + enabled, that is, at least one of AnalogueGainMode and ExposureTimeMode + are in Auto mode. + + By convention EV adjusts the exposure as log2. For example + EV = [-2, -1, -0.5, 0, 0.5, 1, 2] results in an exposure adjustment + of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. + + \sa AnalogueGainMode + \sa ExposureTimeMode + + - ExposureTime: + type: int32_t + direction: inout + description: | + Exposure time for the frame applied in the sensor device. + + This value is specified in micro-seconds. + + This control will only take effect if ExposureTimeMode is Manual. If + this control is set when ExposureTimeMode is Auto, the value will be + ignored and will not be retained. + + When reported in metadata, this control indicates what exposure time + was used for the current frame, regardless of ExposureTimeMode. + ExposureTimeMode will indicate the source of the exposure time value, + whether it came from the AE algorithm or not. + + \sa AnalogueGain + \sa ExposureTimeMode + + - ExposureTimeMode: + type: int32_t + direction: inout + description: | + Controls the source of the exposure time that is applied to the image + sensor. + + When set to Auto, the AE algorithm computes the exposure time and + configures the image sensor accordingly. When set to Manual, the value + of the ExposureTime control is used. + + When transitioning from Auto to Manual mode and no ExposureTime control + is provided by the application, the last value computed by the AE + algorithm when the mode was Auto will be used. If the ExposureTimeMode + was never set to Auto (either because the camera started in Manual mode, + or Auto is not supported by the camera), the camera should use a + best-effort default value. + + If ExposureTimeModeManual is supported, the ExposureTime control must + also be supported. + + Cameras that support manual control of the sensor shall support manual + mode for both ExposureTimeMode and AnalogueGainMode, and shall expose + the ExposureTime and AnalogueGain controls. If the camera also has an + AEGC implementation, both ExposureTimeMode and AnalogueGainMode shall + support both manual and auto mode. If auto mode is available, it shall + be the default mode. These rules do not apply to black box cameras + such as UVC cameras, where the available gain and exposure modes are + completely dependent on what the device exposes. + + \par Flickerless exposure mode transitions + + Applications that wish to transition from ExposureTimeModeAuto to direct + control of the exposure time without causing extra flicker can do so by + selecting an ExposureTime value as close as possible to the last value + computed by the auto exposure algorithm in order to avoid any visible + flickering. + + To select the correct value to use as ExposureTime value, applications + should accommodate the natural delay in applying controls caused by the + capture pipeline frame depth. + + When switching to manual exposure mode, applications should not + immediately specify an ExposureTime value in the same request where + ExposureTimeMode is set to Manual. They should instead wait for the + first Request where ExposureTimeMode is reported as + ExposureTimeModeManual in the Request metadata, and use the reported + ExposureTime to populate the control value in the next Request to be + queued to the Camera. + + The implementation of the auto-exposure algorithm should equally try to + minimize flickering and when transitioning from manual exposure mode to + auto exposure use the last value provided by the application as starting + point. + + 1. Start with ExposureTimeMode set to Auto + + 2. Set ExposureTimeMode to Manual + + 3. Wait for the first completed request that has ExposureTimeMode + set to Manual + + 4. Copy the value reported in ExposureTime into a new request, and + submit it + + 5. Proceed to run manual exposure time as desired + + \sa ExposureTime + enum: + - name: ExposureTimeModeAuto + value: 0 + description: | + The exposure time will be calculated automatically and set by the + AE algorithm. + + If ExposureTime is set while this mode is active, it will be + ignored, and its value will not be retained. + + When transitioning from Manual to Auto mode, the AEGC should start + its adjustments based on the last set manual ExposureTime value. + - name: ExposureTimeModeManual + value: 1 + description: | + The exposure time will not be updated by the AE algorithm. + + When transitioning from Auto to Manual mode, the last computed + exposure value is used until a new value is specified through the + ExposureTime control. If an ExposureTime value is specified in the + same request where the ExposureTimeMode is changed from Auto to + Manual, the provided ExposureTime is applied immediately. + + - AnalogueGain: + type: float + direction: inout + description: | + Analogue gain value applied in the sensor device. + + The value of the control specifies the gain multiplier applied to all + colour channels. This value cannot be lower than 1.0. + + This control will only take effect if AnalogueGainMode is Manual. If + this control is set when AnalogueGainMode is Auto, the value will be + ignored and will not be retained. + + When reported in metadata, this control indicates what analogue gain + was used for the current request, regardless of AnalogueGainMode. + AnalogueGainMode will indicate the source of the analogue gain value, + whether it came from the AEGC algorithm or not. + + \sa ExposureTime + \sa AnalogueGainMode + + - AnalogueGainMode: + type: int32_t + direction: inout + description: | + Controls the source of the analogue gain that is applied to the image + sensor. + + When set to Auto, the AEGC algorithm computes the analogue gain and + configures the image sensor accordingly. When set to Manual, the value + of the AnalogueGain control is used. + + When transitioning from Auto to Manual mode and no AnalogueGain control + is provided by the application, the last value computed by the AEGC + algorithm when the mode was Auto will be used. If the AnalogueGainMode + was never set to Auto (either because the camera started in Manual mode, + or Auto is not supported by the camera), the camera should use a + best-effort default value. + + If AnalogueGainModeManual is supported, the AnalogueGain control must + also be supported. + + For cameras where we have control over the ISP, both ExposureTimeMode + and AnalogueGainMode are expected to support manual mode, and both + controls (as well as ExposureTimeMode and AnalogueGain) are expected to + be present. If the camera also has an AEGC implementation, both + ExposureTimeMode and AnalogueGainMode shall support both manual and + auto mode. If auto mode is available, it shall be the default mode. + These rules do not apply to black box cameras such as UVC cameras, + where the available gain and exposure modes are completely dependent on + what the hardware exposes. + + The same procedure described for performing flickerless transitions in + the ExposureTimeMode control documentation can be applied to analogue + gain. + + \sa ExposureTimeMode + \sa AnalogueGain + enum: + - name: AnalogueGainModeAuto + value: 0 + description: | + The analogue gain will be calculated automatically and set by the + AEGC algorithm. + + If AnalogueGain is set while this mode is active, it will be + ignored, and it will also not be retained. + + When transitioning from Manual to Auto mode, the AEGC should start + its adjustments based on the last set manual AnalogueGain value. + - name: AnalogueGainModeManual + value: 1 + description: | + The analogue gain will not be updated by the AEGC algorithm. + + When transitioning from Auto to Manual mode, the last computed + gain value is used until a new value is specified through the + AnalogueGain control. If an AnalogueGain value is specified in the + same request where the AnalogueGainMode is changed from Auto to + Manual, the provided AnalogueGain is applied immediately. + + - AeFlickerMode: + type: int32_t + direction: inout + description: | + Set the flicker avoidance mode for AGC/AEC. + + The flicker mode determines whether, and how, the AGC/AEC algorithm + attempts to hide flicker effects caused by the duty cycle of artificial + lighting. + + Although implementation dependent, many algorithms for "flicker + avoidance" work by restricting this exposure time to integer multiples + of the cycle period, wherever possible. + + Implementations may not support all of the flicker modes listed below. + + By default the system will start in FlickerAuto mode if this is + supported, otherwise the flicker mode will be set to FlickerOff. + + enum: + - name: FlickerOff + value: 0 + description: | + No flicker avoidance is performed. + - name: FlickerManual + value: 1 + description: | + Manual flicker avoidance. + + Suppress flicker effects caused by lighting running with a period + specified by the AeFlickerPeriod control. + \sa AeFlickerPeriod + - name: FlickerAuto + value: 2 + description: | + Automatic flicker period detection and avoidance. + + The system will automatically determine the most likely value of + flicker period, and avoid flicker of this frequency. Once flicker + is being corrected, it is implementation dependent whether the + system is still able to detect a change in the flicker period. + \sa AeFlickerDetected + + - AeFlickerPeriod: + type: int32_t + direction: inout + description: | + Manual flicker period in microseconds. + + This value sets the current flicker period to avoid. It is used when + AeFlickerMode is set to FlickerManual. + + To cancel 50Hz mains flicker, this should be set to 10000 (corresponding + to 100Hz), or 8333 (120Hz) for 60Hz mains. + + Setting the mode to FlickerManual when no AeFlickerPeriod has ever been + set means that no flicker cancellation occurs (until the value of this + control is updated). + + Switching to modes other than FlickerManual has no effect on the + value of the AeFlickerPeriod control. + + \sa AeFlickerMode + + - AeFlickerDetected: + type: int32_t + direction: out + description: | + Flicker period detected in microseconds. + + The value reported here indicates the currently detected flicker + period, or zero if no flicker at all is detected. + + When AeFlickerMode is set to FlickerAuto, there may be a period during + which the value reported here remains zero. Once a non-zero value is + reported, then this is the flicker period that has been detected and is + now being cancelled. + + In the case of 50Hz mains flicker, the value would be 10000 + (corresponding to 100Hz), or 8333 (120Hz) for 60Hz mains flicker. + + It is implementation dependent whether the system can continue to detect + flicker of different periods when another frequency is already being + cancelled. + + \sa AeFlickerMode + + - Brightness: + type: float + direction: inout + description: | + Specify a fixed brightness parameter. + + Positive values (up to 1.0) produce brighter images; negative values + (up to -1.0) produce darker images and 0.0 leaves pixels unchanged. + + - Contrast: + type: float + direction: inout + description: | + Specify a fixed contrast parameter. + + Normal contrast is given by the value 1.0; larger values produce images + with more contrast. + + - Lux: + type: float + direction: out + description: | + Report an estimate of the current illuminance level in lux. + + The Lux control can only be returned in metadata. + + - AwbEnable: + type: bool + direction: inout + description: | + Enable or disable the AWB. + + When AWB is enabled, the algorithm estimates the colour temperature of + the scene and computes colour gains and the colour correction matrix + automatically. The computed colour temperature, gains and correction + matrix are reported in metadata. The corresponding controls are ignored + if set in a request. + + When AWB is disabled, the colour temperature, gains and correction + matrix are not updated automatically and can be set manually in + requests. + + \sa ColourCorrectionMatrix + \sa ColourGains + \sa ColourTemperature + + # AwbMode needs further attention: + # - Auto-generate max enum value. + # - Better handling of custom types. + - AwbMode: + type: int32_t + direction: inout + description: | + Specify the range of illuminants to use for the AWB algorithm. + + The modes supported are platform specific, and not all modes may be + supported. + enum: + - name: AwbAuto + value: 0 + description: Search over the whole colour temperature range. + - name: AwbIncandescent + value: 1 + description: Incandescent AWB lamp mode. + - name: AwbTungsten + value: 2 + description: Tungsten AWB lamp mode. + - name: AwbFluorescent + value: 3 + description: Fluorescent AWB lamp mode. + - name: AwbIndoor + value: 4 + description: Indoor AWB lighting mode. + - name: AwbDaylight + value: 5 + description: Daylight AWB lighting mode. + - name: AwbCloudy + value: 6 + description: Cloudy AWB lighting mode. + - name: AwbCustom + value: 7 + description: Custom AWB mode. + + - AwbLocked: + type: bool + direction: out + description: | + Report the lock status of a running AWB algorithm. + + If the AWB algorithm is locked the value shall be set to true, if it's + converging it shall be set to false. If the AWB algorithm is not + running the control shall not be present in the metadata control list. + + \sa AwbEnable + + - ColourGains: + type: float + direction: inout + description: | + Pair of gain values for the Red and Blue colour channels, in that + order. + + ColourGains can only be applied in a Request when the AWB is disabled. + If ColourGains is set in a request but ColourTemperature is not, the + implementation shall calculate and set the ColourTemperature based on + the ColourGains. + + \sa AwbEnable + \sa ColourTemperature + size: [2] + + - ColourTemperature: + type: int32_t + direction: out + description: | + ColourTemperature of the frame, in kelvin. + + ColourTemperature can only be applied in a Request when the AWB is + disabled. + + If ColourTemperature is set in a request but ColourGains is not, the + implementation shall calculate and set the ColourGains based on the + given ColourTemperature. If ColourTemperature is set (either directly, + or indirectly by setting ColourGains) but ColourCorrectionMatrix is not, + the ColourCorrectionMatrix is updated based on the ColourTemperature. + + The ColourTemperature used to process the frame is reported in metadata. + + \sa AwbEnable + \sa ColourCorrectionMatrix + \sa ColourGains + + - Saturation: + type: float + direction: inout + description: | + Specify a fixed saturation parameter. + + Normal saturation is given by the value 1.0; larger values produce more + saturated colours; 0.0 produces a greyscale image. + + - SensorBlackLevels: + type: int32_t + direction: out + description: | + Reports the sensor black levels used for processing a frame. + + The values are in the order R, Gr, Gb, B. They are returned as numbers + out of a 16-bit pixel range (as if pixels ranged from 0 to 65535). The + SensorBlackLevels control can only be returned in metadata. + size: [4] + + - Sharpness: + type: float + direction: inout + description: | + Intensity of the sharpening applied to the image. + + A value of 0.0 means no sharpening. The minimum value means + minimal sharpening, and shall be 0.0 unless the camera can't + disable sharpening completely. The default value shall give a + "reasonable" level of sharpening, suitable for most use cases. + The maximum value may apply extremely high levels of sharpening, + higher than anyone could reasonably want. Negative values are + not allowed. Note also that sharpening is not applied to raw + streams. + + - FocusFoM: + type: int32_t + direction: out + description: | + Reports a Figure of Merit (FoM) to indicate how in-focus the frame is. + + A larger FocusFoM value indicates a more in-focus frame. This singular + value may be based on a combination of statistics gathered from + multiple focus regions within an image. The number of focus regions and + method of combination is platform dependent. In this respect, it is not + necessarily aimed at providing a way to implement a focus algorithm by + the application, rather an indication of how in-focus a frame is. + + - ColourCorrectionMatrix: + type: float + direction: inout + description: | + The 3x3 matrix that converts camera RGB to sRGB within the imaging + pipeline. + + This should describe the matrix that is used after pixels have been + white-balanced, but before any gamma transformation. The 3x3 matrix is + stored in conventional reading order in an array of 9 floating point + values. + + ColourCorrectionMatrix can only be applied in a Request when the AWB is + disabled. + + \sa AwbEnable + \sa ColourTemperature + size: [3,3] + + - ScalerCrop: + type: Rectangle + direction: inout + description: | + Sets the image portion that will be scaled to form the whole of + the final output image. + + The (x,y) location of this rectangle is relative to the + PixelArrayActiveAreas that is being used. The units remain native + sensor pixels, even if the sensor is being used in a binning or + skipping mode. + + This control is only present when the pipeline supports scaling. Its + maximum valid value is given by the properties::ScalerCropMaximum + property, and the two can be used to implement digital zoom. + + - DigitalGain: + type: float + direction: inout + description: | + Digital gain value applied during the processing steps applied + to the image as captured from the sensor. + + The global digital gain factor is applied to all the colour channels + of the RAW image. Different pipeline models are free to + specify how the global gain factor applies to each separate + channel. + + If an imaging pipeline applies digital gain in distinct + processing steps, this value indicates their total sum. + Pipelines are free to decide how to adjust each processing + step to respect the received gain factor and shall report + their total value in the request metadata. + + - FrameDuration: + type: int64_t + direction: out + description: | + The instantaneous frame duration from start of frame exposure to start + of next exposure, expressed in microseconds. + + This control is meant to be returned in metadata. + + - FrameDurationLimits: + type: int64_t + direction: inout + description: | + The minimum and maximum (in that order) frame duration, expressed in + microseconds. + + When provided by applications, the control specifies the sensor frame + duration interval the pipeline has to use. This limits the largest + exposure time the sensor can use. For example, if a maximum frame + duration of 33ms is requested (corresponding to 30 frames per second), + the sensor will not be able to raise the exposure time above 33ms. + A fixed frame duration is achieved by setting the minimum and maximum + values to be the same. Setting both values to 0 reverts to using the + camera defaults. + + The maximum frame duration provides the absolute limit to the exposure + time computed by the AE algorithm and it overrides any exposure mode + setting specified with controls::AeExposureMode. Similarly, when a + manual exposure time is set through controls::ExposureTime, it also + gets clipped to the limits set by this control. When reported in + metadata, the control expresses the minimum and maximum frame durations + used after being clipped to the sensor provided frame duration limits. + + \sa AeExposureMode + \sa ExposureTime + + \todo Define how to calculate the capture frame rate by + defining controls to report additional delays introduced by + the capture pipeline or post-processing stages (ie JPEG + conversion, frame scaling). + + \todo Provide an explicit definition of default control values, for + this and all other controls. + + size: [2] + + - SensorTemperature: + type: float + direction: out + description: | + Temperature measure from the camera sensor in Celsius. + + This value is typically obtained by a thermal sensor present on-die or + in the camera module. The range of reported temperatures is device + dependent. + + The SensorTemperature control will only be returned in metadata if a + thermal sensor is present. + + - SensorTimestamp: + type: int64_t + direction: out + description: | + The time when the first row of the image sensor active array is exposed. + + The timestamp, expressed in nanoseconds, represents a monotonically + increasing counter since the system boot time, as defined by the + Linux-specific CLOCK_BOOTTIME clock id. + + The SensorTimestamp control can only be returned in metadata. + + \todo Define how the sensor timestamp has to be used in the reprocessing + use case. + + - AfMode: + type: int32_t + direction: inout + description: | + The mode of the AF (autofocus) algorithm. + + An implementation may choose not to implement all the modes. + + enum: + - name: AfModeManual + value: 0 + description: | + The AF algorithm is in manual mode. + + In this mode it will never perform any action nor move the lens of + its own accord, but an application can specify the desired lens + position using the LensPosition control. The AfState will always + report AfStateIdle. + + If the camera is started in AfModeManual, it will move the focus + lens to the position specified by the LensPosition control. + + This mode is the recommended default value for the AfMode control. + External cameras (as reported by the Location property set to + CameraLocationExternal) may use a different default value. + - name: AfModeAuto + value: 1 + description: | + The AF algorithm is in auto mode. + + In this mode the algorithm will never move the lens or change state + unless the AfTrigger control is used. The AfTrigger control can be + used to initiate a focus scan, the results of which will be + reported by AfState. + + If the autofocus algorithm is moved from AfModeAuto to another mode + while a scan is in progress, the scan is cancelled immediately, + without waiting for the scan to finish. + + When first entering this mode the AfState will report AfStateIdle. + When a trigger control is sent, AfState will report AfStateScanning + for a period before spontaneously changing to AfStateFocused or + AfStateFailed, depending on the outcome of the scan. It will remain + in this state until another scan is initiated by the AfTrigger + control. If a scan is cancelled (without changing to another mode), + AfState will return to AfStateIdle. + - name: AfModeContinuous + value: 2 + description: | + The AF algorithm is in continuous mode. + + In this mode the lens can re-start a scan spontaneously at any + moment, without any user intervention. The AfState still reports + whether the algorithm is currently scanning or not, though the + application has no ability to initiate or cancel scans, nor to move + the lens for itself. + + However, applications can pause the AF algorithm from continuously + scanning by using the AfPause control. This allows video or still + images to be captured whilst guaranteeing that the focus is fixed. + + When set to AfModeContinuous, the system will immediately initiate a + scan so AfState will report AfStateScanning, and will settle on one + of AfStateFocused or AfStateFailed, depending on the scan result. + + - AfRange: + type: int32_t + direction: inout + description: | + The range of focus distances that is scanned. + + An implementation may choose not to implement all the options here. + enum: + - name: AfRangeNormal + value: 0 + description: | + A wide range of focus distances is scanned. + + Scanned distances cover all the way from infinity down to close + distances, though depending on the implementation, possibly not + including the very closest macro positions. + - name: AfRangeMacro + value: 1 + description: | + Only close distances are scanned. + - name: AfRangeFull + value: 2 + description: | + The full range of focus distances is scanned. + + This range is similar to AfRangeNormal but includes the very + closest macro positions. + + - AfSpeed: + type: int32_t + direction: inout + description: | + Determine whether the AF is to move the lens as quickly as possible or + more steadily. + + For example, during video recording it may be desirable not to move the + lens too abruptly, but when in a preview mode (waiting for a still + capture) it may be helpful to move the lens as quickly as is reasonably + possible. + enum: + - name: AfSpeedNormal + value: 0 + description: Move the lens at its usual speed. + - name: AfSpeedFast + value: 1 + description: Move the lens more quickly. + + - AfMetering: + type: int32_t + direction: inout + description: | + The parts of the image used by the AF algorithm to measure focus. + enum: + - name: AfMeteringAuto + value: 0 + description: | + Let the AF algorithm decide for itself where it will measure focus. + - name: AfMeteringWindows + value: 1 + description: | + Use the rectangles defined by the AfWindows control to measure focus. + + If no windows are specified the behaviour is platform dependent. + + - AfWindows: + type: Rectangle + direction: inout + description: | + The focus windows used by the AF algorithm when AfMetering is set to + AfMeteringWindows. + + The units used are pixels within the rectangle returned by the + ScalerCropMaximum property. + + In order to be activated, a rectangle must be programmed with non-zero + width and height. Internally, these rectangles are intersected with the + ScalerCropMaximum rectangle. If the window becomes empty after this + operation, then the window is ignored. If all the windows end up being + ignored, then the behaviour is platform dependent. + + On platforms that support the ScalerCrop control (for implementing + digital zoom, for example), no automatic recalculation or adjustment of + AF windows is performed internally if the ScalerCrop is changed. If any + window lies outside the output image after the scaler crop has been + applied, it is up to the application to recalculate them. + + The details of how the windows are used are platform dependent. We note + that when there is more than one AF window, a typical implementation + might find the optimal focus position for each one and finally select + the window where the focal distance for the objects shown in that part + of the image are closest to the camera. + + size: [n] + + - AfTrigger: + type: int32_t + direction: in + description: | + Start an autofocus scan. + + This control starts an autofocus scan when AfMode is set to AfModeAuto, + and is ignored if AfMode is set to AfModeManual or AfModeContinuous. It + can also be used to terminate a scan early. + + enum: + - name: AfTriggerStart + value: 0 + description: | + Start an AF scan. + + Setting the control to AfTriggerStart is ignored if a scan is in + progress. + - name: AfTriggerCancel + value: 1 + description: | + Cancel an AF scan. + + This does not cause the lens to move anywhere else. Ignored if no + scan is in progress. + + - AfPause: + type: int32_t + direction: in + description: | + Pause lens movements when in continuous autofocus mode. + + This control has no effect except when in continuous autofocus mode + (AfModeContinuous). It can be used to pause any lens movements while + (for example) images are captured. The algorithm remains inactive + until it is instructed to resume. + + enum: + - name: AfPauseImmediate + value: 0 + description: | + Pause the continuous autofocus algorithm immediately. + + The autofocus algorithm is paused whether or not any kind of scan + is underway. AfPauseState will subsequently report + AfPauseStatePaused. AfState may report any of AfStateScanning, + AfStateFocused or AfStateFailed, depending on the algorithm's state + when it received this control. + - name: AfPauseDeferred + value: 1 + description: | + Pause the continuous autofocus algorithm at the end of the scan. + + This is similar to AfPauseImmediate, and if the AfState is + currently reporting AfStateFocused or AfStateFailed it will remain + in that state and AfPauseState will report AfPauseStatePaused. + + However, if the algorithm is scanning (AfStateScanning), + AfPauseState will report AfPauseStatePausing until the scan is + finished, at which point AfState will report one of AfStateFocused + or AfStateFailed, and AfPauseState will change to + AfPauseStatePaused. + + - name: AfPauseResume + value: 2 + description: | + Resume continuous autofocus operation. + + The algorithm starts again from exactly where it left off, and + AfPauseState will report AfPauseStateRunning. + + - LensPosition: + type: float + direction: inout + description: | + Set and report the focus lens position. + + This control instructs the lens to move to a particular position and + also reports back the position of the lens for each frame. + + The LensPosition control is ignored unless the AfMode is set to + AfModeManual, though the value is reported back unconditionally in all + modes. + + This value, which is generally a non-integer, is the reciprocal of the + focal distance in metres, also known as dioptres. That is, to set a + focal distance D, the lens position LP is given by + + \f$LP = \frac{1\mathrm{m}}{D}\f$ + + For example: + + - 0 moves the lens to infinity. + - 0.5 moves the lens to focus on objects 2m away. + - 2 moves the lens to focus on objects 50cm away. + - And larger values will focus the lens closer. + + The default value of the control should indicate a good general + position for the lens, often corresponding to the hyperfocal distance + (the closest position for which objects at infinity are still + acceptably sharp). The minimum will often be zero (meaning infinity), + and the maximum value defines the closest focus position. + + \todo Define a property to report the Hyperfocal distance of calibrated + lenses. + + - AfState: + type: int32_t + direction: out + description: | + The current state of the AF algorithm. + + This control reports the current state of the AF algorithm in + conjunction with the reported AfMode value and (in continuous AF mode) + the AfPauseState value. The possible state changes are described below, + though we note the following state transitions that occur when the + AfMode is changed. + + If the AfMode is set to AfModeManual, then the AfState will always + report AfStateIdle (even if the lens is subsequently moved). Changing + to the AfModeManual state does not initiate any lens movement. + + If the AfMode is set to AfModeAuto then the AfState will report + AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent + together then AfState will omit AfStateIdle and move straight to + AfStateScanning (and start a scan). + + If the AfMode is set to AfModeContinuous then the AfState will + initially report AfStateScanning. + + enum: + - name: AfStateIdle + value: 0 + description: | + The AF algorithm is in manual mode (AfModeManual) or in auto mode + (AfModeAuto) and a scan has not yet been triggered, or an + in-progress scan was cancelled. + - name: AfStateScanning + value: 1 + description: | + The AF algorithm is in auto mode (AfModeAuto), and a scan has been + started using the AfTrigger control. + + The scan can be cancelled by sending AfTriggerCancel at which point + the algorithm will either move back to AfStateIdle or, if the scan + actually completes before the cancel request is processed, to one + of AfStateFocused or AfStateFailed. + + Alternatively the AF algorithm could be in continuous mode + (AfModeContinuous) at which point it may enter this state + spontaneously whenever it determines that a rescan is needed. + - name: AfStateFocused + value: 2 + description: | + The AF algorithm is in auto (AfModeAuto) or continuous + (AfModeContinuous) mode and a scan has completed with the result + that the algorithm believes the image is now in focus. + - name: AfStateFailed + value: 3 + description: | + The AF algorithm is in auto (AfModeAuto) or continuous + (AfModeContinuous) mode and a scan has completed with the result + that the algorithm did not find a good focus position. + + - AfPauseState: + type: int32_t + direction: out + description: | + Report whether the autofocus is currently running, paused or pausing. + + This control is only applicable in continuous (AfModeContinuous) mode, + and reports whether the algorithm is currently running, paused or + pausing (that is, will pause as soon as any in-progress scan + completes). + + Any change to AfMode will cause AfPauseStateRunning to be reported. + + enum: + - name: AfPauseStateRunning + value: 0 + description: | + Continuous AF is running and the algorithm may restart a scan + spontaneously. + - name: AfPauseStatePausing + value: 1 + description: | + Continuous AF has been sent an AfPauseDeferred control, and will + pause as soon as any in-progress scan completes. + + When the scan completes, the AfPauseState control will report + AfPauseStatePaused. No new scans will be start spontaneously until + the AfPauseResume control is sent. + - name: AfPauseStatePaused + value: 2 + description: | + Continuous AF is paused. + + No further state changes or lens movements will occur until the + AfPauseResume control is sent. + + - HdrMode: + type: int32_t + direction: inout + description: | + Set the mode to be used for High Dynamic Range (HDR) imaging. + + HDR techniques typically include multiple exposure, image fusion and + tone mapping techniques to improve the dynamic range of the resulting + images. + + When using an HDR mode, images are captured with different sets of AGC + settings called HDR channels. Channels indicate in particular the type + of exposure (short, medium or long) used to capture the raw image, + before fusion. Each HDR image is tagged with the corresponding channel + using the HdrChannel control. + + \sa HdrChannel + + enum: + - name: HdrModeOff + value: 0 + description: | + HDR is disabled. + + Metadata for this frame will not include the HdrChannel control. + - name: HdrModeMultiExposureUnmerged + value: 1 + description: | + Multiple exposures will be generated in an alternating fashion. + + The multiple exposures will not be merged together and will be + returned to the application as they are. Each image will be tagged + with the correct HDR channel, indicating what kind of exposure it + is. The tag should be the same as in the HdrModeMultiExposure case. + + The expectation is that an application using this mode would merge + the frames to create HDR images for itself if it requires them. + - name: HdrModeMultiExposure + value: 2 + description: | + Multiple exposures will be generated and merged to create HDR + images. + + Each image will be tagged with the HDR channel (long, medium or + short) that arrived and which caused this image to be output. + + Systems that use two channels for HDR will return images tagged + alternately as the short and long channel. Systems that use three + channels for HDR will cycle through the short, medium and long + channel before repeating. + - name: HdrModeSingleExposure + value: 3 + description: | + Multiple frames all at a single exposure will be used to create HDR + images. + + These images should be reported as all corresponding to the HDR + short channel. + - name: HdrModeNight + value: 4 + description: | + Multiple frames will be combined to produce "night mode" images. + + It is up to the implementation exactly which HDR channels it uses, + and the images will all be tagged accordingly with the correct HDR + channel information. + + - HdrChannel: + type: int32_t + direction: out + description: | + The HDR channel used to capture the frame. + + This value is reported back to the application so that it can discover + whether this capture corresponds to the short or long exposure image + (or any other image used by the HDR procedure). An application can + monitor the HDR channel to discover when the differently exposed images + have arrived. + + This metadata is only available when an HDR mode has been enabled. + + \sa HdrMode + + enum: + - name: HdrChannelNone + value: 0 + description: | + This image does not correspond to any of the captures used to create + an HDR image. + - name: HdrChannelShort + value: 1 + description: | + This is a short exposure image. + - name: HdrChannelMedium + value: 2 + description: | + This is a medium exposure image. + - name: HdrChannelLong + value: 3 + description: | + This is a long exposure image. + + - Gamma: + type: float + direction: inout + description: | + Specify a fixed gamma value. + + The default gamma value must be 2.2 which closely mimics sRGB gamma. + Note that this is camera gamma, so it is applied as 1.0/gamma. + + - DebugMetadataEnable: + type: bool + direction: inout + description: | + Enable or disable the debug metadata. + +... diff --git a/libcamera/versioned_files/0.5.1/control_ids_debug.yaml b/libcamera/versioned_files/0.5.1/control_ids_debug.yaml new file mode 100644 index 0000000..7975327 --- /dev/null +++ b/libcamera/versioned_files/0.5.1/control_ids_debug.yaml @@ -0,0 +1,6 @@ +# SPDX-License-Identifier: LGPL-2.1-or-later +# +%YAML 1.1 +--- +vendor: debug +controls: [] diff --git a/libcamera/versioned_files/0.5.1/control_ids_draft.yaml b/libcamera/versioned_files/0.5.1/control_ids_draft.yaml new file mode 100644 index 0000000..03309ee --- /dev/null +++ b/libcamera/versioned_files/0.5.1/control_ids_draft.yaml @@ -0,0 +1,297 @@ +# SPDX-License-Identifier: LGPL-2.1-or-later +# +# Copyright (C) 2019, Google Inc. +# +%YAML 1.1 +--- +# Unless otherwise stated, all controls are bi-directional, i.e. they can be +# set through Request::controls() and returned out through Request::metadata(). +vendor: draft +controls: + - AePrecaptureTrigger: + type: int32_t + direction: inout + description: | + Control for AE metering trigger. Currently identical to + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. + + Whether the camera device will trigger a precapture metering sequence + when it processes this request. + enum: + - name: AePrecaptureTriggerIdle + value: 0 + description: The trigger is idle. + - name: AePrecaptureTriggerStart + value: 1 + description: The pre-capture AE metering is started by the camera. + - name: AePrecaptureTriggerCancel + value: 2 + description: | + The camera will cancel any active or completed metering sequence. + The AE algorithm is reset to its initial state. + + - NoiseReductionMode: + type: int32_t + direction: inout + description: | + Control to select the noise reduction algorithm mode. Currently + identical to ANDROID_NOISE_REDUCTION_MODE. + + Mode of operation for the noise reduction algorithm. + enum: + - name: NoiseReductionModeOff + value: 0 + description: No noise reduction is applied + - name: NoiseReductionModeFast + value: 1 + description: | + Noise reduction is applied without reducing the frame rate. + - name: NoiseReductionModeHighQuality + value: 2 + description: | + High quality noise reduction at the expense of frame rate. + - name: NoiseReductionModeMinimal + value: 3 + description: | + Minimal noise reduction is applied without reducing the frame rate. + - name: NoiseReductionModeZSL + value: 4 + description: | + Noise reduction is applied at different levels to different streams. + + - ColorCorrectionAberrationMode: + type: int32_t + direction: inout + description: | + Control to select the color correction aberration mode. Currently + identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. + + Mode of operation for the chromatic aberration correction algorithm. + enum: + - name: ColorCorrectionAberrationOff + value: 0 + description: No aberration correction is applied. + - name: ColorCorrectionAberrationFast + value: 1 + description: Aberration correction will not slow down the frame rate. + - name: ColorCorrectionAberrationHighQuality + value: 2 + description: | + High quality aberration correction which might reduce the frame + rate. + + - AwbState: + type: int32_t + direction: out + description: | + Control to report the current AWB algorithm state. Currently identical + to ANDROID_CONTROL_AWB_STATE. + + Current state of the AWB algorithm. + enum: + - name: AwbStateInactive + value: 0 + description: The AWB algorithm is inactive. + - name: AwbStateSearching + value: 1 + description: The AWB algorithm has not converged yet. + - name: AwbConverged + value: 2 + description: The AWB algorithm has converged. + - name: AwbLocked + value: 3 + description: The AWB algorithm is locked. + + - SensorRollingShutterSkew: + type: int64_t + direction: out + description: | + Control to report the time between the start of exposure of the first + row and the start of exposure of the last row. Currently identical to + ANDROID_SENSOR_ROLLING_SHUTTER_SKEW + + - LensShadingMapMode: + type: int32_t + direction: inout + description: | + Control to report if the lens shading map is available. Currently + identical to ANDROID_STATISTICS_LENS_SHADING_MAP_MODE. + enum: + - name: LensShadingMapModeOff + value: 0 + description: No lens shading map mode is available. + - name: LensShadingMapModeOn + value: 1 + description: The lens shading map mode is available. + + - PipelineDepth: + type: int32_t + direction: out + description: | + Specifies the number of pipeline stages the frame went through from when + it was exposed to when the final completed result was available to the + framework. Always less than or equal to PipelineMaxDepth. Currently + identical to ANDROID_REQUEST_PIPELINE_DEPTH. + + The typical value for this control is 3 as a frame is first exposed, + captured and then processed in a single pass through the ISP. Any + additional processing step performed after the ISP pass (in example face + detection, additional format conversions etc) count as an additional + pipeline stage. + + - MaxLatency: + type: int32_t + direction: out + description: | + The maximum number of frames that can occur after a request (different + than the previous) has been submitted, and before the result's state + becomes synchronized. A value of -1 indicates unknown latency, and 0 + indicates per-frame control. Currently identical to + ANDROID_SYNC_MAX_LATENCY. + + - TestPatternMode: + type: int32_t + direction: inout + description: | + Control to select the test pattern mode. Currently identical to + ANDROID_SENSOR_TEST_PATTERN_MODE. + enum: + - name: TestPatternModeOff + value: 0 + description: | + No test pattern mode is used. The camera device returns frames from + the image sensor. + - name: TestPatternModeSolidColor + value: 1 + description: | + Each pixel in [R, G_even, G_odd, B] is replaced by its respective + color channel provided in test pattern data. + \todo Add control for test pattern data. + - name: TestPatternModeColorBars + value: 2 + description: | + All pixel data is replaced with an 8-bar color pattern. The vertical + bars (left-to-right) are as follows; white, yellow, cyan, green, + magenta, red, blue and black. Each bar should take up 1/8 of the + sensor pixel array width. When this is not possible, the bar size + should be rounded down to the nearest integer and the pattern can + repeat on the right side. Each bar's height must always take up the + full sensor pixel array height. + - name: TestPatternModeColorBarsFadeToGray + value: 3 + description: | + The test pattern is similar to TestPatternModeColorBars, + except that each bar should start at its specified color at the top + and fade to gray at the bottom. Furthermore each bar is further + subdevided into a left and right half. The left half should have a + smooth gradient, and the right half should have a quantized + gradient. In particular, the right half's should consist of blocks + of the same color for 1/16th active sensor pixel array width. The + least significant bits in the quantized gradient should be copied + from the most significant bits of the smooth gradient. The height of + each bar should always be a multiple of 128. When this is not the + case, the pattern should repeat at the bottom of the image. + - name: TestPatternModePn9 + value: 4 + description: | + All pixel data is replaced by a pseudo-random sequence generated + from a PN9 512-bit sequence (typically implemented in hardware with + a linear feedback shift register). The generator should be reset at + the beginning of each frame, and thus each subsequent raw frame with + this test pattern should be exactly the same as the last. + - name: TestPatternModeCustom1 + value: 256 + description: | + The first custom test pattern. All custom patterns that are + available only on this camera device are at least this numeric + value. All of the custom test patterns will be static (that is the + raw image must not vary from frame to frame). + + - FaceDetectMode: + type: int32_t + direction: inout + description: | + Control to select the face detection mode used by the pipeline. + + Currently identical to ANDROID_STATISTICS_FACE_DETECT_MODE. + + \sa FaceDetectFaceRectangles + \sa FaceDetectFaceScores + \sa FaceDetectFaceLandmarks + \sa FaceDetectFaceIds + + enum: + - name: FaceDetectModeOff + value: 0 + description: | + Pipeline doesn't perform face detection and doesn't report any + control related to face detection. + - name: FaceDetectModeSimple + value: 1 + description: | + Pipeline performs face detection and reports the + FaceDetectFaceRectangles and FaceDetectFaceScores controls for each + detected face. FaceDetectFaceLandmarks and FaceDetectFaceIds are + optional. + - name: FaceDetectModeFull + value: 2 + description: | + Pipeline performs face detection and reports all the controls + related to face detection including FaceDetectFaceRectangles, + FaceDetectFaceScores, FaceDetectFaceLandmarks, and + FaceDeteceFaceIds for each detected face. + + - FaceDetectFaceRectangles: + type: Rectangle + direction: out + description: | + Boundary rectangles of the detected faces. The number of values is + the number of detected faces. + + The FaceDetectFaceRectangles control can only be returned in metadata. + + Currently identical to ANDROID_STATISTICS_FACE_RECTANGLES. + size: [n] + + - FaceDetectFaceScores: + type: uint8_t + direction: out + description: | + Confidence score of each of the detected faces. The range of score is + [0, 100]. The number of values should be the number of faces reported + in FaceDetectFaceRectangles. + + The FaceDetectFaceScores control can only be returned in metadata. + + Currently identical to ANDROID_STATISTICS_FACE_SCORES. + size: [n] + + - FaceDetectFaceLandmarks: + type: Point + direction: out + description: | + Array of human face landmark coordinates in format [..., left_eye_i, + right_eye_i, mouth_i, left_eye_i+1, ...], with i = index of face. The + number of values should be 3 * the number of faces reported in + FaceDetectFaceRectangles. + + The FaceDetectFaceLandmarks control can only be returned in metadata. + + Currently identical to ANDROID_STATISTICS_FACE_LANDMARKS. + size: [n] + + - FaceDetectFaceIds: + type: int32_t + direction: out + description: | + Each detected face is given a unique ID that is valid for as long as the + face is visible to the camera device. A face that leaves the field of + view and later returns may be assigned a new ID. The number of values + should be the number of faces reported in FaceDetectFaceRectangles. + + The FaceDetectFaceIds control can only be returned in metadata. + + Currently identical to ANDROID_STATISTICS_FACE_IDS. + size: [n] + +... diff --git a/libcamera/versioned_files/0.5.1/control_ids_rpi.yaml b/libcamera/versioned_files/0.5.1/control_ids_rpi.yaml new file mode 100644 index 0000000..8d1e8b4 --- /dev/null +++ b/libcamera/versioned_files/0.5.1/control_ids_rpi.yaml @@ -0,0 +1,74 @@ +# SPDX-License-Identifier: LGPL-2.1-or-later +# +# Copyright (C) 2023, Raspberry Pi Ltd +# +%YAML 1.1 +--- +# Raspberry Pi (VC4 and PiSP) specific vendor controls +vendor: rpi +controls: + - StatsOutputEnable: + type: bool + direction: inout + description: | + Toggles the Raspberry Pi IPA to output the hardware generated statistics. + + When this control is set to true, the IPA outputs a binary dump of the + hardware generated statistics through the Request metadata in the + Bcm2835StatsOutput control. + + \sa Bcm2835StatsOutput + + - Bcm2835StatsOutput: + type: uint8_t + size: [n] + direction: out + description: | + Span of the BCM2835 ISP generated statistics for the current frame. + + This is sent in the Request metadata if the StatsOutputEnable is set to + true. The statistics struct definition can be found in + include/linux/bcm2835-isp.h. + + \sa StatsOutputEnable + + - ScalerCrops: + type: Rectangle + size: [n] + direction: out + description: | + An array of rectangles, where each singular value has identical + functionality to the ScalerCrop control. This control allows the + Raspberry Pi pipeline handler to control individual scaler crops per + output stream. + + The order of rectangles passed into the control must match the order of + streams configured by the application. The pipeline handler will only + configure crop retangles up-to the number of output streams configured. + All subsequent rectangles passed into this control are ignored by the + pipeline handler. + + If both rpi::ScalerCrops and ScalerCrop controls are present in a + ControlList, the latter is discarded, and crops are obtained from this + control. + + Note that using different crop rectangles for each output stream with + this control is only applicable on the Pi5/PiSP platform. This control + should also be considered temporary/draft and will be replaced with + official libcamera API support for per-stream controls in the future. + + \sa ScalerCrop + + - PispStatsOutput: + type: uint8_t + direction: out + size: [n] + description: | + Span of the PiSP Frontend ISP generated statistics for the current + frame. This is sent in the Request metadata if the StatsOutputEnable is + set to true. The statistics struct definition can be found in + https://github.com/raspberrypi/libpisp/blob/main/src/libpisp/frontend/pisp_statistics.h + + \sa StatsOutputEnable + +... diff --git a/libcamera/versioned_files/0.5.1/controls.rs b/libcamera/versioned_files/0.5.1/controls.rs new file mode 100644 index 0000000..9b2dc07 --- /dev/null +++ b/libcamera/versioned_files/0.5.1/controls.rs @@ -0,0 +1,3675 @@ +use std::{ffi::CStr, ops::{Deref, DerefMut}}; +use num_enum::{IntoPrimitive, TryFromPrimitive}; +#[allow(unused_imports)] +use crate::control::{Control, Property, ControlEntry, DynControlEntry}; +use crate::control_value::{ControlValue, ControlValueError}; +#[allow(unused_imports)] +use crate::geometry::{Rectangle, Point, Size}; +#[allow(unused_imports)] +use libcamera_sys::*; +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(u32)] +pub enum ControlId { + /// Enable or disable the AEGC algorithm. When this control is set to true, + /// both ExposureTimeMode and AnalogueGainMode are set to auto, and if this + /// control is set to false then both are set to manual. + /// + /// If ExposureTimeMode or AnalogueGainMode are also set in the same + /// request as AeEnable, then the modes supplied by ExposureTimeMode or + /// AnalogueGainMode will take precedence. + /// + /// \sa ExposureTimeMode AnalogueGainMode + AeEnable = AE_ENABLE, + /// Report the AEGC algorithm state. + /// + /// The AEGC algorithm computes the exposure time and the analogue gain + /// to be applied to the image sensor. + /// + /// The AEGC algorithm behaviour is controlled by the ExposureTimeMode and + /// AnalogueGainMode controls, which allow applications to decide how + /// the exposure time and gain are computed, in Auto or Manual mode, + /// independently from one another. + /// + /// The AeState control reports the AEGC algorithm state through a single + /// value and describes it as a single computation block which computes + /// both the exposure time and the analogue gain values. + /// + /// When both the exposure time and analogue gain values are configured to + /// be in Manual mode, the AEGC algorithm is quiescent and does not actively + /// compute any value and the AeState control will report AeStateIdle. + /// + /// When at least the exposure time or analogue gain are configured to be + /// computed by the AEGC algorithm, the AeState control will report if the + /// algorithm has converged to stable values for all of the controls set + /// to be computed in Auto mode. + /// + /// \sa AnalogueGainMode + /// \sa ExposureTimeMode + AeState = AE_STATE, + /// Specify a metering mode for the AE algorithm to use. + /// + /// The metering modes determine which parts of the image are used to + /// determine the scene brightness. Metering modes may be platform specific + /// and not all metering modes may be supported. + AeMeteringMode = AE_METERING_MODE, + /// Specify a constraint mode for the AE algorithm to use. + /// + /// The constraint modes determine how the measured scene brightness is + /// adjusted to reach the desired target exposure. Constraint modes may be + /// platform specific, and not all constraint modes may be supported. + AeConstraintMode = AE_CONSTRAINT_MODE, + /// Specify an exposure mode for the AE algorithm to use. + /// + /// The exposure modes specify how the desired total exposure is divided + /// between the exposure time and the sensor's analogue gain. They are + /// platform specific, and not all exposure modes may be supported. + /// + /// When one of AnalogueGainMode or ExposureTimeMode is set to Manual, + /// the fixed values will override any choices made by AeExposureMode. + /// + /// \sa AnalogueGainMode + /// \sa ExposureTimeMode + AeExposureMode = AE_EXPOSURE_MODE, + /// Specify an Exposure Value (EV) parameter. + /// + /// The EV parameter will only be applied if the AE algorithm is currently + /// enabled, that is, at least one of AnalogueGainMode and ExposureTimeMode + /// are in Auto mode. + /// + /// By convention EV adjusts the exposure as log2. For example + /// EV = [-2, -1, -0.5, 0, 0.5, 1, 2] results in an exposure adjustment + /// of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. + /// + /// \sa AnalogueGainMode + /// \sa ExposureTimeMode + ExposureValue = EXPOSURE_VALUE, + /// Exposure time for the frame applied in the sensor device. + /// + /// This value is specified in micro-seconds. + /// + /// This control will only take effect if ExposureTimeMode is Manual. If + /// this control is set when ExposureTimeMode is Auto, the value will be + /// ignored and will not be retained. + /// + /// When reported in metadata, this control indicates what exposure time + /// was used for the current frame, regardless of ExposureTimeMode. + /// ExposureTimeMode will indicate the source of the exposure time value, + /// whether it came from the AE algorithm or not. + /// + /// \sa AnalogueGain + /// \sa ExposureTimeMode + ExposureTime = EXPOSURE_TIME, + /// Controls the source of the exposure time that is applied to the image + /// sensor. + /// + /// When set to Auto, the AE algorithm computes the exposure time and + /// configures the image sensor accordingly. When set to Manual, the value + /// of the ExposureTime control is used. + /// + /// When transitioning from Auto to Manual mode and no ExposureTime control + /// is provided by the application, the last value computed by the AE + /// algorithm when the mode was Auto will be used. If the ExposureTimeMode + /// was never set to Auto (either because the camera started in Manual mode, + /// or Auto is not supported by the camera), the camera should use a + /// best-effort default value. + /// + /// If ExposureTimeModeManual is supported, the ExposureTime control must + /// also be supported. + /// + /// Cameras that support manual control of the sensor shall support manual + /// mode for both ExposureTimeMode and AnalogueGainMode, and shall expose + /// the ExposureTime and AnalogueGain controls. If the camera also has an + /// AEGC implementation, both ExposureTimeMode and AnalogueGainMode shall + /// support both manual and auto mode. If auto mode is available, it shall + /// be the default mode. These rules do not apply to black box cameras + /// such as UVC cameras, where the available gain and exposure modes are + /// completely dependent on what the device exposes. + /// + /// \par Flickerless exposure mode transitions + /// + /// Applications that wish to transition from ExposureTimeModeAuto to direct + /// control of the exposure time without causing extra flicker can do so by + /// selecting an ExposureTime value as close as possible to the last value + /// computed by the auto exposure algorithm in order to avoid any visible + /// flickering. + /// + /// To select the correct value to use as ExposureTime value, applications + /// should accommodate the natural delay in applying controls caused by the + /// capture pipeline frame depth. + /// + /// When switching to manual exposure mode, applications should not + /// immediately specify an ExposureTime value in the same request where + /// ExposureTimeMode is set to Manual. They should instead wait for the + /// first Request where ExposureTimeMode is reported as + /// ExposureTimeModeManual in the Request metadata, and use the reported + /// ExposureTime to populate the control value in the next Request to be + /// queued to the Camera. + /// + /// The implementation of the auto-exposure algorithm should equally try to + /// minimize flickering and when transitioning from manual exposure mode to + /// auto exposure use the last value provided by the application as starting + /// point. + /// + /// 1. Start with ExposureTimeMode set to Auto + /// + /// 2. Set ExposureTimeMode to Manual + /// + /// 3. Wait for the first completed request that has ExposureTimeMode + /// set to Manual + /// + /// 4. Copy the value reported in ExposureTime into a new request, and + /// submit it + /// + /// 5. Proceed to run manual exposure time as desired + /// + /// \sa ExposureTime + ExposureTimeMode = EXPOSURE_TIME_MODE, + /// Analogue gain value applied in the sensor device. + /// + /// The value of the control specifies the gain multiplier applied to all + /// colour channels. This value cannot be lower than 1.0. + /// + /// This control will only take effect if AnalogueGainMode is Manual. If + /// this control is set when AnalogueGainMode is Auto, the value will be + /// ignored and will not be retained. + /// + /// When reported in metadata, this control indicates what analogue gain + /// was used for the current request, regardless of AnalogueGainMode. + /// AnalogueGainMode will indicate the source of the analogue gain value, + /// whether it came from the AEGC algorithm or not. + /// + /// \sa ExposureTime + /// \sa AnalogueGainMode + AnalogueGain = ANALOGUE_GAIN, + /// Controls the source of the analogue gain that is applied to the image + /// sensor. + /// + /// When set to Auto, the AEGC algorithm computes the analogue gain and + /// configures the image sensor accordingly. When set to Manual, the value + /// of the AnalogueGain control is used. + /// + /// When transitioning from Auto to Manual mode and no AnalogueGain control + /// is provided by the application, the last value computed by the AEGC + /// algorithm when the mode was Auto will be used. If the AnalogueGainMode + /// was never set to Auto (either because the camera started in Manual mode, + /// or Auto is not supported by the camera), the camera should use a + /// best-effort default value. + /// + /// If AnalogueGainModeManual is supported, the AnalogueGain control must + /// also be supported. + /// + /// For cameras where we have control over the ISP, both ExposureTimeMode + /// and AnalogueGainMode are expected to support manual mode, and both + /// controls (as well as ExposureTimeMode and AnalogueGain) are expected to + /// be present. If the camera also has an AEGC implementation, both + /// ExposureTimeMode and AnalogueGainMode shall support both manual and + /// auto mode. If auto mode is available, it shall be the default mode. + /// These rules do not apply to black box cameras such as UVC cameras, + /// where the available gain and exposure modes are completely dependent on + /// what the hardware exposes. + /// + /// The same procedure described for performing flickerless transitions in + /// the ExposureTimeMode control documentation can be applied to analogue + /// gain. + /// + /// \sa ExposureTimeMode + /// \sa AnalogueGain + AnalogueGainMode = ANALOGUE_GAIN_MODE, + /// Set the flicker avoidance mode for AGC/AEC. + /// + /// The flicker mode determines whether, and how, the AGC/AEC algorithm + /// attempts to hide flicker effects caused by the duty cycle of artificial + /// lighting. + /// + /// Although implementation dependent, many algorithms for "flicker + /// avoidance" work by restricting this exposure time to integer multiples + /// of the cycle period, wherever possible. + /// + /// Implementations may not support all of the flicker modes listed below. + /// + /// By default the system will start in FlickerAuto mode if this is + /// supported, otherwise the flicker mode will be set to FlickerOff. + AeFlickerMode = AE_FLICKER_MODE, + /// Manual flicker period in microseconds. + /// + /// This value sets the current flicker period to avoid. It is used when + /// AeFlickerMode is set to FlickerManual. + /// + /// To cancel 50Hz mains flicker, this should be set to 10000 (corresponding + /// to 100Hz), or 8333 (120Hz) for 60Hz mains. + /// + /// Setting the mode to FlickerManual when no AeFlickerPeriod has ever been + /// set means that no flicker cancellation occurs (until the value of this + /// control is updated). + /// + /// Switching to modes other than FlickerManual has no effect on the + /// value of the AeFlickerPeriod control. + /// + /// \sa AeFlickerMode + AeFlickerPeriod = AE_FLICKER_PERIOD, + /// Flicker period detected in microseconds. + /// + /// The value reported here indicates the currently detected flicker + /// period, or zero if no flicker at all is detected. + /// + /// When AeFlickerMode is set to FlickerAuto, there may be a period during + /// which the value reported here remains zero. Once a non-zero value is + /// reported, then this is the flicker period that has been detected and is + /// now being cancelled. + /// + /// In the case of 50Hz mains flicker, the value would be 10000 + /// (corresponding to 100Hz), or 8333 (120Hz) for 60Hz mains flicker. + /// + /// It is implementation dependent whether the system can continue to detect + /// flicker of different periods when another frequency is already being + /// cancelled. + /// + /// \sa AeFlickerMode + AeFlickerDetected = AE_FLICKER_DETECTED, + /// Specify a fixed brightness parameter. + /// + /// Positive values (up to 1.0) produce brighter images; negative values + /// (up to -1.0) produce darker images and 0.0 leaves pixels unchanged. + Brightness = BRIGHTNESS, + /// Specify a fixed contrast parameter. + /// + /// Normal contrast is given by the value 1.0; larger values produce images + /// with more contrast. + Contrast = CONTRAST, + /// Report an estimate of the current illuminance level in lux. + /// + /// The Lux control can only be returned in metadata. + Lux = LUX, + /// Enable or disable the AWB. + /// + /// When AWB is enabled, the algorithm estimates the colour temperature of + /// the scene and computes colour gains and the colour correction matrix + /// automatically. The computed colour temperature, gains and correction + /// matrix are reported in metadata. The corresponding controls are ignored + /// if set in a request. + /// + /// When AWB is disabled, the colour temperature, gains and correction + /// matrix are not updated automatically and can be set manually in + /// requests. + /// + /// \sa ColourCorrectionMatrix + /// \sa ColourGains + /// \sa ColourTemperature + AwbEnable = AWB_ENABLE, + /// Specify the range of illuminants to use for the AWB algorithm. + /// + /// The modes supported are platform specific, and not all modes may be + /// supported. + AwbMode = AWB_MODE, + /// Report the lock status of a running AWB algorithm. + /// + /// If the AWB algorithm is locked the value shall be set to true, if it's + /// converging it shall be set to false. If the AWB algorithm is not + /// running the control shall not be present in the metadata control list. + /// + /// \sa AwbEnable + AwbLocked = AWB_LOCKED, + /// Pair of gain values for the Red and Blue colour channels, in that + /// order. + /// + /// ColourGains can only be applied in a Request when the AWB is disabled. + /// If ColourGains is set in a request but ColourTemperature is not, the + /// implementation shall calculate and set the ColourTemperature based on + /// the ColourGains. + /// + /// \sa AwbEnable + /// \sa ColourTemperature + ColourGains = COLOUR_GAINS, + /// ColourTemperature of the frame, in kelvin. + /// + /// ColourTemperature can only be applied in a Request when the AWB is + /// disabled. + /// + /// If ColourTemperature is set in a request but ColourGains is not, the + /// implementation shall calculate and set the ColourGains based on the + /// given ColourTemperature. If ColourTemperature is set (either directly, + /// or indirectly by setting ColourGains) but ColourCorrectionMatrix is not, + /// the ColourCorrectionMatrix is updated based on the ColourTemperature. + /// + /// The ColourTemperature used to process the frame is reported in metadata. + /// + /// \sa AwbEnable + /// \sa ColourCorrectionMatrix + /// \sa ColourGains + ColourTemperature = COLOUR_TEMPERATURE, + /// Specify a fixed saturation parameter. + /// + /// Normal saturation is given by the value 1.0; larger values produce more + /// saturated colours; 0.0 produces a greyscale image. + Saturation = SATURATION, + /// Reports the sensor black levels used for processing a frame. + /// + /// The values are in the order R, Gr, Gb, B. They are returned as numbers + /// out of a 16-bit pixel range (as if pixels ranged from 0 to 65535). The + /// SensorBlackLevels control can only be returned in metadata. + SensorBlackLevels = SENSOR_BLACK_LEVELS, + /// Intensity of the sharpening applied to the image. + /// + /// A value of 0.0 means no sharpening. The minimum value means + /// minimal sharpening, and shall be 0.0 unless the camera can't + /// disable sharpening completely. The default value shall give a + /// "reasonable" level of sharpening, suitable for most use cases. + /// The maximum value may apply extremely high levels of sharpening, + /// higher than anyone could reasonably want. Negative values are + /// not allowed. Note also that sharpening is not applied to raw + /// streams. + Sharpness = SHARPNESS, + /// Reports a Figure of Merit (FoM) to indicate how in-focus the frame is. + /// + /// A larger FocusFoM value indicates a more in-focus frame. This singular + /// value may be based on a combination of statistics gathered from + /// multiple focus regions within an image. The number of focus regions and + /// method of combination is platform dependent. In this respect, it is not + /// necessarily aimed at providing a way to implement a focus algorithm by + /// the application, rather an indication of how in-focus a frame is. + FocusFoM = FOCUS_FO_M, + /// The 3x3 matrix that converts camera RGB to sRGB within the imaging + /// pipeline. + /// + /// This should describe the matrix that is used after pixels have been + /// white-balanced, but before any gamma transformation. The 3x3 matrix is + /// stored in conventional reading order in an array of 9 floating point + /// values. + /// + /// ColourCorrectionMatrix can only be applied in a Request when the AWB is + /// disabled. + /// + /// \sa AwbEnable + /// \sa ColourTemperature + ColourCorrectionMatrix = COLOUR_CORRECTION_MATRIX, + /// Sets the image portion that will be scaled to form the whole of + /// the final output image. + /// + /// The (x,y) location of this rectangle is relative to the + /// PixelArrayActiveAreas that is being used. The units remain native + /// sensor pixels, even if the sensor is being used in a binning or + /// skipping mode. + /// + /// This control is only present when the pipeline supports scaling. Its + /// maximum valid value is given by the properties::ScalerCropMaximum + /// property, and the two can be used to implement digital zoom. + ScalerCrop = SCALER_CROP, + /// Digital gain value applied during the processing steps applied + /// to the image as captured from the sensor. + /// + /// The global digital gain factor is applied to all the colour channels + /// of the RAW image. Different pipeline models are free to + /// specify how the global gain factor applies to each separate + /// channel. + /// + /// If an imaging pipeline applies digital gain in distinct + /// processing steps, this value indicates their total sum. + /// Pipelines are free to decide how to adjust each processing + /// step to respect the received gain factor and shall report + /// their total value in the request metadata. + DigitalGain = DIGITAL_GAIN, + /// The instantaneous frame duration from start of frame exposure to start + /// of next exposure, expressed in microseconds. + /// + /// This control is meant to be returned in metadata. + FrameDuration = FRAME_DURATION, + /// The minimum and maximum (in that order) frame duration, expressed in + /// microseconds. + /// + /// When provided by applications, the control specifies the sensor frame + /// duration interval the pipeline has to use. This limits the largest + /// exposure time the sensor can use. For example, if a maximum frame + /// duration of 33ms is requested (corresponding to 30 frames per second), + /// the sensor will not be able to raise the exposure time above 33ms. + /// A fixed frame duration is achieved by setting the minimum and maximum + /// values to be the same. Setting both values to 0 reverts to using the + /// camera defaults. + /// + /// The maximum frame duration provides the absolute limit to the exposure + /// time computed by the AE algorithm and it overrides any exposure mode + /// setting specified with controls::AeExposureMode. Similarly, when a + /// manual exposure time is set through controls::ExposureTime, it also + /// gets clipped to the limits set by this control. When reported in + /// metadata, the control expresses the minimum and maximum frame durations + /// used after being clipped to the sensor provided frame duration limits. + /// + /// \sa AeExposureMode + /// \sa ExposureTime + /// + /// \todo Define how to calculate the capture frame rate by + /// defining controls to report additional delays introduced by + /// the capture pipeline or post-processing stages (ie JPEG + /// conversion, frame scaling). + /// + /// \todo Provide an explicit definition of default control values, for + /// this and all other controls. + FrameDurationLimits = FRAME_DURATION_LIMITS, + /// Temperature measure from the camera sensor in Celsius. + /// + /// This value is typically obtained by a thermal sensor present on-die or + /// in the camera module. The range of reported temperatures is device + /// dependent. + /// + /// The SensorTemperature control will only be returned in metadata if a + /// thermal sensor is present. + SensorTemperature = SENSOR_TEMPERATURE, + /// The time when the first row of the image sensor active array is exposed. + /// + /// The timestamp, expressed in nanoseconds, represents a monotonically + /// increasing counter since the system boot time, as defined by the + /// Linux-specific CLOCK_BOOTTIME clock id. + /// + /// The SensorTimestamp control can only be returned in metadata. + /// + /// \todo Define how the sensor timestamp has to be used in the reprocessing + /// use case. + SensorTimestamp = SENSOR_TIMESTAMP, + /// The mode of the AF (autofocus) algorithm. + /// + /// An implementation may choose not to implement all the modes. + AfMode = AF_MODE, + /// The range of focus distances that is scanned. + /// + /// An implementation may choose not to implement all the options here. + AfRange = AF_RANGE, + /// Determine whether the AF is to move the lens as quickly as possible or + /// more steadily. + /// + /// For example, during video recording it may be desirable not to move the + /// lens too abruptly, but when in a preview mode (waiting for a still + /// capture) it may be helpful to move the lens as quickly as is reasonably + /// possible. + AfSpeed = AF_SPEED, + /// The parts of the image used by the AF algorithm to measure focus. + AfMetering = AF_METERING, + /// The focus windows used by the AF algorithm when AfMetering is set to + /// AfMeteringWindows. + /// + /// The units used are pixels within the rectangle returned by the + /// ScalerCropMaximum property. + /// + /// In order to be activated, a rectangle must be programmed with non-zero + /// width and height. Internally, these rectangles are intersected with the + /// ScalerCropMaximum rectangle. If the window becomes empty after this + /// operation, then the window is ignored. If all the windows end up being + /// ignored, then the behaviour is platform dependent. + /// + /// On platforms that support the ScalerCrop control (for implementing + /// digital zoom, for example), no automatic recalculation or adjustment of + /// AF windows is performed internally if the ScalerCrop is changed. If any + /// window lies outside the output image after the scaler crop has been + /// applied, it is up to the application to recalculate them. + /// + /// The details of how the windows are used are platform dependent. We note + /// that when there is more than one AF window, a typical implementation + /// might find the optimal focus position for each one and finally select + /// the window where the focal distance for the objects shown in that part + /// of the image are closest to the camera. + AfWindows = AF_WINDOWS, + /// Start an autofocus scan. + /// + /// This control starts an autofocus scan when AfMode is set to AfModeAuto, + /// and is ignored if AfMode is set to AfModeManual or AfModeContinuous. It + /// can also be used to terminate a scan early. + AfTrigger = AF_TRIGGER, + /// Pause lens movements when in continuous autofocus mode. + /// + /// This control has no effect except when in continuous autofocus mode + /// (AfModeContinuous). It can be used to pause any lens movements while + /// (for example) images are captured. The algorithm remains inactive + /// until it is instructed to resume. + AfPause = AF_PAUSE, + /// Set and report the focus lens position. + /// + /// This control instructs the lens to move to a particular position and + /// also reports back the position of the lens for each frame. + /// + /// The LensPosition control is ignored unless the AfMode is set to + /// AfModeManual, though the value is reported back unconditionally in all + /// modes. + /// + /// This value, which is generally a non-integer, is the reciprocal of the + /// focal distance in metres, also known as dioptres. That is, to set a + /// focal distance D, the lens position LP is given by + /// + /// \f$LP = \frac{1\mathrm{m}}{D}\f$ + /// + /// For example: + /// + /// - 0 moves the lens to infinity. + /// - 0.5 moves the lens to focus on objects 2m away. + /// - 2 moves the lens to focus on objects 50cm away. + /// - And larger values will focus the lens closer. + /// + /// The default value of the control should indicate a good general + /// position for the lens, often corresponding to the hyperfocal distance + /// (the closest position for which objects at infinity are still + /// acceptably sharp). The minimum will often be zero (meaning infinity), + /// and the maximum value defines the closest focus position. + /// + /// \todo Define a property to report the Hyperfocal distance of calibrated + /// lenses. + LensPosition = LENS_POSITION, + /// The current state of the AF algorithm. + /// + /// This control reports the current state of the AF algorithm in + /// conjunction with the reported AfMode value and (in continuous AF mode) + /// the AfPauseState value. The possible state changes are described below, + /// though we note the following state transitions that occur when the + /// AfMode is changed. + /// + /// If the AfMode is set to AfModeManual, then the AfState will always + /// report AfStateIdle (even if the lens is subsequently moved). Changing + /// to the AfModeManual state does not initiate any lens movement. + /// + /// If the AfMode is set to AfModeAuto then the AfState will report + /// AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent + /// together then AfState will omit AfStateIdle and move straight to + /// AfStateScanning (and start a scan). + /// + /// If the AfMode is set to AfModeContinuous then the AfState will + /// initially report AfStateScanning. + AfState = AF_STATE, + /// Report whether the autofocus is currently running, paused or pausing. + /// + /// This control is only applicable in continuous (AfModeContinuous) mode, + /// and reports whether the algorithm is currently running, paused or + /// pausing (that is, will pause as soon as any in-progress scan + /// completes). + /// + /// Any change to AfMode will cause AfPauseStateRunning to be reported. + AfPauseState = AF_PAUSE_STATE, + /// Set the mode to be used for High Dynamic Range (HDR) imaging. + /// + /// HDR techniques typically include multiple exposure, image fusion and + /// tone mapping techniques to improve the dynamic range of the resulting + /// images. + /// + /// When using an HDR mode, images are captured with different sets of AGC + /// settings called HDR channels. Channels indicate in particular the type + /// of exposure (short, medium or long) used to capture the raw image, + /// before fusion. Each HDR image is tagged with the corresponding channel + /// using the HdrChannel control. + /// + /// \sa HdrChannel + HdrMode = HDR_MODE, + /// The HDR channel used to capture the frame. + /// + /// This value is reported back to the application so that it can discover + /// whether this capture corresponds to the short or long exposure image + /// (or any other image used by the HDR procedure). An application can + /// monitor the HDR channel to discover when the differently exposed images + /// have arrived. + /// + /// This metadata is only available when an HDR mode has been enabled. + /// + /// \sa HdrMode + HdrChannel = HDR_CHANNEL, + /// Specify a fixed gamma value. + /// + /// The default gamma value must be 2.2 which closely mimics sRGB gamma. + /// Note that this is camera gamma, so it is applied as 1.0/gamma. + Gamma = GAMMA, + /// Enable or disable the debug metadata. + DebugMetadataEnable = DEBUG_METADATA_ENABLE, + /// Control for AE metering trigger. Currently identical to + /// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. + /// + /// Whether the camera device will trigger a precapture metering sequence + /// when it processes this request. + #[cfg(feature = "vendor_draft")] + AePrecaptureTrigger = AE_PRECAPTURE_TRIGGER, + /// Control to select the noise reduction algorithm mode. Currently + /// identical to ANDROID_NOISE_REDUCTION_MODE. + /// + /// Mode of operation for the noise reduction algorithm. + #[cfg(feature = "vendor_draft")] + NoiseReductionMode = NOISE_REDUCTION_MODE, + /// Control to select the color correction aberration mode. Currently + /// identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. + /// + /// Mode of operation for the chromatic aberration correction algorithm. + #[cfg(feature = "vendor_draft")] + ColorCorrectionAberrationMode = COLOR_CORRECTION_ABERRATION_MODE, + /// Control to report the current AWB algorithm state. Currently identical + /// to ANDROID_CONTROL_AWB_STATE. + /// + /// Current state of the AWB algorithm. + #[cfg(feature = "vendor_draft")] + AwbState = AWB_STATE, + /// Control to report the time between the start of exposure of the first + /// row and the start of exposure of the last row. Currently identical to + /// ANDROID_SENSOR_ROLLING_SHUTTER_SKEW + #[cfg(feature = "vendor_draft")] + SensorRollingShutterSkew = SENSOR_ROLLING_SHUTTER_SKEW, + /// Control to report if the lens shading map is available. Currently + /// identical to ANDROID_STATISTICS_LENS_SHADING_MAP_MODE. + #[cfg(feature = "vendor_draft")] + LensShadingMapMode = LENS_SHADING_MAP_MODE, + /// Specifies the number of pipeline stages the frame went through from when + /// it was exposed to when the final completed result was available to the + /// framework. Always less than or equal to PipelineMaxDepth. Currently + /// identical to ANDROID_REQUEST_PIPELINE_DEPTH. + /// + /// The typical value for this control is 3 as a frame is first exposed, + /// captured and then processed in a single pass through the ISP. Any + /// additional processing step performed after the ISP pass (in example face + /// detection, additional format conversions etc) count as an additional + /// pipeline stage. + #[cfg(feature = "vendor_draft")] + PipelineDepth = PIPELINE_DEPTH, + /// The maximum number of frames that can occur after a request (different + /// than the previous) has been submitted, and before the result's state + /// becomes synchronized. A value of -1 indicates unknown latency, and 0 + /// indicates per-frame control. Currently identical to + /// ANDROID_SYNC_MAX_LATENCY. + #[cfg(feature = "vendor_draft")] + MaxLatency = MAX_LATENCY, + /// Control to select the test pattern mode. Currently identical to + /// ANDROID_SENSOR_TEST_PATTERN_MODE. + #[cfg(feature = "vendor_draft")] + TestPatternMode = TEST_PATTERN_MODE, + /// Control to select the face detection mode used by the pipeline. + /// + /// Currently identical to ANDROID_STATISTICS_FACE_DETECT_MODE. + /// + /// \sa FaceDetectFaceRectangles + /// \sa FaceDetectFaceScores + /// \sa FaceDetectFaceLandmarks + /// \sa FaceDetectFaceIds + #[cfg(feature = "vendor_draft")] + FaceDetectMode = FACE_DETECT_MODE, + /// Boundary rectangles of the detected faces. The number of values is + /// the number of detected faces. + /// + /// The FaceDetectFaceRectangles control can only be returned in metadata. + /// + /// Currently identical to ANDROID_STATISTICS_FACE_RECTANGLES. + #[cfg(feature = "vendor_draft")] + FaceDetectFaceRectangles = FACE_DETECT_FACE_RECTANGLES, + /// Confidence score of each of the detected faces. The range of score is + /// [0, 100]. The number of values should be the number of faces reported + /// in FaceDetectFaceRectangles. + /// + /// The FaceDetectFaceScores control can only be returned in metadata. + /// + /// Currently identical to ANDROID_STATISTICS_FACE_SCORES. + #[cfg(feature = "vendor_draft")] + FaceDetectFaceScores = FACE_DETECT_FACE_SCORES, + /// Array of human face landmark coordinates in format [..., left_eye_i, + /// right_eye_i, mouth_i, left_eye_i+1, ...], with i = index of face. The + /// number of values should be 3 * the number of faces reported in + /// FaceDetectFaceRectangles. + /// + /// The FaceDetectFaceLandmarks control can only be returned in metadata. + /// + /// Currently identical to ANDROID_STATISTICS_FACE_LANDMARKS. + #[cfg(feature = "vendor_draft")] + FaceDetectFaceLandmarks = FACE_DETECT_FACE_LANDMARKS, + /// Each detected face is given a unique ID that is valid for as long as the + /// face is visible to the camera device. A face that leaves the field of + /// view and later returns may be assigned a new ID. The number of values + /// should be the number of faces reported in FaceDetectFaceRectangles. + /// + /// The FaceDetectFaceIds control can only be returned in metadata. + /// + /// Currently identical to ANDROID_STATISTICS_FACE_IDS. + #[cfg(feature = "vendor_draft")] + FaceDetectFaceIds = FACE_DETECT_FACE_IDS, + /// Toggles the Raspberry Pi IPA to output the hardware generated statistics. + /// + /// When this control is set to true, the IPA outputs a binary dump of the + /// hardware generated statistics through the Request metadata in the + /// Bcm2835StatsOutput control. + /// + /// \sa Bcm2835StatsOutput + #[cfg(feature = "vendor_rpi")] + StatsOutputEnable = STATS_OUTPUT_ENABLE, + /// Span of the BCM2835 ISP generated statistics for the current frame. + /// + /// This is sent in the Request metadata if the StatsOutputEnable is set to + /// true. The statistics struct definition can be found in + /// include/linux/bcm2835-isp.h. + /// + /// \sa StatsOutputEnable + #[cfg(feature = "vendor_rpi")] + Bcm2835StatsOutput = BCM2835_STATS_OUTPUT, + /// An array of rectangles, where each singular value has identical + /// functionality to the ScalerCrop control. This control allows the + /// Raspberry Pi pipeline handler to control individual scaler crops per + /// output stream. + /// + /// The order of rectangles passed into the control must match the order of + /// streams configured by the application. The pipeline handler will only + /// configure crop retangles up-to the number of output streams configured. + /// All subsequent rectangles passed into this control are ignored by the + /// pipeline handler. + /// + /// If both rpi::ScalerCrops and ScalerCrop controls are present in a + /// ControlList, the latter is discarded, and crops are obtained from this + /// control. + /// + /// Note that using different crop rectangles for each output stream with + /// this control is only applicable on the Pi5/PiSP platform. This control + /// should also be considered temporary/draft and will be replaced with + /// official libcamera API support for per-stream controls in the future. + /// + /// \sa ScalerCrop + #[cfg(feature = "vendor_rpi")] + ScalerCrops = SCALER_CROPS, + /// Span of the PiSP Frontend ISP generated statistics for the current + /// frame. This is sent in the Request metadata if the StatsOutputEnable is + /// set to true. The statistics struct definition can be found in + /// https://github.com/raspberrypi/libpisp/blob/main/src/libpisp/frontend/pisp_statistics.h + /// + /// \sa StatsOutputEnable + #[cfg(feature = "vendor_rpi")] + PispStatsOutput = PISP_STATS_OUTPUT, +} +impl ControlId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_control_name_from_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} +/// Enable or disable the AEGC algorithm. When this control is set to true, +/// both ExposureTimeMode and AnalogueGainMode are set to auto, and if this +/// control is set to false then both are set to manual. +/// +/// If ExposureTimeMode or AnalogueGainMode are also set in the same +/// request as AeEnable, then the modes supplied by ExposureTimeMode or +/// AnalogueGainMode will take precedence. +/// +/// \sa ExposureTimeMode AnalogueGainMode +#[derive(Debug, Clone)] +pub struct AeEnable(pub bool); +impl Deref for AeEnable { + type Target = bool; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AeEnable { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AeEnable { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AeEnable) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AeEnable { + const ID: u32 = ControlId::AeEnable as _; +} +impl Control for AeEnable {} +/// Report the AEGC algorithm state. +/// +/// The AEGC algorithm computes the exposure time and the analogue gain +/// to be applied to the image sensor. +/// +/// The AEGC algorithm behaviour is controlled by the ExposureTimeMode and +/// AnalogueGainMode controls, which allow applications to decide how +/// the exposure time and gain are computed, in Auto or Manual mode, +/// independently from one another. +/// +/// The AeState control reports the AEGC algorithm state through a single +/// value and describes it as a single computation block which computes +/// both the exposure time and the analogue gain values. +/// +/// When both the exposure time and analogue gain values are configured to +/// be in Manual mode, the AEGC algorithm is quiescent and does not actively +/// compute any value and the AeState control will report AeStateIdle. +/// +/// When at least the exposure time or analogue gain are configured to be +/// computed by the AEGC algorithm, the AeState control will report if the +/// algorithm has converged to stable values for all of the controls set +/// to be computed in Auto mode. +/// +/// \sa AnalogueGainMode +/// \sa ExposureTimeMode +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AeState { + /// The AEGC algorithm is inactive. + /// + /// This state is returned when both AnalogueGainMode and + /// ExposureTimeMode are set to Manual and the algorithm is not + /// actively computing any value. + Idle = 0, + /// The AEGC algorithm is actively computing new values, for either the + /// exposure time or the analogue gain, but has not converged to a + /// stable result yet. + /// + /// This state is returned if at least one of AnalogueGainMode or + /// ExposureTimeMode is auto and the algorithm hasn't converged yet. + /// + /// The AEGC algorithm converges once stable values are computed for + /// all of the controls set to be computed in Auto mode. Once the + /// algorithm converges the state is moved to AeStateConverged. + Searching = 1, + /// The AEGC algorithm has converged. + /// + /// This state is returned if at least one of AnalogueGainMode or + /// ExposureTimeMode is Auto, and the AEGC algorithm has converged to a + /// stable value. + /// + /// If the measurements move too far away from the convergence point + /// then the AEGC algorithm might start adjusting again, in which case + /// the state is moved to AeStateSearching. + Converged = 2, +} +impl TryFrom for AeState { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AeState) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AeState { + const ID: u32 = ControlId::AeState as _; +} +impl Control for AeState {} +/// Specify a metering mode for the AE algorithm to use. +/// +/// The metering modes determine which parts of the image are used to +/// determine the scene brightness. Metering modes may be platform specific +/// and not all metering modes may be supported. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AeMeteringMode { + /// Centre-weighted metering mode. + MeteringCentreWeighted = 0, + /// Spot metering mode. + MeteringSpot = 1, + /// Matrix metering mode. + MeteringMatrix = 2, + /// Custom metering mode. + MeteringCustom = 3, +} +impl TryFrom for AeMeteringMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AeMeteringMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AeMeteringMode { + const ID: u32 = ControlId::AeMeteringMode as _; +} +impl Control for AeMeteringMode {} +/// Specify a constraint mode for the AE algorithm to use. +/// +/// The constraint modes determine how the measured scene brightness is +/// adjusted to reach the desired target exposure. Constraint modes may be +/// platform specific, and not all constraint modes may be supported. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AeConstraintMode { + /// Default constraint mode. + /// + /// This mode aims to balance the exposure of different parts of the + /// image so as to reach a reasonable average level. However, highlights + /// in the image may appear over-exposed and lowlights may appear + /// under-exposed. + ConstraintNormal = 0, + /// Highlight constraint mode. + /// + /// This mode adjusts the exposure levels in order to try and avoid + /// over-exposing the brightest parts (highlights) of an image. + /// Other non-highlight parts of the image may appear under-exposed. + ConstraintHighlight = 1, + /// Shadows constraint mode. + /// + /// This mode adjusts the exposure levels in order to try and avoid + /// under-exposing the dark parts (shadows) of an image. Other normally + /// exposed parts of the image may appear over-exposed. + ConstraintShadows = 2, + /// Custom constraint mode. + ConstraintCustom = 3, +} +impl TryFrom for AeConstraintMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AeConstraintMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AeConstraintMode { + const ID: u32 = ControlId::AeConstraintMode as _; +} +impl Control for AeConstraintMode {} +/// Specify an exposure mode for the AE algorithm to use. +/// +/// The exposure modes specify how the desired total exposure is divided +/// between the exposure time and the sensor's analogue gain. They are +/// platform specific, and not all exposure modes may be supported. +/// +/// When one of AnalogueGainMode or ExposureTimeMode is set to Manual, +/// the fixed values will override any choices made by AeExposureMode. +/// +/// \sa AnalogueGainMode +/// \sa ExposureTimeMode +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AeExposureMode { + /// Default exposure mode. + ExposureNormal = 0, + /// Exposure mode allowing only short exposure times. + ExposureShort = 1, + /// Exposure mode allowing long exposure times. + ExposureLong = 2, + /// Custom exposure mode. + ExposureCustom = 3, +} +impl TryFrom for AeExposureMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AeExposureMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AeExposureMode { + const ID: u32 = ControlId::AeExposureMode as _; +} +impl Control for AeExposureMode {} +/// Specify an Exposure Value (EV) parameter. +/// +/// The EV parameter will only be applied if the AE algorithm is currently +/// enabled, that is, at least one of AnalogueGainMode and ExposureTimeMode +/// are in Auto mode. +/// +/// By convention EV adjusts the exposure as log2. For example +/// EV = [-2, -1, -0.5, 0, 0.5, 1, 2] results in an exposure adjustment +/// of [1/4x, 1/2x, 1/sqrt(2)x, 1x, sqrt(2)x, 2x, 4x]. +/// +/// \sa AnalogueGainMode +/// \sa ExposureTimeMode +#[derive(Debug, Clone)] +pub struct ExposureValue(pub f32); +impl Deref for ExposureValue { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ExposureValue { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ExposureValue { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ExposureValue) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ExposureValue { + const ID: u32 = ControlId::ExposureValue as _; +} +impl Control for ExposureValue {} +/// Exposure time for the frame applied in the sensor device. +/// +/// This value is specified in micro-seconds. +/// +/// This control will only take effect if ExposureTimeMode is Manual. If +/// this control is set when ExposureTimeMode is Auto, the value will be +/// ignored and will not be retained. +/// +/// When reported in metadata, this control indicates what exposure time +/// was used for the current frame, regardless of ExposureTimeMode. +/// ExposureTimeMode will indicate the source of the exposure time value, +/// whether it came from the AE algorithm or not. +/// +/// \sa AnalogueGain +/// \sa ExposureTimeMode +#[derive(Debug, Clone)] +pub struct ExposureTime(pub i32); +impl Deref for ExposureTime { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ExposureTime { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ExposureTime { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ExposureTime) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ExposureTime { + const ID: u32 = ControlId::ExposureTime as _; +} +impl Control for ExposureTime {} +/// Controls the source of the exposure time that is applied to the image +/// sensor. +/// +/// When set to Auto, the AE algorithm computes the exposure time and +/// configures the image sensor accordingly. When set to Manual, the value +/// of the ExposureTime control is used. +/// +/// When transitioning from Auto to Manual mode and no ExposureTime control +/// is provided by the application, the last value computed by the AE +/// algorithm when the mode was Auto will be used. If the ExposureTimeMode +/// was never set to Auto (either because the camera started in Manual mode, +/// or Auto is not supported by the camera), the camera should use a +/// best-effort default value. +/// +/// If ExposureTimeModeManual is supported, the ExposureTime control must +/// also be supported. +/// +/// Cameras that support manual control of the sensor shall support manual +/// mode for both ExposureTimeMode and AnalogueGainMode, and shall expose +/// the ExposureTime and AnalogueGain controls. If the camera also has an +/// AEGC implementation, both ExposureTimeMode and AnalogueGainMode shall +/// support both manual and auto mode. If auto mode is available, it shall +/// be the default mode. These rules do not apply to black box cameras +/// such as UVC cameras, where the available gain and exposure modes are +/// completely dependent on what the device exposes. +/// +/// \par Flickerless exposure mode transitions +/// +/// Applications that wish to transition from ExposureTimeModeAuto to direct +/// control of the exposure time without causing extra flicker can do so by +/// selecting an ExposureTime value as close as possible to the last value +/// computed by the auto exposure algorithm in order to avoid any visible +/// flickering. +/// +/// To select the correct value to use as ExposureTime value, applications +/// should accommodate the natural delay in applying controls caused by the +/// capture pipeline frame depth. +/// +/// When switching to manual exposure mode, applications should not +/// immediately specify an ExposureTime value in the same request where +/// ExposureTimeMode is set to Manual. They should instead wait for the +/// first Request where ExposureTimeMode is reported as +/// ExposureTimeModeManual in the Request metadata, and use the reported +/// ExposureTime to populate the control value in the next Request to be +/// queued to the Camera. +/// +/// The implementation of the auto-exposure algorithm should equally try to +/// minimize flickering and when transitioning from manual exposure mode to +/// auto exposure use the last value provided by the application as starting +/// point. +/// +/// 1. Start with ExposureTimeMode set to Auto +/// +/// 2. Set ExposureTimeMode to Manual +/// +/// 3. Wait for the first completed request that has ExposureTimeMode +/// set to Manual +/// +/// 4. Copy the value reported in ExposureTime into a new request, and +/// submit it +/// +/// 5. Proceed to run manual exposure time as desired +/// +/// \sa ExposureTime +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum ExposureTimeMode { + /// The exposure time will be calculated automatically and set by the + /// AE algorithm. + /// + /// If ExposureTime is set while this mode is active, it will be + /// ignored, and its value will not be retained. + /// + /// When transitioning from Manual to Auto mode, the AEGC should start + /// its adjustments based on the last set manual ExposureTime value. + Auto = 0, + /// The exposure time will not be updated by the AE algorithm. + /// + /// When transitioning from Auto to Manual mode, the last computed + /// exposure value is used until a new value is specified through the + /// ExposureTime control. If an ExposureTime value is specified in the + /// same request where the ExposureTimeMode is changed from Auto to + /// Manual, the provided ExposureTime is applied immediately. + Manual = 1, +} +impl TryFrom for ExposureTimeMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: ExposureTimeMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for ExposureTimeMode { + const ID: u32 = ControlId::ExposureTimeMode as _; +} +impl Control for ExposureTimeMode {} +/// Analogue gain value applied in the sensor device. +/// +/// The value of the control specifies the gain multiplier applied to all +/// colour channels. This value cannot be lower than 1.0. +/// +/// This control will only take effect if AnalogueGainMode is Manual. If +/// this control is set when AnalogueGainMode is Auto, the value will be +/// ignored and will not be retained. +/// +/// When reported in metadata, this control indicates what analogue gain +/// was used for the current request, regardless of AnalogueGainMode. +/// AnalogueGainMode will indicate the source of the analogue gain value, +/// whether it came from the AEGC algorithm or not. +/// +/// \sa ExposureTime +/// \sa AnalogueGainMode +#[derive(Debug, Clone)] +pub struct AnalogueGain(pub f32); +impl Deref for AnalogueGain { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AnalogueGain { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AnalogueGain { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AnalogueGain) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AnalogueGain { + const ID: u32 = ControlId::AnalogueGain as _; +} +impl Control for AnalogueGain {} +/// Controls the source of the analogue gain that is applied to the image +/// sensor. +/// +/// When set to Auto, the AEGC algorithm computes the analogue gain and +/// configures the image sensor accordingly. When set to Manual, the value +/// of the AnalogueGain control is used. +/// +/// When transitioning from Auto to Manual mode and no AnalogueGain control +/// is provided by the application, the last value computed by the AEGC +/// algorithm when the mode was Auto will be used. If the AnalogueGainMode +/// was never set to Auto (either because the camera started in Manual mode, +/// or Auto is not supported by the camera), the camera should use a +/// best-effort default value. +/// +/// If AnalogueGainModeManual is supported, the AnalogueGain control must +/// also be supported. +/// +/// For cameras where we have control over the ISP, both ExposureTimeMode +/// and AnalogueGainMode are expected to support manual mode, and both +/// controls (as well as ExposureTimeMode and AnalogueGain) are expected to +/// be present. If the camera also has an AEGC implementation, both +/// ExposureTimeMode and AnalogueGainMode shall support both manual and +/// auto mode. If auto mode is available, it shall be the default mode. +/// These rules do not apply to black box cameras such as UVC cameras, +/// where the available gain and exposure modes are completely dependent on +/// what the hardware exposes. +/// +/// The same procedure described for performing flickerless transitions in +/// the ExposureTimeMode control documentation can be applied to analogue +/// gain. +/// +/// \sa ExposureTimeMode +/// \sa AnalogueGain +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AnalogueGainMode { + /// The analogue gain will be calculated automatically and set by the + /// AEGC algorithm. + /// + /// If AnalogueGain is set while this mode is active, it will be + /// ignored, and it will also not be retained. + /// + /// When transitioning from Manual to Auto mode, the AEGC should start + /// its adjustments based on the last set manual AnalogueGain value. + Auto = 0, + /// The analogue gain will not be updated by the AEGC algorithm. + /// + /// When transitioning from Auto to Manual mode, the last computed + /// gain value is used until a new value is specified through the + /// AnalogueGain control. If an AnalogueGain value is specified in the + /// same request where the AnalogueGainMode is changed from Auto to + /// Manual, the provided AnalogueGain is applied immediately. + Manual = 1, +} +impl TryFrom for AnalogueGainMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AnalogueGainMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AnalogueGainMode { + const ID: u32 = ControlId::AnalogueGainMode as _; +} +impl Control for AnalogueGainMode {} +/// Set the flicker avoidance mode for AGC/AEC. +/// +/// The flicker mode determines whether, and how, the AGC/AEC algorithm +/// attempts to hide flicker effects caused by the duty cycle of artificial +/// lighting. +/// +/// Although implementation dependent, many algorithms for "flicker +/// avoidance" work by restricting this exposure time to integer multiples +/// of the cycle period, wherever possible. +/// +/// Implementations may not support all of the flicker modes listed below. +/// +/// By default the system will start in FlickerAuto mode if this is +/// supported, otherwise the flicker mode will be set to FlickerOff. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AeFlickerMode { + /// No flicker avoidance is performed. + FlickerOff = 0, + /// Manual flicker avoidance. + /// + /// Suppress flicker effects caused by lighting running with a period + /// specified by the AeFlickerPeriod control. + /// \sa AeFlickerPeriod + FlickerManual = 1, + /// Automatic flicker period detection and avoidance. + /// + /// The system will automatically determine the most likely value of + /// flicker period, and avoid flicker of this frequency. Once flicker + /// is being corrected, it is implementation dependent whether the + /// system is still able to detect a change in the flicker period. + /// \sa AeFlickerDetected + FlickerAuto = 2, +} +impl TryFrom for AeFlickerMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AeFlickerMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AeFlickerMode { + const ID: u32 = ControlId::AeFlickerMode as _; +} +impl Control for AeFlickerMode {} +/// Manual flicker period in microseconds. +/// +/// This value sets the current flicker period to avoid. It is used when +/// AeFlickerMode is set to FlickerManual. +/// +/// To cancel 50Hz mains flicker, this should be set to 10000 (corresponding +/// to 100Hz), or 8333 (120Hz) for 60Hz mains. +/// +/// Setting the mode to FlickerManual when no AeFlickerPeriod has ever been +/// set means that no flicker cancellation occurs (until the value of this +/// control is updated). +/// +/// Switching to modes other than FlickerManual has no effect on the +/// value of the AeFlickerPeriod control. +/// +/// \sa AeFlickerMode +#[derive(Debug, Clone)] +pub struct AeFlickerPeriod(pub i32); +impl Deref for AeFlickerPeriod { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AeFlickerPeriod { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AeFlickerPeriod { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AeFlickerPeriod) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AeFlickerPeriod { + const ID: u32 = ControlId::AeFlickerPeriod as _; +} +impl Control for AeFlickerPeriod {} +/// Flicker period detected in microseconds. +/// +/// The value reported here indicates the currently detected flicker +/// period, or zero if no flicker at all is detected. +/// +/// When AeFlickerMode is set to FlickerAuto, there may be a period during +/// which the value reported here remains zero. Once a non-zero value is +/// reported, then this is the flicker period that has been detected and is +/// now being cancelled. +/// +/// In the case of 50Hz mains flicker, the value would be 10000 +/// (corresponding to 100Hz), or 8333 (120Hz) for 60Hz mains flicker. +/// +/// It is implementation dependent whether the system can continue to detect +/// flicker of different periods when another frequency is already being +/// cancelled. +/// +/// \sa AeFlickerMode +#[derive(Debug, Clone)] +pub struct AeFlickerDetected(pub i32); +impl Deref for AeFlickerDetected { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AeFlickerDetected { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AeFlickerDetected { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AeFlickerDetected) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AeFlickerDetected { + const ID: u32 = ControlId::AeFlickerDetected as _; +} +impl Control for AeFlickerDetected {} +/// Specify a fixed brightness parameter. +/// +/// Positive values (up to 1.0) produce brighter images; negative values +/// (up to -1.0) produce darker images and 0.0 leaves pixels unchanged. +#[derive(Debug, Clone)] +pub struct Brightness(pub f32); +impl Deref for Brightness { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Brightness { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Brightness { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Brightness) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Brightness { + const ID: u32 = ControlId::Brightness as _; +} +impl Control for Brightness {} +/// Specify a fixed contrast parameter. +/// +/// Normal contrast is given by the value 1.0; larger values produce images +/// with more contrast. +#[derive(Debug, Clone)] +pub struct Contrast(pub f32); +impl Deref for Contrast { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Contrast { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Contrast { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Contrast) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Contrast { + const ID: u32 = ControlId::Contrast as _; +} +impl Control for Contrast {} +/// Report an estimate of the current illuminance level in lux. +/// +/// The Lux control can only be returned in metadata. +#[derive(Debug, Clone)] +pub struct Lux(pub f32); +impl Deref for Lux { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Lux { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Lux { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Lux) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Lux { + const ID: u32 = ControlId::Lux as _; +} +impl Control for Lux {} +/// Enable or disable the AWB. +/// +/// When AWB is enabled, the algorithm estimates the colour temperature of +/// the scene and computes colour gains and the colour correction matrix +/// automatically. The computed colour temperature, gains and correction +/// matrix are reported in metadata. The corresponding controls are ignored +/// if set in a request. +/// +/// When AWB is disabled, the colour temperature, gains and correction +/// matrix are not updated automatically and can be set manually in +/// requests. +/// +/// \sa ColourCorrectionMatrix +/// \sa ColourGains +/// \sa ColourTemperature +#[derive(Debug, Clone)] +pub struct AwbEnable(pub bool); +impl Deref for AwbEnable { + type Target = bool; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AwbEnable { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AwbEnable { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AwbEnable) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AwbEnable { + const ID: u32 = ControlId::AwbEnable as _; +} +impl Control for AwbEnable {} +/// Specify the range of illuminants to use for the AWB algorithm. +/// +/// The modes supported are platform specific, and not all modes may be +/// supported. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AwbMode { + /// Search over the whole colour temperature range. + AwbAuto = 0, + /// Incandescent AWB lamp mode. + AwbIncandescent = 1, + /// Tungsten AWB lamp mode. + AwbTungsten = 2, + /// Fluorescent AWB lamp mode. + AwbFluorescent = 3, + /// Indoor AWB lighting mode. + AwbIndoor = 4, + /// Daylight AWB lighting mode. + AwbDaylight = 5, + /// Cloudy AWB lighting mode. + AwbCloudy = 6, + /// Custom AWB mode. + AwbCustom = 7, +} +impl TryFrom for AwbMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AwbMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AwbMode { + const ID: u32 = ControlId::AwbMode as _; +} +impl Control for AwbMode {} +/// Report the lock status of a running AWB algorithm. +/// +/// If the AWB algorithm is locked the value shall be set to true, if it's +/// converging it shall be set to false. If the AWB algorithm is not +/// running the control shall not be present in the metadata control list. +/// +/// \sa AwbEnable +#[derive(Debug, Clone)] +pub struct AwbLocked(pub bool); +impl Deref for AwbLocked { + type Target = bool; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AwbLocked { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AwbLocked { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AwbLocked) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AwbLocked { + const ID: u32 = ControlId::AwbLocked as _; +} +impl Control for AwbLocked {} +/// Pair of gain values for the Red and Blue colour channels, in that +/// order. +/// +/// ColourGains can only be applied in a Request when the AWB is disabled. +/// If ColourGains is set in a request but ColourTemperature is not, the +/// implementation shall calculate and set the ColourTemperature based on +/// the ColourGains. +/// +/// \sa AwbEnable +/// \sa ColourTemperature +#[derive(Debug, Clone)] +pub struct ColourGains(pub [f32; 2]); +impl Deref for ColourGains { + type Target = [f32; 2]; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ColourGains { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ColourGains { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(<[f32; 2]>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ColourGains) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ColourGains { + const ID: u32 = ControlId::ColourGains as _; +} +impl Control for ColourGains {} +/// ColourTemperature of the frame, in kelvin. +/// +/// ColourTemperature can only be applied in a Request when the AWB is +/// disabled. +/// +/// If ColourTemperature is set in a request but ColourGains is not, the +/// implementation shall calculate and set the ColourGains based on the +/// given ColourTemperature. If ColourTemperature is set (either directly, +/// or indirectly by setting ColourGains) but ColourCorrectionMatrix is not, +/// the ColourCorrectionMatrix is updated based on the ColourTemperature. +/// +/// The ColourTemperature used to process the frame is reported in metadata. +/// +/// \sa AwbEnable +/// \sa ColourCorrectionMatrix +/// \sa ColourGains +#[derive(Debug, Clone)] +pub struct ColourTemperature(pub i32); +impl Deref for ColourTemperature { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ColourTemperature { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ColourTemperature { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ColourTemperature) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ColourTemperature { + const ID: u32 = ControlId::ColourTemperature as _; +} +impl Control for ColourTemperature {} +/// Specify a fixed saturation parameter. +/// +/// Normal saturation is given by the value 1.0; larger values produce more +/// saturated colours; 0.0 produces a greyscale image. +#[derive(Debug, Clone)] +pub struct Saturation(pub f32); +impl Deref for Saturation { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Saturation { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Saturation { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Saturation) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Saturation { + const ID: u32 = ControlId::Saturation as _; +} +impl Control for Saturation {} +/// Reports the sensor black levels used for processing a frame. +/// +/// The values are in the order R, Gr, Gb, B. They are returned as numbers +/// out of a 16-bit pixel range (as if pixels ranged from 0 to 65535). The +/// SensorBlackLevels control can only be returned in metadata. +#[derive(Debug, Clone)] +pub struct SensorBlackLevels(pub [i32; 4]); +impl Deref for SensorBlackLevels { + type Target = [i32; 4]; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for SensorBlackLevels { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for SensorBlackLevels { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(<[i32; 4]>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: SensorBlackLevels) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for SensorBlackLevels { + const ID: u32 = ControlId::SensorBlackLevels as _; +} +impl Control for SensorBlackLevels {} +/// Intensity of the sharpening applied to the image. +/// +/// A value of 0.0 means no sharpening. The minimum value means +/// minimal sharpening, and shall be 0.0 unless the camera can't +/// disable sharpening completely. The default value shall give a +/// "reasonable" level of sharpening, suitable for most use cases. +/// The maximum value may apply extremely high levels of sharpening, +/// higher than anyone could reasonably want. Negative values are +/// not allowed. Note also that sharpening is not applied to raw +/// streams. +#[derive(Debug, Clone)] +pub struct Sharpness(pub f32); +impl Deref for Sharpness { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Sharpness { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Sharpness { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Sharpness) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Sharpness { + const ID: u32 = ControlId::Sharpness as _; +} +impl Control for Sharpness {} +/// Reports a Figure of Merit (FoM) to indicate how in-focus the frame is. +/// +/// A larger FocusFoM value indicates a more in-focus frame. This singular +/// value may be based on a combination of statistics gathered from +/// multiple focus regions within an image. The number of focus regions and +/// method of combination is platform dependent. In this respect, it is not +/// necessarily aimed at providing a way to implement a focus algorithm by +/// the application, rather an indication of how in-focus a frame is. +#[derive(Debug, Clone)] +pub struct FocusFoM(pub i32); +impl Deref for FocusFoM { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for FocusFoM { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for FocusFoM { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: FocusFoM) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for FocusFoM { + const ID: u32 = ControlId::FocusFoM as _; +} +impl Control for FocusFoM {} +/// The 3x3 matrix that converts camera RGB to sRGB within the imaging +/// pipeline. +/// +/// This should describe the matrix that is used after pixels have been +/// white-balanced, but before any gamma transformation. The 3x3 matrix is +/// stored in conventional reading order in an array of 9 floating point +/// values. +/// +/// ColourCorrectionMatrix can only be applied in a Request when the AWB is +/// disabled. +/// +/// \sa AwbEnable +/// \sa ColourTemperature +#[derive(Debug, Clone)] +pub struct ColourCorrectionMatrix(pub [[f32; 3]; 3]); +impl Deref for ColourCorrectionMatrix { + type Target = [[f32; 3]; 3]; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ColourCorrectionMatrix { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ColourCorrectionMatrix { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(<[[f32; 3]; 3]>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ColourCorrectionMatrix) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ColourCorrectionMatrix { + const ID: u32 = ControlId::ColourCorrectionMatrix as _; +} +impl Control for ColourCorrectionMatrix {} +/// Sets the image portion that will be scaled to form the whole of +/// the final output image. +/// +/// The (x,y) location of this rectangle is relative to the +/// PixelArrayActiveAreas that is being used. The units remain native +/// sensor pixels, even if the sensor is being used in a binning or +/// skipping mode. +/// +/// This control is only present when the pipeline supports scaling. Its +/// maximum valid value is given by the properties::ScalerCropMaximum +/// property, and the two can be used to implement digital zoom. +#[derive(Debug, Clone)] +pub struct ScalerCrop(pub Rectangle); +impl Deref for ScalerCrop { + type Target = Rectangle; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ScalerCrop { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ScalerCrop { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ScalerCrop) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ScalerCrop { + const ID: u32 = ControlId::ScalerCrop as _; +} +impl Control for ScalerCrop {} +/// Digital gain value applied during the processing steps applied +/// to the image as captured from the sensor. +/// +/// The global digital gain factor is applied to all the colour channels +/// of the RAW image. Different pipeline models are free to +/// specify how the global gain factor applies to each separate +/// channel. +/// +/// If an imaging pipeline applies digital gain in distinct +/// processing steps, this value indicates their total sum. +/// Pipelines are free to decide how to adjust each processing +/// step to respect the received gain factor and shall report +/// their total value in the request metadata. +#[derive(Debug, Clone)] +pub struct DigitalGain(pub f32); +impl Deref for DigitalGain { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for DigitalGain { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for DigitalGain { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: DigitalGain) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for DigitalGain { + const ID: u32 = ControlId::DigitalGain as _; +} +impl Control for DigitalGain {} +/// The instantaneous frame duration from start of frame exposure to start +/// of next exposure, expressed in microseconds. +/// +/// This control is meant to be returned in metadata. +#[derive(Debug, Clone)] +pub struct FrameDuration(pub i64); +impl Deref for FrameDuration { + type Target = i64; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for FrameDuration { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for FrameDuration { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: FrameDuration) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for FrameDuration { + const ID: u32 = ControlId::FrameDuration as _; +} +impl Control for FrameDuration {} +/// The minimum and maximum (in that order) frame duration, expressed in +/// microseconds. +/// +/// When provided by applications, the control specifies the sensor frame +/// duration interval the pipeline has to use. This limits the largest +/// exposure time the sensor can use. For example, if a maximum frame +/// duration of 33ms is requested (corresponding to 30 frames per second), +/// the sensor will not be able to raise the exposure time above 33ms. +/// A fixed frame duration is achieved by setting the minimum and maximum +/// values to be the same. Setting both values to 0 reverts to using the +/// camera defaults. +/// +/// The maximum frame duration provides the absolute limit to the exposure +/// time computed by the AE algorithm and it overrides any exposure mode +/// setting specified with controls::AeExposureMode. Similarly, when a +/// manual exposure time is set through controls::ExposureTime, it also +/// gets clipped to the limits set by this control. When reported in +/// metadata, the control expresses the minimum and maximum frame durations +/// used after being clipped to the sensor provided frame duration limits. +/// +/// \sa AeExposureMode +/// \sa ExposureTime +/// +/// \todo Define how to calculate the capture frame rate by +/// defining controls to report additional delays introduced by +/// the capture pipeline or post-processing stages (ie JPEG +/// conversion, frame scaling). +/// +/// \todo Provide an explicit definition of default control values, for +/// this and all other controls. +#[derive(Debug, Clone)] +pub struct FrameDurationLimits(pub [i64; 2]); +impl Deref for FrameDurationLimits { + type Target = [i64; 2]; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for FrameDurationLimits { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for FrameDurationLimits { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(<[i64; 2]>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: FrameDurationLimits) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for FrameDurationLimits { + const ID: u32 = ControlId::FrameDurationLimits as _; +} +impl Control for FrameDurationLimits {} +/// Temperature measure from the camera sensor in Celsius. +/// +/// This value is typically obtained by a thermal sensor present on-die or +/// in the camera module. The range of reported temperatures is device +/// dependent. +/// +/// The SensorTemperature control will only be returned in metadata if a +/// thermal sensor is present. +#[derive(Debug, Clone)] +pub struct SensorTemperature(pub f32); +impl Deref for SensorTemperature { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for SensorTemperature { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for SensorTemperature { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: SensorTemperature) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for SensorTemperature { + const ID: u32 = ControlId::SensorTemperature as _; +} +impl Control for SensorTemperature {} +/// The time when the first row of the image sensor active array is exposed. +/// +/// The timestamp, expressed in nanoseconds, represents a monotonically +/// increasing counter since the system boot time, as defined by the +/// Linux-specific CLOCK_BOOTTIME clock id. +/// +/// The SensorTimestamp control can only be returned in metadata. +/// +/// \todo Define how the sensor timestamp has to be used in the reprocessing +/// use case. +#[derive(Debug, Clone)] +pub struct SensorTimestamp(pub i64); +impl Deref for SensorTimestamp { + type Target = i64; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for SensorTimestamp { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for SensorTimestamp { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: SensorTimestamp) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for SensorTimestamp { + const ID: u32 = ControlId::SensorTimestamp as _; +} +impl Control for SensorTimestamp {} +/// The mode of the AF (autofocus) algorithm. +/// +/// An implementation may choose not to implement all the modes. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfMode { + /// The AF algorithm is in manual mode. + /// + /// In this mode it will never perform any action nor move the lens of + /// its own accord, but an application can specify the desired lens + /// position using the LensPosition control. The AfState will always + /// report AfStateIdle. + /// + /// If the camera is started in AfModeManual, it will move the focus + /// lens to the position specified by the LensPosition control. + /// + /// This mode is the recommended default value for the AfMode control. + /// External cameras (as reported by the Location property set to + /// CameraLocationExternal) may use a different default value. + Manual = 0, + /// The AF algorithm is in auto mode. + /// + /// In this mode the algorithm will never move the lens or change state + /// unless the AfTrigger control is used. The AfTrigger control can be + /// used to initiate a focus scan, the results of which will be + /// reported by AfState. + /// + /// If the autofocus algorithm is moved from AfModeAuto to another mode + /// while a scan is in progress, the scan is cancelled immediately, + /// without waiting for the scan to finish. + /// + /// When first entering this mode the AfState will report AfStateIdle. + /// When a trigger control is sent, AfState will report AfStateScanning + /// for a period before spontaneously changing to AfStateFocused or + /// AfStateFailed, depending on the outcome of the scan. It will remain + /// in this state until another scan is initiated by the AfTrigger + /// control. If a scan is cancelled (without changing to another mode), + /// AfState will return to AfStateIdle. + Auto = 1, + /// The AF algorithm is in continuous mode. + /// + /// In this mode the lens can re-start a scan spontaneously at any + /// moment, without any user intervention. The AfState still reports + /// whether the algorithm is currently scanning or not, though the + /// application has no ability to initiate or cancel scans, nor to move + /// the lens for itself. + /// + /// However, applications can pause the AF algorithm from continuously + /// scanning by using the AfPause control. This allows video or still + /// images to be captured whilst guaranteeing that the focus is fixed. + /// + /// When set to AfModeContinuous, the system will immediately initiate a + /// scan so AfState will report AfStateScanning, and will settle on one + /// of AfStateFocused or AfStateFailed, depending on the scan result. + Continuous = 2, +} +impl TryFrom for AfMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfMode { + const ID: u32 = ControlId::AfMode as _; +} +impl Control for AfMode {} +/// The range of focus distances that is scanned. +/// +/// An implementation may choose not to implement all the options here. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfRange { + /// A wide range of focus distances is scanned. + /// + /// Scanned distances cover all the way from infinity down to close + /// distances, though depending on the implementation, possibly not + /// including the very closest macro positions. + Normal = 0, + /// Only close distances are scanned. + Macro = 1, + /// The full range of focus distances is scanned. + /// + /// This range is similar to AfRangeNormal but includes the very + /// closest macro positions. + Full = 2, +} +impl TryFrom for AfRange { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfRange) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfRange { + const ID: u32 = ControlId::AfRange as _; +} +impl Control for AfRange {} +/// Determine whether the AF is to move the lens as quickly as possible or +/// more steadily. +/// +/// For example, during video recording it may be desirable not to move the +/// lens too abruptly, but when in a preview mode (waiting for a still +/// capture) it may be helpful to move the lens as quickly as is reasonably +/// possible. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfSpeed { + /// Move the lens at its usual speed. + Normal = 0, + /// Move the lens more quickly. + Fast = 1, +} +impl TryFrom for AfSpeed { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfSpeed) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfSpeed { + const ID: u32 = ControlId::AfSpeed as _; +} +impl Control for AfSpeed {} +/// The parts of the image used by the AF algorithm to measure focus. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfMetering { + /// Let the AF algorithm decide for itself where it will measure focus. + Auto = 0, + /// Use the rectangles defined by the AfWindows control to measure focus. + /// + /// If no windows are specified the behaviour is platform dependent. + Windows = 1, +} +impl TryFrom for AfMetering { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfMetering) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfMetering { + const ID: u32 = ControlId::AfMetering as _; +} +impl Control for AfMetering {} +/// The focus windows used by the AF algorithm when AfMetering is set to +/// AfMeteringWindows. +/// +/// The units used are pixels within the rectangle returned by the +/// ScalerCropMaximum property. +/// +/// In order to be activated, a rectangle must be programmed with non-zero +/// width and height. Internally, these rectangles are intersected with the +/// ScalerCropMaximum rectangle. If the window becomes empty after this +/// operation, then the window is ignored. If all the windows end up being +/// ignored, then the behaviour is platform dependent. +/// +/// On platforms that support the ScalerCrop control (for implementing +/// digital zoom, for example), no automatic recalculation or adjustment of +/// AF windows is performed internally if the ScalerCrop is changed. If any +/// window lies outside the output image after the scaler crop has been +/// applied, it is up to the application to recalculate them. +/// +/// The details of how the windows are used are platform dependent. We note +/// that when there is more than one AF window, a typical implementation +/// might find the optimal focus position for each one and finally select +/// the window where the focal distance for the objects shown in that part +/// of the image are closest to the camera. +#[derive(Debug, Clone)] +pub struct AfWindows(pub Vec); +impl Deref for AfWindows { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for AfWindows { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for AfWindows { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: AfWindows) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for AfWindows { + const ID: u32 = ControlId::AfWindows as _; +} +impl Control for AfWindows {} +/// Start an autofocus scan. +/// +/// This control starts an autofocus scan when AfMode is set to AfModeAuto, +/// and is ignored if AfMode is set to AfModeManual or AfModeContinuous. It +/// can also be used to terminate a scan early. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfTrigger { + /// Start an AF scan. + /// + /// Setting the control to AfTriggerStart is ignored if a scan is in + /// progress. + Start = 0, + /// Cancel an AF scan. + /// + /// This does not cause the lens to move anywhere else. Ignored if no + /// scan is in progress. + Cancel = 1, +} +impl TryFrom for AfTrigger { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfTrigger) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfTrigger { + const ID: u32 = ControlId::AfTrigger as _; +} +impl Control for AfTrigger {} +/// Pause lens movements when in continuous autofocus mode. +/// +/// This control has no effect except when in continuous autofocus mode +/// (AfModeContinuous). It can be used to pause any lens movements while +/// (for example) images are captured. The algorithm remains inactive +/// until it is instructed to resume. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfPause { + /// Pause the continuous autofocus algorithm immediately. + /// + /// The autofocus algorithm is paused whether or not any kind of scan + /// is underway. AfPauseState will subsequently report + /// AfPauseStatePaused. AfState may report any of AfStateScanning, + /// AfStateFocused or AfStateFailed, depending on the algorithm's state + /// when it received this control. + Immediate = 0, + /// Pause the continuous autofocus algorithm at the end of the scan. + /// + /// This is similar to AfPauseImmediate, and if the AfState is + /// currently reporting AfStateFocused or AfStateFailed it will remain + /// in that state and AfPauseState will report AfPauseStatePaused. + /// + /// However, if the algorithm is scanning (AfStateScanning), + /// AfPauseState will report AfPauseStatePausing until the scan is + /// finished, at which point AfState will report one of AfStateFocused + /// or AfStateFailed, and AfPauseState will change to + /// AfPauseStatePaused. + Deferred = 1, + /// Resume continuous autofocus operation. + /// + /// The algorithm starts again from exactly where it left off, and + /// AfPauseState will report AfPauseStateRunning. + Resume = 2, +} +impl TryFrom for AfPause { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfPause) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfPause { + const ID: u32 = ControlId::AfPause as _; +} +impl Control for AfPause {} +/// Set and report the focus lens position. +/// +/// This control instructs the lens to move to a particular position and +/// also reports back the position of the lens for each frame. +/// +/// The LensPosition control is ignored unless the AfMode is set to +/// AfModeManual, though the value is reported back unconditionally in all +/// modes. +/// +/// This value, which is generally a non-integer, is the reciprocal of the +/// focal distance in metres, also known as dioptres. That is, to set a +/// focal distance D, the lens position LP is given by +/// +/// \f$LP = \frac{1\mathrm{m}}{D}\f$ +/// +/// For example: +/// +/// - 0 moves the lens to infinity. +/// - 0.5 moves the lens to focus on objects 2m away. +/// - 2 moves the lens to focus on objects 50cm away. +/// - And larger values will focus the lens closer. +/// +/// The default value of the control should indicate a good general +/// position for the lens, often corresponding to the hyperfocal distance +/// (the closest position for which objects at infinity are still +/// acceptably sharp). The minimum will often be zero (meaning infinity), +/// and the maximum value defines the closest focus position. +/// +/// \todo Define a property to report the Hyperfocal distance of calibrated +/// lenses. +#[derive(Debug, Clone)] +pub struct LensPosition(pub f32); +impl Deref for LensPosition { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for LensPosition { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for LensPosition { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: LensPosition) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for LensPosition { + const ID: u32 = ControlId::LensPosition as _; +} +impl Control for LensPosition {} +/// The current state of the AF algorithm. +/// +/// This control reports the current state of the AF algorithm in +/// conjunction with the reported AfMode value and (in continuous AF mode) +/// the AfPauseState value. The possible state changes are described below, +/// though we note the following state transitions that occur when the +/// AfMode is changed. +/// +/// If the AfMode is set to AfModeManual, then the AfState will always +/// report AfStateIdle (even if the lens is subsequently moved). Changing +/// to the AfModeManual state does not initiate any lens movement. +/// +/// If the AfMode is set to AfModeAuto then the AfState will report +/// AfStateIdle. However, if AfModeAuto and AfTriggerStart are sent +/// together then AfState will omit AfStateIdle and move straight to +/// AfStateScanning (and start a scan). +/// +/// If the AfMode is set to AfModeContinuous then the AfState will +/// initially report AfStateScanning. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfState { + /// The AF algorithm is in manual mode (AfModeManual) or in auto mode + /// (AfModeAuto) and a scan has not yet been triggered, or an + /// in-progress scan was cancelled. + Idle = 0, + /// The AF algorithm is in auto mode (AfModeAuto), and a scan has been + /// started using the AfTrigger control. + /// + /// The scan can be cancelled by sending AfTriggerCancel at which point + /// the algorithm will either move back to AfStateIdle or, if the scan + /// actually completes before the cancel request is processed, to one + /// of AfStateFocused or AfStateFailed. + /// + /// Alternatively the AF algorithm could be in continuous mode + /// (AfModeContinuous) at which point it may enter this state + /// spontaneously whenever it determines that a rescan is needed. + Scanning = 1, + /// The AF algorithm is in auto (AfModeAuto) or continuous + /// (AfModeContinuous) mode and a scan has completed with the result + /// that the algorithm believes the image is now in focus. + Focused = 2, + /// The AF algorithm is in auto (AfModeAuto) or continuous + /// (AfModeContinuous) mode and a scan has completed with the result + /// that the algorithm did not find a good focus position. + Failed = 3, +} +impl TryFrom for AfState { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfState) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfState { + const ID: u32 = ControlId::AfState as _; +} +impl Control for AfState {} +/// Report whether the autofocus is currently running, paused or pausing. +/// +/// This control is only applicable in continuous (AfModeContinuous) mode, +/// and reports whether the algorithm is currently running, paused or +/// pausing (that is, will pause as soon as any in-progress scan +/// completes). +/// +/// Any change to AfMode will cause AfPauseStateRunning to be reported. +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AfPauseState { + /// Continuous AF is running and the algorithm may restart a scan + /// spontaneously. + Running = 0, + /// Continuous AF has been sent an AfPauseDeferred control, and will + /// pause as soon as any in-progress scan completes. + /// + /// When the scan completes, the AfPauseState control will report + /// AfPauseStatePaused. No new scans will be start spontaneously until + /// the AfPauseResume control is sent. + Pausing = 1, + /// Continuous AF is paused. + /// + /// No further state changes or lens movements will occur until the + /// AfPauseResume control is sent. + Paused = 2, +} +impl TryFrom for AfPauseState { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: AfPauseState) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for AfPauseState { + const ID: u32 = ControlId::AfPauseState as _; +} +impl Control for AfPauseState {} +/// Set the mode to be used for High Dynamic Range (HDR) imaging. +/// +/// HDR techniques typically include multiple exposure, image fusion and +/// tone mapping techniques to improve the dynamic range of the resulting +/// images. +/// +/// When using an HDR mode, images are captured with different sets of AGC +/// settings called HDR channels. Channels indicate in particular the type +/// of exposure (short, medium or long) used to capture the raw image, +/// before fusion. Each HDR image is tagged with the corresponding channel +/// using the HdrChannel control. +/// +/// \sa HdrChannel +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum HdrMode { + /// HDR is disabled. + /// + /// Metadata for this frame will not include the HdrChannel control. + Off = 0, + /// Multiple exposures will be generated in an alternating fashion. + /// + /// The multiple exposures will not be merged together and will be + /// returned to the application as they are. Each image will be tagged + /// with the correct HDR channel, indicating what kind of exposure it + /// is. The tag should be the same as in the HdrModeMultiExposure case. + /// + /// The expectation is that an application using this mode would merge + /// the frames to create HDR images for itself if it requires them. + MultiExposureUnmerged = 1, + /// Multiple exposures will be generated and merged to create HDR + /// images. + /// + /// Each image will be tagged with the HDR channel (long, medium or + /// short) that arrived and which caused this image to be output. + /// + /// Systems that use two channels for HDR will return images tagged + /// alternately as the short and long channel. Systems that use three + /// channels for HDR will cycle through the short, medium and long + /// channel before repeating. + MultiExposure = 2, + /// Multiple frames all at a single exposure will be used to create HDR + /// images. + /// + /// These images should be reported as all corresponding to the HDR + /// short channel. + SingleExposure = 3, + /// Multiple frames will be combined to produce "night mode" images. + /// + /// It is up to the implementation exactly which HDR channels it uses, + /// and the images will all be tagged accordingly with the correct HDR + /// channel information. + Night = 4, +} +impl TryFrom for HdrMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: HdrMode) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for HdrMode { + const ID: u32 = ControlId::HdrMode as _; +} +impl Control for HdrMode {} +/// The HDR channel used to capture the frame. +/// +/// This value is reported back to the application so that it can discover +/// whether this capture corresponds to the short or long exposure image +/// (or any other image used by the HDR procedure). An application can +/// monitor the HDR channel to discover when the differently exposed images +/// have arrived. +/// +/// This metadata is only available when an HDR mode has been enabled. +/// +/// \sa HdrMode +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum HdrChannel { + /// This image does not correspond to any of the captures used to create + /// an HDR image. + None = 0, + /// This is a short exposure image. + Short = 1, + /// This is a medium exposure image. + Medium = 2, + /// This is a long exposure image. + Long = 3, +} +impl TryFrom for HdrChannel { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: HdrChannel) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for HdrChannel { + const ID: u32 = ControlId::HdrChannel as _; +} +impl Control for HdrChannel {} +/// Specify a fixed gamma value. +/// +/// The default gamma value must be 2.2 which closely mimics sRGB gamma. +/// Note that this is camera gamma, so it is applied as 1.0/gamma. +#[derive(Debug, Clone)] +pub struct Gamma(pub f32); +impl Deref for Gamma { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Gamma { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Gamma { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Gamma) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Gamma { + const ID: u32 = ControlId::Gamma as _; +} +impl Control for Gamma {} +/// Enable or disable the debug metadata. +#[derive(Debug, Clone)] +pub struct DebugMetadataEnable(pub bool); +impl Deref for DebugMetadataEnable { + type Target = bool; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for DebugMetadataEnable { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for DebugMetadataEnable { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: DebugMetadataEnable) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for DebugMetadataEnable { + const ID: u32 = ControlId::DebugMetadataEnable as _; +} +impl Control for DebugMetadataEnable {} +/// Control for AE metering trigger. Currently identical to +/// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER. +/// +/// Whether the camera device will trigger a precapture metering sequence +/// when it processes this request. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AePrecaptureTrigger { + /// The trigger is idle. + Idle = 0, + /// The pre-capture AE metering is started by the camera. + Start = 1, + /// The camera will cancel any active or completed metering sequence. + /// The AE algorithm is reset to its initial state. + Cancel = 2, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for AePrecaptureTrigger { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: AePrecaptureTrigger) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for AePrecaptureTrigger { + const ID: u32 = ControlId::AePrecaptureTrigger as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for AePrecaptureTrigger {} +/// Control to select the noise reduction algorithm mode. Currently +/// identical to ANDROID_NOISE_REDUCTION_MODE. +/// +/// Mode of operation for the noise reduction algorithm. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum NoiseReductionMode { + /// No noise reduction is applied + Off = 0, + /// Noise reduction is applied without reducing the frame rate. + Fast = 1, + /// High quality noise reduction at the expense of frame rate. + HighQuality = 2, + /// Minimal noise reduction is applied without reducing the frame rate. + Minimal = 3, + /// Noise reduction is applied at different levels to different streams. + ZSL = 4, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for NoiseReductionMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: NoiseReductionMode) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for NoiseReductionMode { + const ID: u32 = ControlId::NoiseReductionMode as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for NoiseReductionMode {} +/// Control to select the color correction aberration mode. Currently +/// identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE. +/// +/// Mode of operation for the chromatic aberration correction algorithm. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum ColorCorrectionAberrationMode { + /// No aberration correction is applied. + ColorCorrectionAberrationOff = 0, + /// Aberration correction will not slow down the frame rate. + ColorCorrectionAberrationFast = 1, + /// High quality aberration correction which might reduce the frame + /// rate. + ColorCorrectionAberrationHighQuality = 2, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for ColorCorrectionAberrationMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: ColorCorrectionAberrationMode) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for ColorCorrectionAberrationMode { + const ID: u32 = ControlId::ColorCorrectionAberrationMode as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for ColorCorrectionAberrationMode {} +/// Control to report the current AWB algorithm state. Currently identical +/// to ANDROID_CONTROL_AWB_STATE. +/// +/// Current state of the AWB algorithm. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum AwbState { + /// The AWB algorithm is inactive. + Inactive = 0, + /// The AWB algorithm has not converged yet. + Searching = 1, + /// The AWB algorithm has converged. + AwbConverged = 2, + /// The AWB algorithm is locked. + AwbLocked = 3, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for AwbState { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: AwbState) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for AwbState { + const ID: u32 = ControlId::AwbState as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for AwbState {} +/// Control to report the time between the start of exposure of the first +/// row and the start of exposure of the last row. Currently identical to +/// ANDROID_SENSOR_ROLLING_SHUTTER_SKEW +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct SensorRollingShutterSkew(pub i64); +#[cfg(feature = "vendor_draft")] +impl Deref for SensorRollingShutterSkew { + type Target = i64; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for SensorRollingShutterSkew { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for SensorRollingShutterSkew { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: SensorRollingShutterSkew) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for SensorRollingShutterSkew { + const ID: u32 = ControlId::SensorRollingShutterSkew as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for SensorRollingShutterSkew {} +/// Control to report if the lens shading map is available. Currently +/// identical to ANDROID_STATISTICS_LENS_SHADING_MAP_MODE. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum LensShadingMapMode { + /// No lens shading map mode is available. + Off = 0, + /// The lens shading map mode is available. + On = 1, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for LensShadingMapMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: LensShadingMapMode) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for LensShadingMapMode { + const ID: u32 = ControlId::LensShadingMapMode as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for LensShadingMapMode {} +/// Specifies the number of pipeline stages the frame went through from when +/// it was exposed to when the final completed result was available to the +/// framework. Always less than or equal to PipelineMaxDepth. Currently +/// identical to ANDROID_REQUEST_PIPELINE_DEPTH. +/// +/// The typical value for this control is 3 as a frame is first exposed, +/// captured and then processed in a single pass through the ISP. Any +/// additional processing step performed after the ISP pass (in example face +/// detection, additional format conversions etc) count as an additional +/// pipeline stage. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct PipelineDepth(pub i32); +#[cfg(feature = "vendor_draft")] +impl Deref for PipelineDepth { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for PipelineDepth { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for PipelineDepth { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: PipelineDepth) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for PipelineDepth { + const ID: u32 = ControlId::PipelineDepth as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for PipelineDepth {} +/// The maximum number of frames that can occur after a request (different +/// than the previous) has been submitted, and before the result's state +/// becomes synchronized. A value of -1 indicates unknown latency, and 0 +/// indicates per-frame control. Currently identical to +/// ANDROID_SYNC_MAX_LATENCY. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct MaxLatency(pub i32); +#[cfg(feature = "vendor_draft")] +impl Deref for MaxLatency { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for MaxLatency { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for MaxLatency { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: MaxLatency) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for MaxLatency { + const ID: u32 = ControlId::MaxLatency as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for MaxLatency {} +/// Control to select the test pattern mode. Currently identical to +/// ANDROID_SENSOR_TEST_PATTERN_MODE. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum TestPatternMode { + /// No test pattern mode is used. The camera device returns frames from + /// the image sensor. + Off = 0, + /// Each pixel in [R, G_even, G_odd, B] is replaced by its respective + /// color channel provided in test pattern data. + /// \todo Add control for test pattern data. + SolidColor = 1, + /// All pixel data is replaced with an 8-bar color pattern. The vertical + /// bars (left-to-right) are as follows; white, yellow, cyan, green, + /// magenta, red, blue and black. Each bar should take up 1/8 of the + /// sensor pixel array width. When this is not possible, the bar size + /// should be rounded down to the nearest integer and the pattern can + /// repeat on the right side. Each bar's height must always take up the + /// full sensor pixel array height. + ColorBars = 2, + /// The test pattern is similar to TestPatternModeColorBars, + /// except that each bar should start at its specified color at the top + /// and fade to gray at the bottom. Furthermore each bar is further + /// subdevided into a left and right half. The left half should have a + /// smooth gradient, and the right half should have a quantized + /// gradient. In particular, the right half's should consist of blocks + /// of the same color for 1/16th active sensor pixel array width. The + /// least significant bits in the quantized gradient should be copied + /// from the most significant bits of the smooth gradient. The height of + /// each bar should always be a multiple of 128. When this is not the + /// case, the pattern should repeat at the bottom of the image. + ColorBarsFadeToGray = 3, + /// All pixel data is replaced by a pseudo-random sequence generated + /// from a PN9 512-bit sequence (typically implemented in hardware with + /// a linear feedback shift register). The generator should be reset at + /// the beginning of each frame, and thus each subsequent raw frame with + /// this test pattern should be exactly the same as the last. + Pn9 = 4, + /// The first custom test pattern. All custom patterns that are + /// available only on this camera device are at least this numeric + /// value. All of the custom test patterns will be static (that is the + /// raw image must not vary from frame to frame). + Custom1 = 256, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for TestPatternMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: TestPatternMode) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for TestPatternMode { + const ID: u32 = ControlId::TestPatternMode as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for TestPatternMode {} +/// Control to select the face detection mode used by the pipeline. +/// +/// Currently identical to ANDROID_STATISTICS_FACE_DETECT_MODE. +/// +/// \sa FaceDetectFaceRectangles +/// \sa FaceDetectFaceScores +/// \sa FaceDetectFaceLandmarks +/// \sa FaceDetectFaceIds +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum FaceDetectMode { + /// Pipeline doesn't perform face detection and doesn't report any + /// control related to face detection. + Off = 0, + /// Pipeline performs face detection and reports the + /// FaceDetectFaceRectangles and FaceDetectFaceScores controls for each + /// detected face. FaceDetectFaceLandmarks and FaceDetectFaceIds are + /// optional. + Simple = 1, + /// Pipeline performs face detection and reports all the controls + /// related to face detection including FaceDetectFaceRectangles, + /// FaceDetectFaceScores, FaceDetectFaceLandmarks, and + /// FaceDeteceFaceIds for each detected face. + Full = 2, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for FaceDetectMode { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: FaceDetectMode) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for FaceDetectMode { + const ID: u32 = ControlId::FaceDetectMode as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for FaceDetectMode {} +/// Boundary rectangles of the detected faces. The number of values is +/// the number of detected faces. +/// +/// The FaceDetectFaceRectangles control can only be returned in metadata. +/// +/// Currently identical to ANDROID_STATISTICS_FACE_RECTANGLES. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct FaceDetectFaceRectangles(pub Vec); +#[cfg(feature = "vendor_draft")] +impl Deref for FaceDetectFaceRectangles { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for FaceDetectFaceRectangles { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for FaceDetectFaceRectangles { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: FaceDetectFaceRectangles) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for FaceDetectFaceRectangles { + const ID: u32 = ControlId::FaceDetectFaceRectangles as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for FaceDetectFaceRectangles {} +/// Confidence score of each of the detected faces. The range of score is +/// [0, 100]. The number of values should be the number of faces reported +/// in FaceDetectFaceRectangles. +/// +/// The FaceDetectFaceScores control can only be returned in metadata. +/// +/// Currently identical to ANDROID_STATISTICS_FACE_SCORES. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct FaceDetectFaceScores(pub Vec); +#[cfg(feature = "vendor_draft")] +impl Deref for FaceDetectFaceScores { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for FaceDetectFaceScores { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for FaceDetectFaceScores { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: FaceDetectFaceScores) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for FaceDetectFaceScores { + const ID: u32 = ControlId::FaceDetectFaceScores as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for FaceDetectFaceScores {} +/// Array of human face landmark coordinates in format [..., left_eye_i, +/// right_eye_i, mouth_i, left_eye_i+1, ...], with i = index of face. The +/// number of values should be 3 * the number of faces reported in +/// FaceDetectFaceRectangles. +/// +/// The FaceDetectFaceLandmarks control can only be returned in metadata. +/// +/// Currently identical to ANDROID_STATISTICS_FACE_LANDMARKS. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct FaceDetectFaceLandmarks(pub Vec); +#[cfg(feature = "vendor_draft")] +impl Deref for FaceDetectFaceLandmarks { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for FaceDetectFaceLandmarks { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for FaceDetectFaceLandmarks { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: FaceDetectFaceLandmarks) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for FaceDetectFaceLandmarks { + const ID: u32 = ControlId::FaceDetectFaceLandmarks as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for FaceDetectFaceLandmarks {} +/// Each detected face is given a unique ID that is valid for as long as the +/// face is visible to the camera device. A face that leaves the field of +/// view and later returns may be assigned a new ID. The number of values +/// should be the number of faces reported in FaceDetectFaceRectangles. +/// +/// The FaceDetectFaceIds control can only be returned in metadata. +/// +/// Currently identical to ANDROID_STATISTICS_FACE_IDS. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone)] +pub struct FaceDetectFaceIds(pub Vec); +#[cfg(feature = "vendor_draft")] +impl Deref for FaceDetectFaceIds { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl DerefMut for FaceDetectFaceIds { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for FaceDetectFaceIds { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: FaceDetectFaceIds) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for FaceDetectFaceIds { + const ID: u32 = ControlId::FaceDetectFaceIds as _; +} +#[cfg(feature = "vendor_draft")] +impl Control for FaceDetectFaceIds {} +/// Toggles the Raspberry Pi IPA to output the hardware generated statistics. +/// +/// When this control is set to true, the IPA outputs a binary dump of the +/// hardware generated statistics through the Request metadata in the +/// Bcm2835StatsOutput control. +/// +/// \sa Bcm2835StatsOutput +#[cfg(feature = "vendor_rpi")] +#[derive(Debug, Clone)] +pub struct StatsOutputEnable(pub bool); +#[cfg(feature = "vendor_rpi")] +impl Deref for StatsOutputEnable { + type Target = bool; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl DerefMut for StatsOutputEnable { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl TryFrom for StatsOutputEnable { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +#[cfg(feature = "vendor_rpi")] +impl From for ControlValue { + fn from(val: StatsOutputEnable) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_rpi")] +impl ControlEntry for StatsOutputEnable { + const ID: u32 = ControlId::StatsOutputEnable as _; +} +#[cfg(feature = "vendor_rpi")] +impl Control for StatsOutputEnable {} +/// Span of the BCM2835 ISP generated statistics for the current frame. +/// +/// This is sent in the Request metadata if the StatsOutputEnable is set to +/// true. The statistics struct definition can be found in +/// include/linux/bcm2835-isp.h. +/// +/// \sa StatsOutputEnable +#[cfg(feature = "vendor_rpi")] +#[derive(Debug, Clone)] +pub struct Bcm2835StatsOutput(pub Vec); +#[cfg(feature = "vendor_rpi")] +impl Deref for Bcm2835StatsOutput { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl DerefMut for Bcm2835StatsOutput { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl TryFrom for Bcm2835StatsOutput { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_rpi")] +impl From for ControlValue { + fn from(val: Bcm2835StatsOutput) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_rpi")] +impl ControlEntry for Bcm2835StatsOutput { + const ID: u32 = ControlId::Bcm2835StatsOutput as _; +} +#[cfg(feature = "vendor_rpi")] +impl Control for Bcm2835StatsOutput {} +/// An array of rectangles, where each singular value has identical +/// functionality to the ScalerCrop control. This control allows the +/// Raspberry Pi pipeline handler to control individual scaler crops per +/// output stream. +/// +/// The order of rectangles passed into the control must match the order of +/// streams configured by the application. The pipeline handler will only +/// configure crop retangles up-to the number of output streams configured. +/// All subsequent rectangles passed into this control are ignored by the +/// pipeline handler. +/// +/// If both rpi::ScalerCrops and ScalerCrop controls are present in a +/// ControlList, the latter is discarded, and crops are obtained from this +/// control. +/// +/// Note that using different crop rectangles for each output stream with +/// this control is only applicable on the Pi5/PiSP platform. This control +/// should also be considered temporary/draft and will be replaced with +/// official libcamera API support for per-stream controls in the future. +/// +/// \sa ScalerCrop +#[cfg(feature = "vendor_rpi")] +#[derive(Debug, Clone)] +pub struct ScalerCrops(pub Vec); +#[cfg(feature = "vendor_rpi")] +impl Deref for ScalerCrops { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl DerefMut for ScalerCrops { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl TryFrom for ScalerCrops { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_rpi")] +impl From for ControlValue { + fn from(val: ScalerCrops) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_rpi")] +impl ControlEntry for ScalerCrops { + const ID: u32 = ControlId::ScalerCrops as _; +} +#[cfg(feature = "vendor_rpi")] +impl Control for ScalerCrops {} +/// Span of the PiSP Frontend ISP generated statistics for the current +/// frame. This is sent in the Request metadata if the StatsOutputEnable is +/// set to true. The statistics struct definition can be found in +/// https://github.com/raspberrypi/libpisp/blob/main/src/libpisp/frontend/pisp_statistics.h +/// +/// \sa StatsOutputEnable +#[cfg(feature = "vendor_rpi")] +#[derive(Debug, Clone)] +pub struct PispStatsOutput(pub Vec); +#[cfg(feature = "vendor_rpi")] +impl Deref for PispStatsOutput { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl DerefMut for PispStatsOutput { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +#[cfg(feature = "vendor_rpi")] +impl TryFrom for PispStatsOutput { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +#[cfg(feature = "vendor_rpi")] +impl From for ControlValue { + fn from(val: PispStatsOutput) -> Self { + ControlValue::from(val.0) + } +} +#[cfg(feature = "vendor_rpi")] +impl ControlEntry for PispStatsOutput { + const ID: u32 = ControlId::PispStatsOutput as _; +} +#[cfg(feature = "vendor_rpi")] +impl Control for PispStatsOutput {} +pub fn make_dyn( + id: ControlId, + val: ControlValue, +) -> Result, ControlValueError> { + match id { + ControlId::AeEnable => Ok(Box::new(AeEnable::try_from(val)?)), + ControlId::AeState => Ok(Box::new(AeState::try_from(val)?)), + ControlId::AeMeteringMode => Ok(Box::new(AeMeteringMode::try_from(val)?)), + ControlId::AeConstraintMode => Ok(Box::new(AeConstraintMode::try_from(val)?)), + ControlId::AeExposureMode => Ok(Box::new(AeExposureMode::try_from(val)?)), + ControlId::ExposureValue => Ok(Box::new(ExposureValue::try_from(val)?)), + ControlId::ExposureTime => Ok(Box::new(ExposureTime::try_from(val)?)), + ControlId::ExposureTimeMode => Ok(Box::new(ExposureTimeMode::try_from(val)?)), + ControlId::AnalogueGain => Ok(Box::new(AnalogueGain::try_from(val)?)), + ControlId::AnalogueGainMode => Ok(Box::new(AnalogueGainMode::try_from(val)?)), + ControlId::AeFlickerMode => Ok(Box::new(AeFlickerMode::try_from(val)?)), + ControlId::AeFlickerPeriod => Ok(Box::new(AeFlickerPeriod::try_from(val)?)), + ControlId::AeFlickerDetected => Ok(Box::new(AeFlickerDetected::try_from(val)?)), + ControlId::Brightness => Ok(Box::new(Brightness::try_from(val)?)), + ControlId::Contrast => Ok(Box::new(Contrast::try_from(val)?)), + ControlId::Lux => Ok(Box::new(Lux::try_from(val)?)), + ControlId::AwbEnable => Ok(Box::new(AwbEnable::try_from(val)?)), + ControlId::AwbMode => Ok(Box::new(AwbMode::try_from(val)?)), + ControlId::AwbLocked => Ok(Box::new(AwbLocked::try_from(val)?)), + ControlId::ColourGains => Ok(Box::new(ColourGains::try_from(val)?)), + ControlId::ColourTemperature => Ok(Box::new(ColourTemperature::try_from(val)?)), + ControlId::Saturation => Ok(Box::new(Saturation::try_from(val)?)), + ControlId::SensorBlackLevels => Ok(Box::new(SensorBlackLevels::try_from(val)?)), + ControlId::Sharpness => Ok(Box::new(Sharpness::try_from(val)?)), + ControlId::FocusFoM => Ok(Box::new(FocusFoM::try_from(val)?)), + ControlId::ColourCorrectionMatrix => { + Ok(Box::new(ColourCorrectionMatrix::try_from(val)?)) + } + ControlId::ScalerCrop => Ok(Box::new(ScalerCrop::try_from(val)?)), + ControlId::DigitalGain => Ok(Box::new(DigitalGain::try_from(val)?)), + ControlId::FrameDuration => Ok(Box::new(FrameDuration::try_from(val)?)), + ControlId::FrameDurationLimits => { + Ok(Box::new(FrameDurationLimits::try_from(val)?)) + } + ControlId::SensorTemperature => Ok(Box::new(SensorTemperature::try_from(val)?)), + ControlId::SensorTimestamp => Ok(Box::new(SensorTimestamp::try_from(val)?)), + ControlId::AfMode => Ok(Box::new(AfMode::try_from(val)?)), + ControlId::AfRange => Ok(Box::new(AfRange::try_from(val)?)), + ControlId::AfSpeed => Ok(Box::new(AfSpeed::try_from(val)?)), + ControlId::AfMetering => Ok(Box::new(AfMetering::try_from(val)?)), + ControlId::AfWindows => Ok(Box::new(AfWindows::try_from(val)?)), + ControlId::AfTrigger => Ok(Box::new(AfTrigger::try_from(val)?)), + ControlId::AfPause => Ok(Box::new(AfPause::try_from(val)?)), + ControlId::LensPosition => Ok(Box::new(LensPosition::try_from(val)?)), + ControlId::AfState => Ok(Box::new(AfState::try_from(val)?)), + ControlId::AfPauseState => Ok(Box::new(AfPauseState::try_from(val)?)), + ControlId::HdrMode => Ok(Box::new(HdrMode::try_from(val)?)), + ControlId::HdrChannel => Ok(Box::new(HdrChannel::try_from(val)?)), + ControlId::Gamma => Ok(Box::new(Gamma::try_from(val)?)), + ControlId::DebugMetadataEnable => { + Ok(Box::new(DebugMetadataEnable::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::AePrecaptureTrigger => { + Ok(Box::new(AePrecaptureTrigger::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::NoiseReductionMode => Ok(Box::new(NoiseReductionMode::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::ColorCorrectionAberrationMode => { + Ok(Box::new(ColorCorrectionAberrationMode::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::AwbState => Ok(Box::new(AwbState::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::SensorRollingShutterSkew => { + Ok(Box::new(SensorRollingShutterSkew::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::LensShadingMapMode => Ok(Box::new(LensShadingMapMode::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::PipelineDepth => Ok(Box::new(PipelineDepth::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::MaxLatency => Ok(Box::new(MaxLatency::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::TestPatternMode => Ok(Box::new(TestPatternMode::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::FaceDetectMode => Ok(Box::new(FaceDetectMode::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + ControlId::FaceDetectFaceRectangles => { + Ok(Box::new(FaceDetectFaceRectangles::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::FaceDetectFaceScores => { + Ok(Box::new(FaceDetectFaceScores::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::FaceDetectFaceLandmarks => { + Ok(Box::new(FaceDetectFaceLandmarks::try_from(val)?)) + } + #[cfg(feature = "vendor_draft")] + ControlId::FaceDetectFaceIds => Ok(Box::new(FaceDetectFaceIds::try_from(val)?)), + #[cfg(feature = "vendor_rpi")] + ControlId::StatsOutputEnable => Ok(Box::new(StatsOutputEnable::try_from(val)?)), + #[cfg(feature = "vendor_rpi")] + ControlId::Bcm2835StatsOutput => Ok(Box::new(Bcm2835StatsOutput::try_from(val)?)), + #[cfg(feature = "vendor_rpi")] + ControlId::ScalerCrops => Ok(Box::new(ScalerCrops::try_from(val)?)), + #[cfg(feature = "vendor_rpi")] + ControlId::PispStatsOutput => Ok(Box::new(PispStatsOutput::try_from(val)?)), + } +} diff --git a/libcamera/versioned_files/0.5.1/properties.rs b/libcamera/versioned_files/0.5.1/properties.rs new file mode 100644 index 0000000..d7f2836 --- /dev/null +++ b/libcamera/versioned_files/0.5.1/properties.rs @@ -0,0 +1,1777 @@ +use std::{ffi::CStr, ops::{Deref, DerefMut}}; +use num_enum::{IntoPrimitive, TryFromPrimitive}; +#[allow(unused_imports)] +use crate::control::{Control, Property, ControlEntry, DynControlEntry}; +use crate::control_value::{ControlValue, ControlValueError}; +#[allow(unused_imports)] +use crate::geometry::{Rectangle, Point, Size}; +#[allow(unused_imports)] +use libcamera_sys::*; +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(u32)] +pub enum PropertyId { + /// Camera mounting location + Location = LOCATION, + /// The camera physical mounting rotation. It is expressed as the angular + /// difference in degrees between two reference systems, one relative to the + /// camera module, and one defined on the external world scene to be + /// captured when projected on the image sensor pixel array. + /// + /// A camera sensor has a 2-dimensional reference system 'Rc' defined by + /// its pixel array read-out order. The origin is set to the first pixel + /// being read out, the X-axis points along the column read-out direction + /// towards the last columns, and the Y-axis along the row read-out + /// direction towards the last row. + /// + /// A typical example for a sensor with a 2592x1944 pixel array matrix + /// observed from the front is + /// + /// ```text + /// 2591 X-axis 0 + /// <------------------------+ 0 + /// .......... ... ..........! + /// .......... ... ..........! Y-axis + /// ... ! + /// .......... ... ..........! + /// .......... ... ..........! 1943 + /// V + /// ``` + /// + /// + /// The external world scene reference system 'Rs' is a 2-dimensional + /// reference system on the focal plane of the camera module. The origin is + /// placed on the top-left corner of the visible scene, the X-axis points + /// towards the right, and the Y-axis points towards the bottom of the + /// scene. The top, bottom, left and right directions are intentionally not + /// defined and depend on the environment in which the camera is used. + /// + /// A typical example of a (very common) picture of a shark swimming from + /// left to right, as seen from the camera, is + /// + /// ```text + /// 0 X-axis + /// 0 +-------------------------------------> + /// ! + /// ! + /// ! + /// ! |\____)\___ + /// ! ) _____ __`< + /// ! |/ )/ + /// ! + /// ! + /// ! + /// V + /// Y-axis + /// ``` + /// + /// With the reference system 'Rs' placed on the camera focal plane. + /// + /// ```text + /// ¸.·˙! + /// ¸.·˙ ! + /// _ ¸.·˙ ! + /// +-/ \-+¸.·˙ ! + /// | (o) | ! Camera focal plane + /// +-----+˙·.¸ ! + /// ˙·.¸ ! + /// ˙·.¸ ! + /// ˙·.¸! + /// ``` + /// + /// When projected on the sensor's pixel array, the image and the associated + /// reference system 'Rs' are typically (but not always) inverted, due to + /// the camera module's lens optical inversion effect. + /// + /// Assuming the above represented scene of the swimming shark, the lens + /// inversion projects the scene and its reference system onto the sensor + /// pixel array, seen from the front of the camera sensor, as follow + /// + /// ```text + /// Y-axis + /// ^ + /// ! + /// ! + /// ! + /// ! |\_____)\__ + /// ! ) ____ ___.< + /// ! |/ )/ + /// ! + /// ! + /// ! + /// 0 +-------------------------------------> + /// 0 X-axis + /// ``` + /// + /// Note the shark being upside-down. + /// + /// The resulting projected reference system is named 'Rp'. + /// + /// The camera rotation property is then defined as the angular difference + /// in the counter-clockwise direction between the camera reference system + /// 'Rc' and the projected scene reference system 'Rp'. It is expressed in + /// degrees as a number in the range [0, 360[. + /// + /// Examples + /// + /// 0 degrees camera rotation + /// + /// + /// ```text + /// Y-Rp + /// ^ + /// Y-Rc ! + /// ^ ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! 0 +-------------------------------------> + /// ! 0 X-Rp + /// 0 +-------------------------------------> + /// 0 X-Rc + /// ``` + /// + /// + /// ```text + /// X-Rc 0 + /// <------------------------------------+ 0 + /// X-Rp 0 ! + /// <------------------------------------+ 0 ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! V + /// ! Y-Rc + /// V + /// Y-Rp + /// ``` + /// + /// 90 degrees camera rotation + /// + /// ```text + /// 0 Y-Rc + /// 0 +--------------------> + /// ! Y-Rp + /// ! ^ + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! 0 +-------------------------------------> + /// ! 0 X-Rp + /// ! + /// ! + /// ! + /// ! + /// V + /// X-Rc + /// ``` + /// + /// 180 degrees camera rotation + /// + /// ```text + /// 0 + /// <------------------------------------+ 0 + /// X-Rc ! + /// Y-Rp ! + /// ^ ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! V + /// ! Y-Rc + /// 0 +-------------------------------------> + /// 0 X-Rp + /// ``` + /// + /// 270 degrees camera rotation + /// + /// ```text + /// 0 Y-Rc + /// 0 +--------------------> + /// ! 0 + /// ! <-----------------------------------+ 0 + /// ! X-Rp ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! ! + /// ! V + /// ! Y-Rp + /// ! + /// ! + /// ! + /// ! + /// V + /// X-Rc + /// ``` + /// + /// + /// Example one - Webcam + /// + /// A camera module installed on the user facing part of a laptop screen + /// casing used for video calls. The captured images are meant to be + /// displayed in landscape mode (width > height) on the laptop screen. + /// + /// The camera is typically mounted upside-down to compensate the lens + /// optical inversion effect. + /// + /// ```text + /// Y-Rp + /// Y-Rc ^ + /// ^ ! + /// ! ! + /// ! ! |\_____)\__ + /// ! ! ) ____ ___.< + /// ! ! |/ )/ + /// ! ! + /// ! ! + /// ! ! + /// ! 0 +-------------------------------------> + /// ! 0 X-Rp + /// 0 +-------------------------------------> + /// 0 X-Rc + /// ``` + /// + /// The two reference systems are aligned, the resulting camera rotation is + /// 0 degrees, no rotation correction needs to be applied to the resulting + /// image once captured to memory buffers to correctly display it to users. + /// + /// ```text + /// +--------------------------------------+ + /// ! ! + /// ! ! + /// ! ! + /// ! |\____)\___ ! + /// ! ) _____ __`< ! + /// ! |/ )/ ! + /// ! ! + /// ! ! + /// ! ! + /// +--------------------------------------+ + /// ``` + /// + /// If the camera sensor is not mounted upside-down to compensate for the + /// lens optical inversion, the two reference systems will not be aligned, + /// with 'Rp' being rotated 180 degrees relatively to 'Rc'. + /// + /// + /// ```text + /// X-Rc 0 + /// <------------------------------------+ 0 + /// ! + /// Y-Rp ! + /// ^ ! + /// ! ! + /// ! |\_____)\__ ! + /// ! ) ____ ___.< ! + /// ! |/ )/ ! + /// ! ! + /// ! ! + /// ! V + /// ! Y-Rc + /// 0 +-------------------------------------> + /// 0 X-Rp + /// ``` + /// + /// The image once captured to memory will then be rotated by 180 degrees + /// + /// ```text + /// +--------------------------------------+ + /// ! ! + /// ! ! + /// ! ! + /// ! __/(_____/| ! + /// ! >.___ ____ ( ! + /// ! \( \| ! + /// ! ! + /// ! ! + /// ! ! + /// +--------------------------------------+ + /// ``` + /// + /// A software rotation correction of 180 degrees should be applied to + /// correctly display the image. + /// + /// ```text + /// +--------------------------------------+ + /// ! ! + /// ! ! + /// ! ! + /// ! |\____)\___ ! + /// ! ) _____ __`< ! + /// ! |/ )/ ! + /// ! ! + /// ! ! + /// ! ! + /// +--------------------------------------+ + /// ``` + /// + /// Example two - Phone camera + /// + /// A camera installed on the back side of a mobile device facing away from + /// the user. The captured images are meant to be displayed in portrait mode + /// (height > width) to match the device screen orientation and the device + /// usage orientation used when taking the picture. + /// + /// The camera sensor is typically mounted with its pixel array longer side + /// aligned to the device longer side, upside-down mounted to compensate for + /// the lens optical inversion effect. + /// + /// ```text + /// 0 Y-Rc + /// 0 +--------------------> + /// ! Y-Rp + /// ! ^ + /// ! ! + /// ! ! + /// ! ! + /// ! ! |\_____)\__ + /// ! ! ) ____ ___.< + /// ! ! |/ )/ + /// ! ! + /// ! ! + /// ! ! + /// ! 0 +-------------------------------------> + /// ! 0 X-Rp + /// ! + /// ! + /// ! + /// ! + /// V + /// X-Rc + /// ``` + /// + /// The two reference systems are not aligned and the 'Rp' reference + /// system is rotated by 90 degrees in the counter-clockwise direction + /// relatively to the 'Rc' reference system. + /// + /// The image once captured to memory will be rotated. + /// + /// ```text + /// +-------------------------------------+ + /// | _ _ | + /// | \ / | + /// | | | | + /// | | | | + /// | | > | + /// | < | | + /// | | | | + /// | . | + /// | V | + /// +-------------------------------------+ + /// ``` + /// + /// A correction of 90 degrees in counter-clockwise direction has to be + /// applied to correctly display the image in portrait mode on the device + /// screen. + /// + /// ```text + /// +--------------------+ + /// | | + /// | | + /// | | + /// | | + /// | | + /// | | + /// | |\____)\___ | + /// | ) _____ __`< | + /// | |/ )/ | + /// | | + /// | | + /// | | + /// | | + /// | | + /// +--------------------+ + Rotation = ROTATION, + /// The model name shall to the extent possible describe the sensor. For + /// most devices this is the model name of the sensor. While for some + /// devices the sensor model is unavailable as the sensor or the entire + /// camera is part of a larger unit and exposed as a black-box to the + /// system. In such cases the model name of the smallest device that + /// contains the camera sensor shall be used. + /// + /// The model name is not meant to be a camera name displayed to the + /// end-user, but may be combined with other camera information to create a + /// camera name. + /// + /// The model name is not guaranteed to be unique in the system nor is + /// it guaranteed to be stable or have any other properties required to make + /// it a good candidate to be used as a permanent identifier of a camera. + /// + /// The model name shall describe the camera in a human readable format and + /// shall be encoded in ASCII. + /// + /// Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. + Model = MODEL, + /// The pixel unit cell physical size, in nanometers. + /// + /// The UnitCellSize properties defines the horizontal and vertical sizes of + /// a single pixel unit, including its active and non-active parts. In + /// other words, it expresses the horizontal and vertical distance between + /// the top-left corners of adjacent pixels. + /// + /// The property can be used to calculate the physical size of the sensor's + /// pixel array area and for calibration purposes. + UnitCellSize = UNIT_CELL_SIZE, + /// The camera sensor pixel array readable area vertical and horizontal + /// sizes, in pixels. + /// + /// The PixelArraySize property defines the size in pixel units of the + /// readable part of full pixel array matrix, including optical black + /// pixels used for calibration, pixels which are not considered valid for + /// capture and active pixels containing valid image data. + /// + /// The property describes the maximum size of the raw data captured by the + /// camera, which might not correspond to the physical size of the sensor + /// pixel array matrix, as some portions of the physical pixel array matrix + /// are not accessible and cannot be transmitted out. + /// + /// For example, let's consider a pixel array matrix assembled as follows + /// + /// ```text + /// +--------------------------------------------------+ + /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// ... ... ... ... ... + /// ``` + /// + /// ```text + /// ... ... ... ... ... + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + /// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + /// +--------------------------------------------------+ + /// ``` + /// + /// starting with two lines of non-readable pixels (x), followed by N lines + /// of readable data (D) surrounded by two columns of non-readable pixels on + /// each side, and ending with two more lines of non-readable pixels. Only + /// the readable portion is transmitted to the receiving side, defining the + /// sizes of the largest possible buffer of raw data that can be presented + /// to applications. + /// + /// ```text + /// PixelArraySize.width + /// /----------------------------------------------/ + /// +----------------------------------------------+ / + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | PixelArraySize.height + /// ... ... ... ... ... + /// ... ... ... ... ... + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + /// +----------------------------------------------+ / + /// ``` + /// + /// This defines a rectangle whose top-left corner is placed in position (0, + /// 0) and whose vertical and horizontal sizes are defined by this property. + /// All other rectangles that describe portions of the pixel array, such as + /// the optical black pixels rectangles and active pixel areas, are defined + /// relatively to this rectangle. + /// + /// All the coordinates are expressed relative to the default sensor readout + /// direction, without any transformation (such as horizontal and vertical + /// flipping) applied. When mapping them to the raw pixel buffer, + /// applications shall take any configured transformation into account. + /// + /// \todo Rename this property to Size once we will have property + /// ```text + /// categories (i.e. Properties::PixelArray::Size) + PixelArraySize = PIXEL_ARRAY_SIZE, + /// The pixel array region(s) which contain optical black pixels + /// considered valid for calibration purposes. + /// + /// This property describes the position and size of optical black pixel + /// regions in the raw data buffer as stored in memory, which might differ + /// from their actual physical location in the pixel array matrix. + /// + /// It is important to note, in fact, that camera sensors might + /// automatically reorder or skip portions of their pixels array matrix when + /// transmitting data to the receiver. For instance, a sensor may merge the + /// top and bottom optical black rectangles into a single rectangle, + /// transmitted at the beginning of the frame. + /// + /// The pixel array contains several areas with different purposes, + /// interleaved by lines and columns which are said not to be valid for + /// capturing purposes. Invalid lines and columns are defined as invalid as + /// they could be positioned too close to the chip margins or to the optical + /// black shielding placed on top of optical black pixels. + /// + /// ```text + /// PixelArraySize.width + /// /----------------------------------------------/ + /// x1 x2 + /// +--o---------------------------------------o---+ / + /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + /// y1 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + /// |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + /// |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + /// y2 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + /// y3 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | + /// |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | PixelArraySize.height + /// |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | + /// ... ... ... ... ... + /// ... ... ... ... ... + /// y4 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | + /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + /// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + /// +----------------------------------------------+ / + /// ``` + /// + /// The readable pixel array matrix is composed by + /// 2 invalid lines (I) + /// 4 lines of valid optical black pixels (O) + /// 2 invalid lines (I) + /// n lines of valid pixel data (P) + /// 2 invalid lines (I) + /// + /// And the position of the optical black pixel rectangles is defined by + /// + /// ```text + /// PixelArrayOpticalBlackRectangles = { + /// { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, + /// { x1, y3, 2, y4 - y3 + 1 }, + /// { x2, y3, 2, y4 - y3 + 1 }, + /// }; + /// ``` + /// + /// If the camera, when capturing the full pixel array matrix, automatically + /// skips the invalid lines and columns, producing the following data + /// buffer, when captured to memory + /// + /// ```text + /// PixelArraySize.width + /// /----------------------------------------------/ + /// x1 + /// +--------------------------------------------o-+ / + /// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + /// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + /// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + /// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + /// y1 oOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | PixelArraySize.height + /// ... ... ... ... ... | + /// ... ... ... ... ... | + /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + /// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + /// +----------------------------------------------+ / + /// ``` + /// + /// then the invalid lines and columns should not be reported as part of the + /// PixelArraySize property in first place. + /// + /// In this case, the position of the black pixel rectangles will be + /// + /// ```text + /// PixelArrayOpticalBlackRectangles = { + /// { 0, 0, y1 + 1, PixelArraySize[0] }, + /// { 0, y1, 2, PixelArraySize[1] - y1 + 1 }, + /// { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, + /// }; + /// ``` + /// + /// \todo Rename this property to Size once we will have property + /// ```text + /// categories (i.e. Properties::PixelArray::OpticalBlackRectangles) + PixelArrayOpticalBlackRectangles = PIXEL_ARRAY_OPTICAL_BLACK_RECTANGLES, + /// The PixelArrayActiveAreas property defines the (possibly multiple and + /// overlapping) portions of the camera sensor readable pixel matrix + /// which are considered valid for image acquisition purposes. + /// + /// This property describes an arbitrary number of overlapping rectangles, + /// with each rectangle representing the maximum image size that the camera + /// sensor can produce for a particular aspect ratio. They are defined + /// relatively to the PixelArraySize rectangle. + /// + /// When multiple rectangles are reported, they shall be ordered from the + /// tallest to the shortest. + /// + /// Example 1 + /// A camera sensor which only produces images in the 4:3 image resolution + /// will report a single PixelArrayActiveAreas rectangle, from which all + /// other image formats are obtained by either cropping the field-of-view + /// and/or applying pixel sub-sampling techniques such as pixel skipping or + /// binning. + /// + /// ```text + /// PixelArraySize.width + /// /----------------/ + /// x1 x2 + /// (0,0)-> +-o------------o-+ / + /// y1 o +------------+ | | + /// | |////////////| | | + /// | |////////////| | | PixelArraySize.height + /// | |////////////| | | + /// y2 o +------------+ | | + /// +----------------+ / + /// ``` + /// + /// The property reports a single rectangle + /// + /// ```text + /// PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) + /// ``` + /// + /// Example 2 + /// A camera sensor which can produce images in different native + /// resolutions will report several overlapping rectangles, one for each + /// natively supported resolution. + /// + /// ```text + /// PixelArraySize.width + /// /------------------/ + /// x1 x2 x3 x4 + /// (0,0)-> +o---o------o---o+ / + /// y1 o +------+ | | + /// | |//////| | | + /// y2 o+---+------+---+| | + /// ||///|//////|///|| | PixelArraySize.height + /// y3 o+---+------+---+| | + /// | |//////| | | + /// y4 o +------+ | | + /// +----+------+----+ / + /// ``` + /// + /// The property reports two rectangles + /// + /// ```text + /// PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), + /// (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) + /// ``` + /// + /// The first rectangle describes the maximum field-of-view of all image + /// formats in the 4:3 resolutions, while the second one describes the + /// maximum field of view for all image formats in the 16:9 resolutions. + /// + /// Multiple rectangles shall only be reported when the sensor can't capture + /// the pixels in the corner regions. If all the pixels in the (x1,y1) - + /// (x4,y4) area can be captured, the PixelArrayActiveAreas property shall + /// contains the single rectangle (x1,y1) - (x4,y4). + /// + /// \todo Rename this property to ActiveAreas once we will have property + /// ```text + /// categories (i.e. Properties::PixelArray::ActiveAreas) + PixelArrayActiveAreas = PIXEL_ARRAY_ACTIVE_AREAS, + /// The maximum valid rectangle for the controls::ScalerCrop control. This + /// reflects the minimum mandatory cropping applied in the camera sensor and + /// the rest of the pipeline. Just as the ScalerCrop control, it defines a + /// rectangle taken from the sensor's active pixel array. + /// + /// This property is valid only after the camera has been successfully + /// configured and its value may change whenever a new configuration is + /// applied. + /// + /// \todo Turn this property into a "maximum control value" for the + /// ScalerCrop control once "dynamic" controls have been implemented. + ScalerCropMaximum = SCALER_CROP_MAXIMUM, + /// The relative sensitivity of the chosen sensor mode. + /// + /// Some sensors have readout modes with different sensitivities. For example, + /// a binned camera mode might, with the same exposure and gains, produce + /// twice the signal level of the full resolution readout. This would be + /// signalled by the binned mode, when it is chosen, indicating a value here + /// that is twice that of the full resolution mode. This value will be valid + /// after the configure method has returned successfully. + SensorSensitivity = SENSOR_SENSITIVITY, + /// A list of integer values of type dev_t denoting the major and minor + /// device numbers of the underlying devices used in the operation of this + /// camera. + /// + /// Different cameras may report identical devices. + SystemDevices = SYSTEM_DEVICES, + /// The arrangement of color filters on sensor; represents the colors in the + /// top-left 2x2 section of the sensor, in reading order. Currently + /// identical to ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT. + #[cfg(feature = "vendor_draft")] + ColorFilterArrangement = COLOR_FILTER_ARRANGEMENT, +} +impl PropertyId { + fn id(&self) -> u32 { + *self as u32 + } + pub fn name(&self) -> String { + unsafe { + let c_str = libcamera_property_name_by_id(self.id()); + if c_str.is_null() { + return "".into(); + } + CStr::from_ptr(c_str).to_str().unwrap().into() + } + } +} +/// Camera mounting location +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum Location { + /// The camera is mounted on the front side of the device, facing the + /// user + CameraFront = 0, + /// The camera is mounted on the back side of the device, facing away + /// from the user + CameraBack = 1, + /// The camera is attached to the device in a way that allows it to + /// be moved freely + CameraExternal = 2, +} +impl TryFrom for Location { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +impl From for ControlValue { + fn from(val: Location) -> Self { + ControlValue::from(::from(val)) + } +} +impl ControlEntry for Location { + const ID: u32 = PropertyId::Location as _; +} +impl Property for Location {} +/// The camera physical mounting rotation. It is expressed as the angular +/// difference in degrees between two reference systems, one relative to the +/// camera module, and one defined on the external world scene to be +/// captured when projected on the image sensor pixel array. +/// +/// A camera sensor has a 2-dimensional reference system 'Rc' defined by +/// its pixel array read-out order. The origin is set to the first pixel +/// being read out, the X-axis points along the column read-out direction +/// towards the last columns, and the Y-axis along the row read-out +/// direction towards the last row. +/// +/// A typical example for a sensor with a 2592x1944 pixel array matrix +/// observed from the front is +/// +/// ```text +/// 2591 X-axis 0 +/// <------------------------+ 0 +/// .......... ... ..........! +/// .......... ... ..........! Y-axis +/// ... ! +/// .......... ... ..........! +/// .......... ... ..........! 1943 +/// V +/// ``` +/// +/// +/// The external world scene reference system 'Rs' is a 2-dimensional +/// reference system on the focal plane of the camera module. The origin is +/// placed on the top-left corner of the visible scene, the X-axis points +/// towards the right, and the Y-axis points towards the bottom of the +/// scene. The top, bottom, left and right directions are intentionally not +/// defined and depend on the environment in which the camera is used. +/// +/// A typical example of a (very common) picture of a shark swimming from +/// left to right, as seen from the camera, is +/// +/// ```text +/// 0 X-axis +/// 0 +-------------------------------------> +/// ! +/// ! +/// ! +/// ! |\____)\___ +/// ! ) _____ __`< +/// ! |/ )/ +/// ! +/// ! +/// ! +/// V +/// Y-axis +/// ``` +/// +/// With the reference system 'Rs' placed on the camera focal plane. +/// +/// ```text +/// ¸.·˙! +/// ¸.·˙ ! +/// _ ¸.·˙ ! +/// +-/ \-+¸.·˙ ! +/// | (o) | ! Camera focal plane +/// +-----+˙·.¸ ! +/// ˙·.¸ ! +/// ˙·.¸ ! +/// ˙·.¸! +/// ``` +/// +/// When projected on the sensor's pixel array, the image and the associated +/// reference system 'Rs' are typically (but not always) inverted, due to +/// the camera module's lens optical inversion effect. +/// +/// Assuming the above represented scene of the swimming shark, the lens +/// inversion projects the scene and its reference system onto the sensor +/// pixel array, seen from the front of the camera sensor, as follow +/// +/// ```text +/// Y-axis +/// ^ +/// ! +/// ! +/// ! +/// ! |\_____)\__ +/// ! ) ____ ___.< +/// ! |/ )/ +/// ! +/// ! +/// ! +/// 0 +-------------------------------------> +/// 0 X-axis +/// ``` +/// +/// Note the shark being upside-down. +/// +/// The resulting projected reference system is named 'Rp'. +/// +/// The camera rotation property is then defined as the angular difference +/// in the counter-clockwise direction between the camera reference system +/// 'Rc' and the projected scene reference system 'Rp'. It is expressed in +/// degrees as a number in the range [0, 360[. +/// +/// Examples +/// +/// 0 degrees camera rotation +/// +/// +/// ```text +/// Y-Rp +/// ^ +/// Y-Rc ! +/// ^ ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! 0 +-------------------------------------> +/// ! 0 X-Rp +/// 0 +-------------------------------------> +/// 0 X-Rc +/// ``` +/// +/// +/// ```text +/// X-Rc 0 +/// <------------------------------------+ 0 +/// X-Rp 0 ! +/// <------------------------------------+ 0 ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! V +/// ! Y-Rc +/// V +/// Y-Rp +/// ``` +/// +/// 90 degrees camera rotation +/// +/// ```text +/// 0 Y-Rc +/// 0 +--------------------> +/// ! Y-Rp +/// ! ^ +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! 0 +-------------------------------------> +/// ! 0 X-Rp +/// ! +/// ! +/// ! +/// ! +/// V +/// X-Rc +/// ``` +/// +/// 180 degrees camera rotation +/// +/// ```text +/// 0 +/// <------------------------------------+ 0 +/// X-Rc ! +/// Y-Rp ! +/// ^ ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! V +/// ! Y-Rc +/// 0 +-------------------------------------> +/// 0 X-Rp +/// ``` +/// +/// 270 degrees camera rotation +/// +/// ```text +/// 0 Y-Rc +/// 0 +--------------------> +/// ! 0 +/// ! <-----------------------------------+ 0 +/// ! X-Rp ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! ! +/// ! V +/// ! Y-Rp +/// ! +/// ! +/// ! +/// ! +/// V +/// X-Rc +/// ``` +/// +/// +/// Example one - Webcam +/// +/// A camera module installed on the user facing part of a laptop screen +/// casing used for video calls. The captured images are meant to be +/// displayed in landscape mode (width > height) on the laptop screen. +/// +/// The camera is typically mounted upside-down to compensate the lens +/// optical inversion effect. +/// +/// ```text +/// Y-Rp +/// Y-Rc ^ +/// ^ ! +/// ! ! +/// ! ! |\_____)\__ +/// ! ! ) ____ ___.< +/// ! ! |/ )/ +/// ! ! +/// ! ! +/// ! ! +/// ! 0 +-------------------------------------> +/// ! 0 X-Rp +/// 0 +-------------------------------------> +/// 0 X-Rc +/// ``` +/// +/// The two reference systems are aligned, the resulting camera rotation is +/// 0 degrees, no rotation correction needs to be applied to the resulting +/// image once captured to memory buffers to correctly display it to users. +/// +/// ```text +/// +--------------------------------------+ +/// ! ! +/// ! ! +/// ! ! +/// ! |\____)\___ ! +/// ! ) _____ __`< ! +/// ! |/ )/ ! +/// ! ! +/// ! ! +/// ! ! +/// +--------------------------------------+ +/// ``` +/// +/// If the camera sensor is not mounted upside-down to compensate for the +/// lens optical inversion, the two reference systems will not be aligned, +/// with 'Rp' being rotated 180 degrees relatively to 'Rc'. +/// +/// +/// ```text +/// X-Rc 0 +/// <------------------------------------+ 0 +/// ! +/// Y-Rp ! +/// ^ ! +/// ! ! +/// ! |\_____)\__ ! +/// ! ) ____ ___.< ! +/// ! |/ )/ ! +/// ! ! +/// ! ! +/// ! V +/// ! Y-Rc +/// 0 +-------------------------------------> +/// 0 X-Rp +/// ``` +/// +/// The image once captured to memory will then be rotated by 180 degrees +/// +/// ```text +/// +--------------------------------------+ +/// ! ! +/// ! ! +/// ! ! +/// ! __/(_____/| ! +/// ! >.___ ____ ( ! +/// ! \( \| ! +/// ! ! +/// ! ! +/// ! ! +/// +--------------------------------------+ +/// ``` +/// +/// A software rotation correction of 180 degrees should be applied to +/// correctly display the image. +/// +/// ```text +/// +--------------------------------------+ +/// ! ! +/// ! ! +/// ! ! +/// ! |\____)\___ ! +/// ! ) _____ __`< ! +/// ! |/ )/ ! +/// ! ! +/// ! ! +/// ! ! +/// +--------------------------------------+ +/// ``` +/// +/// Example two - Phone camera +/// +/// A camera installed on the back side of a mobile device facing away from +/// the user. The captured images are meant to be displayed in portrait mode +/// (height > width) to match the device screen orientation and the device +/// usage orientation used when taking the picture. +/// +/// The camera sensor is typically mounted with its pixel array longer side +/// aligned to the device longer side, upside-down mounted to compensate for +/// the lens optical inversion effect. +/// +/// ```text +/// 0 Y-Rc +/// 0 +--------------------> +/// ! Y-Rp +/// ! ^ +/// ! ! +/// ! ! +/// ! ! +/// ! ! |\_____)\__ +/// ! ! ) ____ ___.< +/// ! ! |/ )/ +/// ! ! +/// ! ! +/// ! ! +/// ! 0 +-------------------------------------> +/// ! 0 X-Rp +/// ! +/// ! +/// ! +/// ! +/// V +/// X-Rc +/// ``` +/// +/// The two reference systems are not aligned and the 'Rp' reference +/// system is rotated by 90 degrees in the counter-clockwise direction +/// relatively to the 'Rc' reference system. +/// +/// The image once captured to memory will be rotated. +/// +/// ```text +/// +-------------------------------------+ +/// | _ _ | +/// | \ / | +/// | | | | +/// | | | | +/// | | > | +/// | < | | +/// | | | | +/// | . | +/// | V | +/// +-------------------------------------+ +/// ``` +/// +/// A correction of 90 degrees in counter-clockwise direction has to be +/// applied to correctly display the image in portrait mode on the device +/// screen. +/// +/// ```text +/// +--------------------+ +/// | | +/// | | +/// | | +/// | | +/// | | +/// | | +/// | |\____)\___ | +/// | ) _____ __`< | +/// | |/ )/ | +/// | | +/// | | +/// | | +/// | | +/// | | +/// +--------------------+ +#[derive(Debug, Clone)] +pub struct Rotation(pub i32); +impl Deref for Rotation { + type Target = i32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Rotation { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Rotation { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Rotation) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Rotation { + const ID: u32 = PropertyId::Rotation as _; +} +impl Property for Rotation {} +/// The model name shall to the extent possible describe the sensor. For +/// most devices this is the model name of the sensor. While for some +/// devices the sensor model is unavailable as the sensor or the entire +/// camera is part of a larger unit and exposed as a black-box to the +/// system. In such cases the model name of the smallest device that +/// contains the camera sensor shall be used. +/// +/// The model name is not meant to be a camera name displayed to the +/// end-user, but may be combined with other camera information to create a +/// camera name. +/// +/// The model name is not guaranteed to be unique in the system nor is +/// it guaranteed to be stable or have any other properties required to make +/// it a good candidate to be used as a permanent identifier of a camera. +/// +/// The model name shall describe the camera in a human readable format and +/// shall be encoded in ASCII. +/// +/// Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. +#[derive(Debug, Clone)] +pub struct Model(pub String); +impl Deref for Model { + type Target = String; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for Model { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for Model { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: Model) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for Model { + const ID: u32 = PropertyId::Model as _; +} +impl Property for Model {} +/// The pixel unit cell physical size, in nanometers. +/// +/// The UnitCellSize properties defines the horizontal and vertical sizes of +/// a single pixel unit, including its active and non-active parts. In +/// other words, it expresses the horizontal and vertical distance between +/// the top-left corners of adjacent pixels. +/// +/// The property can be used to calculate the physical size of the sensor's +/// pixel array area and for calibration purposes. +#[derive(Debug, Clone)] +pub struct UnitCellSize(pub Size); +impl Deref for UnitCellSize { + type Target = Size; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for UnitCellSize { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for UnitCellSize { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: UnitCellSize) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for UnitCellSize { + const ID: u32 = PropertyId::UnitCellSize as _; +} +impl Property for UnitCellSize {} +/// The camera sensor pixel array readable area vertical and horizontal +/// sizes, in pixels. +/// +/// The PixelArraySize property defines the size in pixel units of the +/// readable part of full pixel array matrix, including optical black +/// pixels used for calibration, pixels which are not considered valid for +/// capture and active pixels containing valid image data. +/// +/// The property describes the maximum size of the raw data captured by the +/// camera, which might not correspond to the physical size of the sensor +/// pixel array matrix, as some portions of the physical pixel array matrix +/// are not accessible and cannot be transmitted out. +/// +/// For example, let's consider a pixel array matrix assembled as follows +/// +/// ```text +/// +--------------------------------------------------+ +/// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| +/// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// ... ... ... ... ... +/// ``` +/// +/// ```text +/// ... ... ... ... ... +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| +/// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| +/// |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| +/// +--------------------------------------------------+ +/// ``` +/// +/// starting with two lines of non-readable pixels (x), followed by N lines +/// of readable data (D) surrounded by two columns of non-readable pixels on +/// each side, and ending with two more lines of non-readable pixels. Only +/// the readable portion is transmitted to the receiving side, defining the +/// sizes of the largest possible buffer of raw data that can be presented +/// to applications. +/// +/// ```text +/// PixelArraySize.width +/// /----------------------------------------------/ +/// +----------------------------------------------+ / +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | PixelArraySize.height +/// ... ... ... ... ... +/// ... ... ... ... ... +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | +/// +----------------------------------------------+ / +/// ``` +/// +/// This defines a rectangle whose top-left corner is placed in position (0, +/// 0) and whose vertical and horizontal sizes are defined by this property. +/// All other rectangles that describe portions of the pixel array, such as +/// the optical black pixels rectangles and active pixel areas, are defined +/// relatively to this rectangle. +/// +/// All the coordinates are expressed relative to the default sensor readout +/// direction, without any transformation (such as horizontal and vertical +/// flipping) applied. When mapping them to the raw pixel buffer, +/// applications shall take any configured transformation into account. +/// +/// \todo Rename this property to Size once we will have property +/// ```text +/// categories (i.e. Properties::PixelArray::Size) +#[derive(Debug, Clone)] +pub struct PixelArraySize(pub Size); +impl Deref for PixelArraySize { + type Target = Size; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for PixelArraySize { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for PixelArraySize { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: PixelArraySize) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for PixelArraySize { + const ID: u32 = PropertyId::PixelArraySize as _; +} +impl Property for PixelArraySize {} +/// The pixel array region(s) which contain optical black pixels +/// considered valid for calibration purposes. +/// +/// This property describes the position and size of optical black pixel +/// regions in the raw data buffer as stored in memory, which might differ +/// from their actual physical location in the pixel array matrix. +/// +/// It is important to note, in fact, that camera sensors might +/// automatically reorder or skip portions of their pixels array matrix when +/// transmitting data to the receiver. For instance, a sensor may merge the +/// top and bottom optical black rectangles into a single rectangle, +/// transmitted at the beginning of the frame. +/// +/// The pixel array contains several areas with different purposes, +/// interleaved by lines and columns which are said not to be valid for +/// capturing purposes. Invalid lines and columns are defined as invalid as +/// they could be positioned too close to the chip margins or to the optical +/// black shielding placed on top of optical black pixels. +/// +/// ```text +/// PixelArraySize.width +/// /----------------------------------------------/ +/// x1 x2 +/// +--o---------------------------------------o---+ / +/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | +/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | +/// y1 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | +/// |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | +/// |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | +/// y2 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | +/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | +/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | +/// y3 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | +/// |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | PixelArraySize.height +/// |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | +/// ... ... ... ... ... +/// ... ... ... ... ... +/// y4 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | +/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | +/// |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | +/// +----------------------------------------------+ / +/// ``` +/// +/// The readable pixel array matrix is composed by +/// 2 invalid lines (I) +/// 4 lines of valid optical black pixels (O) +/// 2 invalid lines (I) +/// n lines of valid pixel data (P) +/// 2 invalid lines (I) +/// +/// And the position of the optical black pixel rectangles is defined by +/// +/// ```text +/// PixelArrayOpticalBlackRectangles = { +/// { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, +/// { x1, y3, 2, y4 - y3 + 1 }, +/// { x2, y3, 2, y4 - y3 + 1 }, +/// }; +/// ``` +/// +/// If the camera, when capturing the full pixel array matrix, automatically +/// skips the invalid lines and columns, producing the following data +/// buffer, when captured to memory +/// +/// ```text +/// PixelArraySize.width +/// /----------------------------------------------/ +/// x1 +/// +--------------------------------------------o-+ / +/// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | +/// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | +/// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | +/// |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | +/// y1 oOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | +/// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | +/// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | PixelArraySize.height +/// ... ... ... ... ... | +/// ... ... ... ... ... | +/// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | +/// |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | +/// +----------------------------------------------+ / +/// ``` +/// +/// then the invalid lines and columns should not be reported as part of the +/// PixelArraySize property in first place. +/// +/// In this case, the position of the black pixel rectangles will be +/// +/// ```text +/// PixelArrayOpticalBlackRectangles = { +/// { 0, 0, y1 + 1, PixelArraySize[0] }, +/// { 0, y1, 2, PixelArraySize[1] - y1 + 1 }, +/// { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, +/// }; +/// ``` +/// +/// \todo Rename this property to Size once we will have property +/// ```text +/// categories (i.e. Properties::PixelArray::OpticalBlackRectangles) +#[derive(Debug, Clone)] +pub struct PixelArrayOpticalBlackRectangles(pub Vec); +impl Deref for PixelArrayOpticalBlackRectangles { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for PixelArrayOpticalBlackRectangles { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for PixelArrayOpticalBlackRectangles { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: PixelArrayOpticalBlackRectangles) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for PixelArrayOpticalBlackRectangles { + const ID: u32 = PropertyId::PixelArrayOpticalBlackRectangles as _; +} +impl Property for PixelArrayOpticalBlackRectangles {} +/// The PixelArrayActiveAreas property defines the (possibly multiple and +/// overlapping) portions of the camera sensor readable pixel matrix +/// which are considered valid for image acquisition purposes. +/// +/// This property describes an arbitrary number of overlapping rectangles, +/// with each rectangle representing the maximum image size that the camera +/// sensor can produce for a particular aspect ratio. They are defined +/// relatively to the PixelArraySize rectangle. +/// +/// When multiple rectangles are reported, they shall be ordered from the +/// tallest to the shortest. +/// +/// Example 1 +/// A camera sensor which only produces images in the 4:3 image resolution +/// will report a single PixelArrayActiveAreas rectangle, from which all +/// other image formats are obtained by either cropping the field-of-view +/// and/or applying pixel sub-sampling techniques such as pixel skipping or +/// binning. +/// +/// ```text +/// PixelArraySize.width +/// /----------------/ +/// x1 x2 +/// (0,0)-> +-o------------o-+ / +/// y1 o +------------+ | | +/// | |////////////| | | +/// | |////////////| | | PixelArraySize.height +/// | |////////////| | | +/// y2 o +------------+ | | +/// +----------------+ / +/// ``` +/// +/// The property reports a single rectangle +/// +/// ```text +/// PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) +/// ``` +/// +/// Example 2 +/// A camera sensor which can produce images in different native +/// resolutions will report several overlapping rectangles, one for each +/// natively supported resolution. +/// +/// ```text +/// PixelArraySize.width +/// /------------------/ +/// x1 x2 x3 x4 +/// (0,0)-> +o---o------o---o+ / +/// y1 o +------+ | | +/// | |//////| | | +/// y2 o+---+------+---+| | +/// ||///|//////|///|| | PixelArraySize.height +/// y3 o+---+------+---+| | +/// | |//////| | | +/// y4 o +------+ | | +/// +----+------+----+ / +/// ``` +/// +/// The property reports two rectangles +/// +/// ```text +/// PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), +/// (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) +/// ``` +/// +/// The first rectangle describes the maximum field-of-view of all image +/// formats in the 4:3 resolutions, while the second one describes the +/// maximum field of view for all image formats in the 16:9 resolutions. +/// +/// Multiple rectangles shall only be reported when the sensor can't capture +/// the pixels in the corner regions. If all the pixels in the (x1,y1) - +/// (x4,y4) area can be captured, the PixelArrayActiveAreas property shall +/// contains the single rectangle (x1,y1) - (x4,y4). +/// +/// \todo Rename this property to ActiveAreas once we will have property +/// ```text +/// categories (i.e. Properties::PixelArray::ActiveAreas) +#[derive(Debug, Clone)] +pub struct PixelArrayActiveAreas(pub Vec); +impl Deref for PixelArrayActiveAreas { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for PixelArrayActiveAreas { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for PixelArrayActiveAreas { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: PixelArrayActiveAreas) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for PixelArrayActiveAreas { + const ID: u32 = PropertyId::PixelArrayActiveAreas as _; +} +impl Property for PixelArrayActiveAreas {} +/// The maximum valid rectangle for the controls::ScalerCrop control. This +/// reflects the minimum mandatory cropping applied in the camera sensor and +/// the rest of the pipeline. Just as the ScalerCrop control, it defines a +/// rectangle taken from the sensor's active pixel array. +/// +/// This property is valid only after the camera has been successfully +/// configured and its value may change whenever a new configuration is +/// applied. +/// +/// \todo Turn this property into a "maximum control value" for the +/// ScalerCrop control once "dynamic" controls have been implemented. +#[derive(Debug, Clone)] +pub struct ScalerCropMaximum(pub Rectangle); +impl Deref for ScalerCropMaximum { + type Target = Rectangle; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for ScalerCropMaximum { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for ScalerCropMaximum { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: ScalerCropMaximum) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for ScalerCropMaximum { + const ID: u32 = PropertyId::ScalerCropMaximum as _; +} +impl Property for ScalerCropMaximum {} +/// The relative sensitivity of the chosen sensor mode. +/// +/// Some sensors have readout modes with different sensitivities. For example, +/// a binned camera mode might, with the same exposure and gains, produce +/// twice the signal level of the full resolution readout. This would be +/// signalled by the binned mode, when it is chosen, indicating a value here +/// that is twice that of the full resolution mode. This value will be valid +/// after the configure method has returned successfully. +#[derive(Debug, Clone)] +pub struct SensorSensitivity(pub f32); +impl Deref for SensorSensitivity { + type Target = f32; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for SensorSensitivity { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for SensorSensitivity { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: SensorSensitivity) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for SensorSensitivity { + const ID: u32 = PropertyId::SensorSensitivity as _; +} +impl Property for SensorSensitivity {} +/// A list of integer values of type dev_t denoting the major and minor +/// device numbers of the underlying devices used in the operation of this +/// camera. +/// +/// Different cameras may report identical devices. +#[derive(Debug, Clone)] +pub struct SystemDevices(pub Vec); +impl Deref for SystemDevices { + type Target = Vec; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl DerefMut for SystemDevices { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl TryFrom for SystemDevices { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Ok(Self(>::try_from(value)?)) + } +} +impl From for ControlValue { + fn from(val: SystemDevices) -> Self { + ControlValue::from(val.0) + } +} +impl ControlEntry for SystemDevices { + const ID: u32 = PropertyId::SystemDevices as _; +} +impl Property for SystemDevices {} +/// The arrangement of color filters on sensor; represents the colors in the +/// top-left 2x2 section of the sensor, in reading order. Currently +/// identical to ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT. +#[cfg(feature = "vendor_draft")] +#[derive(Debug, Clone, Copy, Eq, PartialEq, TryFromPrimitive, IntoPrimitive)] +#[repr(i32)] +pub enum ColorFilterArrangement { + /// RGGB Bayer pattern + RGGB = 0, + /// GRBG Bayer pattern + GRBG = 1, + /// GBRG Bayer pattern + GBRG = 2, + /// BGGR Bayer pattern + BGGR = 3, + /// Sensor is not Bayer; output has 3 16-bit values for each pixel, + /// instead of just 1 16-bit value per pixel. + RGB = 4, + /// Sensor is not Bayer; output consists of a single colour channel. + MONO = 5, +} +#[cfg(feature = "vendor_draft")] +impl TryFrom for ColorFilterArrangement { + type Error = ControlValueError; + fn try_from(value: ControlValue) -> Result { + Self::try_from(i32::try_from(value.clone())?) + .map_err(|_| ControlValueError::UnknownVariant(value)) + } +} +#[cfg(feature = "vendor_draft")] +impl From for ControlValue { + fn from(val: ColorFilterArrangement) -> Self { + ControlValue::from(::from(val)) + } +} +#[cfg(feature = "vendor_draft")] +impl ControlEntry for ColorFilterArrangement { + const ID: u32 = PropertyId::ColorFilterArrangement as _; +} +#[cfg(feature = "vendor_draft")] +impl Property for ColorFilterArrangement {} +pub fn make_dyn( + id: PropertyId, + val: ControlValue, +) -> Result, ControlValueError> { + match id { + PropertyId::Location => Ok(Box::new(Location::try_from(val)?)), + PropertyId::Rotation => Ok(Box::new(Rotation::try_from(val)?)), + PropertyId::Model => Ok(Box::new(Model::try_from(val)?)), + PropertyId::UnitCellSize => Ok(Box::new(UnitCellSize::try_from(val)?)), + PropertyId::PixelArraySize => Ok(Box::new(PixelArraySize::try_from(val)?)), + PropertyId::PixelArrayOpticalBlackRectangles => { + Ok(Box::new(PixelArrayOpticalBlackRectangles::try_from(val)?)) + } + PropertyId::PixelArrayActiveAreas => { + Ok(Box::new(PixelArrayActiveAreas::try_from(val)?)) + } + PropertyId::ScalerCropMaximum => Ok(Box::new(ScalerCropMaximum::try_from(val)?)), + PropertyId::SensorSensitivity => Ok(Box::new(SensorSensitivity::try_from(val)?)), + PropertyId::SystemDevices => Ok(Box::new(SystemDevices::try_from(val)?)), + #[cfg(feature = "vendor_draft")] + PropertyId::ColorFilterArrangement => { + Ok(Box::new(ColorFilterArrangement::try_from(val)?)) + } + } +} diff --git a/libcamera/versioned_files/0.5.1/property_ids_core.yaml b/libcamera/versioned_files/0.5.1/property_ids_core.yaml new file mode 100644 index 0000000..834454a --- /dev/null +++ b/libcamera/versioned_files/0.5.1/property_ids_core.yaml @@ -0,0 +1,704 @@ +# SPDX-License-Identifier: LGPL-2.1-or-later +# +# Copyright (C) 2019, Google Inc. +# +%YAML 1.1 +--- +vendor: libcamera +controls: + - Location: + type: int32_t + description: | + Camera mounting location + enum: + - name: CameraLocationFront + value: 0 + description: | + The camera is mounted on the front side of the device, facing the + user + - name: CameraLocationBack + value: 1 + description: | + The camera is mounted on the back side of the device, facing away + from the user + - name: CameraLocationExternal + value: 2 + description: | + The camera is attached to the device in a way that allows it to + be moved freely + + - Rotation: + type: int32_t + description: | + The camera physical mounting rotation. It is expressed as the angular + difference in degrees between two reference systems, one relative to the + camera module, and one defined on the external world scene to be + captured when projected on the image sensor pixel array. + + A camera sensor has a 2-dimensional reference system 'Rc' defined by + its pixel array read-out order. The origin is set to the first pixel + being read out, the X-axis points along the column read-out direction + towards the last columns, and the Y-axis along the row read-out + direction towards the last row. + + A typical example for a sensor with a 2592x1944 pixel array matrix + observed from the front is + + 2591 X-axis 0 + <------------------------+ 0 + .......... ... ..........! + .......... ... ..........! Y-axis + ... ! + .......... ... ..........! + .......... ... ..........! 1943 + V + + + The external world scene reference system 'Rs' is a 2-dimensional + reference system on the focal plane of the camera module. The origin is + placed on the top-left corner of the visible scene, the X-axis points + towards the right, and the Y-axis points towards the bottom of the + scene. The top, bottom, left and right directions are intentionally not + defined and depend on the environment in which the camera is used. + + A typical example of a (very common) picture of a shark swimming from + left to right, as seen from the camera, is + + 0 X-axis + 0 +-------------------------------------> + ! + ! + ! + ! |\____)\___ + ! ) _____ __`< + ! |/ )/ + ! + ! + ! + V + Y-axis + + With the reference system 'Rs' placed on the camera focal plane. + + ¸.·˙! + ¸.·˙ ! + _ ¸.·˙ ! + +-/ \-+¸.·˙ ! + | (o) | ! Camera focal plane + +-----+˙·.¸ ! + ˙·.¸ ! + ˙·.¸ ! + ˙·.¸! + + When projected on the sensor's pixel array, the image and the associated + reference system 'Rs' are typically (but not always) inverted, due to + the camera module's lens optical inversion effect. + + Assuming the above represented scene of the swimming shark, the lens + inversion projects the scene and its reference system onto the sensor + pixel array, seen from the front of the camera sensor, as follow + + Y-axis + ^ + ! + ! + ! + ! |\_____)\__ + ! ) ____ ___.< + ! |/ )/ + ! + ! + ! + 0 +-------------------------------------> + 0 X-axis + + Note the shark being upside-down. + + The resulting projected reference system is named 'Rp'. + + The camera rotation property is then defined as the angular difference + in the counter-clockwise direction between the camera reference system + 'Rc' and the projected scene reference system 'Rp'. It is expressed in + degrees as a number in the range [0, 360[. + + Examples + + 0 degrees camera rotation + + + Y-Rp + ^ + Y-Rc ! + ^ ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! 0 +-------------------------------------> + ! 0 X-Rp + 0 +-------------------------------------> + 0 X-Rc + + + X-Rc 0 + <------------------------------------+ 0 + X-Rp 0 ! + <------------------------------------+ 0 ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! V + ! Y-Rc + V + Y-Rp + + 90 degrees camera rotation + + 0 Y-Rc + 0 +--------------------> + ! Y-Rp + ! ^ + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! 0 +-------------------------------------> + ! 0 X-Rp + ! + ! + ! + ! + V + X-Rc + + 180 degrees camera rotation + + 0 + <------------------------------------+ 0 + X-Rc ! + Y-Rp ! + ^ ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! V + ! Y-Rc + 0 +-------------------------------------> + 0 X-Rp + + 270 degrees camera rotation + + 0 Y-Rc + 0 +--------------------> + ! 0 + ! <-----------------------------------+ 0 + ! X-Rp ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! ! + ! V + ! Y-Rp + ! + ! + ! + ! + V + X-Rc + + + Example one - Webcam + + A camera module installed on the user facing part of a laptop screen + casing used for video calls. The captured images are meant to be + displayed in landscape mode (width > height) on the laptop screen. + + The camera is typically mounted upside-down to compensate the lens + optical inversion effect. + + Y-Rp + Y-Rc ^ + ^ ! + ! ! + ! ! |\_____)\__ + ! ! ) ____ ___.< + ! ! |/ )/ + ! ! + ! ! + ! ! + ! 0 +-------------------------------------> + ! 0 X-Rp + 0 +-------------------------------------> + 0 X-Rc + + The two reference systems are aligned, the resulting camera rotation is + 0 degrees, no rotation correction needs to be applied to the resulting + image once captured to memory buffers to correctly display it to users. + + +--------------------------------------+ + ! ! + ! ! + ! ! + ! |\____)\___ ! + ! ) _____ __`< ! + ! |/ )/ ! + ! ! + ! ! + ! ! + +--------------------------------------+ + + If the camera sensor is not mounted upside-down to compensate for the + lens optical inversion, the two reference systems will not be aligned, + with 'Rp' being rotated 180 degrees relatively to 'Rc'. + + + X-Rc 0 + <------------------------------------+ 0 + ! + Y-Rp ! + ^ ! + ! ! + ! |\_____)\__ ! + ! ) ____ ___.< ! + ! |/ )/ ! + ! ! + ! ! + ! V + ! Y-Rc + 0 +-------------------------------------> + 0 X-Rp + + The image once captured to memory will then be rotated by 180 degrees + + +--------------------------------------+ + ! ! + ! ! + ! ! + ! __/(_____/| ! + ! >.___ ____ ( ! + ! \( \| ! + ! ! + ! ! + ! ! + +--------------------------------------+ + + A software rotation correction of 180 degrees should be applied to + correctly display the image. + + +--------------------------------------+ + ! ! + ! ! + ! ! + ! |\____)\___ ! + ! ) _____ __`< ! + ! |/ )/ ! + ! ! + ! ! + ! ! + +--------------------------------------+ + + Example two - Phone camera + + A camera installed on the back side of a mobile device facing away from + the user. The captured images are meant to be displayed in portrait mode + (height > width) to match the device screen orientation and the device + usage orientation used when taking the picture. + + The camera sensor is typically mounted with its pixel array longer side + aligned to the device longer side, upside-down mounted to compensate for + the lens optical inversion effect. + + 0 Y-Rc + 0 +--------------------> + ! Y-Rp + ! ^ + ! ! + ! ! + ! ! + ! ! |\_____)\__ + ! ! ) ____ ___.< + ! ! |/ )/ + ! ! + ! ! + ! ! + ! 0 +-------------------------------------> + ! 0 X-Rp + ! + ! + ! + ! + V + X-Rc + + The two reference systems are not aligned and the 'Rp' reference + system is rotated by 90 degrees in the counter-clockwise direction + relatively to the 'Rc' reference system. + + The image once captured to memory will be rotated. + + +-------------------------------------+ + | _ _ | + | \ / | + | | | | + | | | | + | | > | + | < | | + | | | | + | . | + | V | + +-------------------------------------+ + + A correction of 90 degrees in counter-clockwise direction has to be + applied to correctly display the image in portrait mode on the device + screen. + + +--------------------+ + | | + | | + | | + | | + | | + | | + | |\____)\___ | + | ) _____ __`< | + | |/ )/ | + | | + | | + | | + | | + | | + +--------------------+ + + - Model: + type: string + description: | + The model name shall to the extent possible describe the sensor. For + most devices this is the model name of the sensor. While for some + devices the sensor model is unavailable as the sensor or the entire + camera is part of a larger unit and exposed as a black-box to the + system. In such cases the model name of the smallest device that + contains the camera sensor shall be used. + + The model name is not meant to be a camera name displayed to the + end-user, but may be combined with other camera information to create a + camera name. + + The model name is not guaranteed to be unique in the system nor is + it guaranteed to be stable or have any other properties required to make + it a good candidate to be used as a permanent identifier of a camera. + + The model name shall describe the camera in a human readable format and + shall be encoded in ASCII. + + Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'. + + - UnitCellSize: + type: Size + description: | + The pixel unit cell physical size, in nanometers. + + The UnitCellSize properties defines the horizontal and vertical sizes of + a single pixel unit, including its active and non-active parts. In + other words, it expresses the horizontal and vertical distance between + the top-left corners of adjacent pixels. + + The property can be used to calculate the physical size of the sensor's + pixel array area and for calibration purposes. + + - PixelArraySize: + type: Size + description: | + The camera sensor pixel array readable area vertical and horizontal + sizes, in pixels. + + The PixelArraySize property defines the size in pixel units of the + readable part of full pixel array matrix, including optical black + pixels used for calibration, pixels which are not considered valid for + capture and active pixels containing valid image data. + + The property describes the maximum size of the raw data captured by the + camera, which might not correspond to the physical size of the sensor + pixel array matrix, as some portions of the physical pixel array matrix + are not accessible and cannot be transmitted out. + + For example, let's consider a pixel array matrix assembled as follows + + +--------------------------------------------------+ + |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + ... ... ... ... ... + + ... ... ... ... ... + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDxx| + |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + |xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx| + +--------------------------------------------------+ + + starting with two lines of non-readable pixels (x), followed by N lines + of readable data (D) surrounded by two columns of non-readable pixels on + each side, and ending with two more lines of non-readable pixels. Only + the readable portion is transmitted to the receiving side, defining the + sizes of the largest possible buffer of raw data that can be presented + to applications. + + PixelArraySize.width + /----------------------------------------------/ + +----------------------------------------------+ / + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | PixelArraySize.height + ... ... ... ... ... + ... ... ... ... ... + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + |DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD| | + +----------------------------------------------+ / + + This defines a rectangle whose top-left corner is placed in position (0, + 0) and whose vertical and horizontal sizes are defined by this property. + All other rectangles that describe portions of the pixel array, such as + the optical black pixels rectangles and active pixel areas, are defined + relatively to this rectangle. + + All the coordinates are expressed relative to the default sensor readout + direction, without any transformation (such as horizontal and vertical + flipping) applied. When mapping them to the raw pixel buffer, + applications shall take any configured transformation into account. + + \todo Rename this property to Size once we will have property + categories (i.e. Properties::PixelArray::Size) + + - PixelArrayOpticalBlackRectangles: + type: Rectangle + size: [n] + description: | + The pixel array region(s) which contain optical black pixels + considered valid for calibration purposes. + + This property describes the position and size of optical black pixel + regions in the raw data buffer as stored in memory, which might differ + from their actual physical location in the pixel array matrix. + + It is important to note, in fact, that camera sensors might + automatically reorder or skip portions of their pixels array matrix when + transmitting data to the receiver. For instance, a sensor may merge the + top and bottom optical black rectangles into a single rectangle, + transmitted at the beginning of the frame. + + The pixel array contains several areas with different purposes, + interleaved by lines and columns which are said not to be valid for + capturing purposes. Invalid lines and columns are defined as invalid as + they could be positioned too close to the chip margins or to the optical + black shielding placed on top of optical black pixels. + + PixelArraySize.width + /----------------------------------------------/ + x1 x2 + +--o---------------------------------------o---+ / + |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + y1 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + |IIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + y2 oIIOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOII| | + |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + y3 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | + |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | PixelArraySize.height + |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | + ... ... ... ... ... + ... ... ... ... ... + y4 |IIOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOOII| | + |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + |IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII| | + +----------------------------------------------+ / + + The readable pixel array matrix is composed by + 2 invalid lines (I) + 4 lines of valid optical black pixels (O) + 2 invalid lines (I) + n lines of valid pixel data (P) + 2 invalid lines (I) + + And the position of the optical black pixel rectangles is defined by + + PixelArrayOpticalBlackRectangles = { + { x1, y1, x2 - x1 + 1, y2 - y1 + 1 }, + { x1, y3, 2, y4 - y3 + 1 }, + { x2, y3, 2, y4 - y3 + 1 }, + }; + + If the camera, when capturing the full pixel array matrix, automatically + skips the invalid lines and columns, producing the following data + buffer, when captured to memory + + PixelArraySize.width + /----------------------------------------------/ + x1 + +--------------------------------------------o-+ / + |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + |OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO| | + y1 oOOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | PixelArraySize.height + ... ... ... ... ... | + ... ... ... ... ... | + |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + |OOPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPOO| | + +----------------------------------------------+ / + + then the invalid lines and columns should not be reported as part of the + PixelArraySize property in first place. + + In this case, the position of the black pixel rectangles will be + + PixelArrayOpticalBlackRectangles = { + { 0, 0, y1 + 1, PixelArraySize[0] }, + { 0, y1, 2, PixelArraySize[1] - y1 + 1 }, + { x1, y1, 2, PixelArraySize[1] - y1 + 1 }, + }; + + \todo Rename this property to Size once we will have property + categories (i.e. Properties::PixelArray::OpticalBlackRectangles) + + - PixelArrayActiveAreas: + type: Rectangle + size: [n] + description: | + The PixelArrayActiveAreas property defines the (possibly multiple and + overlapping) portions of the camera sensor readable pixel matrix + which are considered valid for image acquisition purposes. + + This property describes an arbitrary number of overlapping rectangles, + with each rectangle representing the maximum image size that the camera + sensor can produce for a particular aspect ratio. They are defined + relatively to the PixelArraySize rectangle. + + When multiple rectangles are reported, they shall be ordered from the + tallest to the shortest. + + Example 1 + A camera sensor which only produces images in the 4:3 image resolution + will report a single PixelArrayActiveAreas rectangle, from which all + other image formats are obtained by either cropping the field-of-view + and/or applying pixel sub-sampling techniques such as pixel skipping or + binning. + + PixelArraySize.width + /----------------/ + x1 x2 + (0,0)-> +-o------------o-+ / + y1 o +------------+ | | + | |////////////| | | + | |////////////| | | PixelArraySize.height + | |////////////| | | + y2 o +------------+ | | + +----------------+ / + + The property reports a single rectangle + + PixelArrayActiveAreas = (x1, y1, x2 - x1 + 1, y2 - y1 + 1) + + Example 2 + A camera sensor which can produce images in different native + resolutions will report several overlapping rectangles, one for each + natively supported resolution. + + PixelArraySize.width + /------------------/ + x1 x2 x3 x4 + (0,0)-> +o---o------o---o+ / + y1 o +------+ | | + | |//////| | | + y2 o+---+------+---+| | + ||///|//////|///|| | PixelArraySize.height + y3 o+---+------+---+| | + | |//////| | | + y4 o +------+ | | + +----+------+----+ / + + The property reports two rectangles + + PixelArrayActiveAreas = ((x2, y1, x3 - x2 + 1, y4 - y1 + 1), + (x1, y2, x4 - x1 + 1, y3 - y2 + 1)) + + The first rectangle describes the maximum field-of-view of all image + formats in the 4:3 resolutions, while the second one describes the + maximum field of view for all image formats in the 16:9 resolutions. + + Multiple rectangles shall only be reported when the sensor can't capture + the pixels in the corner regions. If all the pixels in the (x1,y1) - + (x4,y4) area can be captured, the PixelArrayActiveAreas property shall + contains the single rectangle (x1,y1) - (x4,y4). + + \todo Rename this property to ActiveAreas once we will have property + categories (i.e. Properties::PixelArray::ActiveAreas) + + - ScalerCropMaximum: + type: Rectangle + description: | + The maximum valid rectangle for the controls::ScalerCrop control. This + reflects the minimum mandatory cropping applied in the camera sensor and + the rest of the pipeline. Just as the ScalerCrop control, it defines a + rectangle taken from the sensor's active pixel array. + + This property is valid only after the camera has been successfully + configured and its value may change whenever a new configuration is + applied. + + \todo Turn this property into a "maximum control value" for the + ScalerCrop control once "dynamic" controls have been implemented. + + - SensorSensitivity: + type: float + description: | + The relative sensitivity of the chosen sensor mode. + + Some sensors have readout modes with different sensitivities. For example, + a binned camera mode might, with the same exposure and gains, produce + twice the signal level of the full resolution readout. This would be + signalled by the binned mode, when it is chosen, indicating a value here + that is twice that of the full resolution mode. This value will be valid + after the configure method has returned successfully. + + - SystemDevices: + type: int64_t + size: [n] + description: | + A list of integer values of type dev_t denoting the major and minor + device numbers of the underlying devices used in the operation of this + camera. + + Different cameras may report identical devices. + +... diff --git a/libcamera/versioned_files/0.5.1/property_ids_draft.yaml b/libcamera/versioned_files/0.5.1/property_ids_draft.yaml new file mode 100644 index 0000000..62f0e24 --- /dev/null +++ b/libcamera/versioned_files/0.5.1/property_ids_draft.yaml @@ -0,0 +1,39 @@ +# SPDX-License-Identifier: LGPL-2.1-or-later +# +# Copyright (C) 2019, Google Inc. +# +%YAML 1.1 +--- +vendor: draft +controls: + - ColorFilterArrangement: + type: int32_t + vendor: draft + description: | + The arrangement of color filters on sensor; represents the colors in the + top-left 2x2 section of the sensor, in reading order. Currently + identical to ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT. + enum: + - name: RGGB + value: 0 + description: RGGB Bayer pattern + - name: GRBG + value: 1 + description: GRBG Bayer pattern + - name: GBRG + value: 2 + description: GBRG Bayer pattern + - name: BGGR + value: 3 + description: BGGR Bayer pattern + - name: RGB + value: 4 + description: | + Sensor is not Bayer; output has 3 16-bit values for each pixel, + instead of just 1 16-bit value per pixel. + - name: MONO + value: 5 + description: | + Sensor is not Bayer; output consists of a single colour channel. + +...