diff --git a/Cargo.toml b/Cargo.toml index 1adaca296c..62a55693c6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -89,7 +89,7 @@ default-features = false # path = "module/core/type_constructor_derive_pair_meta" [workspace.dependencies.interval_adapter] -version = "~0.8.0" +version = "~0.9.0" path = "module/core/interval_adapter" default-features = false features = [ "enabled" ] @@ -173,7 +173,7 @@ default-features = false ## iter [workspace.dependencies.iter_tools] -version = "~0.7.0" +version = "~0.8.0" path = "module/core/iter_tools" default-features = false @@ -198,10 +198,7 @@ default-features = false [workspace.dependencies.former_meta] version = "~0.7.0" path = "module/core/former_meta" - -# [workspace.dependencies.former_runtime] -# version = "~0.1.12" -# path = "module/core/former_runtime" +default-features = false [workspace.dependencies.impls_index] version = "~0.3.0" @@ -244,7 +241,7 @@ default-features = false ## proc macro tools [workspace.dependencies.macro_tools] -version = "~0.8.0" +version = "~0.9.0" path = "module/core/macro_tools" default-features = false diff --git a/module/alias/instance_of/src/typing/instance_of_lib.rs b/module/alias/instance_of/src/typing/instance_of_lib.rs index 1c462b5f90..2f552e12b2 100644 --- a/module/alias/instance_of/src/typing/instance_of_lib.rs +++ b/module/alias/instance_of/src/typing/instance_of_lib.rs @@ -2,14 +2,6 @@ #![ doc( html_logo_url = "https://raw.githubusercontent.com/Wandalen/wTools/master/asset/img/logo_v3_trans_square.png" ) ] #![ doc( html_favicon_url = "https://raw.githubusercontent.com/Wandalen/wTools/alpha/asset/img/logo_v3_trans_square_icon_small_v2.ico" ) ] #![ doc( html_root_url = "https://docs.rs/instance_of/latest/instance_of/" ) ] -// #![ deny( rust_2018_idioms ) ] -// #![ deny( missing_debug_implementations ) ] -// #![ deny( missing_docs ) ] - -//! -//! Macro to answer the question: does it implement a trait? -//! - #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] #[ doc( inline ) ] diff --git a/module/alias/proc_macro_tools/Readme.md b/module/alias/proc_macro_tools/Readme.md index 97176f6bed..288a6b53f4 100644 --- a/module/alias/proc_macro_tools/Readme.md +++ b/module/alias/proc_macro_tools/Readme.md @@ -12,15 +12,13 @@ Tools for writing procedural macros. ```rust ignore use proc_macro_tools::*; -use proc_macro_tools::dependency::*; -use quote::quote; fn main() { - let code = quote!( core::option::Option< i8, i16, i32, i64 > ); + let code = qt!( core::option::Option< i8, i16, i32, i64 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); let got = type_parameters( &tree_type, 0..=2 ); - got.iter().for_each( | e | println!( "{}", quote!( #e ) ) ); + got.iter().for_each( | e | println!( "{}", qt!( #e ) ) ); // < i8 // < i16 // < i32 diff --git a/module/alias/proc_macro_tools/examples/proc_macro_tools_trivial.rs b/module/alias/proc_macro_tools/examples/proc_macro_tools_trivial.rs index df3d8151e7..aa45c5ab86 100644 --- a/module/alias/proc_macro_tools/examples/proc_macro_tools_trivial.rs +++ b/module/alias/proc_macro_tools/examples/proc_macro_tools_trivial.rs @@ -1,19 +1,20 @@ //! example +#[ cfg( feature = "no_std" ) ] +fn main(){} + +#[ cfg( not( feature = "no_std" ) ) ] fn main() { - #[ cfg( not( feature = "no_std" ) ) ] - { - use proc_macro_tools::*; + use proc_macro_tools::{ typ, qt }; - let code = qt!( core::option::Option< i8, i16, i32, i64 > ); - let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = type_parameters( &tree_type, 0..=2 ); - got.iter().for_each( | e | println!( "{}", qt!( #e ) ) ); - /* print : - i8 - i16 - i32 - */ - } + let code = qt!( core::option::Option< i8, i16, i32, i64 > ); + let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); + let got = typ::type_parameters( &tree_type, 0..=2 ); + got.iter().for_each( | e | println!( "{}", qt!( #e ) ) ); + /* print : + i8 + i16 + i32 + */ } \ No newline at end of file diff --git a/module/core/clone_dyn/Cargo.toml b/module/core/clone_dyn/Cargo.toml index b268e237c9..9fb7caecee 100644 --- a/module/core/clone_dyn/Cargo.toml +++ b/module/core/clone_dyn/Cargo.toml @@ -23,7 +23,6 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false - exclude = [ "/tests", "/examples", "-*" ] [features] @@ -32,7 +31,7 @@ default = [ "enabled" ] full = [ "enabled" ] no_std = [] use_alloc = [] -enabled = [] +enabled = [ "clone_dyn_meta/enabled" ] [dependencies] clone_dyn_meta = { workspace = true } diff --git a/module/core/clone_dyn/tests/inc/mod.rs b/module/core/clone_dyn/tests/inc/mod.rs index 9d3ce53b88..2b3e377fa5 100644 --- a/module/core/clone_dyn/tests/inc/mod.rs +++ b/module/core/clone_dyn/tests/inc/mod.rs @@ -9,6 +9,7 @@ tests_impls! // + // qqq : organize tests in the same way tests organized for derive_tools fn manual() { @@ -34,6 +35,7 @@ tests_impls! // + #[ allow( non_local_definitions ) ] impl < 'c > Clone for Box< dyn Trait1 + 'c > { @@ -41,6 +43,7 @@ tests_impls! fn clone( &self ) -> Self { _clone_boxed( &**self ) } } + #[ allow( non_local_definitions ) ] impl < 'c > Clone for Box< dyn Trait1 + Send + 'c > { @@ -48,6 +51,7 @@ tests_impls! fn clone( &self ) -> Self { _clone_boxed( &**self ) } } + #[ allow( non_local_definitions ) ] impl < 'c > Clone for Box< dyn Trait1 + Sync + 'c > { @@ -55,6 +59,7 @@ tests_impls! fn clone( &self ) -> Self { _clone_boxed( &**self ) } } + #[ allow( non_local_definitions ) ] impl < 'c > Clone for Box< dyn Trait1 + Send + Sync + 'c > { diff --git a/module/core/clone_dyn_meta/src/lib.rs b/module/core/clone_dyn_meta/src/lib.rs index 27efb3e841..752d3dc344 100644 --- a/module/core/clone_dyn_meta/src/lib.rs +++ b/module/core/clone_dyn_meta/src/lib.rs @@ -2,17 +2,6 @@ #![ doc( html_logo_url = "https://raw.githubusercontent.com/Wandalen/wTools/master/asset/img/logo_v3_trans_square.png" ) ] #![ doc( html_favicon_url = "https://raw.githubusercontent.com/Wandalen/wTools/alpha/asset/img/logo_v3_trans_square_icon_small_v2.ico" ) ] #![ doc( html_root_url = "https://docs.rs/clone_dyn_meta/latest/clone_dyn_meta/" ) ] -// #![ deny( rust_2018_idioms ) ] -// #![ deny( missing_debug_implementations ) ] -// #![ deny( missing_docs ) ] - -// #![ feature( type_name_of_val ) ] -// #![ feature( trace_macros ) ] - -//! -//! Derive to clone dyn structures. -//! - #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] #[ cfg( feature = "enabled" ) ] diff --git a/module/core/clone_dyn_meta/src/meta_impl.rs b/module/core/clone_dyn_meta/src/meta_impl.rs index c9b5e930c2..883d7b9bf6 100644 --- a/module/core/clone_dyn_meta/src/meta_impl.rs +++ b/module/core/clone_dyn_meta/src/meta_impl.rs @@ -1,6 +1,6 @@ use macro_tools::prelude::*; -pub type Result< T > = std::result::Result< T, syn::Error >; +use macro_tools::Result; // @@ -24,6 +24,7 @@ pub fn clone_dyn( _attr : proc_macro::TokenStream, item : proc_macro::TokenStrea { #item_parsed + #[ allow( non_local_definitions ) ] impl < 'c, #generics_params > Clone for Box< dyn #name_ident< #( #generics_names ),* > + 'c > // where @@ -33,6 +34,7 @@ pub fn clone_dyn( _attr : proc_macro::TokenStream, item : proc_macro::TokenStrea fn clone( &self ) -> Self { clone_dyn::_clone_boxed( &**self ) } } + #[ allow( non_local_definitions ) ] impl < 'c, #generics_params > Clone for Box< dyn #name_ident< #( #generics_names ),* > + Send + 'c > // where @@ -42,6 +44,7 @@ pub fn clone_dyn( _attr : proc_macro::TokenStream, item : proc_macro::TokenStrea fn clone( &self ) -> Self { clone_dyn::_clone_boxed( &**self ) } } + #[ allow( non_local_definitions ) ] impl < 'c, #generics_params > Clone for Box< dyn #name_ident< #( #generics_names ),* > + Sync + 'c > // where @@ -51,6 +54,7 @@ pub fn clone_dyn( _attr : proc_macro::TokenStream, item : proc_macro::TokenStrea fn clone( &self ) -> Self { clone_dyn::_clone_boxed( &**self ) } } + #[ allow( non_local_definitions ) ] impl < 'c, #generics_params > Clone for Box< dyn #name_ident< #( #generics_names ),* > + Send + Sync + 'c > // where diff --git a/module/core/derive_tools/Cargo.toml b/module/core/derive_tools/Cargo.toml index 0c0bf16f33..3fe98d4e80 100644 --- a/module/core/derive_tools/Cargo.toml +++ b/module/core/derive_tools/Cargo.toml @@ -125,34 +125,34 @@ derive_variadic_from = [ "type_variadic_from", "derive_tools_meta/derive_variadi # enable_track_caller = [ "derive_more", "derive_more/track-caller" ] -derive_add_assign = [ "derive_more", "derive_more/add_assign" ] -derive_add = [ "derive_more", "derive_more/add" ] +derive_add_assign = [ "derive_more", "derive_more/std", "derive_more/add_assign" ] +derive_add = [ "derive_more", "derive_more/std", "derive_more/add" ] derive_as_mut = [ "derive_tools_meta/derive_as_mut" ] derive_as_ref = [ "derive_tools_meta/derive_as_ref" ] # derive_as_mut = [ "derive_more", "derive_more/as_mut" ] # derive_as_ref = [ "derive_more", "derive_more/as_ref" ] -derive_constructor = [ "derive_more", "derive_more/constructor" ] +derive_constructor = [ "derive_more", "derive_more/std", "derive_more/constructor" ] derive_deref = [ "derive_tools_meta/derive_deref" ] derive_deref_mut = [ "derive_tools_meta/derive_deref_mut" ] # derive_deref = [ "derive_more", "derive_more/deref" ] # derive_deref_mut = [ "derive_more", "derive_more/deref_mut" ] -derive_error = [ "derive_more", "derive_more/error" ] +derive_error = [ "derive_more", "derive_more/std", "derive_more/error" ] # derive_from = [ "derive_more", "derive_more/from" ] # derive_from = [ "derive_tools_meta/derive_from" ] # derive_reflect = [ "derive_tools_meta/derive_reflect" ] -derive_index = [ "derive_more", "derive_more/index" ] -derive_index_mut = [ "derive_more", "derive_more/index_mut" ] +derive_index = [ "derive_more", "derive_more/std", "derive_more/index" ] +derive_index_mut = [ "derive_more", "derive_more/std", "derive_more/index_mut" ] # derive_inner_from = [ "derive_more", "derive_more/into" ] -derive_into_iterator = [ "derive_more", "derive_more/into_iterator" ] +derive_into_iterator = [ "derive_more", "derive_more/std", "derive_more/into_iterator" ] # derive_iterator = [ "derive_more", "derive_more/iterator" ] -derive_mul_assign = [ "derive_more", "derive_more/mul_assign" ] -derive_mul = [ "derive_more", "derive_more/mul" ] -derive_not = [ "derive_more", "derive_more/not" ] -derive_sum = [ "derive_more", "derive_more/sum" ] -derive_try_into = [ "derive_more", "derive_more/try_into" ] +derive_mul_assign = [ "derive_more", "derive_more/std", "derive_more/mul_assign" ] +derive_mul = [ "derive_more", "derive_more/std", "derive_more/mul" ] +derive_not = [ "derive_more", "derive_more/std", "derive_more/not" ] +derive_sum = [ "derive_more", "derive_more/std", "derive_more/sum" ] +derive_try_into = [ "derive_more", "derive_more/std", "derive_more/try_into" ] derive_is_variant = [ "derive_more", "derive_more/is_variant" ] -derive_unwrap = [ "derive_more", "derive_more/unwrap" ] +derive_unwrap = [ "derive_more", "derive_more/std", "derive_more/unwrap" ] # derive_convert_case = [ "derive_more", "derive_more/convert_case" ] derive_display = [ "parse-display" ] diff --git a/module/core/derive_tools/src/lib.rs b/module/core/derive_tools/src/lib.rs index 5033fe4364..e69ec9864a 100644 --- a/module/core/derive_tools/src/lib.rs +++ b/module/core/derive_tools/src/lib.rs @@ -12,8 +12,8 @@ #[ cfg( feature = "enabled" ) ] pub mod wtools; -#[ cfg( feature = "derive_reflect" ) ] -pub mod reflect; +// #[ cfg( feature = "derive_reflect" ) ] +// pub mod reflect; // use derive_tools_meta::Deref; // use derive_tools_meta::VariadicFrom; @@ -55,10 +55,10 @@ pub mod protected #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::wtools::orphan::*; - #[ cfg( feature = "derive_reflect" ) ] - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::reflect::orphan::*; + // #[ cfg( feature = "derive_reflect" ) ] + // #[ doc( inline ) ] + // #[ allow( unused_imports ) ] + // pub use super::reflect::orphan::*; } #[ cfg( all( feature = "derive_more" ) ) ] @@ -66,9 +66,37 @@ pub mod protected mod derive_more { #[ cfg( feature = "derive_add" ) ] - pub use ::derive_more::Add; + pub use ::derive_more::{ Add, Sub }; + #[ cfg( feature = "derive_add_assign" ) ] + pub use ::derive_more::{ AddAssign, SubAssign }; + #[ cfg( feature = "derive_constructor" ) ] + pub use ::derive_more::Constructor; + #[ cfg( feature = "derive_error" ) ] + pub use ::derive_more::Error; + #[ cfg( feature = "derive_index_mut" ) ] + pub use ::derive_more::IndexMut; + #[ cfg( feature = "derive_index" ) ] + pub use ::derive_more::Index; + #[ cfg( feature = "derive_into" ) ] + pub use ::derive_more::Into; + #[ cfg( feature = "derive_iterator" ) ] + pub use ::derive_more::Iterator; + #[ cfg( feature = "derive_into_iterator" ) ] + pub use ::derive_more::IntoIterator; + #[ cfg( feature = "derive_mul" ) ] + pub use ::derive_more::{ Mul, Div }; + #[ cfg( feature = "derive_mul_assign" ) ] + pub use ::derive_more::{ MulAssign, DivAssign }; + #[ cfg( feature = "derive_not" ) ] + pub use ::derive_more::Not; + #[ cfg( feature = "derive_sum" ) ] + pub use ::derive_more::Sum; + #[ cfg( feature = "derive_try_into" ) ] + pub use ::derive_more::TryInto; #[ cfg( feature = "derive_is_variant" ) ] pub use ::derive_more::IsVariant; + #[ cfg( feature = "derive_unwrap" ) ] + pub use ::derive_more::Unwrap; // qqq2 : list all // qqq2 : make sure all features of derive_more is reexported @@ -183,10 +211,10 @@ pub mod exposed #[ allow( unused_imports ) ] pub use super::wtools::exposed::*; - #[ cfg( feature = "derive_reflect" ) ] - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::reflect::exposed::*; + // #[ cfg( feature = "derive_reflect" ) ] + // #[ doc( inline ) ] + // #[ allow( unused_imports ) ] + // pub use super::reflect::exposed::*; // #[ cfg( any_derive ) ] #[ doc( inline ) ] @@ -217,10 +245,10 @@ pub mod prelude #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use ::clone_dyn::clone_dyn; - #[ cfg( feature = "derive_reflect" ) ] - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::reflect::prelude::*; + // #[ cfg( feature = "derive_reflect" ) ] + // #[ doc( inline ) ] + // #[ allow( unused_imports ) ] + // pub use super::reflect::prelude::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/derive_tools/src/reflect.rs b/module/core/derive_tools/src/reflect.rs deleted file mode 100644 index e6fd3c6192..0000000000 --- a/module/core/derive_tools/src/reflect.rs +++ /dev/null @@ -1,147 +0,0 @@ -//! -//! # System of Types for Reflection -//! -//! This crate provides a comprehensive system for runtime type reflection, enabling dynamic type inspection and manipulation. It is designed to facilitate the integration of types into systems that require advanced operations such as serialization, deserialization, object-relational mapping (ORM), and interaction with generic containers and algorithms that operate on heterogeneous collections of entities. -//! -//! ## Features -//! -//! - **Dynamic Type Inspection**: Retrieve detailed type information at runtime, supporting complex scenarios like serialization frameworks that need to dynamically handle different data types. -//! - **Entity Manipulation**: Manipulate entities in a type-safe manner, leveraging Rust's powerful type system to ensure correctness while allowing dynamic behavior. -//! - **Reflection API**: Utilize a rich set of APIs to introspect and manipulate entities based on their runtime type information, enabling patterns that are not possible with static typing alone. -//! - **Support for Primitive and Composite Types**: Handle both primitive types (e.g., integers, floating-point numbers, strings) and composite entities (e.g., structs, arrays, maps) with a unified interface. -//! -//! ## Use Cases -//! -//! - **Serialization/Deserialization**: Automatically convert Rust structs to and from formats like JSON, XML, or binary representations, based on their runtime type information. -//! - **Dynamic ORM**: Map Rust entities to database tables dynamically, enabling flexible schema evolution and complex queries without sacrificing type safety. -//! - **Generic Algorithms**: Implement algorithms that operate on collections of heterogeneous types, performing runtime type checks and conversions as necessary. -//! - **Plugin Architectures**: Build systems that load and interact with plugins or modules of unknown types at compile time, facilitating extensibility and modularity. -//! -//! ## Getting Started -//! -//! To start using the reflection system, define your entities using the provided traits and enums, and then use the `reflect` function to introspect their properties and behavior at runtime. The system is designed to be intuitive for Rust developers familiar with traits and enums, with minimal boilerplate required to make existing types compatible. -//! -//! ## Example -//! -//! ```rust, ignore -//! # use derive_tools::reflect::{ reflect, Entity }; -//! -//! // Define an entity that implements the Instance trait. -//! #[ derive( Debug ) ] -//! struct MyEntity -//! { -//! id : i32, -//! name : String, -//! // other fields -//! } -//! -//! // Implement the required traits for MyEntity. -//! // ... -//! -//! // Use the reflection API to inspect `MyEntity`. -//! let entity = MyEntity { id: 1, name: "Entity Name".to_string() /*, other fields*/ }; -//! let reflected = reflect( &entity ); -//! println!( "{:?}", reflected.type_name() ); // Outputs "MyEntity" -//! ``` -//! -//! ## Extending the System -//! -//! Implement additional traits for your types as needed to leverage the full power of the reflection system. The crate is designed to be extensible, allowing custom types to integrate seamlessly with the reflection mechanism. -//! - -// qqq : make the example working. use tests for inpsisrations - -/// Internal namespace. -pub( crate ) mod private -{ -} - -pub mod axiomatic; -pub mod entity_array; -pub mod entity_slice; -pub mod entity_vec; -pub mod entity_hashmap; -pub mod entity_hashset; -pub mod primitive; - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use protected::*; - -/// Protected namespace of the module. -pub mod protected -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::orphan::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::axiomatic::orphan::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::entity_array::orphan::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::entity_slice::orphan::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::entity_vec::orphan::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::entity_hashmap::orphan::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::entity_hashset::orphan::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::primitive::orphan::*; - // pub use super::private:: - // { - // }; -} - -/// Orphan namespace of the module. -pub mod orphan -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::exposed::*; -} - -/// Exposed namespace of the module. -pub mod exposed -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::prelude::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::axiomatic::exposed::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::entity_array::exposed::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::entity_slice::exposed::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::entity_vec::exposed::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::entity_hashmap::exposed::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::entity_hashset::exposed::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::primitive::exposed::*; -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use exposed::*; - -/// Prelude to use essentials: `use my_module::prelude::*`. -pub mod prelude -{ -} diff --git a/module/core/derive_tools/src/reflect/axiomatic.rs b/module/core/derive_tools/src/reflect/axiomatic.rs deleted file mode 100644 index 47d26b07a4..0000000000 --- a/module/core/derive_tools/src/reflect/axiomatic.rs +++ /dev/null @@ -1,550 +0,0 @@ -//! -//! Mechanism for reflection. -//! - -use super::*; - -/// Internal namespace. -pub( crate ) mod private -{ - use super::*; - - /// Provides a reflection of an instance that implements the `Instance` trait. - /// - /// This function is required to distinguish between instances of a type and references to an instance - /// in contexts where `self` is used. Without this function, associated trait functions would not differentiate - /// between `i32` and `&i32`, treating both identically. - /// - /// # Arguments - /// - /// * `src` - A reference to an instance that implements the `Instance` trait. - /// - /// # Returns - /// - /// Returns an entity descriptor that implements the `Entity` trait, providing - /// runtime reflection capabilities for the given instance. - pub fn reflect( src : &impl Instance ) -> impl Entity - { - src._reflect() - } - - /// - /// Trait indicating that an entity is a container. - /// - /// Implementors of `IsContainer` are considered to be container types, - /// which can hold zero or more elements. This trait is typically used in - /// conjunction with reflection mechanisms to dynamically inspect, access, - /// or modify the contents of a container at runtime. - pub trait IsContainer : Instance - { - } - - /// - /// Trait indicating that an entity is a scalar value. - /// - /// Implementors of `IsScalar` are considered to be scalar types, - /// representing single, indivisible values as opposed to composite entities - /// like arrays or structs. This distinction can be useful in reflection-based - /// APIs or generic programming to treat scalar values differently from containers - /// or other complex types. - pub trait IsScalar : Instance - { - } - - /// - /// Represents a trait for enabling runtime reflection of entities. - /// - /// This trait is designed to equip implementing structs with the ability to introspect - /// their properties, type names, and any contained elements. It facilitates runtime inspection - /// and manipulation of entities in a dynamic manner. - /// - pub trait Instance - { - /// The entity descriptor associated with this instance. - type Entity : Entity; - /// Returns a descriptor for the current instance. - /// - /// Don't use manually. - fn _reflect( &self ) -> Self::Entity - { - Self::Reflect() - } - /// Returns a descriptor for the type of the instance. - #[ allow( non_snake_case ) ] - fn Reflect() -> Self::Entity; - } - - impl< T > Instance for T - where - EntityDescriptor< T > : Entity, - T : InstanceMarker, - { - type Entity = EntityDescriptor::< Self >; - #[ inline( always ) ] - fn Reflect() -> Self::Entity - { - EntityDescriptor::< Self >::new() - } - } - - /// - /// The `Entity` trait defines a common interface for entities within a system, enabling - /// runtime reflection, inspection, and manipulation of their properties and elements. It - /// serves as a foundational component for dynamic entity handling, where entities can - /// represent data structures, components, or other logical units with introspectable - /// and manipulable state. - /// - /// ## Usage - /// - /// Implementing the `Entity` trait allows a type to be integrated into systems that require - /// dynamic type inspection and manipulation, such as serialization frameworks, object-relational - /// mapping (ORM) systems, or generic containers and algorithms that operate on heterogeneous - /// entity collections. - /// - /// ## Key Concepts - /// - /// - **Containment**: Entities can act as containers for other entities, enabling hierarchical - /// or composite data models. - /// - /// - **Ordering**: The trait distinguishes between ordered and unordered entities, affecting - /// how their elements are iterated over or accessed. - /// - /// - **Reflection**: Through type metadata and element access methods, entities support - /// reflection, allowing programmatic querying and manipulation of their structure and state. - /// - /// ## Implementing `Entity` - /// - /// To implement the `Entity` trait, a type must provide implementations for all non-default - /// methods (`type_name`, `type_id`). The default method implementations assume non-container - /// entities with no elements and predictable ordering. Implementers should override these - /// defaults as appropriate to accurately reflect their specific semantics and behavior. - /// - /// ## Example - /// - /// ``` - /// # use derive_tools::reflect::Entity; - /// - /// #[ derive(Debug)] - /// struct MyEntity - /// { - /// // Entity fields - /// } - /// - /// impl Entity for MyEntity - /// { - /// - /// #[ inline ] - /// fn type_name( &self ) -> &'static str - /// { - /// "MyEntity" - /// } - /// - /// #[ inline ] - /// fn type_id(&self) -> core::any::TypeId - /// { - /// core::any::TypeId::of::< MyEntity >() - /// } - /// - /// // Additional method implementations as necessary... - /// } - /// ``` - /// - /// This trait is designed to be flexible and extensible, accommodating a wide variety of entity - /// types and use cases. Implementers are encouraged to leverage Rust's type system and trait - /// mechanisms to provide rich, dynamic behavior in a type-safe manner. - /// - pub trait Entity : core::fmt::Debug - { - - /// Determines if the entity acts as a container for other entities. - /// - /// # Returns - /// - /// Returns `true` if the entity can contain other entities (like a struct, vector, etc.), - /// otherwise `false`. - /// - /// By default, this method returns `false`, assuming that the entity does not act as a container. - #[ inline( always ) ] - fn is_container( &self ) -> bool - { - false - } - - /// Determines if the elements of the container are maintained in a specific order. - /// - /// This method indicates whether the container preserves a specific order of its elements. - /// The concept of "order" can refer to: - /// - **Sorted Order**: Where elements are arranged based on a sorting criterion, typically - /// through comparison operations. - /// - **Insertion Order**: Where elements retain the order in which they were added to the container. - /// - /// It is important to distinguish this property in collections to understand how iteration over - /// the elements will proceed and what expectations can be held about the sequence of elements - /// when accessed. - /// - /// # Returns - /// - /// - `true` if the container maintains its elements in a predictable order. This is typically - /// true for data structures like arrays, slices, and vectors, where elements are accessed - /// sequentially or are sorted based on inherent or specified criteria. - /// - `false` for collections where the arrangement of elements does not follow a predictable - /// sequence from the perspective of an observer, such as sets and maps implemented via hashing. - /// In these structures, the order of elements is determined by their hash and internal state, - /// rather than the order of insertion or sorting. - /// - /// By default, this method returns `true`, assuming that the entity behaves like an array, slice, - /// or vector, where the order of elements is consistent and predictable. Implementers should override - /// this behavior for collections where element order is not maintained or is irrelevant. - #[ inline( always ) ] - fn is_ordered( &self ) -> bool - { - true - } - - /// Returns the number of elements contained in the entity. - /// - /// # Returns - /// - /// Returns the count of elements if the entity is a container, otherwise `0`. - /// - /// This method is particularly useful for collections or composite entities. - /// By default, this method returns `0`, assuming the entity contains no elements. - #[ inline( always ) ] - fn len( &self ) -> usize - { - 0 - } - - /// Retrieves the type name. - /// - /// # Returns - /// - /// Returns the type name of the implementing entity as a static string slice. - /// - /// This method leverages Rust's `type_name` function to provide the name at runtime, - /// aiding in debugging and logging purposes. - fn type_name( &self ) -> &'static str; - - /// Retrives the typ id. - fn type_id( &self ) -> core::any::TypeId; - - /// Provides an iterator over the elements contained within the entity, if any. - /// - /// # Returns - /// - /// Returns a boxed iterator over `KeyVal` pairs representing the key-value mappings - /// of the entity's elements. For non-container entities, an empty iterator is returned. - /// - /// This method is crucial for traversing composite entities or collections at runtime, - /// allowing for dynamic inspection and manipulation. - #[ inline( always ) ] - fn elements( &self ) -> Box< dyn Iterator< Item = KeyVal > > - { - Box::new( [].into_iter() ) - } - - /// Returns a descriptor for the type of the instance. - /// - /// # Returns - /// - /// Returns an entity descriptor that implements the `Entity` trait. - #[ inline( always ) ] - fn element( &self, i : usize ) -> KeyVal - { - debug_assert!( i < self.len() ); - self.elements().skip( i ).next().unwrap() - } - - } - - /// - /// Type descriptor - /// - #[ derive( PartialEq, Default, Clone ) ] - pub struct EntityDescriptor< I : Instance > - { - _phantom : core::marker::PhantomData< I >, - } - - impl< I : Instance > EntityDescriptor< I > - { - /// Constructor of the descriptor. - #[ inline( always ) ] - pub fn new() -> Self - { - let _phantom = core::marker::PhantomData::< I >; - Self { _phantom } - } - } - - /// - /// Dynamically sized collection descriptor - /// - #[ derive( PartialEq, Default, Clone ) ] - pub struct CollectionDescriptor< I : Instance > - { - /// Container length. - pub len : usize, - _phantom : core::marker::PhantomData< I >, - } - - impl< I : Instance > CollectionDescriptor< I > - { - /// Constructor of the descriptor of container type. - pub fn new( size : usize ) -> Self - { - let _phantom = core::marker::PhantomData::< I >; - Self - { - _phantom, - len : size, - } - } - } - - /// - /// Dynamically sized key-value collection descriptor - /// - #[ derive( PartialEq, Default, Clone ) ] - pub struct KeyedCollectionDescriptor< I : Instance > - { - /// Container length. - pub len : usize, - /// Container keys. - pub keys : Vec< primitive::Primitive >, - _phantom : core::marker::PhantomData< I >, - } - - impl< I : Instance > KeyedCollectionDescriptor< I > - { - /// Constructor of the descriptor of container type. - pub fn new( size : usize, keys : Vec< primitive::Primitive > ) -> Self - { - let _phantom = core::marker::PhantomData::< I >; - Self - { - _phantom, - len : size, - keys, - } - } - } - - /// Auto-implement descriptor for this type. - trait InstanceMarker {} - - impl< T > Entity for EntityDescriptor< T > - where - T : InstanceMarker + 'static, - { - #[ inline( always ) ] - fn type_name( &self ) -> &'static str - { - core::any::type_name::< T >() - } - #[ inline( always ) ] - fn type_id( &self ) -> core::any::TypeId - { - core::any::TypeId::of::< T >() - } - } - - impl< T > std::fmt::Debug for EntityDescriptor< T > - where - T : Instance + 'static, - EntityDescriptor< T > : Entity, - { - fn fmt( &self, f: &mut std::fmt::Formatter< '_ > ) -> std::fmt::Result - { - f - .write_str( &format!( "{}#{:?}", self.type_name(), self.type_id() ) ) - } - } - - impl< T > std::fmt::Debug for CollectionDescriptor< T > - where - T : Instance + 'static, - CollectionDescriptor< T > : Entity, - { - fn fmt( &self, f: &mut std::fmt::Formatter< '_ > ) -> std::fmt::Result - { - f - .write_str( &format!( "{}#{:?}", self.type_name(), self.type_id() ) ) - } - } - - impl< T > std::fmt::Debug for KeyedCollectionDescriptor< T > - where - T : Instance + 'static, - KeyedCollectionDescriptor< T > : Entity, - { - fn fmt( &self, f: &mut std::fmt::Formatter< '_ > ) -> std::fmt::Result - { - f - .write_str( &format!( "{}#{:?}", self.type_name(), self.type_id() ) ) - } - } - - /// Represents a key-value pair where the key is a static string slice - /// and the value is a boxed entity that implements the `AnyEntity` trait. - /// - /// This struct is typically used in the context of reflecting over the properties - /// or members of a container entity, allowing for dynamic access and inspection - /// of its contents. - /// - // #[ derive( PartialEq, Debug ) ] - // #[ derive( Default ) ] - pub struct KeyVal - { - /// The key associated with the value in the key-value pair. - pub key : primitive::Primitive, - // pub key : &'static str, - /// The value associated with the key in the key-value pair. - pub val : Box< dyn Entity >, - } - - impl Default for KeyVal - { - fn default() -> Self - { - Self - { - key : primitive::Primitive::default(), - val : Box::new( EntityDescriptor::< i8 >::new() ) as Box::< dyn Entity >, - } - } - } - - impl std::fmt::Debug for KeyVal - { - fn fmt( &self, f: &mut std::fmt::Formatter< '_ > ) -> std::fmt::Result - { - f - .debug_struct( "KeyVal" ) - .field( "key", &self.key ) - .field( "val", &format_args!( "{:?}", &self.val ) ) - .finish() - } - } - - // qqq aaa: added comparison by val - impl PartialEq for KeyVal - { - fn eq( &self, other : &Self ) -> bool - { - let mut equal = self.key == other.key - && self.val.type_id() == other.val.type_id() - && self.val.type_name() == other.val.type_name() - && self.val.len() == other.val.len(); - - if equal - { - for i in 0..self.val.len() - { - equal = equal && ( self.val.element( i ) == other.val.element( i ) ) - } - } - equal - } - } - - impl InstanceMarker for i8 {} - impl InstanceMarker for i16 {} - impl InstanceMarker for i32 {} - impl InstanceMarker for i64 {} - impl InstanceMarker for u8 {} - impl InstanceMarker for u16 {} - impl InstanceMarker for u32 {} - impl InstanceMarker for u64 {} - impl InstanceMarker for f32 {} - impl InstanceMarker for f64 {} - impl InstanceMarker for String {} - impl InstanceMarker for &'static str {} - - impl< T > InstanceMarker for &T - where T : InstanceMarker - {} - - impl IsScalar for i8 {} - impl IsScalar for i16 {} - impl IsScalar for i32 {} - impl IsScalar for i64 {} - impl IsScalar for u8 {} - impl IsScalar for u16 {} - impl IsScalar for u32 {} - impl IsScalar for u64 {} - impl IsScalar for f32 {} - impl IsScalar for f64 {} - impl IsScalar for String {} - impl IsScalar for &'static str {} - - impl< T : Instance + 'static, const N : usize > IsContainer for [ T ; N ] {} - // qqq : aaa : added implementation for slice - impl< T : Instance > IsContainer for &'static [ T ] {} - // qqq : aaa : added implementation for Vec - impl< T : Instance + 'static > IsContainer for Vec< T > {} - // qqq : aaa : added implementation for HashMap - impl< K : IsScalar + Clone + 'static, V : Instance + 'static > IsContainer for std::collections::HashMap< K, V > - where primitive::Primitive : From< K > {} - // qqq : aaa : added implementation for HashSet - impl< V : Instance + 'static > IsContainer for std::collections::HashSet< V > {} - -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use protected::*; - -/// Protected namespace of the module. -pub mod protected -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::orphan::*; -} - -/// Orphan namespace of the module. -pub mod orphan -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::exposed::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::private:: - { - // reflect, - IsContainer, - IsScalar, - Instance, - // InstanceMarker, - Entity, - EntityDescriptor, - CollectionDescriptor, - KeyedCollectionDescriptor, - KeyVal, - }; -} - -/// Exposed namespace of the module. -pub mod exposed -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::prelude::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::private:: - { - reflect, - }; -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use exposed::*; - -/// Prelude to use essentials: `use my_module::prelude::*`. -pub mod prelude -{ -} diff --git a/module/core/derive_tools/src/reflect/entity_array.rs b/module/core/derive_tools/src/reflect/entity_array.rs deleted file mode 100644 index 5c171783e4..0000000000 --- a/module/core/derive_tools/src/reflect/entity_array.rs +++ /dev/null @@ -1,114 +0,0 @@ -//! -//! Implementation of Entity for an array. -//! - -use super::*; - -/// Internal namespace. -pub mod private -{ - use super::*; - - impl< T, const N : usize > Instance for [ T ; N ] - where - EntityDescriptor< [ T ; N ] > : Entity, - { - type Entity = EntityDescriptor::< Self >; - #[ inline( always ) ] - fn Reflect() -> Self::Entity - { - EntityDescriptor::< Self >::new() - } - } - - impl< T, const N : usize > Entity for EntityDescriptor< [ T ; N ] > - where - T : 'static + Instance, - { - - #[ inline( always ) ] - fn is_container( &self ) -> bool - { - true - } - - #[ inline( always ) ] - fn len( &self ) -> usize - { - N - } - - #[ inline( always ) ] - fn type_name( &self ) -> &'static str - { - core::any::type_name::< [ T ; N ] >() - } - - #[ inline( always ) ] - fn type_id( &self ) -> core::any::TypeId - { - core::any::TypeId::of::< [ T ; N ] >() - } - - #[ inline( always ) ] - fn elements( &self ) -> Box< dyn Iterator< Item = KeyVal > > - { - - // qqq : write optimal implementation - // let mut result : [ KeyVal ; N ] = [ KeyVal::default() ; N ]; -// -// for i in 0..N -// { -// result[ i ] = KeyVal { key : "x", val : Box::new( < T as Instance >::Reflect() ) } -// } - - let result : Vec< KeyVal > = ( 0 .. N ) - .map( | k | KeyVal { key : Primitive::usize( k ), val : Box::new( < T as Instance >::Reflect() ) } ) - .collect(); - - Box::new( result.into_iter() ) - } - - } - -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use protected::*; - -/// Protected namespace of the module. -pub mod protected -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::orphan::*; -} - -/// Orphan namespace of the module. -pub mod orphan -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::exposed::*; - // pub use super::private:: - // { - // }; -} - -/// Exposed namespace of the module. -pub mod exposed -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::prelude::*; -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use exposed::*; - -/// Prelude to use essentials: `use my_module::prelude::*`. -pub mod prelude -{ -} diff --git a/module/core/derive_tools/src/reflect/entity_hashmap.rs b/module/core/derive_tools/src/reflect/entity_hashmap.rs deleted file mode 100644 index 696f644db5..0000000000 --- a/module/core/derive_tools/src/reflect/entity_hashmap.rs +++ /dev/null @@ -1,121 +0,0 @@ -//! -//! Implementation of Entity for a HashMap. -//! - -use super::*; - -/// Internal namespace. -pub mod private -{ - use super::*; - // qqq : xxx : implement for HashMap - // aaa : added implementation of Instance trait for HashMap - use std::collections::HashMap; - impl< K, V > Instance for HashMap< K, V > - where - KeyedCollectionDescriptor< HashMap< K, V > > : Entity, - primitive::Primitive : From< K >, - K : Clone, - { - type Entity = KeyedCollectionDescriptor::< HashMap< K, V > >; - fn _reflect( &self ) -> Self::Entity - { - KeyedCollectionDescriptor::< Self >::new - ( - self.len(), - self.keys().into_iter().map( | k | primitive::Primitive::from( k.clone() ) ).collect::< Vec< _ > >(), - ) - } - #[ inline( always ) ] - fn Reflect() -> Self::Entity - { - KeyedCollectionDescriptor::< Self >::new( 0, Vec::new() ) - } - } - - impl< K, V > Entity for KeyedCollectionDescriptor< HashMap< K, V > > - where - K : 'static + Instance + IsScalar + Clone, - primitive::Primitive : From< K >, - V : 'static + Instance, - { - #[ inline( always ) ] - fn is_container( &self ) -> bool - { - true - } - - #[ inline( always ) ] - fn len( &self ) -> usize - { - self.len - } - - #[ inline( always ) ] - fn type_name( &self ) -> &'static str - { - core::any::type_name::< HashMap< K, V > >() - } - - #[ inline( always ) ] - fn type_id( &self ) -> core::any::TypeId - { - core::any::TypeId::of::< HashMap< K, V > >() - } - - #[ inline( always ) ] - fn elements( &self ) -> Box< dyn Iterator< Item = KeyVal > > - { - let mut result : Vec< KeyVal > = ( 0 .. self.len() ) - .map( | k | KeyVal { key : Primitive::usize( k ), val : Box::new( < V as Instance >::Reflect() ) } ) - .collect(); - - for i in 0..self.len() - { - result[ i ] = KeyVal { key : self.keys[ i ].clone(), val : Box::new( < V as Instance >::Reflect() ) } - } - - Box::new( result.into_iter() ) - } - } -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use protected::*; - -/// Protected namespace of the module. -pub mod protected -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::orphan::*; -} - -/// Orphan namespace of the module. -pub mod orphan -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::exposed::*; - // pub use super::private:: - // { - // }; -} - -/// Exposed namespace of the module. -pub mod exposed -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::prelude::*; -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use exposed::*; - -/// Prelude to use essentials: `use my_module::prelude::*`. -pub mod prelude -{ -} diff --git a/module/core/derive_tools/src/reflect/entity_hashset.rs b/module/core/derive_tools/src/reflect/entity_hashset.rs deleted file mode 100644 index d51fda1030..0000000000 --- a/module/core/derive_tools/src/reflect/entity_hashset.rs +++ /dev/null @@ -1,110 +0,0 @@ -//! -//! Implementation of Entity for a HashSet. -//! - -use super::*; - -/// Internal namespace. -pub mod private -{ - use super::*; - - // qqq : xxx : implement for HashSet - // aaa : added implementation of Instance trait for HashSet - use std::collections::HashSet; - impl< T > Instance for HashSet< T > - where - CollectionDescriptor< HashSet< T > > : Entity, - { - type Entity = CollectionDescriptor::< HashSet< T > >; - fn _reflect( &self ) -> Self::Entity - { - CollectionDescriptor::< Self >::new( self.len() ) - } - #[ inline( always ) ] - fn Reflect() -> Self::Entity - { - CollectionDescriptor::< Self >::new( 0 ) - } - } - - impl< T > Entity for CollectionDescriptor< HashSet< T > > - where - T : 'static + Instance, - { - - #[ inline( always ) ] - fn is_container( &self ) -> bool - { - true - } - - #[ inline( always ) ] - fn len( &self ) -> usize - { - self.len - } - - #[ inline( always ) ] - fn type_name( &self ) -> &'static str - { - core::any::type_name::< HashSet< T > >() - } - - #[ inline( always ) ] - fn type_id( &self ) -> core::any::TypeId - { - core::any::TypeId::of::< HashSet< T > >() - } - - #[ inline( always ) ] - fn elements( &self ) -> Box< dyn Iterator< Item = KeyVal > > - { - let result : Vec< KeyVal > = ( 0..self.len() ) - .map( | k | KeyVal { key : Primitive::usize( k ), val : Box::new( < T as Instance >::Reflect() ) } ) - .collect(); - - Box::new( result.into_iter() ) - } - } -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use protected::*; - -/// Protected namespace of the module. -pub mod protected -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::orphan::*; -} - -/// Orphan namespace of the module. -pub mod orphan -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::exposed::*; - // pub use super::private:: - // { - // }; -} - -/// Exposed namespace of the module. -pub mod exposed -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::prelude::*; -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use exposed::*; - -/// Prelude to use essentials: `use my_module::prelude::*`. -pub mod prelude -{ -} diff --git a/module/core/derive_tools/src/reflect/entity_slice.rs b/module/core/derive_tools/src/reflect/entity_slice.rs deleted file mode 100644 index 90416afcbc..0000000000 --- a/module/core/derive_tools/src/reflect/entity_slice.rs +++ /dev/null @@ -1,110 +0,0 @@ -//! -//! Implementation of Entity for a slice. -//! - -use super::*; - -/// Internal namespace. -pub mod private -{ - use super::*; - - // qqq : xxx : implement for slice - // aaa : added implementation of Instance trait for slice - impl< T > Instance for &'static [ T ] - where - CollectionDescriptor< &'static [ T ] > : Entity, - { - type Entity = CollectionDescriptor::< &'static [ T ] >; - fn _reflect( &self ) -> Self::Entity - { - CollectionDescriptor::< Self >::new( self.len() ) - } - #[ inline( always ) ] - fn Reflect() -> Self::Entity - { - CollectionDescriptor::< Self >::new( 1 ) - } - } - - impl< T > Entity for CollectionDescriptor< &'static [ T ] > - where - T : 'static + Instance, - { - - #[ inline( always ) ] - fn is_container( &self ) -> bool - { - true - } - - #[ inline( always ) ] - fn len( &self ) -> usize - { - self.len - } - - #[ inline( always ) ] - fn type_name( &self ) -> &'static str - { - core::any::type_name::< &'static [ T ] >() - } - - #[ inline( always ) ] - fn type_id( &self ) -> core::any::TypeId - { - core::any::TypeId::of::< &'static [ T ] >() - } - - #[ inline( always ) ] - fn elements( &self ) -> Box< dyn Iterator< Item = KeyVal > > - { - - let result : Vec< KeyVal > = ( 0 .. self.len() ) - .map( | k | KeyVal { key : Primitive::usize( k ), val : Box::new( < T as Instance >::Reflect() ) } ) - .collect(); - - Box::new( result.into_iter() ) - } - } -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use protected::*; - -/// Protected namespace of the module. -pub mod protected -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::orphan::*; -} - -/// Orphan namespace of the module. -pub mod orphan -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::exposed::*; - // pub use super::private:: - // { - // }; -} - -/// Exposed namespace of the module. -pub mod exposed -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::prelude::*; -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use exposed::*; - -/// Prelude to use essentials: `use my_module::prelude::*`. -pub mod prelude -{ -} \ No newline at end of file diff --git a/module/core/derive_tools/src/reflect/entity_vec.rs b/module/core/derive_tools/src/reflect/entity_vec.rs deleted file mode 100644 index 997e32b18c..0000000000 --- a/module/core/derive_tools/src/reflect/entity_vec.rs +++ /dev/null @@ -1,109 +0,0 @@ -//! -//! Implementation of Entity for a Vec. -//! - -use super::*; - -/// Internal namespace. -pub mod private -{ - use super::*; - - // qqq : xxx : implement for Vec - // aaa : added implementation of Instance trait for Vec - impl< T > Instance for Vec< T > - where - CollectionDescriptor< Vec< T > > : Entity, - { - type Entity = CollectionDescriptor::< Vec< T > >; - fn _reflect( &self ) -> Self::Entity - { - CollectionDescriptor::< Self >::new( self.len() ) - } - #[ inline( always ) ] - fn Reflect() -> Self::Entity - { - CollectionDescriptor::< Self >::new( 0 ) - } - } - - impl< T > Entity for CollectionDescriptor< Vec< T > > - where - T : 'static + Instance, - { - - #[ inline( always ) ] - fn is_container( &self ) -> bool - { - true - } - - #[ inline( always ) ] - fn len( &self ) -> usize - { - self.len - } - - #[ inline( always ) ] - fn type_name( &self ) -> &'static str - { - core::any::type_name::< Vec< T > >() - } - - #[ inline( always ) ] - fn type_id( &self ) -> core::any::TypeId - { - core::any::TypeId::of::< Vec< T > >() - } - - #[ inline( always ) ] - fn elements( &self ) -> Box< dyn Iterator< Item = KeyVal > > - { - let result : Vec< KeyVal > = ( 0 .. self.len() ) - .map( | k | KeyVal { key : Primitive::usize( k ), val : Box::new( < T as Instance >::Reflect() ) } ) - .collect(); - - Box::new( result.into_iter() ) - } - } -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use protected::*; - -/// Protected namespace of the module. -pub mod protected -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::orphan::*; -} - -/// Orphan namespace of the module. -pub mod orphan -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::exposed::*; - // pub use super::private:: - // { - // }; -} - -/// Exposed namespace of the module. -pub mod exposed -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::prelude::*; -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use exposed::*; - -/// Prelude to use essentials: `use my_module::prelude::*`. -pub mod prelude -{ -} diff --git a/module/core/derive_tools/src/reflect/primitive.rs b/module/core/derive_tools/src/reflect/primitive.rs deleted file mode 100644 index a059dd5f99..0000000000 --- a/module/core/derive_tools/src/reflect/primitive.rs +++ /dev/null @@ -1,264 +0,0 @@ -//! -//! Define primitive and data types. -//! - -/// Internal namespace. -pub( crate ) mod private -{ - - /// Represents a general-purpose data container that can hold various primitive types - /// and strings. This enum is designed to encapsulate common data types in a unified - /// format, simplifying the handling of different types of data in generic contexts. - /// - /// # Variants - /// - /// - `i8`, `i16`, `i32`, `i64`, `isize`: Signed integer types. - /// - `u8`, `u16`, `u32`, `u64`, `usize`: Unsigned integer types. - /// - `f32`, `f64`: Floating-point types. - /// - `String`: A heap-allocated string (`String`). - /// - `str`: A borrowed string slice (`&'static str`), typically used for string literals. - /// - `binary`: A borrowed slice of bytes (`&'static [u8]`), useful for binary data. - /// - /// # Example - /// - /// Creating a `Primitive` instance with an integer: - /// - /// ``` - /// # use derive_tools::reflect::Primitive; - /// let num = Primitive::i32( 42 ); - /// ``` - /// - /// Creating a `Primitive` instance with a string: - /// - /// ``` - /// # use derive_tools::reflect::Primitive; - /// let greeting = Primitive::String( "Hello, world!".to_string() ); - /// ``` - /// - /// Creating a `Primitive` instance with a binary slice: - /// - /// ``` - /// # use derive_tools::reflect::Primitive; - /// let bytes = Primitive::binary( &[ 0xde, 0xad, 0xbe, 0xef ] ); - /// ``` - /// - #[ allow( non_camel_case_types ) ] - #[ derive( Debug, PartialEq, Default, Clone ) ] - pub enum Primitive - { - /// None - #[ default ] - None, - /// Represents a signed 8-bit integer. - i8( i8 ), - /// Represents a signed 16-bit integer. - i16( i16 ), - /// Represents a signed 32-bit integer. - i32( i32 ), - /// Represents a signed 64-bit integer. - i64( i64 ), - /// Represents a machine-sized signed integer. - isize( isize ), - /// Represents an unsigned 8-bit integer. - u8( u8 ), - /// Represents an unsigned 16-bit integer. - u16( u16 ), - /// Represents an unsigned 32-bit integer. - u32( u32 ), - /// Represents an unsigned 64-bit integer. - u64( u64 ), - /// Represents a machine-sized unsigned integer. - usize( usize ), - /// Represents a 32-bit floating-point number. - f32( f32 ), - /// Represents a 64-bit floating-point number. - f64( f64 ), - /// Represents a dynamically allocated string. - String( String ), - /// Represents a statically allocated string slice. - str( &'static str ), - /// Represents a statically allocated slice of bytes. - binary( &'static [ u8 ] ), - } - - impl From< i8 > for Primitive - { - fn from( value: i8 ) -> Self - { - Self::i8( value ) - } - } - - impl From< i16 > for Primitive - { - fn from( value: i16 ) -> Self - { - Self::i16( value ) - } - } - - impl From< i32 > for Primitive - { - fn from( value: i32 ) -> Self - { - Self::i32( value ) - } - } - - impl From< i64 > for Primitive - { - fn from( value: i64 ) -> Self - { - Self::i64( value ) - } - } - - impl From< isize > for Primitive - { - fn from( value: isize ) -> Self - { - Self::isize( value ) - } - } - - impl From< u8 > for Primitive - { - fn from( value: u8 ) -> Self - { - Self::u8( value ) - } - } - - impl From< u16 > for Primitive - { - fn from( value: u16 ) -> Self - { - Self::u16( value ) - } - } - - impl From< u32 > for Primitive - { - fn from( value: u32 ) -> Self - { - Self::u32( value ) - } - } - - impl From< u64 > for Primitive - { - fn from( value: u64 ) -> Self - { - Self::u64( value ) - } - } - - impl From< usize > for Primitive - { - fn from( value: usize ) -> Self - { - Self::usize( value ) - } - } - - impl From< f32 > for Primitive - { - fn from( value: f32 ) -> Self - { - Self::f32( value ) - } - } - - impl From< f64 > for Primitive - { - fn from( value: f64 ) -> Self - { - Self::f64( value ) - } - } - - impl From< &'static str > for Primitive - { - fn from( value: &'static str ) -> Self - { - Self::str( value ) - } - } - - impl From< String > for Primitive - { - fn from( value: String ) -> Self - { - Self::String( value ) - } - } - - impl From< &'static [ u8 ] > for Primitive - { - fn from( value: &'static [ u8 ] ) -> Self - { - Self::binary( value ) - } - } - - #[ allow( non_camel_case_types ) ] - #[ derive( Debug, PartialEq ) ] - pub enum Data< const N : usize = 0 > - { - /// None - Primitive( Primitive ), - // /// Array - // array( &'a [ Data ; N ] ), - } - - impl< const N : usize > Default for Data< N > - { - fn default() -> Self - { - Data::Primitive( Primitive::None ) - } - } - -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use protected::*; - -/// Protected namespace of the module. -pub mod protected -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::orphan::*; -} - -/// Orphan namespace of the module. -pub mod orphan -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::exposed::*; - pub use super::private:: - { - Primitive, - // Data, - }; -} - -/// Exposed namespace of the module. -pub mod exposed -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::prelude::*; -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use exposed::*; - -/// Prelude to use essentials: `use my_module::prelude::*`. -pub mod prelude -{ -} diff --git a/module/core/derive_tools_meta/src/implementation/mod.rs b/module/core/derive_tools_meta/src/derive.rs similarity index 80% rename from module/core/derive_tools_meta/src/implementation/mod.rs rename to module/core/derive_tools_meta/src/derive.rs index ec9ed0b65a..a72164ce1f 100644 --- a/module/core/derive_tools_meta/src/implementation/mod.rs +++ b/module/core/derive_tools_meta/src/derive.rs @@ -5,12 +5,9 @@ #[ allow( unused_imports ) ] use macro_tools::prelude::*; -pub use macro_tools::{ Result, Many }; +// pub use macro_tools::{ Result, Many }; pub use iter_tools as iter; -pub mod input; -#[ allow( unused_imports ) ] -use input::*; #[ cfg( feature = "derive_as_mut" ) ] pub mod as_mut; #[ cfg( feature = "derive_as_ref" ) ] @@ -20,7 +17,7 @@ pub mod deref; #[ cfg( feature = "derive_deref_mut" ) ] pub mod deref_mut; #[ cfg( feature = "derive_from" ) ] -pub mod from_inner; +pub mod from; #[ cfg( feature = "derive_inner_from" ) ] pub mod inner_from; #[ cfg( feature = "derive_variadic_from" ) ] diff --git a/module/core/derive_tools_meta/src/implementation/as_mut.rs b/module/core/derive_tools_meta/src/derive/as_mut.rs similarity index 73% rename from module/core/derive_tools_meta/src/implementation/as_mut.rs rename to module/core/derive_tools_meta/src/derive/as_mut.rs index 8628f00d4a..9f1a61553d 100644 --- a/module/core/derive_tools_meta/src/implementation/as_mut.rs +++ b/module/core/derive_tools_meta/src/derive/as_mut.rs @@ -1,9 +1,10 @@ use super::*; +use macro_tools::{ type_struct, Result }; pub fn as_mut( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > { - let parsed = syn::parse::< InputParsed >( input )?; + let parsed = syn::parse::< type_struct::TypeStructParsed >( input )?; let field_type = parsed.first_field_type()?; let item_name = parsed.item_name; diff --git a/module/core/derive_tools_meta/src/implementation/as_ref.rs b/module/core/derive_tools_meta/src/derive/as_ref.rs similarity index 72% rename from module/core/derive_tools_meta/src/implementation/as_ref.rs rename to module/core/derive_tools_meta/src/derive/as_ref.rs index 3d9a8e12bc..4edc30dad9 100644 --- a/module/core/derive_tools_meta/src/implementation/as_ref.rs +++ b/module/core/derive_tools_meta/src/derive/as_ref.rs @@ -1,11 +1,12 @@ use super::*; +use macro_tools::{ type_struct, Result }; // pub fn as_ref( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > { - let parsed = syn::parse::< InputParsed >( input )?; + let parsed = syn::parse::< type_struct::TypeStructParsed >( input )?; let field_type = parsed.first_field_type()?; let item_name = parsed.item_name; diff --git a/module/core/derive_tools_meta/src/implementation/deref.rs b/module/core/derive_tools_meta/src/derive/deref.rs similarity index 75% rename from module/core/derive_tools_meta/src/implementation/deref.rs rename to module/core/derive_tools_meta/src/derive/deref.rs index 57b025f3b6..b5ff8873bc 100644 --- a/module/core/derive_tools_meta/src/implementation/deref.rs +++ b/module/core/derive_tools_meta/src/derive/deref.rs @@ -1,9 +1,10 @@ use super::*; +use macro_tools::{ type_struct, Result }; pub fn deref( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > { - let parsed = syn::parse::< InputParsed >( input )?; + let parsed = syn::parse::< type_struct::TypeStructParsed >( input )?; let field_type = parsed.first_field_type()?; let item_name = parsed.item_name; diff --git a/module/core/derive_tools_meta/src/implementation/deref_mut.rs b/module/core/derive_tools_meta/src/derive/deref_mut.rs similarity index 72% rename from module/core/derive_tools_meta/src/implementation/deref_mut.rs rename to module/core/derive_tools_meta/src/derive/deref_mut.rs index d2977a94c5..14b506c2b4 100644 --- a/module/core/derive_tools_meta/src/implementation/deref_mut.rs +++ b/module/core/derive_tools_meta/src/derive/deref_mut.rs @@ -1,12 +1,12 @@ use super::*; +use macro_tools::{ type_struct, Result }; // pub fn deref_mut( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > { - let parsed = syn::parse::< InputParsed >( input )?; - // let field_type = parsed.first_field_type()?; + let parsed = syn::parse::< type_struct::TypeStructParsed >( input )?; let item_name = parsed.item_name; let result = qt! diff --git a/module/core/derive_tools_meta/src/implementation/from_inner.rs b/module/core/derive_tools_meta/src/derive/from.rs similarity index 60% rename from module/core/derive_tools_meta/src/implementation/from_inner.rs rename to module/core/derive_tools_meta/src/derive/from.rs index dbaca5156a..4de3720481 100644 --- a/module/core/derive_tools_meta/src/implementation/from_inner.rs +++ b/module/core/derive_tools_meta/src/derive/from.rs @@ -1,17 +1,15 @@ - -use macro_tools::proc_macro2::TokenStream; - use super::*; +use macro_tools::{ type_struct, Result }; // -pub fn from_inner( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > +pub fn from( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > { - let parsed = syn::parse::< InputParsed >( input )?; - let field_types = parsed.field_types; - let field_names = parsed.field_names; + let parsed = syn::parse::< type_struct::TypeStructParsed >( input )?; + let field_types = parsed.field_types(); + let field_names = parsed.field_names(); let item_name = parsed.item_name; - let result = + let result = match ( field_types.len(), field_names ) { ( 0, _ ) => { generate_unit(item_name) }, @@ -24,17 +22,17 @@ pub fn from_inner( input : proc_macro::TokenStream ) -> Result< proc_macro2::Tok Ok( result ) } -fn generate_from_single_field_named( field_type: &syn::Type, field_name: &syn::Ident, item_name: syn::Ident ) -> TokenStream +fn generate_from_single_field_named( field_type: &syn::Type, field_name: &syn::Ident, item_name: syn::Ident ) -> proc_macro2::TokenStream { qt! { #[ automatically_derived ] // impl From < i32 > for MyStruct - impl From< #field_type > for #item_name + impl From< #field_type > for #item_name { #[ inline( always ) ] // fn from( src: i32 ) -> Self - fn from( src: #field_type ) -> Self + fn from( src: #field_type ) -> Self { // Self { a: src } Self { #field_name: src } @@ -43,17 +41,17 @@ fn generate_from_single_field_named( field_type: &syn::Type, field_name: &syn::I } } -fn generate_from_single_field( field_type: &syn::Type, item_name: syn::Ident ) -> TokenStream +fn generate_from_single_field( field_type: &syn::Type, item_name: syn::Ident ) -> proc_macro2::TokenStream { qt! { #[automatically_derived] // impl From< bool > for IsTransparent - impl From< #field_type > for #item_name + impl From< #field_type > for #item_name { #[ inline( always ) ] // fn from( src: bool ) -> Self - fn from( src: #field_type ) -> Self + fn from( src: #field_type ) -> Self { // Self(src) Self(src) @@ -62,26 +60,26 @@ fn generate_from_single_field( field_type: &syn::Type, item_name: syn::Ident ) - } } -fn generate_from_multiple_fields_named( field_types: &Vec< syn::Type >, field_names: &Vec< syn::Ident >, item_name: syn::Ident) -> TokenStream +fn generate_from_multiple_fields_named( field_types: &Vec< syn::Type >, field_names: &Vec< syn::Ident >, item_name: syn::Ident) -> proc_macro2::TokenStream { - let params: Vec< TokenStream > = field_names + let params: Vec< proc_macro2::TokenStream > = field_names .iter() .enumerate() - .map(| ( index, field_name ) | + .map(| ( index, field_name ) | { - let index = index.to_string().parse::< TokenStream >().unwrap(); + let index = index.to_string().parse::< proc_macro2::TokenStream >().unwrap(); qt! { #field_name : src.#index } }) .collect(); - qt! + qt! { // impl From< (i32, bool) > for StructNamedFields - impl From< (#(#field_types), *) > for #item_name + impl From< (#(#field_types), *) > for #item_name { #[ inline( always ) ] // fn from( src: (i32, bool) ) -> Self - fn from( src: (#(#field_types), *) ) -> Self + fn from( src: (#(#field_types), *) ) -> Self { // StructNamedFields{ a: src.0, b: src.1 } #item_name { #(#params), * } @@ -90,24 +88,24 @@ fn generate_from_multiple_fields_named( field_types: &Vec< syn::Type >, field_na } } -fn generate_from_multiple_fields( field_types: &Vec< syn::Type >, item_name: syn::Ident ) -> TokenStream +fn generate_from_multiple_fields( field_types: &Vec< syn::Type >, item_name: syn::Ident ) -> proc_macro2::TokenStream { - let params: Vec = ( 0..field_types.len() ) + let params: Vec< proc_macro2::TokenStream > = ( 0..field_types.len() ) .map( | index | { - let index = index.to_string().parse::< TokenStream >().unwrap(); + let index = index.to_string().parse::< proc_macro2::TokenStream >().unwrap(); qt!( src.#index ) } ) .collect(); - qt! + qt! { // impl From< (i32, bool) > for StructWithManyFields - impl From< (#(#field_types), *) > for #item_name + impl From< (#(#field_types), *) > for #item_name { #[ inline( always ) ] // fn from( src: (i32, bool) ) -> Self - fn from( src: (#(#field_types), *) ) -> Self + fn from( src: (#(#field_types), *) ) -> Self { // StructWithManyFields( src.0, src.1 ) #item_name( #(#params), *) @@ -116,7 +114,7 @@ fn generate_from_multiple_fields( field_types: &Vec< syn::Type >, item_name: syn } } -fn generate_unit( item_name: syn::Ident ) -> TokenStream +fn generate_unit( item_name: syn::Ident ) -> proc_macro2::TokenStream { qt! { diff --git a/module/core/derive_tools_meta/src/implementation/inner_from.rs b/module/core/derive_tools_meta/src/derive/inner_from.rs similarity index 62% rename from module/core/derive_tools_meta/src/implementation/inner_from.rs rename to module/core/derive_tools_meta/src/derive/inner_from.rs index 749615bb02..a82d4880c7 100644 --- a/module/core/derive_tools_meta/src/implementation/inner_from.rs +++ b/module/core/derive_tools_meta/src/derive/inner_from.rs @@ -1,54 +1,53 @@ -use macro_tools::proc_macro2::TokenStream; - use super::*; +use macro_tools::{ type_struct, Result }; // pub fn inner_from( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > { - let parsed = syn::parse::< InputParsed >( input )?; - let field_types = parsed.field_types; - let field_names = parsed.field_names; + let parsed = syn::parse::< type_struct::TypeStructParsed >( input )?; + let field_types = parsed.field_types(); + let field_names = parsed.field_names(); let item_name = parsed.item_name; let result = match ( field_types.len(), field_names ) { - ( 0, _ ) => generate_unit( item_name ), + ( 0, _ ) => unit( item_name ), ( 1, Some( field_names ) ) => { let field_name = field_names.get( 0 ).unwrap(); let field_type = field_types.get( 0 ).unwrap(); - generate_from_impl_named( item_name, field_type, field_name ) + from_impl_named( item_name, field_type, field_name ) } ( 1, None ) => { let field_type = field_types.get( 0 ).unwrap(); - generate_from_impl( item_name, field_type ) + from_impl( item_name, field_type ) } ( _, Some( field_names ) ) => { - let params: Vec< TokenStream > = field_names.iter() + let params : Vec< proc_macro2::TokenStream > = field_names.iter() .map( | field_name | qt! { src.#field_name } ) .collect(); - generate_from_impl_multiple_fields( item_name, &field_types, ¶ms ) + from_impl_multiple_fields( item_name, &field_types, ¶ms ) } ( _, None ) => { - let params: Vec< TokenStream > = ( 0..field_types.len() ) + let params : Vec< proc_macro2::TokenStream > = ( 0..field_types.len() ) .map( | index | { - let index: TokenStream = index.to_string().parse().unwrap(); + let index : proc_macro2::TokenStream = index.to_string().parse().unwrap(); qt! { src.#index } }) .collect(); - generate_from_impl_multiple_fields( item_name, &field_types, ¶ms ) + from_impl_multiple_fields( item_name, &field_types, ¶ms ) } }; Ok( result ) } -fn generate_from_impl_named( item_name: syn::Ident, field_type: &syn::Type, field_name: &syn::Ident ) -> TokenStream +fn from_impl_named( item_name: syn::Ident, field_type: &syn::Type, field_name: &syn::Ident ) -> proc_macro2::TokenStream { qt! { @@ -68,7 +67,7 @@ fn generate_from_impl_named( item_name: syn::Ident, field_type: &syn::Type, fiel } } -fn generate_from_impl( item_name: syn::Ident, field_type: &syn::Type ) -> TokenStream +fn from_impl( item_name: syn::Ident, field_type: &syn::Type ) -> proc_macro2::TokenStream { qt! { @@ -87,7 +86,12 @@ fn generate_from_impl( item_name: syn::Ident, field_type: &syn::Type ) -> TokenS } } -fn generate_from_impl_multiple_fields ( item_name: syn::Ident, field_types: &Vec< syn::Type >, params: &Vec< TokenStream > ) -> TokenStream +fn from_impl_multiple_fields +( + item_name : syn::Ident, + field_types : &Vec< syn::Type >, + params : &Vec< proc_macro2::TokenStream >, +) -> proc_macro2::TokenStream { qt! { @@ -107,7 +111,7 @@ fn generate_from_impl_multiple_fields ( item_name: syn::Ident, field_types: &Vec } } -fn generate_unit( item_name: syn::Ident ) -> TokenStream +fn unit( item_name : syn::Ident ) -> proc_macro2::TokenStream { qt! { diff --git a/module/core/derive_tools_meta/src/implementation/variadic_from.rs b/module/core/derive_tools_meta/src/derive/variadic_from.rs similarity index 93% rename from module/core/derive_tools_meta/src/implementation/variadic_from.rs rename to module/core/derive_tools_meta/src/derive/variadic_from.rs index 207648eb44..e268a5dc11 100644 --- a/module/core/derive_tools_meta/src/implementation/variadic_from.rs +++ b/module/core/derive_tools_meta/src/derive/variadic_from.rs @@ -1,5 +1,6 @@ use super::*; +use macro_tools::{ type_struct, Result }; use iter::{ IterExt, Itertools }; // @@ -7,7 +8,7 @@ use iter::{ IterExt, Itertools }; pub fn variadic_from( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > { - let parsed = syn::parse::< InputParsed >( input )?; + let parsed = syn::parse::< type_struct::TypeStructParsed >( input )?; let item_name = parsed.item_name; let result = match &parsed.fields diff --git a/module/core/derive_tools_meta/src/implementation/input.rs b/module/core/derive_tools_meta/src/implementation/input.rs deleted file mode 100644 index ee01e7466b..0000000000 --- a/module/core/derive_tools_meta/src/implementation/input.rs +++ /dev/null @@ -1,130 +0,0 @@ - -use super::*; - -// - -pub struct InputParsed -{ - pub item : syn::ItemStruct, - pub item_name : syn::Ident, - pub fields : syn::Fields, - pub fields_many : Many< syn::Field >, - pub field_types: Vec< syn::Type >, - pub field_names: Option< Vec< syn::Ident > >, - // pub field_type : syn::Type, -} - -impl InputParsed -{ - #[ allow( dead_code ) ] - pub fn first_field_type( &self ) -> Result< syn::Type > - { - let maybe_field = match self.fields - { - syn::Fields::Named( ref fields ) => fields.named.first(), - syn::Fields::Unnamed( ref fields ) => fields.unnamed.first(), - _ => return Err( syn_err!( self.fields.span(), "Expects fields" ) ), - }; - - // let maybe_field = self.fields.0.first(); - // let maybe_field = self.fields; - - if let Some( field ) = maybe_field - { - return Ok( field.ty.clone() ) - } - - return Err( syn_err!( self.item.span(), "Expects type for fields" ) ); - } - - #[ allow( dead_code ) ] - pub fn first_field_name( &self ) -> Result< Option< syn::Ident > > - { - let maybe_field = match self.fields - { - syn::Fields::Named( ref fields ) => fields.named.first(), - syn::Fields::Unnamed( ref fields ) => fields.unnamed.first(), - _ => return Err( syn_err!( self.fields.span(), "Expects fields" ) ), - }; - - if let Some( field ) = maybe_field - { - return Ok( field.ident.clone() ) - } - - return Err( syn_err!( self.item.span(), "Expects type for fields" ) ); - } -} - -// - -impl syn::parse::Parse for InputParsed -{ - fn parse( input : ParseStream< '_ > ) -> Result< Self > - { - let item : syn::ItemStruct = input.parse()?; - - // # example of input - // - // pub struct IsTransparent( bool ); - // - - let item_name = item.ident.clone(); - let fields = item.fields.clone(); - let fields_many : Vec< syn::Field > = match item.fields - { - syn::Fields::Unnamed( ref fields ) => { fields.unnamed.iter().cloned().collect() }, - syn::Fields::Named( ref fields ) => { fields.named.iter().cloned().collect() }, - _ => return Ok( Self { item, item_name, fields, fields_many: Many(vec![]), field_types: vec![], field_names: None } ), - }; - - // if fields.len() != 1 - // { - // return Err( syn_err!( fields.span(), "Expects exactly one field, not implemented for {}.", fields.len() ) ); - // } - // let field = fields.first().cloned().unwrap(); - // let field_type = field.ty.clone(); - let fields_many = fields_many.into(); - let field_types = field_types( &fields_many )?; - let field_names = field_names( &fields_many )?; - Ok( Self { item, item_name, fields, fields_many, field_types, field_names } ) - } -} - -// - -impl quote::ToTokens for InputParsed -{ - fn to_tokens( &self, tokens : &mut proc_macro2::TokenStream ) - { - self.item.to_tokens( tokens ); - } -} - - -fn field_types ( fields: &Many< syn::Field > ) -> Result< Vec< syn::Type> > -{ - let mut field_types: Vec< syn::Type > = vec![]; - for elem in fields - { - field_types.push( elem.ty.clone() ); - } - Ok( field_types ) -} - -fn field_names( fields: &Many< syn::Field > ) -> Result< Option< Vec< syn::Ident > > > -{ - let mut field_names: Vec< syn::Ident > = vec![]; - for elem in fields - { - if let Some( ident ) = &elem.ident - { - field_names.push( ident.clone() ); - } - else - { - return Ok( None ); - } - } - Ok( Some( field_names ) ) -} \ No newline at end of file diff --git a/module/core/derive_tools_meta/src/lib.rs b/module/core/derive_tools_meta/src/lib.rs index 13ee0cf8de..97cb37042f 100644 --- a/module/core/derive_tools_meta/src/lib.rs +++ b/module/core/derive_tools_meta/src/lib.rs @@ -2,18 +2,8 @@ #![ doc( html_logo_url = "https://raw.githubusercontent.com/Wandalen/wTools/master/asset/img/logo_v3_trans_square.png" ) ] #![ doc( html_favicon_url = "https://raw.githubusercontent.com/Wandalen/wTools/alpha/asset/img/logo_v3_trans_square_icon_small_v2.ico" ) ] #![ doc( html_root_url = "https://docs.rs/clone_dyn_meta/latest/clone_dyn_meta/" ) ] -// #![ allow( non_snake_case ) ] -// #![ allow( non_upper_case_globals ) ] - -//! -//! Derive to clone dyn structures. -//! - #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] -// #[ cfg( feature = "enabled" ) ] -// use macro_tools::prelude::*; - #[ cfg ( any @@ -28,7 +18,7 @@ ) )] #[ cfg( feature = "enabled" ) ] -mod implementation; +mod derive; #[ cfg ( any @@ -42,26 +32,23 @@ mod implementation; feature = "derive_variadic_from", ) )] -#[ cfg( feature = "enabled" ) ] -use implementation::*; +// #[ cfg( feature = "enabled" ) ] +// use derive::*; + /// -/// Derive macro to implement From converting inner type into outer when-ever it's possible to do automatically. +/// Provides an automatic `From` implementation for struct wrapping a single value. /// -/// ### Sample :: struct instead of macro. +/// This macro simplifies the conversion of an inner type to an outer struct type +/// when the outer type is a simple wrapper around the inner type. /// -/// Write this -/// -/// ```rust -/// # use derive_tools_meta::*; -/// #[ derive( From ) ] -/// pub struct IsTransparent( bool ); -/// ``` +/// ## Example Usage /// -/// Instead of this +/// Instead of manually implementing `From< bool >` for `IsTransparent`: /// /// ```rust /// pub struct IsTransparent( bool ); +/// /// impl From< bool > for IsTransparent /// { /// #[ inline( always ) ] @@ -71,13 +58,24 @@ use implementation::*; /// } /// } /// ``` +/// +/// Use `#[ derive( From ) ]` to automatically generate the implementation: +/// +/// ```rust +/// # use derive_tools_meta::*; +/// #[ derive( From ) ] +/// pub struct IsTransparent( bool ); +/// ``` +/// +/// The macro facilitates the conversion without additional boilerplate code. +/// #[ cfg( feature = "enabled" ) ] #[ cfg( feature = "derive_from" ) ] #[ proc_macro_derive( From ) ] pub fn from( input : proc_macro::TokenStream ) -> proc_macro::TokenStream { - let result = from_inner::from_inner( input ); + let result = derive::from::from( input ); match result { Ok( stream ) => stream.into(), @@ -86,22 +84,18 @@ pub fn from( input : proc_macro::TokenStream ) -> proc_macro::TokenStream } /// -/// Derive macro to implement From converting inner type into outer when-ever it's possible to do automatically. +/// Alias for derive `From`. Provides an automatic `From` implementation for struct wrapping a single value. /// -/// ### Sample :: struct instead of macro. +/// This macro simplifies the conversion of an inner type to an outer struct type +/// when the outer type is a simple wrapper around the inner type. /// -/// Write this +/// ## Example Usage /// -/// ```rust -/// # use derive_tools_meta::*; -/// #[ derive( FromInner ) ] -/// pub struct IsTransparent( bool ); -/// ``` -/// -/// Instead of this +/// Instead of manually implementing `From< bool >` for `IsTransparent`: /// /// ```rust /// pub struct IsTransparent( bool ); +/// /// impl From< bool > for IsTransparent /// { /// #[ inline( always ) ] @@ -111,13 +105,24 @@ pub fn from( input : proc_macro::TokenStream ) -> proc_macro::TokenStream /// } /// } /// ``` +/// +/// Use `#[ derive( FromInner ) ]` to automatically generate the implementation: +/// +/// ```rust +/// # use derive_tools_meta::*; +/// #[ derive( FromInner ) ] +/// pub struct IsTransparent( bool ); +/// ``` +/// +/// The macro facilitates the conversion without additional boilerplate code. +/// #[ cfg( feature = "enabled" ) ] #[ cfg( feature = "derive_from" ) ] #[ proc_macro_derive( FromInner ) ] pub fn from_inner( input : proc_macro::TokenStream ) -> proc_macro::TokenStream { - let result = from_inner::from_inner( input ); + let result = derive::from::from( input ); match result { Ok( stream ) => stream.into(), @@ -157,7 +162,7 @@ pub fn from_inner( input : proc_macro::TokenStream ) -> proc_macro::TokenStream #[ proc_macro_derive( InnerFrom ) ] pub fn inner_from( input : proc_macro::TokenStream ) -> proc_macro::TokenStream { - let result = inner_from::inner_from( input ); + let result = derive::inner_from::inner_from( input ); match result { Ok( stream ) => stream.into(), @@ -198,7 +203,7 @@ pub fn inner_from( input : proc_macro::TokenStream ) -> proc_macro::TokenStream #[ proc_macro_derive( Deref ) ] pub fn deref( input : proc_macro::TokenStream ) -> proc_macro::TokenStream { - let result = deref::deref( input ); + let result = derive::deref::deref( input ); match result { Ok( stream ) => stream.into(), @@ -248,7 +253,7 @@ pub fn deref( input : proc_macro::TokenStream ) -> proc_macro::TokenStream #[ proc_macro_derive( DerefMut ) ] pub fn deref_mut( input : proc_macro::TokenStream ) -> proc_macro::TokenStream { - let result = deref_mut::deref_mut( input ); + let result = derive::deref_mut::deref_mut( input ); match result { Ok( stream ) => stream.into(), @@ -287,7 +292,7 @@ pub fn deref_mut( input : proc_macro::TokenStream ) -> proc_macro::TokenStream #[ proc_macro_derive( AsRef ) ] pub fn as_ref( input : proc_macro::TokenStream ) -> proc_macro::TokenStream { - let result = as_ref::as_ref( input ); + let result = derive::as_ref::as_ref( input ); match result { Ok( stream ) => stream.into(), @@ -327,7 +332,7 @@ pub fn as_ref( input : proc_macro::TokenStream ) -> proc_macro::TokenStream #[ proc_macro_derive( AsMut ) ] pub fn as_mut( input : proc_macro::TokenStream ) -> proc_macro::TokenStream { - let result = as_mut::as_mut( input ); + let result = derive::as_mut::as_mut( input ); match result { Ok( stream ) => stream.into(), @@ -369,35 +374,14 @@ pub fn as_mut( input : proc_macro::TokenStream ) -> proc_macro::TokenStream /// /// ``` -// qqq : xxx : why no run? +// qqq : xxx : why no run/ignore? fix #[ cfg( feature = "enabled" ) ] #[ cfg( feature = "derive_variadic_from" ) ] #[ proc_macro_derive( VariadicFrom ) ] pub fn derive_variadic_from( input : proc_macro::TokenStream ) -> proc_macro::TokenStream { - let result = variadic_from::variadic_from( input ); - match result - { - Ok( stream ) => stream.into(), - Err( err ) => err.to_compile_error().into(), - } -} - -/// -/// Reflect structure of any kind. -/// -/// ### Sample :: trivial. -/// -/// qqq : write, please -/// - -#[ cfg( feature = "enabled" ) ] -#[ cfg( feature = "derive_reflect" ) ] -#[ proc_macro_derive( Reflect ) ] -pub fn derive_reflect( input : proc_macro::TokenStream ) -> proc_macro::TokenStream -{ - let result = reflect::reflect( input ); + let result = derive::variadic_from::variadic_from( input ); match result { Ok( stream ) => stream.into(), diff --git a/module/core/diagnostics_tools/src/diagnostics/cta.rs b/module/core/diagnostics_tools/src/diag/cta.rs similarity index 100% rename from module/core/diagnostics_tools/src/diagnostics/cta.rs rename to module/core/diagnostics_tools/src/diag/cta.rs diff --git a/module/core/diagnostics_tools/src/diagnostics/layout.rs b/module/core/diagnostics_tools/src/diag/layout.rs similarity index 100% rename from module/core/diagnostics_tools/src/diagnostics/layout.rs rename to module/core/diagnostics_tools/src/diag/layout.rs diff --git a/module/core/diagnostics_tools/src/diagnostics/mod.rs b/module/core/diagnostics_tools/src/diag/mod.rs similarity index 100% rename from module/core/diagnostics_tools/src/diagnostics/mod.rs rename to module/core/diagnostics_tools/src/diag/mod.rs diff --git a/module/core/diagnostics_tools/src/diagnostics/rta.rs b/module/core/diagnostics_tools/src/diag/rta.rs similarity index 100% rename from module/core/diagnostics_tools/src/diagnostics/rta.rs rename to module/core/diagnostics_tools/src/diag/rta.rs diff --git a/module/core/diagnostics_tools/src/lib.rs b/module/core/diagnostics_tools/src/lib.rs index 1963c499d7..55e416c0bc 100644 --- a/module/core/diagnostics_tools/src/lib.rs +++ b/module/core/diagnostics_tools/src/lib.rs @@ -2,18 +2,11 @@ #![ doc( html_logo_url = "https://raw.githubusercontent.com/Wandalen/wTools/master/asset/img/logo_v3_trans_square.png" ) ] #![ doc( html_favicon_url = "https://raw.githubusercontent.com/Wandalen/wTools/alpha/asset/img/logo_v3_trans_square_icon_small_v2.ico" ) ] #![ doc( html_root_url = "https://docs.rs/diagnostics_tools/latest/diagnostics_tools/" ) ] -// #![ deny( rust_2018_idioms ) ] -// #![ deny( missing_debug_implementations ) ] -// #![ deny( missing_docs ) ] - -//! -//! Diagnostics tools. -//! - #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] + #[ cfg( feature = "enabled" ) ] /// Compile-time asserting. -pub mod diagnostics; +pub mod diag; /// Dependencies. #[ cfg( feature = "enabled" ) ] @@ -37,7 +30,7 @@ pub mod protected pub use super::orphan::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] - pub use super::diagnostics::orphan::*; + pub use super::diag::orphan::*; } /// Orphan namespace of the module. @@ -58,7 +51,7 @@ pub mod exposed pub use super::prelude::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] - pub use super::diagnostics::exposed::*; + pub use super::diag::exposed::*; } /// Prelude to use essentials: `use my_module::prelude::*`. @@ -67,5 +60,5 @@ pub mod prelude { #[ doc( inline ) ] #[ allow( unused_imports ) ] - pub use super::diagnostics::prelude::*; + pub use super::diag::prelude::*; } diff --git a/module/core/former/Cargo.toml b/module/core/former/Cargo.toml index d76c4b6fc7..af70e3fb55 100644 --- a/module/core/former/Cargo.toml +++ b/module/core/former/Cargo.toml @@ -23,19 +23,25 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false - -exclude = [ "/tests", "/examples", "-*" ] +exclude = [ "/tests", "-*" ] +# exclude = [ "/tests", "/examples", "-*" ] +# xxx : check and replicate for all modules [features] -default = [ "enabled" ] -full = [ "enabled" ] + no_std = [] use_alloc = [] -enabled = [] + +default = [ "enabled", "derive_former", "derive_component_from", "derive_set_component" ] +full = [ "enabled", "derive_former", "derive_component_from", "derive_set_component" ] +enabled = [ "former_meta/enabled" ] + +derive_former = [ "former_meta/derive_former" ] +derive_component_from = [ "former_meta/derive_component_from" ] +derive_set_component = [ "former_meta/derive_set_component" ] [dependencies] -former_meta = { workspace = true, features = [ "default" ] } -# former_runtime = { workspace = true, features = [ "default" ] } +former_meta = { workspace = true } [dev-dependencies] -test_tools = { workspace = true, features = [ "default" ] } +test_tools = { workspace = true, features = [ "full" ] } diff --git a/module/core/former/Readme.md b/module/core/former/Readme.md index 6ed8348a8b..2cd42373b1 100644 --- a/module/core/former/Readme.md +++ b/module/core/former/Readme.md @@ -23,6 +23,9 @@ This approach abstracts away the need for manually implementing a builder for ea The provided code snippet illustrates a basic use-case of the Former crate in Rust, which is used to apply the builder pattern for structured and flexible object creation. Below is a detailed explanation of each part of the markdown chapter, aimed at clarifying how the Former trait simplifies struct instantiation. ```rust +# #[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] +# { + use former::Former; #[ derive( Debug, PartialEq, Former ) ] @@ -47,6 +50,220 @@ dbg!( &profile ); // bio_optional: Some("Software Developer"), // } +# } +``` + +Code above is expanded into + +```rust +# #[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] +# #[ allow( dead_code ) ] +# { + + #[ derive( Debug, PartialEq ) ] + pub struct UserProfile + { + age : i32, + username : String, + bio_optional : Option< String >, // Fields could be optional + } + + impl UserProfile + { + #[ inline( always ) ] + pub fn former() -> UserProfileFormer< UserProfile, former::ReturnContainer > + { + UserProfileFormer::< UserProfile, former::ReturnContainer >::new() + } + } + + #[ derive( Debug, Default ) ] + pub struct UserProfileFormerContainer + { + age : Option< i32 >, + username : Option< String >, + bio_optional : Option< String >, + } + + pub struct UserProfileFormer + < + FormerContext = UserProfile, + FormerEnd = former::ReturnContainer, + > + where + FormerEnd : former::ToSuperFormer< UserProfile, FormerContext >, + { + container : UserProfileFormerContainer, + context : Option< FormerContext >, + on_end : Option< FormerEnd >, + } + + impl< FormerContext, FormerEnd > UserProfileFormer< FormerContext, FormerEnd > + where + FormerEnd : former::ToSuperFormer< UserProfile, FormerContext >, + { + #[ inline( always ) ] + pub fn form( mut self ) -> UserProfile + { + let age = if self.container.age.is_some() + { + self.container.age.take().unwrap() + } + else + { + let val : i32 = + { + trait NotDefault< T > + { + fn maybe_default( self : &Self ) -> T { panic!( "Field 'age' isn't initialized" ) } + } + trait WithDefault< T > + { + fn maybe_default( self : &Self ) -> T; + } + impl< T > NotDefault< T > for &::core::marker::PhantomData< T > {} + impl< T > WithDefault< T > for ::core::marker::PhantomData< T > + where + T : ::core::default::Default, + { + fn maybe_default( self : &Self ) -> T + { + T::default() + } + } + ( &::core::marker::PhantomData::< i32 > ).maybe_default() + }; + val + }; + let username = if self.container.username.is_some() + { + self.container.username.take().unwrap() + } + else + { + let val : String = + { + trait NotDefault< T > + { + fn maybe_default( self : &Self ) -> T { panic!( "Field 'username' isn't initialized" ) } + } + trait WithDefault< T > + { + fn maybe_default( self : &Self ) -> T; + } + impl< T > NotDefault< T > for &::core::marker::PhantomData< T > {} + impl< T > WithDefault< T > for ::core::marker::PhantomData< T > + where + T : ::core::default::Default, + { + fn maybe_default( self : &Self ) -> T + { + T::default() + } + } + ( &::core::marker::PhantomData::< String > ).maybe_default() + }; + val + }; + let bio_optional = if self.container.bio_optional.is_some() + { + Option::Some( self.container.bio_optional.take().unwrap() ) + } + else + { + Option::None + }; + let result = UserProfile + { + age, + username, + bio_optional, + }; + return result; + } + + #[ inline( always ) ] + pub fn perform( self ) -> UserProfile + { + let result = self.form(); + return result; + } + + #[ inline( always ) ] + pub fn new() -> UserProfileFormer< UserProfile, former::ReturnContainer > + { + UserProfileFormer::< UserProfile, former::ReturnContainer >::begin( None, former::ReturnContainer ) + } + + #[ inline( always ) ] + pub fn begin( + context : Option< FormerContext >, + on_end : FormerEnd, + ) -> Self + { + Self + { + container : core::default::Default::default(), + context : context, + on_end : Option::Some( on_end ), + } + } + + #[ inline( always ) ] + pub fn end( mut self ) -> FormerContext + { + let on_end = self.on_end.take().unwrap(); + let context = self.context.take(); + let container = self.form(); + on_end.call( container, context ) + } + + #[ inline ] + pub fn age< Src >( mut self, src : Src ) -> Self + where + Src : Into< i32 >, + { + debug_assert!( self.container.age.is_none() ); + self.container.age = Option::Some( src.into() ); + self + } + + #[ inline ] + pub fn username< Src >( mut self, src : Src ) -> Self + where + Src : Into< String >, + { + debug_assert!( self.container.username.is_none() ); + self.container.username = Option::Some( src.into() ); + self + } + + #[ inline ] + pub fn bio_optional< Src >( mut self, src : Src ) -> Self + where + Src : Into< String >, + { + debug_assert!( self.container.bio_optional.is_none() ); + self.container.bio_optional = Option::Some( src.into() ); + self + } + } + + let profile = UserProfile::former() + .age( 30 ) + .username( "JohnDoe".to_string() ) + .bio_optional( "Software Developer".to_string() ) + .form(); + + dbg!( &profile ); + // Expected output: + // &profile = UserProfile { + // age: 30, + // username: "JohnDoe", + // bio_optional: Some("Software Developer"), + // } + +} ``` ### Custom and Alternative Setters @@ -54,6 +271,9 @@ dbg!( &profile ); With help of `Former`, it is possible to define multiple versions of a setter for a single field, providing the flexibility to include custom logic within the setter methods. This feature is particularly useful when you need to preprocess data or enforce specific constraints before assigning values to fields. Custom setters should have unique names to differentiate them from the default setters generated by `Former`, allowing for specialized behavior while maintaining clarity in your code. ```rust +# #[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] +# { + use former::Former; /// Structure with a custom setter. @@ -85,6 +305,8 @@ let example = StructWithCustomSetters::former() .word_exclaimed( "Hello" ) .form(); assert_eq!( example.word, "Hello!".to_string() ); + +# } ``` In the example above showcases a custom alternative setter, `word_exclaimed`, which appends an exclamation mark to the input string before storing it. This approach allows for additional processing or validation of the input data without compromising the simplicity of the builder pattern. @@ -94,6 +316,9 @@ In the example above showcases a custom alternative setter, `word_exclaimed`, wh But it's also possible to completely override setter and write its own from scratch. For that use attribe `[ setter( false ) ]` to disable setter. ```rust +# #[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] +# { + use former::Former; /// Structure with a custom setter. @@ -121,6 +346,7 @@ let example = StructWithCustomSetters::former() .word( "Hello" ) .form(); assert_eq!( example.word, "Hello!".to_string() ); +# } ``` In the example above, the default setter for `word` is disabled, and a custom setter is defined to automatically append an exclamation mark to the string. This method allows for complete control over the data assignment process, enabling the inclusion of any necessary logic or validation steps. @@ -130,6 +356,9 @@ In the example above, the default setter for `word` is disabled, and a custom se The `Former` crate enhances struct initialization in Rust by allowing the specification of custom default values for fields through the `default` attribute. This feature not only provides a way to set initial values for struct fields without relying on the `Default` trait but also adds flexibility in handling cases where a field's type does not implement `Default`, or a non-standard default value is desired. ```rust +# #[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] +# { + use former::Former; /// Structure with default attributes. @@ -164,6 +393,7 @@ dbg!( &instance ); // > 30, // > ], // > } +# } ``` The above code snippet showcases the `Former` crate's ability to initialize struct fields with custom default values: @@ -182,6 +412,10 @@ Subformers are specialized builders used within the `Former` framework to constr The following example illustrates how to use a `VectorSubformer` to construct a `Vec` field within a struct. The subformer enables adding elements to the vector with a fluent interface, streamlining the process of populating collection fields within structs. ```rust +# #[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] +# #[ cfg( not( feature = "no_std" ) ) ] +# { + #[ derive( Debug, PartialEq, former::Former ) ] pub struct StructWithVec { @@ -197,6 +431,7 @@ let instance = StructWithVec::former() .form(); assert_eq!( instance, StructWithVec { vec: vec![ "apple", "banana" ] } ); +# } ``` ### Subformer example: Building a Hashmap @@ -204,6 +439,10 @@ assert_eq!( instance, StructWithVec { vec: vec![ "apple", "banana" ] } ); This example demonstrates the use of a `HashMapSubformer` to build a hash map within a struct. The subformer provides a concise way to insert key-value pairs into the map, making it easier to manage and construct hash map fields. ```rust +# #[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] +# #[ cfg( not( feature = "no_std" ) ) ] +# { + use test_tools::exposed::*; #[ derive( Debug, PartialEq, former::Former ) ] @@ -221,6 +460,7 @@ let struct1 = StructWithMap::former() .form() ; assert_eq!( struct1, StructWithMap { map : hmap!{ "a" => "b", "c" => "d" } } ); +# } ``` ### Subformer example: Building a Hashset @@ -228,6 +468,10 @@ assert_eq!( struct1, StructWithMap { map : hmap!{ "a" => "b", "c" => "d" } } ); In the following example, a `HashSetSubformer` is utilized to construct a hash set within a struct. This illustrates the convenience of adding elements to a set using the builder pattern facilitated by subformers. ```rust +# #[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] +# #[ cfg( not( feature = "no_std" ) ) ] +# { + use test_tools::exposed::*; #[ derive( Debug, PartialEq, former::Former ) ] @@ -245,6 +489,7 @@ let instance = StructWithSet::former() .form(); assert_eq!(instance, StructWithSet { set : hset![ "apple", "banana" ] }); +# } ``` ### Custom Subformer @@ -257,7 +502,10 @@ The example below illustrates how to incorporate the builder pattern of one stru example of how to use former of another structure as subformer of former of current one function `command` integrate `CommandFormer` into `AggregatorFormer`. -``` rust +```rust +# #[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] +# { + fn main() { use std::collections::HashMap; @@ -332,6 +580,7 @@ fn main() // > }, // > } } +# } ``` In this example, the `Aggregator` struct functions as a container for multiple `Command` structs, each identified by a unique command name. The `AggregatorFormer` implements a custom method `command`, which serves as a subformer for adding `Command` instances into the `Aggregator`. diff --git a/module/core/former/examples/former_component_from.rs b/module/core/former/examples/former_component_from.rs new file mode 100644 index 0000000000..e7cadbb335 --- /dev/null +++ b/module/core/former/examples/former_component_from.rs @@ -0,0 +1,40 @@ +//! +//! Macro to implement `From` for each component (field) of a structure. +//! This macro simplifies the creation of `From` trait implementations for struct fields, +//! enabling easy conversion from a struct reference to its field types. +//! +//! # Features +//! +//! - Requires the `derive_component_from` feature to be enabled for use. +//! - The `ComponentFrom` derive macro can be applied to structs to automatically generate +//! `From` implementations for each field. +//! +//! # Attributes +//! +//! - `debug` : Optional attribute to enable debug-level output during the macro expansion process. +//! + +#[ cfg( not( feature = "derive_component_from" ) ) ] +fn main() {} + +#[ cfg( feature = "derive_component_from" ) ] +fn main() +{ + + #[ derive( former::ComponentFrom ) ] + struct MyStruct + { + pub field1 : i32, + pub field2 : String, + } + + // Generated implementations allow for the following conversions : + let my_struct = MyStruct { field1 : 10, field2 : "Hello".into() }; + let field1 : i32 = From::from( &my_struct ); + let field2 : String = From::from( &my_struct ); + dbg!( field1 ); + dbg!( field2 ); + // > field1 = 10 + // > field2 = "Hello" + +} diff --git a/module/core/former/examples/former_custom_default.rs b/module/core/former/examples/former_custom_default.rs index 0db7078b56..963856d0f3 100644 --- a/module/core/former/examples/former_custom_default.rs +++ b/module/core/former/examples/former_custom_default.rs @@ -9,6 +9,10 @@ //! This approach significantly simplifies struct construction, particularly for complex types or where defaults beyond the `Default` trait's capability are required. By utilizing the `default` attribute, developers can ensure their structs are initialized safely and predictably, enhancing code clarity and maintainability. //! +#[ cfg( not( feature = "derive_former" ) ) ] +fn main() {} + +#[ cfg( feature = "derive_former" ) ] fn main() { use former::Former; diff --git a/module/core/former/examples/former_custom_setter.rs b/module/core/former/examples/former_custom_setter.rs index 10b57a25d7..43ca0eea85 100644 --- a/module/core/former/examples/former_custom_setter.rs +++ b/module/core/former/examples/former_custom_setter.rs @@ -4,6 +4,10 @@ //! In the example showcases a custom alternative setter, `word_exclaimed`, which appends an exclamation mark to the input string before storing it. This approach allows for additional processing or validation of the input data without compromising the simplicity of the builder pattern. //! +#[ cfg( not( feature = "derive_former" ) ) ] +fn main() {} + +#[ cfg( feature = "derive_former" ) ] fn main() { use former::Former; diff --git a/module/core/former/examples/former_custom_setter_overriden.rs b/module/core/former/examples/former_custom_setter_overriden.rs index 2b50efb097..15e8012c68 100644 --- a/module/core/former/examples/former_custom_setter_overriden.rs +++ b/module/core/former/examples/former_custom_setter_overriden.rs @@ -3,6 +3,10 @@ //! For that use attribe `[ setter( false ) ]` to disable setter. In the example, the default setter for `word` is disabled, and a custom setter is defined to automatically append an exclamation mark to the string. This method allows for complete control over the data assignment process, enabling the inclusion of any necessary logic or validation steps. //! +#[ cfg( not( feature = "derive_former" ) ) ] +fn main() {} + +#[ cfg( feature = "derive_former" ) ] fn main() { use former::Former; diff --git a/module/core/former/examples/former_custom_subformer.rs b/module/core/former/examples/former_custom_subformer.rs index 18295925ee..247a718533 100644 --- a/module/core/former/examples/former_custom_subformer.rs +++ b/module/core/former/examples/former_custom_subformer.rs @@ -1,6 +1,10 @@ //! example of how to use former of another structure as subformer of former of current one //! function `command` integrate `CommandFormer` into `AggregatorFormer`. +#[ cfg( not( feature = "derive_former" ) ) ] +fn main() {} + +#[ cfg( feature = "derive_former" ) ] fn main() { use std::collections::HashMap; diff --git a/module/core/former/examples/former_many_fields.rs b/module/core/former/examples/former_many_fields.rs index ee88752424..5bca4a54b1 100644 --- a/module/core/former/examples/former_many_fields.rs +++ b/module/core/former/examples/former_many_fields.rs @@ -17,6 +17,10 @@ //! //! The `dbg!` macro is utilized to print the constructed `Structure1` instance, confirming that all fields are correctly assigned, including the handling of optional fields and collections. This example underscores the power and convenience of using `Former` for struct initialization in Rust projects. +#[ cfg( not( feature = "derive_former" ) ) ] +fn main() {} + +#[ cfg( feature = "derive_former" ) ] fn main() { use former::Former; diff --git a/module/core/former/examples/former_subformer_hashmap.rs b/module/core/former/examples/former_subformer_hashmap.rs index 4e6d69c241..0cfb6dff30 100644 --- a/module/core/former/examples/former_subformer_hashmap.rs +++ b/module/core/former/examples/former_subformer_hashmap.rs @@ -3,6 +3,10 @@ //! Demonstrates how to use `HashMapSubformer` with the `HashMapLike` trait to build a `std::collections::HashMap`: //! +#[ cfg( not( all( feature = "derive_former", not( feature = "no_std" ) ) ) ) ] +fn main() {} + +#[ cfg( all( feature = "derive_former", not( feature = "no_std" ) ) ) ] fn main() { use test_tools::exposed::*; diff --git a/module/core/former/examples/former_subformer_hashset.rs b/module/core/former/examples/former_subformer_hashset.rs index 505f283db8..7ce1d3a365 100644 --- a/module/core/former/examples/former_subformer_hashset.rs +++ b/module/core/former/examples/former_subformer_hashset.rs @@ -3,6 +3,10 @@ //! Demonstrates how to use `HashMapSubformer` with the `HashMapLike` trait to build a `std::collections::HashMap`: //! +#[ cfg( not( all( feature = "derive_former", not( feature = "no_std" ) ) ) ) ] +fn main() {} + +#[ cfg( all( feature = "derive_former", not( feature = "no_std" ) ) ) ] fn main() { use test_tools::exposed::*; diff --git a/module/core/former/examples/former_subformer_vector.rs b/module/core/former/examples/former_subformer_vector.rs index 7c52148c3e..9d7b22bdc0 100644 --- a/module/core/former/examples/former_subformer_vector.rs +++ b/module/core/former/examples/former_subformer_vector.rs @@ -3,6 +3,10 @@ //! Demonstrates how to use `HashMapSubformer` with the `HashMapLike` trait to build a `std::collections::HashMap`: //! +#[ cfg( not( all( feature = "derive_former", not( feature = "no_std" ) ) ) ) ] +fn main() {} + +#[ cfg( all( feature = "derive_former", not( feature = "no_std" ) ) ) ] fn main() { diff --git a/module/core/former/examples/former_trivial.rs b/module/core/former/examples/former_trivial.rs index 2d44909326..db6cc6572f 100644 --- a/module/core/former/examples/former_trivial.rs +++ b/module/core/former/examples/former_trivial.rs @@ -16,6 +16,10 @@ //! This approach abstracts away the need for manually implementing a builder for each struct, making code more readable and maintainable. //! +#[ cfg( not( feature = "derive_former" ) ) ] +fn main() {} + +#[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] fn main() { use former::Former; @@ -25,7 +29,7 @@ fn main() { age : i32, username : String, - bio_optional : Option, // Fields could be optional + bio_optional : Option< String >, // Fields could be optional } let profile = UserProfile::former() diff --git a/module/core/former/examples/former_trivial_expaned.rs b/module/core/former/examples/former_trivial_expaned.rs new file mode 100644 index 0000000000..cdcb3fc995 --- /dev/null +++ b/module/core/former/examples/former_trivial_expaned.rs @@ -0,0 +1,231 @@ +//! # Builder Pattern Implementation with Former +//! +//! This module demonstrates the use of the `Former` trait to apply the builder pattern for Rust structs. +//! The `Former` trait simplifies the instantiation of structs by enabling a fluent, method-chaining approach +//! to set fields before finalizing the instance with `.form()`. It is particularly useful for structs with optional fields +//! or when a clear and concise way to instantiate complex data structures is needed. +//! +//! ## How Former Works +//! +//! - **Trait Derivation** : By deriving `Former` on a struct, you automatically generate builder methods for each field. +//! - **Fluent Interface** : Each field's builder method allows for setting the value of that field and returns a mutable reference to the builder, +//! enabling method chaining. +//! - **Optional Fields** : Optional fields can be easily handled without needing to explicitly set them to `None`. +//! - **Finalization** : The `.form()` method finalizes the building process and returns the constructed struct instance. +//! +//! This approach abstracts away the need for manually implementing a builder for each struct, making code more readable and maintainable. +//! + +#[ cfg( not( feature = "enabled" ) ) ] +#[ allow( dead_code ) ] +fn main(){} + +#[ cfg( feature = "enabled" ) ] +#[ allow( dead_code ) ] +fn main() +{ + + #[ derive( Debug, PartialEq ) ] + pub struct UserProfile + { + age : i32, + username : String, + bio_optional : Option< String >, // Fields could be optional + } + + impl UserProfile + { + #[ inline( always ) ] + pub fn former() -> UserProfileFormer< UserProfile, former::ReturnContainer > + { + UserProfileFormer::< UserProfile, former::ReturnContainer >::new() + } + } + + #[ derive( Debug, Default ) ] + pub struct UserProfileFormerContainer + { + age : Option< i32 >, + username : Option< String >, + bio_optional : Option< String >, + } + + pub struct UserProfileFormer + < + FormerContext = UserProfile, + FormerEnd = former::ReturnContainer, + > + where + FormerEnd : former::ToSuperFormer< UserProfile, FormerContext >, + { + container : UserProfileFormerContainer, + context : Option< FormerContext >, + on_end : Option< FormerEnd >, + } + + impl< FormerContext, FormerEnd > UserProfileFormer< FormerContext, FormerEnd > + where + FormerEnd : former::ToSuperFormer< UserProfile, FormerContext >, + { + #[ inline( always ) ] + pub fn form( mut self ) -> UserProfile + { + let age = if self.container.age.is_some() + { + self.container.age.take().unwrap() + } + else + { + let val : i32 = + { + trait NotDefault< T > + { + fn maybe_default( self : &Self ) -> T { panic!( "Field 'age' isn't initialized" ) } + } + trait WithDefault< T > + { + fn maybe_default( self : &Self ) -> T; + } + impl< T > NotDefault< T > for &::core::marker::PhantomData< T > {} + impl< T > WithDefault< T > for ::core::marker::PhantomData< T > + where + T : ::core::default::Default, + { + fn maybe_default( self : &Self ) -> T + { + T::default() + } + } + ( &::core::marker::PhantomData::< i32 > ).maybe_default() + }; + val + }; + let username = if self.container.username.is_some() + { + self.container.username.take().unwrap() + } + else + { + let val : String = + { + trait NotDefault< T > + { + fn maybe_default( self : &Self ) -> T { panic!( "Field 'username' isn't initialized" ) } + } + trait WithDefault< T > + { + fn maybe_default( self : &Self ) -> T; + } + impl< T > NotDefault< T > for &::core::marker::PhantomData< T > {} + impl< T > WithDefault< T > for ::core::marker::PhantomData< T > + where + T : ::core::default::Default, + { + fn maybe_default( self : &Self ) -> T + { + T::default() + } + } + ( &::core::marker::PhantomData::< String > ).maybe_default() + }; + val + }; + let bio_optional = if self.container.bio_optional.is_some() + { + Option::Some( self.container.bio_optional.take().unwrap() ) + } + else + { + Option::None + }; + let result = UserProfile + { + age, + username, + bio_optional, + }; + return result; + } + + #[ inline( always ) ] + pub fn perform( self ) -> UserProfile + { + let result = self.form(); + return result; + } + + #[ inline( always ) ] + pub fn new() -> UserProfileFormer< UserProfile, former::ReturnContainer > + { + UserProfileFormer::< UserProfile, former::ReturnContainer >::begin( None, former::ReturnContainer ) + } + + #[ inline( always ) ] + pub fn begin( + context : Option< FormerContext >, + on_end : FormerEnd, + ) -> Self + { + Self + { + container : core::default::Default::default(), + context : context, + on_end : Option::Some( on_end ), + } + } + + #[ inline( always ) ] + pub fn end( mut self ) -> FormerContext + { + let on_end = self.on_end.take().unwrap(); + let context = self.context.take(); + let container = self.form(); + on_end.call( container, context ) + } + + #[ inline ] + pub fn age< Src >( mut self, src : Src ) -> Self + where + Src : Into< i32 >, + { + debug_assert!( self.container.age.is_none() ); + self.container.age = Option::Some( src.into() ); + self + } + + #[ inline ] + pub fn username< Src >( mut self, src : Src ) -> Self + where + Src : Into< String >, + { + debug_assert!( self.container.username.is_none() ); + self.container.username = Option::Some( src.into() ); + self + } + + #[ inline ] + pub fn bio_optional< Src >( mut self, src : Src ) -> Self + where + Src : Into< String >, + { + debug_assert!( self.container.bio_optional.is_none() ); + self.container.bio_optional = Option::Some( src.into() ); + self + } + } + + let profile = UserProfile::former() + .age( 30 ) + .username( "JohnDoe".to_string() ) + .bio_optional( "Software Developer".to_string() ) + .form(); + + dbg!( &profile ); + // Expected output: + // &profile = UserProfile { + // age: 30, + // username: "JohnDoe", + // bio_optional: Some("Software Developer"), + // } + +} diff --git a/module/core/former/src/axiomatic.rs b/module/core/former/src/axiomatic.rs index 0800170856..2319227480 100644 --- a/module/core/former/src/axiomatic.rs +++ b/module/core/former/src/axiomatic.rs @@ -23,6 +23,7 @@ pub trait ToSuperFormer< T, Context > /// /// # Returns /// Returns the transformed or original context based on the implementation. + #[ allow( dead_code ) ] fn call( &self, container : T, context : core::option::Option< Context > ) -> Context; } diff --git a/module/core/former/src/component.rs b/module/core/former/src/component.rs new file mode 100644 index 0000000000..1d0de919c0 --- /dev/null +++ b/module/core/former/src/component.rs @@ -0,0 +1,50 @@ +/// Provides a generic interface for setting a component of a certain type on an object. +/// +/// This trait abstracts the action of setting or replacing a component, where a component +/// can be any part or attribute of an object, such as a field value. It is designed to be +/// generic over the type of the component being set ( `T` ) and the type that can be converted +/// into the component ( `IntoT` ). This design allows for flexible implementations that can +/// accept various types that can then be converted into the required component type. +/// +/// # Type Parameters +/// +/// - `T` : The type of the component to be set on the implementing object. This type represents +/// the final form of the component as it should be stored or represented in the object. +/// - `IntoT` : The type that can be converted into `T`. This allows the `set` method to accept +/// different types that are capable of being transformed into the required component type `T`, +/// providing greater flexibility in setting the component. +/// +/// # Examples +/// +/// Implementing `SetComponent` to set a name string on a struct : +/// +/// ```rust +/// use former::SetComponent; +/// +/// struct MyStruct +/// { +/// name : String, +/// } +/// +/// impl SetComponent< String, &str > for MyStruct +/// { +/// fn set( &mut self, component : &str ) +/// { +/// self.name = component.into(); +/// } +/// } +/// +/// let mut obj = MyStruct { name : String::new() }; +/// obj.set( "New Name" ); +/// assert_eq!( obj.name, "New Name" ); +/// ``` +pub trait SetComponent< T, IntoT > +where + IntoT : Into< T >, +{ + /// Sets or replaces the component on the object with the given value. + /// + /// This method takes ownership of the given value ( `component` ), which is of type `IntoT`. + /// `component` is then converted into type `T` and set as the component of the object. + fn set( &mut self, component : IntoT ); +} diff --git a/module/core/former/src/lib.rs b/module/core/former/src/lib.rs index 9066c3749c..e34e9c8e7d 100644 --- a/module/core/former/src/lib.rs +++ b/module/core/former/src/lib.rs @@ -4,21 +4,31 @@ #![ doc( html_root_url = "https://docs.rs/former/latest/former/" ) ] #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] -// /// Former - variation of builder pattern. Implementation of its runtime. -// pub mod runtime; - /// Axiomatic things. -#[ cfg( not( feature = "no_std" ) ) ] +#[ cfg( feature = "enabled" ) ] +// #[ cfg( not( feature = "no_std" ) ) ] +#[ cfg( feature = "derive_former" ) ] mod axiomatic; /// Former of a vector. +#[ cfg( feature = "enabled" ) ] #[ cfg( not( feature = "no_std" ) ) ] +#[ cfg( feature = "derive_former" ) ] mod vector; /// Former of a hash map. +#[ cfg( feature = "enabled" ) ] #[ cfg( not( feature = "no_std" ) ) ] +#[ cfg( feature = "derive_former" ) ] mod hash_map; /// Former of a hash set. +#[ cfg( feature = "enabled" ) ] #[ cfg( not( feature = "no_std" ) ) ] +#[ cfg( feature = "derive_former" ) ] mod hash_set; +/// Component-based forming. +#[ cfg( feature = "enabled" ) ] +#[ cfg( not( feature = "no_std" ) ) ] +#[ cfg( feature = "derive_component_from" ) ] +mod component; /// Namespace with dependencies. #[ cfg( feature = "enabled" ) ] @@ -27,28 +37,25 @@ pub mod dependency pub use former_meta; } +#[ doc( inline ) ] +#[ allow( unused_imports ) ] +#[ cfg( feature = "enabled" ) ] +pub use protected::*; + /// Protected namespace of the module. +#[ cfg( feature = "enabled" ) ] pub mod protected { #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::orphan::*; - // #[ cfg( any( feature = "runtime", feature = "former_runtime" ) ) ] - // #[ doc( inline ) ] - // #[ allow( unused_imports ) ] - // use super::runtime; - // pub use former_runtime as runtime; - // #[ cfg( any( feature = "meta", feature = "former_meta" ) ) ] #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use former_meta as derive; } -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use protected::*; - /// Parented namespace of the module. +#[ cfg( feature = "enabled" ) ] pub mod orphan { #[ doc( inline ) ] @@ -57,45 +64,55 @@ pub mod orphan } /// Exposed namespace of the module. +#[ cfg( feature = "enabled" ) ] pub mod exposed { #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; - // #[ cfg( any( feature = "meta", feature = "former_meta" ) ) ] #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use former_meta::*; - // #[ doc( inline ) ] - // #[ allow( unused_imports ) ] - // pub use super::runtime::exposed::*; - #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] - #[ cfg( not( feature = "no_std" ) ) ] + #[ cfg( feature = "enabled" ) ] + #[ cfg( feature = "derive_former" ) ] pub use super::axiomatic::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] + #[ cfg( feature = "enabled" ) ] #[ cfg( not( feature = "no_std" ) ) ] + #[ cfg( feature = "derive_former" ) ] pub use super::vector::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] + #[ cfg( feature = "enabled" ) ] #[ cfg( not( feature = "no_std" ) ) ] + #[ cfg( feature = "derive_former" ) ] pub use super::hash_map::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] + #[ cfg( feature = "enabled" ) ] #[ cfg( not( feature = "no_std" ) ) ] + #[ cfg( feature = "derive_former" ) ] pub use super::hash_set::*; } /// Prelude to use essentials: `use my_module::prelude::*`. +#[ cfg( feature = "enabled" ) ] pub mod prelude { + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + #[ cfg( feature = "enabled" ) ] + #[ cfg( not( feature = "no_std" ) ) ] + #[ cfg( feature = "derive_component_from" ) ] + pub use super::component::*; } // qqq : check and improve quality of generated documentation diff --git a/module/core/former/src/runtime/mod.rs b/module/core/former/src/runtime/mod.rs deleted file mode 100644 index cf2edd896c..0000000000 --- a/module/core/former/src/runtime/mod.rs +++ /dev/null @@ -1,58 +0,0 @@ - -//! -//! Former - variation of builder pattern. Implementation of its runtime. -//! - -/// Axiomatic things. -#[ cfg( not( feature = "no_std" ) ) ] -mod axiomatic; -/// Former of a vector. -#[ cfg( not( feature = "no_std" ) ) ] -mod vector; -/// Former of a hash map. -#[ cfg( not( feature = "no_std" ) ) ] -mod hash_map; -/// Former of a hash set. -#[ cfg( not( feature = "no_std" ) ) ] -mod hash_set; - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use protected::*; - -/// Protected namespace of the module. -pub mod protected -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::exposed::*; -} - -/// Exposed namespace of the module. -pub mod exposed -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::prelude::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - #[ cfg( not( feature = "no_std" ) ) ] - pub use super::axiomatic::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - #[ cfg( not( feature = "no_std" ) ) ] - pub use super::vector::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - #[ cfg( not( feature = "no_std" ) ) ] - pub use super::hash_map::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - #[ cfg( not( feature = "no_std" ) ) ] - pub use super::hash_set::*; -} - -/// Prelude to use essentials: `use my_module::prelude::*`. -pub mod prelude -{ -} diff --git a/module/core/former/tests/experimental.rs b/module/core/former/tests/experimental.rs index 9286216f0f..6713e6c17d 100644 --- a/module/core/former/tests/experimental.rs +++ b/module/core/former/tests/experimental.rs @@ -8,5 +8,5 @@ use test_tools::exposed::*; #[ allow( unused_imports ) ] use former as TheModule; -#[ path = "./inc/a_containers_without_runtime_test.rs" ] -mod experimental; +// #[ path = "./inc/a_containers_without_runtime_test.rs" ] +// mod experimental; diff --git a/module/core/former/tests/inc/components_component_from.rs b/module/core/former/tests/inc/components_component_from.rs new file mode 100644 index 0000000000..a35e8ce45c --- /dev/null +++ b/module/core/former/tests/inc/components_component_from.rs @@ -0,0 +1,20 @@ +#[ allow( unused_imports ) ] +use super::*; + +/// +/// Options1 +/// + +#[ derive( Debug, Default, PartialEq, TheModule::ComponentFrom ) ] +// #[ debug ] +// xxx : finish with debug, add test and sample +pub struct Options1 +{ + field1 : i32, + field2 : String, + field3 : f32, +} + +// + +include!( "only_test/components_from.rs" ); diff --git a/module/core/former/tests/inc/components_component_from_manual.rs b/module/core/former/tests/inc/components_component_from_manual.rs new file mode 100644 index 0000000000..cbe6da7b86 --- /dev/null +++ b/module/core/former/tests/inc/components_component_from_manual.rs @@ -0,0 +1,45 @@ +#[ allow( unused_imports ) ] +use super::*; + +/// +/// Options1 +/// + +#[ derive( Debug, Default, PartialEq ) ] +pub struct Options1 +{ + field1 : i32, + field2 : String, + field3 : f32, +} + +impl From< &Options1 > for i32 +{ + #[ inline( always ) ] + fn from( src : &Options1 ) -> Self + { + src.field1.clone() + } +} + +impl From< &Options1 > for String +{ + #[ inline( always ) ] + fn from( src : &Options1 ) -> Self + { + src.field2.clone() + } +} + +impl From< &Options1 > for f32 +{ + #[ inline( always ) ] + fn from( src : &Options1 ) -> Self + { + src.field3.clone() + } +} + +// + +include!( "only_test/components_from.rs" ); diff --git a/module/core/former/tests/inc/components_composite.rs b/module/core/former/tests/inc/components_composite.rs new file mode 100644 index 0000000000..3dc2fda5bc --- /dev/null +++ b/module/core/former/tests/inc/components_composite.rs @@ -0,0 +1,106 @@ +#[ allow( unused_imports ) ] +use super::*; +#[ allow( unused_imports ) ] +use former::SetComponent; + +/// +/// Options1 +/// + +#[ derive( Debug, Default, PartialEq, TheModule::ComponentFrom, TheModule::SetComponent ) ] +// qqq : make these traits working for generic struct, use `split_for_impl` +pub struct Options1 +{ + field1 : i32, + field2 : String, + field3 : f32, +} + +/// +/// Options2 +/// + +#[ derive( Debug, Default, PartialEq, TheModule::ComponentFrom, TheModule::SetComponent ) ] +pub struct Options2 +{ + field1 : i32, + field2 : String, +} + +/// +/// Options2SetComponents. +/// + +pub trait Options2SetComponents< IntoT > +where + IntoT : Into< i32 >, + IntoT : Into< String >, + IntoT : Clone, +{ + fn components_set( &mut self, component : IntoT ); +} + +impl< T, IntoT > Options2SetComponents< IntoT > for T +where + T : former::SetComponent< i32, IntoT >, + T : former::SetComponent< String, IntoT >, + IntoT : Into< i32 >, + IntoT : Into< String >, + IntoT : Clone, +{ + #[ inline( always ) ] + fn components_set( &mut self, component : IntoT ) + { + former::SetComponent::< i32, _ >::set( self, component.clone() ); + former::SetComponent::< String, _ >::set( self, component.clone() ); + } +} + +impl< T > From< T > for Options2 +where + T : Into< i32 >, + T : Into< String >, + T : Clone, +{ + #[ inline( always ) ] + fn from( src : T ) -> Self + { + let field1 = Into::< i32 >::into( src.clone() ); + let field2 = Into::< String >::into( src.clone() ); + Options2 + { + field1, + field2, + } + } +} + +/// +/// Set with type. +/// + +pub trait SetWithType +{ + fn set_with_type< T, IntoT >( &mut self, component : IntoT ) + where + IntoT : Into< T >, + Self : former::SetComponent< T, IntoT >; +} + +impl SetWithType for Options2 +{ + + #[ inline( always ) ] + fn set_with_type< T, IntoT >( &mut self, component : IntoT ) + where + IntoT : Into< T >, + Self : former::SetComponent< T, IntoT >, + { + former::SetComponent::< T, IntoT >::set( self, component ); + } + +} + +// + +include!( "only_test/components_composite.rs" ); diff --git a/module/core/former/tests/inc/components_composite_manual.rs b/module/core/former/tests/inc/components_composite_manual.rs new file mode 100644 index 0000000000..840310c7a2 --- /dev/null +++ b/module/core/former/tests/inc/components_composite_manual.rs @@ -0,0 +1,205 @@ +#[ allow( unused_imports ) ] +use super::*; +#[ allow( unused_imports ) ] +use former::SetComponent; + +/// +/// Options1 +/// + +#[ derive( Debug, Default, PartialEq ) ] +pub struct Options1 +{ + field1 : i32, + field2 : String, + field3 : f32, +} + +impl From< &Options1 > for i32 +{ + #[ inline( always ) ] + fn from( src : &Options1 ) -> Self + { + src.field1.clone() + } +} + +impl From< &Options1 > for String +{ + #[ inline( always ) ] + fn from( src : &Options1 ) -> Self + { + src.field2.clone() + } +} + +impl From< &Options1 > for f32 +{ + #[ inline( always ) ] + fn from( src : &Options1 ) -> Self + { + src.field3.clone() + } +} + +impl< IntoT > former::SetComponent< i32, IntoT > for Options1 +where + IntoT : Into< i32 >, +{ + #[ inline( always ) ] + fn set( &mut self, component : IntoT ) + { + self.field1 = component.into().clone(); + } +} + +impl< IntoT > former::SetComponent< String, IntoT > for Options1 +where + IntoT : Into< String >, +{ + #[ inline( always ) ] + fn set( &mut self, component : IntoT ) + { + self.field2 = component.into().clone(); + } +} + +impl< IntoT > former::SetComponent< f32, IntoT > for Options1 +where + IntoT : Into< f32 >, +{ + #[ inline( always ) ] + fn set( &mut self, component : IntoT ) + { + self.field3 = component.into().clone(); + } +} + +/// +/// Options2 +/// + +#[ derive( Debug, Default, PartialEq ) ] +pub struct Options2 +{ + field1 : i32, + field2 : String, +} + +impl From< &Options2 > for i32 +{ + #[ inline( always ) ] + fn from( src : &Options2 ) -> Self + { + src.field1.clone() + } +} + +impl From< &Options2 > for String +{ + #[ inline( always ) ] + fn from( src : &Options2 ) -> Self + { + src.field2.clone() + } +} + +impl< IntoT > former::SetComponent< i32, IntoT > for Options2 +where + IntoT : Into< i32 >, +{ + #[ inline( always ) ] + fn set( &mut self, component : IntoT ) + { + self.field1 = component.into().clone(); + } +} + +impl< IntoT > former::SetComponent< String, IntoT > for Options2 +where + IntoT : Into< String >, +{ + #[ inline( always ) ] + fn set( &mut self, component : IntoT ) + { + self.field2 = component.into().clone(); + } +} + +/// +/// Options2SetComponents. +/// + +pub trait Options2SetComponents< IntoT > +where + IntoT : Into< i32 >, + IntoT : Into< String >, + IntoT : Clone, +{ + fn components_set( &mut self, component : IntoT ); +} + +impl< T, IntoT > Options2SetComponents< IntoT > for T +where + T : former::SetComponent< i32, IntoT >, + T : former::SetComponent< String, IntoT >, + IntoT : Into< i32 >, + IntoT : Into< String >, + IntoT : Clone, +{ + #[ inline( always ) ] + fn components_set( &mut self, component : IntoT ) + { + former::SetComponent::< i32, _ >::set( self, component.clone() ); + former::SetComponent::< String, _ >::set( self, component.clone() ); + } +} + +impl< T > From< T > for Options2 +where + T : Into< i32 >, + T : Into< String >, + T : Clone, +{ + #[ inline( always ) ] + fn from( src : T ) -> Self + { + let field1 = Into::< i32 >::into( src.clone() ); + let field2 = Into::< String >::into( src.clone() ); + Options2 + { + field1, + field2, + } + } +} + +/// +/// Set with type. +/// + +pub trait SetWithType +{ + fn set_with_type< T, IntoT >( &mut self, component : IntoT ) + where + IntoT : Into< T >, + Self : former::SetComponent< T, IntoT >; +} + +impl SetWithType for Options2 +{ + + #[ inline( always ) ] + fn set_with_type< T, IntoT >( &mut self, component : IntoT ) + where + IntoT : Into< T >, + Self : former::SetComponent< T, IntoT >, + { + former::SetComponent::< T, IntoT >::set( self, component ); + } + +} + +// + +include!( "only_test/components_composite.rs" ); diff --git a/module/core/former/tests/inc/mod.rs b/module/core/former/tests/inc/mod.rs index c7ba8d9395..4283ee2382 100644 --- a/module/core/former/tests/inc/mod.rs +++ b/module/core/former/tests/inc/mod.rs @@ -1,48 +1,93 @@ use super::*; +#[ cfg( feature = "derive_former" ) ] mod a_primitives_manual_test; +#[ cfg( feature = "derive_former" ) ] mod a_containers_without_runtime_manual_test; +#[ cfg( feature = "derive_former" ) ] mod a_containers_without_runtime_test; +#[ cfg( feature = "derive_former" ) ] +#[ cfg( not( feature = "no_std" ) ) ] mod a_containers_with_runtime_manual_test; -mod a_containers_with_runtime_test; +#[ cfg( feature = "derive_former" ) ] +#[ cfg( not( feature = "no_std" ) ) ] +mod a_containers_with_runtime_test ; +#[ cfg( feature = "derive_former" ) ] mod attribute_default_container; +#[ cfg( feature = "derive_former" ) ] mod attribute_default_primitive; +#[ cfg( feature = "derive_former" ) ] mod former_hashmap_without_parameter; +#[ cfg( feature = "derive_former" ) ] mod former_vector_without_parameter; +#[ cfg( feature = "derive_former" ) ] mod string_slice_manual_test; +#[ cfg( feature = "derive_former" ) ] mod string_slice_test; +#[ cfg( feature = "derive_former" ) ] mod default_user_type; +#[ cfg( feature = "derive_former" ) ] mod user_type_no_default; +#[ cfg( feature = "derive_former" ) ] mod user_type_no_debug; +#[ cfg( feature = "derive_former" ) ] mod alias_test; +#[ cfg( feature = "derive_former" ) ] mod name_collisions; +#[ cfg( feature = "derive_former" ) ] mod name_collision_context; +#[ cfg( feature = "derive_former" ) ] mod name_collision_end; +#[ cfg( feature = "derive_former" ) ] mod name_collision_on_end; +#[ cfg( feature = "derive_former" ) ] mod unsigned_primitive_types; +#[ cfg( feature = "derive_former" ) ] mod attribute_perform; +#[ cfg( feature = "derive_former" ) ] mod attribute_setter; +#[ cfg( feature = "derive_former" ) ] +#[ cfg( not( feature = "no_std" ) ) ] mod parametrized_struct_manual; +#[ cfg( feature = "derive_former" ) ] +#[ cfg( not( feature = "no_std" ) ) ] mod parametrized_struct_imm; +#[ cfg( feature = "derive_former" ) ] +#[ cfg( not( feature = "no_std" ) ) ] mod parametrized_struct_where; +#[ cfg( feature = "derive_former" ) ] +#[ cfg( not( feature = "no_std" ) ) ] mod subformer_basic_manual; +#[ cfg( feature = "derive_former" ) ] +#[ cfg( not( feature = "no_std" ) ) ] mod subformer_basic; +#[ cfg( feature = "derive_component_from" ) ] +mod components_component_from_manual; +#[ cfg( feature = "derive_component_from" ) ] +mod components_component_from; + +#[ cfg( all( feature = "derive_component_from", feature = "derive_set_component" ) ) ] +mod components_composite_manual; +#[ cfg( all( feature = "derive_component_from", feature = "derive_set_component" ) ) ] +mod components_composite; + only_for_terminal_module! { // stable have different information about error // that's why these tests are active only for nightly #[ test_tools::nightly ] + #[ cfg( feature = "derive_former" ) ] #[ test ] - fn trybuild_tests() + fn former_trybuild() { println!( "current_dir : {:?}", std::env::current_dir().unwrap() ); diff --git a/module/core/former/tests/inc/only_test/components_composite.rs b/module/core/former/tests/inc/only_test/components_composite.rs new file mode 100644 index 0000000000..4e30fa3cfa --- /dev/null +++ b/module/core/former/tests/inc/only_test/components_composite.rs @@ -0,0 +1,84 @@ + + +#[ test ] +fn component_set() +{ + + let mut o1 = Options1::default(); + o1.set( 42 ); + o1.set( "Hello, world!" ); + o1.set( 13.01 ); + println!( "field1: {}, field2: {}", o1.field1, o1.field2 ); + let exp = Options1 { field1 : 42, field2 : "Hello, world!".to_string(), field3 : 13.01 }; + assert_eq!( o1, exp ); + +} + +#[ test ] +fn component_set_with_composite() +{ + + // set( Into::< i32 >::into( &o1 ) ) + + let mut o1 = Options1::default(); + o1.set( 42 ); + o1.set( "Hello, world!" ); + o1.set( 13.01 ); + let mut o2 = Options2::default(); + o2.set( Into::< i32 >::into( &o1 ) ); + o2.set( Into::< String >::into( &o1 ) ); + let exp = Options2 { field1 : 42, field2 : "Hello, world!".to_string() }; + assert_eq!( o2, exp ); + + // set_with_type + + let mut o1 = Options1::default(); + o1.set( 42 ); + o1.set( "Hello, world!" ); + o1.set( 13.01 ); + let mut o2 = Options2::default(); + o2.set_with_type::< i32, _ >( &o1 ); + o2.set_with_type::< String, _ >( &o1 ); + let exp = Options2 { field1 : 42, field2 : "Hello, world!".to_string() }; + assert_eq!( o2, exp ); + +} + +#[ test ] +fn components_set() +{ + + // o2.components_set( &o1 ) + + let mut o1 = Options1::default(); + o1.set( 42 ); + o1.set( "Hello, world!" ); + o1.set( 13.01 ); + let mut o2 = Options2::default(); + o2.components_set( &o1 ); + let exp = Options2 { field1 : 42, field2 : "Hello, world!".to_string() }; + assert_eq!( o2, exp ); + + // o1.components_set( &o2 ) + + let mut o2 = Options2::default(); + o2.set( 42 ); + o2.set( "Hello, world!" ); + let mut o1 = Options1::default(); + o1.components_set( &o2 ); + let exp = Options1 { field1 : 42, field2 : "Hello, world!".to_string(), field3 : 0.0 }; + assert_eq!( o1, exp ); + + // o2 : Options2 = o1.into() + + let mut o1 = Options1::default(); + o1.set( 42 ); + o1.set( "Hello, world!" ); + o1.set( 13.01 ); + let o2 : Options2 = Into::< Options2 >::into( &o1 ); + let exp = Options2 { field1 : 42, field2 : "Hello, world!".to_string() }; + assert_eq!( o2, exp ); + let o2 : Options2 = (&o1).into(); + assert_eq!( o2, exp ); + +} diff --git a/module/core/former/tests/inc/only_test/components_from.rs b/module/core/former/tests/inc/only_test/components_from.rs new file mode 100644 index 0000000000..18fbe15011 --- /dev/null +++ b/module/core/former/tests/inc/only_test/components_from.rs @@ -0,0 +1,18 @@ + + +#[ test ] +fn component_set() +{ + + let o1 = Options1 { field1 : 42, field2 : "Hello, world!".to_string(), field3 : 13.01 }; + + let field1 : i32 = ( &o1 ).into(); + assert_eq!( field1, 42 ); + + let field2 : String = ( &o1 ).into(); + assert_eq!( field2, "Hello, world!".to_string() ); + + let field3 : f32 = ( &o1 ).into(); + assert_eq!( field3, 13.01 ); + +} diff --git a/module/core/former/tests/inc/string_slice_manual_test.rs b/module/core/former/tests/inc/string_slice_manual_test.rs index 0c2681faf8..98988cb6cc 100644 --- a/module/core/former/tests/inc/string_slice_manual_test.rs +++ b/module/core/former/tests/inc/string_slice_manual_test.rs @@ -9,7 +9,7 @@ pub struct Struct1< 'a > impl< 'a > Struct1< 'a > { - #[inline] + #[ inline ] pub fn former() -> Struct1Former< 'a > { Struct1Former @@ -26,7 +26,7 @@ pub struct Struct1Former< 'a > impl< 'a > Struct1Former< 'a > { - #[inline] + #[ inline ] pub fn form( mut self ) -> Struct1< 'a > { let string_slice_1 = if self.string_slice_1.is_some() @@ -41,7 +41,7 @@ impl< 'a > Struct1Former< 'a > Struct1 { string_slice_1 } } - #[inline] + #[ inline ] pub fn string_slice_1< Src >( mut self, src : Src ) -> Self where Src : ::core::convert::Into< &'a str >, diff --git a/module/core/former_meta/Cargo.toml b/module/core/former_meta/Cargo.toml index eebb731b05..a617ffbf85 100644 --- a/module/core/former_meta/Cargo.toml +++ b/module/core/former_meta/Cargo.toml @@ -26,9 +26,14 @@ all-features = false exclude = [ "/tests", "/examples", "-*" ] [features] -default = [ "enabled" ] -full = [ "enabled" ] -enabled = [] + +default = [ "enabled", "derive_former", "derive_component_from", "derive_set_component" ] +full = [ "enabled", "derive_former", "derive_component_from", "derive_set_component" ] +enabled = [ "former/enabled" ] + +derive_former = [ "former/derive_former" ] +derive_component_from = [ "former/derive_component_from" ] +derive_set_component = [ "former/derive_set_component" ] [lib] proc-macro = true @@ -37,7 +42,8 @@ proc-macro = true macro_tools = { workspace = true, features = [ "default" ] } iter_tools = { workspace = true, features = [ "default" ] } -# zzz : optimize features set +# xxx : optimize features set [dev-dependencies] -test_tools = { workspace = true, features = [ "default" ] } +test_tools = { workspace = true, features = [ "full" ] } +former = { workspace = true } diff --git a/module/core/former_meta/src/derive.rs b/module/core/former_meta/src/derive.rs new file mode 100644 index 0000000000..e2f7129f6c --- /dev/null +++ b/module/core/former_meta/src/derive.rs @@ -0,0 +1,16 @@ + +//! +//! Implement couple of derives of general-purpose. +//! + +#[ allow( unused_imports ) ] +use macro_tools::prelude::*; +// pub use macro_tools::{ Result, Many }; +// pub use iter_tools as iter; + +#[ cfg( feature = "derive_former" ) ] +pub mod former; +#[ cfg( feature = "derive_component_from" ) ] +pub mod component_from; +#[ cfg( feature = "derive_set_component" ) ] +pub mod set_component; diff --git a/module/core/former_meta/src/derive/component_from.rs b/module/core/former_meta/src/derive/component_from.rs new file mode 100644 index 0000000000..d984b476ee --- /dev/null +++ b/module/core/former_meta/src/derive/component_from.rs @@ -0,0 +1,71 @@ + +use super::*; +use macro_tools::{ attr, diag, type_struct, Result }; + +/// Generates `From` implementations for each unique component (field) of the structure. +pub fn component_from( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > +{ + let original_input = input.clone(); + let parsed = syn::parse::< type_struct::TypeStructParsed >( input )?; + let has_debug = attr::has_debug( parsed.item.attrs.iter() )?; + + let for_field = parsed.fields_many().iter().map( | field | + { + for_each_field( field, &parsed.item_name ) + }) + .collect::< Result< Vec< _ > > >()?; + + let result = qt! + { + #( #for_field )* + }; + + if has_debug + { + diag::debug_report_print( original_input, &result ); + } + + Ok( result ) +} + +/// Generates a `From` implementation for a specific field of a struct. +/// +/// # Arguments +/// +/// * `field` - A reference to the field for which to generate the `From` implementation. +/// * `item_name` - The name of the structure containing the field. +/// +/// # Example of generated code +/// +/// If you have a structure `Person` with a field `name: String`, the generated code would look something like this: +/// +/// ```rust, ignore +/// impl From< &Person > for String +/// { +/// #[ inline( always ) ] +/// fn from( src : &Person ) -> Self +/// { +/// src.name.clone() +/// } +/// } +/// + +fn for_each_field( field : &syn::Field, item_name : &syn::Ident ) -> Result< proc_macro2::TokenStream > +{ + let field_name = field.ident.as_ref() + .ok_or_else( || syn::Error::new( field.span(), "Field without a name" ) )?; + let field_type = &field.ty; + + Ok( qt! + { + #[ allow( non_local_definitions ) ] + impl From< &#item_name > for #field_type + { + #[ inline( always ) ] + fn from( src : &#item_name ) -> Self + { + src.#field_name.clone() + } + } + }) +} diff --git a/module/core/former_meta/src/former_impl.rs b/module/core/former_meta/src/derive/former.rs similarity index 78% rename from module/core/former_meta/src/former_impl.rs rename to module/core/former_meta/src/derive/former.rs index a127760148..73f7f5ee55 100644 --- a/module/core/former_meta/src/former_impl.rs +++ b/module/core/former_meta/src/derive/former.rs @@ -1,9 +1,8 @@ +use super::*; use iter_tools::{ Itertools, process_results }; -use macro_tools::*; - -pub type Result< T > = std::result::Result< T, syn::Error >; - +use macro_tools::{ typ, generics, container_kind, Result }; +use proc_macro2::TokenStream; /// /// Descripotr of a field. /// @@ -18,7 +17,7 @@ struct FormerField< 'a > pub ty : &'a syn::Type, pub non_optional_ty : &'a syn::Type, pub is_optional : bool, - pub type_container_kind : macro_tools::ContainerKind, + pub of_type : container_kind::ContainerKind, } /// @@ -43,30 +42,60 @@ impl Attributes let mut alias = None; for attr in attributes { - let key_ident = attr.path.get_ident() - .ok_or_else( || syn_err!( attr, "Expects simple key of an attirbute, but got:\n {}", qt!{ #attr } ) )?; + let key_ident = attr.path().get_ident() + .ok_or_else( || syn_err!( attr, "Expects an attirbute of format #[ attribute( val ) ], but got:\n {}", qt!{ #attr } ) )?; let key_str = format!( "{}", key_ident ); match key_str.as_ref() { "default" => { - let attr_default = syn::parse2::< AttributeDefault >( attr.tokens.clone() )?; - default.replace( attr_default ); + match attr.meta + { + syn::Meta::List( ref meta_list ) => + { + default.replace( syn::parse2::< AttributeDefault >( meta_list.tokens.clone() )? ); + }, + _ => return_syn_err!( attr, "Expects an attirbute of format #[ attribute( val ) ], but got:\n {}", qt!{ #attr } ), + } } "setter" => { - let attr_setter = syn::parse2::< AttributeSetter >( attr.tokens.clone() )?; - setter.replace( attr_setter ); + match attr.meta + { + syn::Meta::List( ref meta_list ) => + { + setter.replace( syn::parse2::< AttributeSetter >( meta_list.tokens.clone() )? ); + }, + _ => return_syn_err!( attr, "Expects an attirbute of format #[ attribute( val ) ], but got:\n {}", qt!{ #attr } ), + } + // let attr_setter = syn::parse2::< AttributeSetter >( attr.tokens.clone() )?; + // setter.replace( attr_setter ); } "subformer" => { - let attr_former = syn::parse2::< AttributeFormer >( attr.tokens.clone() )?; - subformer.replace( attr_former ); + match attr.meta + { + syn::Meta::List( ref meta_list ) => + { + subformer.replace( syn::parse2::< AttributeFormer >( meta_list.tokens.clone() )? ); + }, + _ => return_syn_err!( attr, "Expects an attirbute of format #[ attribute( val ) ], but got:\n {}", qt!{ #attr } ), + } + // let attr_former = syn::parse2::< AttributeFormer >( attr.tokens.clone() )?; + // subformer.replace( attr_former ); } "alias" => { - let attr_alias = syn::parse2::< AttributeAlias >( attr.tokens.clone() )?; - alias.replace( attr_alias ); + match attr.meta + { + syn::Meta::List( ref meta_list ) => + { + alias.replace( syn::parse2::< AttributeAlias >( meta_list.tokens.clone() )? ); + }, + _ => return_syn_err!( attr, "Expects an attirbute of format #[ attribute( val ) ], but got:\n {}", qt!{ #attr } ), + } + // let attr_alias = syn::parse2::< AttributeAlias >( attr.tokens.clone() )?; + // alias.replace( attr_alias ); } "doc" => { @@ -91,7 +120,7 @@ impl Attributes #[ allow( dead_code ) ] struct AttributeFormAfter { - paren_token : syn::token::Paren, + // paren_token : syn::token::Paren, signature : syn::Signature, } @@ -99,11 +128,12 @@ impl syn::parse::Parse for AttributeFormAfter { fn parse( input : syn::parse::ParseStream< '_ > ) -> Result< Self > { - let input2; + // let input2; Ok( Self { - paren_token : syn::parenthesized!( input2 in input ), - signature : input2.parse()?, + // paren_token : syn::parenthesized!( input2 in input ), + // signature : input2.parse()?, + signature : input.parse()?, }) } } @@ -111,14 +141,14 @@ impl syn::parse::Parse for AttributeFormAfter /// /// Attribute to hold information about default value. /// -/// `#[ default = 13 ]` +/// `#[ default( 13 ) ]` /// #[ allow( dead_code ) ] struct AttributeDefault { // eq_token : syn::Token!{ = }, - paren_token : syn::token::Paren, + // paren_token : syn::token::Paren, expr : syn::Expr, } @@ -126,12 +156,13 @@ impl syn::parse::Parse for AttributeDefault { fn parse( input : syn::parse::ParseStream< '_ > ) -> Result< Self > { - let input2; + // let input2; Ok( Self { - paren_token : syn::parenthesized!( input2 in input ), + // paren_token : syn::parenthesized!( input2 in input ), // eq_token : input.parse()?, - expr : input2.parse()?, + // expr : input2.parse()?, + expr : input.parse()?, }) } } @@ -146,7 +177,7 @@ impl syn::parse::Parse for AttributeDefault #[ allow( dead_code ) ] struct AttributeSetter { - paren_token : syn::token::Paren, + // paren_token : syn::token::Paren, condition : syn::LitBool, } @@ -154,11 +185,12 @@ impl syn::parse::Parse for AttributeSetter { fn parse( input : syn::parse::ParseStream< '_ > ) -> Result< Self > { - let input2; + // let input2; Ok( Self { - paren_token : syn::parenthesized!( input2 in input ), - condition : input2.parse()?, + // paren_token : syn::parenthesized!( input2 in input ), + // condition : input2.parse()?, + condition : input.parse()?, }) } } @@ -172,7 +204,7 @@ impl syn::parse::Parse for AttributeSetter #[ allow( dead_code ) ] struct AttributeFormer { - paren_token : syn::token::Paren, + // paren_token : syn::token::Paren, expr : syn::Type, } @@ -180,11 +212,12 @@ impl syn::parse::Parse for AttributeFormer { fn parse( input : syn::parse::ParseStream< '_ > ) -> Result< Self > { - let input2; + // let input2; Ok( Self { - paren_token : syn::parenthesized!( input2 in input ), - expr : input2.parse()?, + // paren_token : syn::parenthesized!( input2 in input ), + // expr : input2.parse()?, + expr : input.parse()?, }) } } @@ -198,7 +231,7 @@ impl syn::parse::Parse for AttributeFormer #[ allow( dead_code ) ] struct AttributeAlias { - paren_token : syn::token::Paren, + // paren_token : syn::token::Paren, alias : syn::Ident, } @@ -206,11 +239,12 @@ impl syn::parse::Parse for AttributeAlias { fn parse( input : syn::parse::ParseStream< '_ > ) -> Result< Self > { - let input2; + // let input2; Ok( Self { - paren_token : syn::parenthesized!( input2 in input ), - alias : input2.parse()?, + // paren_token : syn::parenthesized!( input2 in input ), + // alias : input2.parse()?, + alias : input.parse()?, }) } } @@ -221,7 +255,7 @@ impl syn::parse::Parse for AttributeAlias fn is_optional( ty : &syn::Type ) -> bool { - macro_tools::type_rightmost( ty ) == Some( "Option".to_string() ) + typ::type_rightmost( ty ) == Some( "Option".to_string() ) } /// @@ -230,7 +264,7 @@ fn is_optional( ty : &syn::Type ) -> bool fn parameter_internal_first( ty : &syn::Type ) -> Result< &syn::Type > { - macro_tools::type_parameters( ty, 0 ..= 0 ) + typ::type_parameters( ty, 0 ..= 0 ) .first() .copied() .ok_or_else( || syn_err!( ty, "Expects at least one parameter here:\n {}", qt!{ #ty } ) ) @@ -249,7 +283,7 @@ fn parameter_internal_first( ty : &syn::Type ) -> Result< &syn::Type > /// #[ inline( always ) ] -fn field_none_map( field : &FormerField< '_ > ) -> proc_macro2::TokenStream +fn field_none_map( field : &FormerField< '_ > ) -> TokenStream { let ident = Some( field.ident.clone() ); let tokens = qt! { ::core::option::Option::None }; @@ -275,7 +309,7 @@ fn field_none_map( field : &FormerField< '_ > ) -> proc_macro2::TokenStream /// #[ inline( always ) ] -fn field_optional_map( field : &FormerField< '_ > ) -> proc_macro2::TokenStream +fn field_optional_map( field : &FormerField< '_ > ) -> TokenStream { let ident = Some( field.ident.clone() ); let ty = field.ty.clone(); @@ -316,7 +350,7 @@ fn field_optional_map( field : &FormerField< '_ > ) -> proc_macro2::TokenStream /// #[ inline( always ) ] -fn field_form_map( field : &FormerField< '_ > ) -> Result< proc_macro2::TokenStream > +fn field_form_map( field : &FormerField< '_ > ) -> Result< TokenStream > { let ident = field.ident; let ty = field.ty; @@ -439,7 +473,7 @@ fn field_name_map( field : &FormerField< '_ > ) -> syn::Ident /// # Example of output /// ```ignore /// #[ doc = "Setter for the '#field_ident' field." ] -/// #[inline] +/// #[ inline ] /// pub fn int_1< Src >( mut self, src : Src ) -> Self /// where /// Src : ::core::convert::Into< i32 >, @@ -451,7 +485,7 @@ fn field_name_map( field : &FormerField< '_ > ) -> syn::Ident /// ``` #[ inline ] -fn field_setter_map( field : &FormerField< '_ > ) -> Result< proc_macro2::TokenStream > +fn field_setter_map( field : &FormerField< '_ > ) -> Result< TokenStream > { let ident = &field.ident; @@ -505,7 +539,7 @@ fn field_setter setter_name : &syn::Ident, non_optional_type : &syn::Type, ) --> proc_macro2::TokenStream +-> TokenStream { qt! { @@ -554,7 +588,7 @@ fn subformer_field_setter non_optional_type : &syn::Type, subformer_type : &syn::Type, ) --> proc_macro2::TokenStream +-> TokenStream { let doc = format! ( @@ -564,7 +598,7 @@ fn subformer_field_setter // tree_print!( non_optional_type ); // code_print!( non_optional_type ); - let params = type_parameters( &non_optional_type, .. ); + let params = typ::type_parameters( &non_optional_type, .. ); // params.iter().for_each( | e | println!( "{}", qt!( #e ) ) ); qt! @@ -634,7 +668,65 @@ For specifing custom default value use attribute `default`. For example: // -pub fn former( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > +pub fn performer< 'a > +( + name_ident : &syn::Ident, + generics_ty : &syn::TypeGenerics< '_ >, + attrs : impl Iterator< Item = &'a syn::Attribute >, +) +-> Result< ( TokenStream, TokenStream, TokenStream ) > +{ + + let mut perform = qt! + { + return result; + }; + let mut perform_output = qt!{ #name_ident #generics_ty }; + let mut perform_generics = qt!{}; + for attr in attrs + { + if let Some( ident ) = attr.path().get_ident() + { + let ident_string = format!( "{}", ident ); + if ident_string == "perform" + { + match attr.meta + { + syn::Meta::List( ref meta_list ) => + { + // default.replace( syn::parse2::< AttributeDefault >( meta_list.tokens.clone() )? ); + // let attr_perform = syn::parse2::< AttributeFormAfter >( attr.tokens.clone() )?; + let attr_perform = syn::parse2::< AttributeFormAfter >( meta_list.tokens.clone() )?; + let signature = &attr_perform.signature; + let generics = &signature.generics; + perform_generics = qt!{ #generics }; + let perform_ident = &signature.ident; + let output = &signature.output; + if let syn::ReturnType::Type( _, boxed_type ) = output + { + perform_output = qt!{ #boxed_type }; + } + perform = qt! + { + return result.#perform_ident(); + }; + }, + _ => return_syn_err!( attr, "Expects an attirbute of format #[ attribute( val ) ], but got:\n {}", qt!{ #attr } ), + } + } + } + else + { + return_syn_err!( "Unknown structure attribute:\n{}", qt!{ attr } ); + } + } + + Ok( ( perform, perform_output, perform_generics ) ) +} + +// + +pub fn former( input : proc_macro::TokenStream ) -> Result< TokenStream > { let ast = match syn::parse::< syn::DeriveInput >( input ) @@ -668,59 +760,21 @@ pub fn former( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenSt // add embedded generic parameters let mut extra_generics : syn::Generics = parse_quote!{ < __FormerContext = #name_ident #generics_ty, __FormerEnd = former::ReturnContainer > }; extra_generics.where_clause = parse_quote!{ where __FormerEnd : former::ToSuperFormer< #name_ident #generics_ty, __FormerContext >, }; + // xxx : write helper to fix the bug let generics_of_former = generics::merge( &generics, &extra_generics ); let ( generics_of_former_impl, generics_of_former_ty, generics_of_former_where ) = generics_of_former.split_for_impl(); let generics_of_former_with_defaults = generics_of_former.params.clone(); // macro_tools::code_print!( generics_of_former_with_defaults ); // macro_tools::code_print!( extra_generics ); - // pub struct CommandFormer< K, __FormerContext = Command< K >, __FormerEnd = former::ReturnContainer > - // where - // K : core::hash::Hash + std::cmp::Eq, - // __FormerEnd : former::ToSuperFormer< Command< K >, __FormerContext >, - // { - // name : core::option::Option< String >, - // properties : core::option::Option< std::collections::HashMap< K, Property< K > > >, - // context : core::option::Option< __FormerContext >, - // on_end : core::option::Option< __FormerEnd >, - // } - /* structure attribute */ - let mut perform = qt! - { - return result; - }; - let mut perform_output = qt!{ #name_ident #generics_ty }; - let mut perform_generics = qt!{}; - for attr in ast.attrs.iter() - { - if let Some( ident ) = attr.path.get_ident() - { - let ident_string = format!( "{}", ident ); - if ident_string == "perform" - { - let attr_perform = syn::parse2::< AttributeFormAfter >( attr.tokens.clone() )?; - let signature = &attr_perform.signature; - let generics = &signature.generics; - perform_generics = qt!{ #generics }; - let perform_ident = &signature.ident; - let output = &signature.output; - if let syn::ReturnType::Type( _, boxed_type ) = output - { - perform_output = qt!{ #boxed_type }; - } - perform = qt! - { - return result.#perform_ident(); - }; - } - } - else - { - return Err( syn_err!( "Unknown structure attribute:\n{}", qt!{ attr } ) ); - } - } + let ( perform, perform_output, perform_generics ) = performer + ( + &name_ident, + &generics_ty, + ast.attrs.iter(), + )?; /* */ @@ -746,9 +800,9 @@ pub fn former( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenSt let colon_token = &field.colon_token; let ty = &field.ty; let is_optional = is_optional( ty ); - let type_container_kind = macro_tools::type_optional_container_kind( ty ).0; + let of_type = container_kind::of_optional( ty ).0; let non_optional_ty : &syn::Type = if is_optional { parameter_internal_first( ty )? } else { ty }; - let former_field = FormerField { attrs, vis, ident, colon_token, ty, non_optional_ty, is_optional, type_container_kind }; + let former_field = FormerField { attrs, vis, ident, colon_token, ty, non_optional_ty, is_optional, of_type }; Ok( former_field ) }).collect(); diff --git a/module/core/former_meta/src/derive/set_component.rs b/module/core/former_meta/src/derive/set_component.rs new file mode 100644 index 0000000000..95d145dce1 --- /dev/null +++ b/module/core/former_meta/src/derive/set_component.rs @@ -0,0 +1,71 @@ +use super::*; +use macro_tools::{ type_struct, Result }; + +/// +/// Generates implementations of the `SetComponent` trait for each field of a struct. +/// +pub fn set_component( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > +{ + let parsed = syn::parse::< type_struct::TypeStructParsed >( input )?; + + let for_field = parsed.fields_many().iter().map( | field | + { + for_each_field( field, &parsed.item_name ) + }) + .collect::< Result< Vec< _ > > >()?; + + let result = qt! + { + #( #for_field )* + }; + + Ok( result ) +} + +/// Generates an implementation of the `SetComponent` trait for a specific field of a struct. +/// +/// This function creates the trait implementation that enables setting a struct's field value +/// with a type that can be converted into the field's type. It dynamically generates code +/// during the macro execution to provide `SetComponent` trait implementations for each field +/// of the struct, facilitating an ergonomic API for modifying struct instances. +/// +/// # Parameters +/// +/// - `field`: Reference to the struct field's metadata. +/// - `item_name`: The name of the struct. +/// +/// # Example of generated code +/// +/// ```rust, ignore +/// impl< IntoT > former::SetComponent< i32, IntoT > for Options1 +/// where +/// IntoT : Into< i32 >, +/// { +/// #[ inline( always ) ] +/// fn set( &mut self, component : IntoT ) +/// { +/// self.field1 = component.into().clone(); +/// } +/// } +/// ``` +fn for_each_field( field : &syn::Field, item_name : &syn::Ident ) -> Result< proc_macro2::TokenStream > +{ + let field_name = field.ident.as_ref() + .ok_or_else( || syn::Error::new( field.span(), "Field without a name" ) )?; + let field_type = &field.ty; + + Ok( qt! + { + #[ allow( non_snake_case ) ] + impl< IntoT > SetComponent< #field_type, IntoT > for #item_name + where + IntoT : Into< #field_type >, + { + #[ inline( always ) ] + fn set( &mut self, component : IntoT ) + { + self.#field_name = component.into(); + } + } + }) +} diff --git a/module/core/former_meta/src/lib.rs b/module/core/former_meta/src/lib.rs index 135a3f946a..8d3e3959b2 100644 --- a/module/core/former_meta/src/lib.rs +++ b/module/core/former_meta/src/lib.rs @@ -3,16 +3,363 @@ #![ doc( html_root_url = "https://docs.rs/former_derive_meta/latest/former_derive_meta/" ) ] #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] -mod former_impl; +#[ cfg( feature = "enabled" ) ] +mod derive; /// /// Derive macro to generate former for a structure. Former is variation of Builder Pattern. /// +/// Derives a 'Former' for a struct, implementing a variation of the Builder Pattern. +/// +/// This macro simplifies the creation of builder patterns for structs by automatically +/// generating a 'former' (builder) struct and implementation. It supports customization +/// through attributes to control default values, setter generation, subformer inclusion, +/// and field aliases. +/// +/// # Attributes : +/// - `perform` : Specifies a method to call on the built object immediately after its construction. +/// - `default` : Sets a default value for a field. +/// - `setter` : Enables or disables the generation of a setter method for a field. +/// - `subformer` : Defines a sub-former for complex field types, allowing nested builders. +/// - `alias` : Creates an alias for a field setter. +/// - `doc` : Adds documentation to the generated setter methods. +/// +/// # Input Example : +/// +/// ```rust +/// #[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] +/// fn main() +/// { +/// use former::Former; +/// +/// #[ derive( Debug, PartialEq, Former ) ] +/// pub struct UserProfile +/// { +/// age : i32, +/// username : String, +/// bio_optional : Option< String >, // Fields could be optional +/// } +/// +/// let profile = UserProfile::former() +/// .age( 30 ) +/// .username( "JohnDoe".to_string() ) +/// .bio_optional( "Software Developer".to_string() ) // Optionally provide a bio +/// .form(); +/// +/// dbg!( &profile ); +/// // Expected output: +/// // &profile = UserProfile { +/// // age: 30, +/// // username: "JohnDoe", +/// // bio_optional: Some("Software Developer"), +/// // } +/// +/// } +/// ``` +/// +/// # Generated Code Example : +/// +/// Assuming the struct above, the macro generates something like this : +/// +/// ```rust +/// # #[ cfg( feature = "enabled" ) ] +/// # #[ allow( dead_code ) ] +/// # fn main() +/// # { +/// +/// #[ derive( Debug, PartialEq ) ] +/// pub struct UserProfile +/// { +/// age : i32, +/// username : String, +/// bio_optional : Option< String >, // Fields could be optional +/// } +/// +/// impl UserProfile +/// { +/// #[ inline( always ) ] +/// pub fn former() -> UserProfileFormer< UserProfile, former::ReturnContainer > +/// { +/// UserProfileFormer::< UserProfile, former::ReturnContainer >::new() +/// } +/// } +/// +/// #[ derive( Debug, Default ) ] +/// pub struct UserProfileFormerContainer +/// { +/// age : Option< i32 >, +/// username : Option< String >, +/// bio_optional : Option< String >, +/// } +/// +/// pub struct UserProfileFormer +/// < +/// FormerContext = UserProfile, +/// FormerEnd = former::ReturnContainer, +/// > +/// where +/// FormerEnd : former::ToSuperFormer< UserProfile, FormerContext >, +/// { +/// container : UserProfileFormerContainer, +/// context : Option< FormerContext >, +/// on_end : Option< FormerEnd >, +/// } +/// +/// impl< FormerContext, FormerEnd > UserProfileFormer< FormerContext, FormerEnd > +/// where +/// FormerEnd : former::ToSuperFormer< UserProfile, FormerContext >, +/// { +/// #[ inline( always ) ] +/// pub fn form( mut self ) -> UserProfile +/// { +/// let age = self.container.age.take().unwrap_or_else( || +/// { +/// default_for_field::< i32 >( "age" ) +/// } ); +/// let username = self.container.username.take().unwrap_or_else( || +/// { +/// default_for_field::< String >( "username" ) +/// } ); +/// let bio_optional = self.container.bio_optional.take(); +/// UserProfile { age, username, bio_optional } +/// } +/// +/// #[ inline( always ) ] +/// pub fn perform( self ) -> UserProfile +/// { +/// self.form() +/// } +/// +/// #[ inline( always ) ] +/// pub fn new() -> UserProfileFormer< UserProfile, former::ReturnContainer > +/// { +/// UserProfileFormer::< UserProfile, former::ReturnContainer >::begin( None, former::ReturnContainer ) +/// } +/// +/// #[ inline( always ) ] +/// pub fn begin( context : Option< FormerContext >, on_end : FormerEnd ) -> Self +/// { +/// Self +/// { +/// container : Default::default(), +/// context, +/// on_end : Some( on_end ), +/// } +/// } +/// +/// #[ inline( always ) ] +/// pub fn end( mut self ) -> FormerContext +/// { +/// let on_end = self.on_end.take().unwrap(); +/// let context = self.context.take(); +/// let container = self.form(); +/// on_end.call( container, context ) +/// } +/// +/// #[ inline ] +/// pub fn age< Src >( mut self, src : Src ) -> Self +/// where +/// Src : Into< i32 >, +/// { +/// self.container.age = Some( src.into() ); +/// self +/// } +/// +/// #[ inline ] +/// pub fn username< Src >( mut self, src : Src ) -> Self +/// where +/// Src : Into< String >, +/// { +/// self.container.username = Some( src.into() ); +/// self +/// } +/// +/// #[ inline ] +/// pub fn bio_optional< Src >( mut self, src : Src ) -> Self +/// where +/// Src : Into< String >, +/// { +/// self.container.bio_optional = Some( src.into() ); +/// self +/// } +/// } +/// +/// fn default_for_field(field_name: &str) -> T { +/// eprintln!("Field '{}' isn't initialized, using default value.", field_name); +/// T::default() +/// } +/// +/// let profile = UserProfile::former() +/// .age( 30 ) +/// .username( "JohnDoe".to_string() ) +/// .bio_optional( "Software Developer".to_string() ) +/// .form(); +/// +/// dbg!( &profile ); +/// // Expected output: +/// // &profile = UserProfile { +/// // age: 30, +/// // username: "JohnDoe", +/// // bio_optional: Some("Software Developer"), +/// // } +/// # } +/// ``` +/// +/// This generated code allows building an instance of `MyStruct` fluently, with optional customization for each field. + +#[ cfg( feature = "enabled" ) ] +#[ cfg( feature = "derive_former" ) ] #[ proc_macro_derive( Former, attributes( perform, default, setter, subformer, alias, doc ) ) ] pub fn former( input : proc_macro::TokenStream ) -> proc_macro::TokenStream { - let result = former_impl::former( input ); + let result = derive::former::former( input ); + match result + { + Ok( stream ) => stream.into(), + Err( err ) => err.to_compile_error().into(), + } +} + +/// +/// Macro to implement `From` for each component (field) of a structure. +/// This macro simplifies the creation of `From` trait implementations for struct fields, +/// enabling easy conversion from a struct reference to its field types. +/// +/// # Features +/// +/// - Requires the `derive_component_from` feature to be enabled for use. +/// - The `ComponentFrom` derive macro can be applied to structs to automatically generate +/// `From` implementations for each field. +/// +/// # Attributes +/// +/// - `debug` : Optional attribute to enable debug-level output during the macro expansion process. +/// +/// # Examples +/// +/// Assuming the `derive_component_from` feature is enabled in your `Cargo.toml`, you can use the macro as follows : +/// +/// ```rust +/// # fn main() +/// # { +/// #[ derive( former::ComponentFrom ) ] +/// struct Person +/// { +/// pub age : i32, +/// pub name : String, +/// } +/// +/// let my_struct = Person { age : 10, name : "Hello".into() }; +/// let age : i32 = From::from( &my_struct ); +/// let name : String = From::from( &my_struct ); +/// dbg!( age ); +/// dbg!( name ); +/// // > age = 10 +/// // > name = "Hello" +/// # } +/// ``` +/// + +// qqq : xxx : implement debug +#[ cfg( feature = "enabled" ) ] +#[ cfg( feature = "derive_component_from" ) ] +#[ proc_macro_derive( ComponentFrom, attributes( debug ) ) ] +pub fn component_from( input : proc_macro::TokenStream ) -> proc_macro::TokenStream +{ + let result = derive::component_from::component_from( input ); + match result + { + Ok( stream ) => stream.into(), + Err( err ) => err.to_compile_error().into(), + } +} + +/// Derives the `SetComponent` trait for struct fields, allowing each field to be set +/// with a value that can be converted into the field's type. +/// +/// This macro facilitates the automatic implementation of the `SetComponent` trait for all +/// fields within a struct, leveraging the power of Rust's type system to ensure type safety +/// and conversion logic. It is particularly useful for builder patterns or mutating instances +/// of data structures in a fluent and ergonomic manner. +/// +/// # Attributes +/// +/// - `debug` : An optional attribute to enable debugging of the trait derivation process. +/// +/// # Conditions +/// +/// - This macro is only enabled when the `derive_set_component` feature is active in your `Cargo.toml`. +/// +/// # Input Code Example +/// +/// Given a struct definition annotated with `#[ derive( SetComponent ) ]` : +/// +/// ```rust +/// use former::SetComponent; +/// +/// #[ derive( Default, PartialEq, Debug, former::SetComponent ) ] +/// struct Person +/// { +/// age : i32, +/// name : String, +/// } +/// +/// let mut person : Person = Default::default(); +/// person.set( 13 ); +/// person.set( "John" ); +/// assert_eq!( person, Person { age : 13, name : "John".to_string() } ); +/// ``` +/// +/// # Generated Code Example +/// +/// The procedural macro generates the following implementations for `Person` : +/// +/// ```rust +/// use former::SetComponent; +/// +/// #[ derive( Default, PartialEq, Debug ) ] +/// struct Person +/// { +/// age : i32, +/// name : String, +/// } +/// +/// impl< IntoT > SetComponent< i32, IntoT > for Person +/// where +/// IntoT : Into< i32 >, +/// { +/// fn set( &mut self, component : IntoT ) +/// { +/// self.age = component.into(); +/// } +/// } +/// +/// impl< IntoT > SetComponent< String, IntoT > for Person +/// where +/// IntoT : Into< String >, +/// { +/// fn set( &mut self, component : IntoT ) +/// { +/// self.name = component.into(); +/// } +/// } +/// +/// let mut person : Person = Default::default(); +/// person.set( 13 ); +/// person.set( "John" ); +/// assert_eq!( person, Person { age : 13, name : "John".to_string() } ); +/// ``` +/// This allows any type that can be converted into an `i32` or `String` to be set as +/// the value of the `age` or `name` fields of `Person` instances, respectively. + +// qqq : xxx : implement debug +#[ cfg( feature = "enabled" ) ] +#[ cfg( feature = "derive_set_component" ) ] +#[ proc_macro_derive( SetComponent, attributes( debug ) ) ] +pub fn set_component( input : proc_macro::TokenStream ) -> proc_macro::TokenStream +{ + let result = derive::set_component::set_component( input ); match result { Ok( stream ) => stream.into(), diff --git a/module/core/impls_index_meta/Cargo.toml b/module/core/impls_index_meta/Cargo.toml index 76db79c7cf..6751aee7d4 100644 --- a/module/core/impls_index_meta/Cargo.toml +++ b/module/core/impls_index_meta/Cargo.toml @@ -23,7 +23,6 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false - exclude = [ "/tests", "/examples", "-*" ] [features] diff --git a/module/core/impls_index_meta/src/impls.rs b/module/core/impls_index_meta/src/impls.rs index 7d1654e3f0..821cd58fea 100644 --- a/module/core/impls_index_meta/src/impls.rs +++ b/module/core/impls_index_meta/src/impls.rs @@ -1,13 +1,6 @@ -#[ allow( unused_imports ) ] -use quote::quote; -#[ allow( unused_imports ) ] -use syn::parse_quote; -#[ allow( unused_imports ) ] +use macro_tools::{ Result, Many }; use macro_tools::prelude::*; -#[ allow( unused_imports ) ] -// use macro_tools::{ Result, Items }; -use macro_tools::{ Result, Many, syn }; /// /// Module-specific item. @@ -75,7 +68,7 @@ impl quote::ToTokens for Items2 { let func = &e.func; - let declare_aliased = quote! + let declare_aliased = qt! { ( as $Name2 : ident ) => { @@ -90,14 +83,14 @@ impl quote::ToTokens for Items2 }; }; - let mut mandatory = quote! + let mut mandatory = qt! { #[ allow( unused_macros ) ] }; if e.optional.is_none() { - mandatory = quote! + mandatory = qt! { #[ deny( unused_macros ) ] } @@ -105,7 +98,7 @@ impl quote::ToTokens for Items2 let name_str = func.name(); let name_ident = syn::Ident::new( &name_str[ .. ], proc_macro2::Span::call_site() ); - let result = quote! + let result = qt! { #mandatory macro_rules! #name_ident @@ -129,7 +122,7 @@ pub fn impls( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStr { let items2 = syn::parse::< Items2 >( input )?; - let result = quote! + let result = qt! { #items2 }; diff --git a/module/core/impls_index_meta/src/lib.rs b/module/core/impls_index_meta/src/lib.rs index efab9f5d87..8b1f3394da 100644 --- a/module/core/impls_index_meta/src/lib.rs +++ b/module/core/impls_index_meta/src/lib.rs @@ -2,17 +2,6 @@ #![ doc( html_logo_url = "https://raw.githubusercontent.com/Wandalen/wTools/master/asset/img/logo_v3_trans_square.png" ) ] #![ doc( html_favicon_url = "https://raw.githubusercontent.com/Wandalen/wTools/alpha/asset/img/logo_v3_trans_square_icon_small_v2.ico" ) ] #![ doc( html_root_url = "https://docs.rs/impls_index_meta/latest/impls_index_meta/" ) ] -// #![ deny( rust_2018_idioms ) ] -// #![ deny( missing_debug_implementations ) ] -// #![ deny( missing_docs ) ] - -// #![ feature( type_name_of_val ) ] -// #![ feature( trace_macros ) ] - -//! -//! Several of macros to put each function under a named macro to index every function in a class. -//! - #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] #[ cfg( feature = "enabled" ) ] diff --git a/module/core/interval_adapter/Cargo.toml b/module/core/interval_adapter/Cargo.toml index e6dc44a949..0e81fa0e05 100644 --- a/module/core/interval_adapter/Cargo.toml +++ b/module/core/interval_adapter/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "interval_adapter" -version = "0.8.0" +version = "0.9.0" edition = "2021" authors = [ "Kostiantyn Wandalen ", diff --git a/module/core/iter_tools/Cargo.toml b/module/core/iter_tools/Cargo.toml index d60ba502f1..c2ca258656 100644 --- a/module/core/iter_tools/Cargo.toml +++ b/module/core/iter_tools/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "iter_tools" -version = "0.7.0" +version = "0.8.0" edition = "2021" authors = [ "Kostiantyn Wandalen ", diff --git a/module/core/macro_tools/Cargo.toml b/module/core/macro_tools/Cargo.toml index 2e897f1a9a..165d36bab8 100644 --- a/module/core/macro_tools/Cargo.toml +++ b/module/core/macro_tools/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "macro_tools" -version = "0.8.0" +version = "0.9.0" edition = "2021" authors = [ "Kostiantyn Wandalen ", @@ -32,20 +32,18 @@ full = [ "enabled" ] no_std = [] use_alloc = [] enabled = [] -# qqq2 : introduce feature enabled [dependencies] ## external -proc-macro2 = { version = "~1.0", features = [] } -quote = { version = "~1.0", features = [] } -syn = { version = "~1.0", features = [ "full", "extra-traits" ] } +proc-macro2 = { version = "~1.0.78", features = [] } +quote = { version = "~1.0.35", features = [] } +syn = { version = "~2.0.52", features = [ "full", "extra-traits" ] } +# syn = { version = "~1.0", features = [ "full", "extra-traits" ] } ## internal interval_adapter = { workspace = true, features = [ "default" ] } -# derive_tools = { workspace = true, features = [ "default" ] } -# type_constructor = { workspace = true, features = [ "default" ] } +# strs_tools = { workspace = true, features = [ "default" ] } [dev-dependencies] -# trybuild = { version = "~1.0", features = [ "diff" ] } test_tools = { workspace = true } diff --git a/module/core/macro_tools/Readme.md b/module/core/macro_tools/Readme.md index cd20d19038..9a09ee1c72 100644 --- a/module/core/macro_tools/Readme.md +++ b/module/core/macro_tools/Readme.md @@ -13,11 +13,11 @@ Tools for writing procedural macros. ```rust #[ cfg( not( feature = "no_std" ) ) ] { - use macro_tools::*; + use macro_tools::exposed::*; let code = qt!( core::option::Option< i8, i16, i32, i64 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = type_parameters( &tree_type, 0..=2 ); + let got = typ::type_parameters( &tree_type, 0..=2 ); got.iter().for_each( | e | println!( "{}", qt!( #e ) ) ); /* print : i8 diff --git a/module/core/macro_tools/examples/macro_tools_trivial.rs b/module/core/macro_tools/examples/macro_tools_trivial.rs index 64c2523ab1..a77a98720e 100644 --- a/module/core/macro_tools/examples/macro_tools_trivial.rs +++ b/module/core/macro_tools/examples/macro_tools_trivial.rs @@ -1,19 +1,20 @@ //! example +#[ cfg( feature = "no_std" ) ] +fn main(){} + +#[ cfg( not( feature = "no_std" ) ) ] fn main() { - #[ cfg( not( feature = "no_std" ) ) ] - { - use macro_tools::*; + use macro_tools::{ typ, qt }; - let code = qt!( core::option::Option< i8, i16, i32, i64 > ); - let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = type_parameters( &tree_type, 0..=2 ); - got.iter().for_each( | e | println!( "{}", qt!( #e ) ) ); - /* print : - i8 - i16 - i32 - */ - } -} + let code = qt!( core::option::Option< i8, i16, i32, i64 > ); + let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); + let got = typ::type_parameters( &tree_type, 0..=2 ); + got.iter().for_each( | e | println!( "{}", qt!( #e ) ) ); + /* print : + i8 + i16 + i32 + */ +} \ No newline at end of file diff --git a/module/core/macro_tools/src/attr.rs b/module/core/macro_tools/src/attr.rs index b8fae834f5..51a3fbe10d 100644 --- a/module/core/macro_tools/src/attr.rs +++ b/module/core/macro_tools/src/attr.rs @@ -12,48 +12,89 @@ pub( crate ) mod private /// as well as syn::Meta as the last element of result tuple. /// /// ### Basic use-case. - /// ``` + /// ```rust + /// use macro_tools::exposed::*; /// let attr : syn::Attribute = syn::parse_quote!( #[ former( default = 31 ) ] ); - /// let ( key, val, _meta ) = macro_tools::attr::eq_pair( &attr ).unwrap(); - /// assert_eq!( key, "default" ); - /// assert_eq!( val, syn::Lit::Int( syn::LitInt::new( "31", proc_macro2::Span::call_site() ) ) ); + /// // tree_print!( attr ); + /// let got = equation( &attr ).unwrap(); + /// assert_eq!( code_to_str!( got ), "default = 31".to_string() ); /// ``` - pub fn eq_pair( attr : &syn::Attribute ) -> Result< ( String, syn::Lit, syn::Meta ) > + pub fn equation( attr : &syn::Attribute ) -> Result< tokens::Equation > { - // use syn::spanned::Spanned; - let meta = attr.parse_meta()?; - - // zzz : try to use helper from toolbox - let ( key, val ); - match meta + let meta = &attr.meta; + return match meta { syn::Meta::List( ref meta_list ) => - match meta_list.nested.first() { - Some( nested_meta ) => match nested_meta - { - syn::NestedMeta::Meta( meta2 ) => match meta2 - { - syn::Meta::NameValue( name_value ) => // match &name_value.lit - { - if meta_list.nested.len() != 1 - { - return Err( syn::Error::new( attr.span(), format!( "Expected single element of the list, but got {}", meta_list.nested.len() ) ) ); - } - key = name_value.path.get_ident().unwrap().to_string(); - val = name_value.lit.clone(); - }, - _ => return Err( syn::Error::new( attr.span(), "Unknown format of attribute, expected syn::Meta::NameValue( name_value )" ) ), - }, - _ => return Err( syn::Error::new( attr.span(), "Unknown format of attribute, expected syn::NestedMeta::Meta( meta2 )" ) ), - }, - _ => return Err( syn::Error::new( attr.span(), "Unknown format of attribute, expected Some( nested_meta )" ) ), - }, + let eq : tokens::Equation = syn::parse2( meta_list.tokens.clone() )?; + Ok( eq ) + } _ => return Err( syn::Error::new( attr.span(), "Unknown format of attribute, expected syn::Meta::List( meta_list )" ) ), }; + } - Ok( ( key, val, meta ) ) + /// Checks if the given iterator of attributes contains an attribute named `debug`. + /// + /// This function iterates over an input sequence of `syn::Attribute`, typically associated with a struct, + /// enum, or other item in a Rust Abstract Syntax Tree ( AST ), and determines whether any of the attributes + /// is exactly named `debug`. + /// + /// # Parameters + /// - `attrs` : An iterator over `syn::Attribute`. This could be obtained from parsing Rust code + /// with the `syn` crate, where the iterator represents attributes applied to a Rust item ( like a struct or function ). + /// + /// # Returns + /// - `Ok( true )` if the `debug` attribute is present. + /// - `Ok( false )` if the `debug` attribute is not found. + /// - `Err( syn::Error )` if an unknown or improperly formatted attribute is encountered. + /// + /// # Example + /// + /// Suppose you have the following struct definition in a procedural macro input: + /// + /// ```rust, ignore + /// #[ derive( SomeDerive ) ] + /// #[ debug ] + /// struct MyStruct + /// { + /// field : i32, + /// } + /// ``` + /// + /// You can use `has_debug` to check for the presence of the `debug` attribute: + /// + /// ```rust + /// use macro_tools::exposed::*; + /// + /// // Example struct attribute + /// let attrs : Vec< syn::Attribute > = vec![ syn::parse_quote!( #[ debug ] ) ]; + /// + /// // Checking for 'debug' attribute + /// let contains_debug = attr::has_debug( ( &attrs ).into_iter() ).unwrap(); + /// + /// assert!( contains_debug, "Expected to find 'debug' attribute" ); + /// ``` + /// + + pub fn has_debug< 'a >( attrs : impl Iterator< Item = &'a syn::Attribute > ) -> Result< bool > + { + for attr in attrs + { + if let Some( ident ) = attr.path().get_ident() + { + let ident_string = format!( "{}", ident ); + if ident_string == "debug" + { + return Ok( true ) + } + } + else + { + return_syn_err!( "Unknown structure attribute:\n{}", qt!{ attr } ); + } + } + return Ok( false ) } /// @@ -111,8 +152,9 @@ pub( crate ) mod private pound_token : input.parse()?, style : syn::AttrStyle::Inner( input.parse()? ), bracket_token : bracketed!( input2 in input ), - path : input2.call( syn::Path::parse_mod_style )?, - tokens : input2.parse()?, + // path : input2.call( syn::Path::parse_mod_style )?, + // tokens : input2.parse()?, + meta : input2.parse()?, }; result.0.push( element ); } @@ -195,8 +237,9 @@ pub( crate ) mod private pound_token : input.parse()?, style : syn::AttrStyle::Outer, bracket_token : bracketed!( input2 in input ), - path : input2.call( syn::Path::parse_mod_style )?, - tokens : input2.parse()?, + // path : input2.call( syn::Path::parse_mod_style )?, + // tokens : input2.parse()?, + meta : input2.parse()?, }; result.0.push( element ); } @@ -263,6 +306,7 @@ pub mod orphan /// Exposed namespace of the module. pub mod exposed { + pub use super::protected as attr; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; @@ -270,7 +314,8 @@ pub mod exposed #[ allow( unused_imports ) ] pub use super::private:: { - eq_pair, + equation, + has_debug, AttributesInner, AttributesOuter, AttributedIdent, diff --git a/module/core/macro_tools/src/container_kind.rs b/module/core/macro_tools/src/container_kind.rs index 4af3490a3a..a516594e47 100644 --- a/module/core/macro_tools/src/container_kind.rs +++ b/module/core/macro_tools/src/container_kind.rs @@ -32,16 +32,15 @@ pub( crate ) mod private /// /// ### Basic use-case. /// ``` - /// use macro_tools::*; - /// use quote::quote; + /// use macro_tools::exposed::*; /// - /// let code = quote!( std::collections::HashMap< i32, i32 > ); + /// let code = qt!( std::collections::HashMap< i32, i32 > ); /// let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - /// let kind = type_container_kind( &tree_type ); - /// assert_eq!( kind, ContainerKind::HashMap ); + /// let kind = container_kind::of_type( &tree_type ); + /// assert_eq!( kind, container_kind::ContainerKind::HashMap ); /// ``` - pub fn type_container_kind( ty : &syn::Type ) -> ContainerKind + pub fn of_type( ty : &syn::Type ) -> ContainerKind { if let syn::Type::Path( path ) = ty @@ -62,40 +61,37 @@ pub( crate ) mod private ContainerKind::No } - /// Return kind of container specified by type. Unlike [type_container_kind] it also understand optional types. + /// Return kind of container specified by type. Unlike [of_type] it also understand optional types. /// /// Good to verify `Option< alloc::vec::Vec< i32 > >` is optional vector. /// /// ### Basic use-case. /// ``` - /// use macro_tools::*; - /// use quote::quote; + /// use macro_tools::exposed::*; /// - /// let code = quote!( Option< std::collections::HashMap< i32, i32 > > ); + /// let code = qt!( Option< std::collections::HashMap< i32, i32 > > ); /// let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - /// let ( kind, optional ) = type_optional_container_kind( &tree_type ); - /// assert_eq!( kind, ContainerKind::HashMap ); + /// let ( kind, optional ) = container_kind::of_optional( &tree_type ); + /// assert_eq!( kind, container_kind::ContainerKind::HashMap ); /// assert_eq!( optional, true ); /// ``` - pub fn type_optional_container_kind( ty : &syn::Type ) -> ( ContainerKind, bool ) + pub fn of_optional( ty : &syn::Type ) -> ( ContainerKind, bool ) { - // use inspect_type::*; - - if type_rightmost( ty ) == Some( "Option".to_string() ) + if typ::type_rightmost( ty ) == Some( "Option".to_string() ) { - let ty2 = type_parameters( ty, 0 ..= 0 ).first().copied(); + let ty2 = typ::type_parameters( ty, 0 ..= 0 ).first().copied(); // inspect_type::inspect_type_of!( ty2 ); if ty2.is_none() { return ( ContainerKind::No, false ) } let ty2 = ty2.unwrap(); - return ( type_container_kind( ty2 ), true ) + return ( of_type( ty2 ), true ) } - ( type_container_kind( ty ), false ) + ( of_type( ty ), false ) } } @@ -110,6 +106,16 @@ pub mod protected #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::orphan::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + ContainerKind, + of_type, + of_optional, + }; + } /// Orphan namespace of the module. @@ -123,19 +129,11 @@ pub mod orphan /// Exposed namespace of the module. pub mod exposed { - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::prelude::*; + pub use super::protected as container_kind; #[ doc( inline ) ] #[ allow( unused_imports ) ] - pub use super::private:: - { - ContainerKind, - type_container_kind, - type_optional_container_kind, - }; - + pub use super::prelude::*; } /// Prelude to use essentials: `use my_module::prelude::*`. diff --git a/module/core/macro_tools/src/diag.rs b/module/core/macro_tools/src/diag.rs new file mode 100644 index 0000000000..5e03a6bed5 --- /dev/null +++ b/module/core/macro_tools/src/diag.rs @@ -0,0 +1,432 @@ +//! +//! Macro helpers. +//! + +/// Internal namespace. +pub( crate ) mod private +{ + use super::super::*; + + /// + /// Result with syn::Error. + /// + + pub type Result< T > = std::result::Result< T, syn::Error >; + + /// Adds indentation and optional prefix/postfix to each line of the given string. + /// + /// This function iterates over each line in the input string and applies the specified + /// prefix and postfix to it, effectively indenting the string and optionally wrapping + /// each line with additional content. + /// + /// # Parameters + /// - `prefix` : The string to prepend to each line, typically used for indentation. + /// - `src` : The source string to be indented and modified. + /// - `postfix` : The string to append to each line, can be used for line terminators or other suffixes. + /// + /// # Type Parameters + /// - `Prefix` : A type that can be referenced as a string slice, for the prefix. + /// - `Src` : A type that can be referenced as a string slice, for the source string. + /// - `Postfix` : A type that can be referenced as a string slice, for the postfix. + /// + /// # Returns + /// A `String` that represents the original `src` string with `prefix` and `postfix` applied to each line. + /// + /// # Example + /// ``` + /// use macro_tools::diag; + /// + /// let input = "Line 1\nLine 2\nLine 3"; + /// let indented = diag::indentation( " ", input, ";" ); + /// assert_eq!( indented, " Line 1;\n Line 2;\n Line 3;" ); + /// + /// // Demonstrating the function's handling of trailing newlines + /// let input_with_newline = "Line 1\nLine 2\nLine 3\n"; + /// let indented_with_newline = diag::indentation( " ", input_with_newline, ";" ); + /// assert_eq!( indented_with_newline, " Line 1;\n Line 2;\n Line 3;\n ;" ); + /// ``` + /// + /// In the example above, `indentation` is used to add two spaces before each line + /// and a semicolon at the end of each line. The function also demonstrates handling + /// of input strings that end with a newline character by appending an additional line + /// consisting only of the prefix and postfix. + + pub fn indentation< Prefix, Src, Postfix >( prefix : Prefix, src : Src, postfix : Postfix ) -> String + where + Prefix : AsRef< str >, + Src : AsRef< str >, + Postfix : AsRef< str >, + { + let prefix = prefix.as_ref(); + let postfix = postfix.as_ref(); + let src = src.as_ref(); + + let mut result = src + .lines() + .enumerate() + .fold( String::new(), | mut a, b | + { + if b.0 > 0 + { + a.push_str( "\n" ); + } + a.push_str( prefix ); + a.push_str( &b.1 ); + a.push_str( postfix ); + a + }); + + if src.ends_with( "\n" ) || src.ends_with( "\n\r" ) || src.ends_with( "\r\n" ) + { + result.push_str( "\n" ); + result.push_str( prefix ); + result.push_str( postfix ); + } + + result + } + + /// Formats a debugging report for a pair of token streams, showing the original and generated code. + /// + /// This function takes two inputs: the original code as an `IntoTokens` (which can be converted into a `proc_macro2::TokenStream`), + /// and the generated code as a `proc_macro2::TokenStream`. It formats both inputs with indentation for better readability, + /// labeling them as "original" and "generated" respectively. + /// + /// Ensure the correct conversion of `proc_macro::TokenStream` to `proc_macro2::TokenStream` where necessary, + /// especially when interfacing with procedural macros' `input` parameter + /// + /// # Parameters + /// - `input`: The original input code that can be converted into a `proc_macro2::TokenStream`. + /// - `output`: The generated code as a `proc_macro2::TokenStream`. + /// + /// # Returns + /// A `String` containing the formatted debug report. + /// + /// # Type Parameters + /// - `IntoTokens`: A type that can be converted into a `proc_macro2::TokenStream`. + /// + /// # Examples + /// ``` + /// use macro_tools::exposed::*; + /// + /// let original_input : proc_macro2::TokenStream = qt! + /// { + /// #[ derive( Debug, PartialEq ) ] + /// pub struct MyStruct + /// { + /// pub field : i32, + /// } + /// }; + /// + /// let generated_code : proc_macro2::TokenStream = qt! + /// { + /// impl MyStruct + /// { + /// pub fn new( field : i32 ) -> Self + /// { + /// MyStruct { field } + /// } + /// } + /// }; + /// + /// // Format the debug report for printing or logging + /// let formatted_report = debug_report_format( original_input, &generated_code ); + /// println!( "{}", formatted_report ); + /// ``` + /// + /// This will output a formatted report showing the original input code and the generated code side by side, + /// each line indented for clarity. + /// + pub fn debug_report_format< IntoTokens > + ( + input : IntoTokens, output : &proc_macro2::TokenStream + ) -> String + where + IntoTokens : Into< proc_macro2::TokenStream >, + { + format!( "\n" ) + + &format!( " = original\n\n{}\n\n", indentation( " ", input.into().to_string(), "" ) ) + + &format!( " = generated\n\n{}\n", indentation( " ", qt!{ #output }.to_string(), "" ) ) + } + + /// Prints a debugging report for a pair of token streams to the standard output. + /// + /// This convenience function wraps `debug_report_format`, directly printing the formatted report to stdout. + /// It serves as a utility for debugging procedural macros, providing a clear comparison between original + /// and generated code. + /// + /// # Parameters and Type Parameters + /// - Same as `debug_report_format`. + /// + /// # Examples + /// + /// ``` + /// use macro_tools::exposed::*; + /// + /// let original_input : proc_macro2::TokenStream = qt! + /// { + /// #[ derive( Debug, PartialEq ) ] + /// pub struct MyStruct + /// { + /// pub field : i32, + /// } + /// }; + /// + /// let generated_code : proc_macro2::TokenStream = qt! + /// { + /// impl MyStruct + /// { + /// pub fn new( field : i32 ) -> Self + /// { + /// MyStruct { field } + /// } + /// } + /// }; + /// + /// // Directly print the debug report + /// debug_report_print( original_input, &generated_code ); + /// ``` + /// + /// This will output a formatted report showing the original input code and the generated code side by side, + /// each line indented for clarity. + + pub fn debug_report_print< IntoTokens > + ( + input : IntoTokens, output : &proc_macro2::TokenStream + ) + where + IntoTokens : Into< proc_macro2::TokenStream >, + { + println!( "{}", debug_report_format( input, output ) ); + } + + /// + /// Macro for diagnostics purpose to print both syntax tree and source code behind it with syntax tree. + /// + /// ### Basic use-case. + /// ``` + /// use macro_tools::prelude::*; + /// + /// let code = qt!( std::collections::HashMap< i32, i32 > ); + /// let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); + /// tree_print!( tree_type ); + /// ``` + /// + + #[ macro_export ] + macro_rules! tree_print + { + ( $src:expr ) => + {{ + let result = $crate::tree_diagnostics_str!( $src ); + println!( "{}", result ); + result + }}; + ( $( $src:expr ),+ $(,)? ) => + {{ + $( $crate::tree_print!( $src ) );+ + }}; + } + + /// + /// Macro for diagnostics purpose to print both syntax tree and source code behind it without syntax tree. + /// + /// ### Basic use-case. + /// ``` + /// use macro_tools::prelude::*; + /// + /// let code = qt!( std::collections::HashMap< i32, i32 > ); + /// let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); + /// tree_print!( tree_type ); + /// ``` + /// + + #[ macro_export ] + macro_rules! code_print + { + ( $src:expr ) => + {{ + let result = $crate::code_diagnostics_str!( $src ); + println!( "{}", result ); + result + }}; + ( $( $src:expr ),+ $(,)? ) => + {{ + $( $crate::code_print!( $src ) );+ + }}; + } + + /// + /// Macro for diagnostics purpose to export both syntax tree and source code behind it into a string. + /// + + #[ macro_export ] + macro_rules! tree_diagnostics_str + { + ( $src:expr ) => + {{ + let src2 = &$src; + format!( "{} : {} :\n{:#?}", stringify!( $src ), $crate::qt!{ #src2 }, $src ) + }}; + } + + /// + /// Macro for diagnostics purpose to diagnose source code behind it and export it into a string. + /// + + #[ macro_export ] + macro_rules! code_diagnostics_str + { + ( $src:expr ) => + {{ + let src2 = &$src; + format!( "{} : {}", stringify!( $src ), $crate::qt!{ #src2 } ) + }}; + } + + /// + /// Macro to export source code behind a syntax tree into a string. + /// + + #[ macro_export ] + macro_rules! code_to_str + { + ( $src:expr ) => + {{ + let src2 = &$src; + format!( "{}", $crate::qt!{ #src2 } ) + }}; + } + + /// + /// Macro to generate syn error either with span of a syntax tree element or with default one `proc_macro2::Span::call_site()`. + /// + /// ### Basic use-case. + /// ``` + /// # use macro_tools::exposed::*; + /// syn_err!( "No attr" ); + /// # () + /// ``` + /// + + #[ macro_export ] + macro_rules! syn_err + { + + ( $msg:expr $(,)? ) => + { + $crate::syn::Error::new( proc_macro2::Span::call_site(), $msg ) + }; + ( _, $msg:expr $(,)? ) => + { + $crate::syn::Error::new( proc_macro2::Span::call_site(), $msg ) + }; + ( $span:expr, $msg:expr $(,)? ) => + { + $crate::syn::Error::new( syn::spanned::Spanned::span( &( $span ) ), $msg ) + }; + ( $span:expr, $msg:expr, $( $arg:expr ),+ $(,)? ) => + { + $crate::syn::Error::new( syn::spanned::Spanned::span( &( $span ) ), format!( $msg, $( $arg ),+ ) ) + }; + ( _, $msg:expr, $( $arg:expr ),+ $(,)? ) => + { + $crate::syn::Error::new( proc_macro2::Span::call_site(), format!( $msg, $( $arg ),+ ) ) + }; + + } + + /// + /// Macro to generate syn error either with span of a syntax tree element or with default one `proc_macro2::Span::call_site()`. + /// + /// ### Basic use-case. + /// ``` + /// # use macro_tools::exposed::*; + /// syn_err!( "No attr" ); + /// # () + /// ``` + /// + + #[ macro_export ] + macro_rules! return_syn_err + { + ( $( $Arg : tt )* ) => + { + return Result::Err( $crate::syn_err!( $( $Arg )* ) ) + }; + } + + pub use + { + tree_print, + code_print, + tree_diagnostics_str, + code_diagnostics_str, + code_to_str, + syn_err, + return_syn_err, + }; + +} + +#[ doc( inline ) ] +#[ allow( unused_imports ) ] +pub use protected::*; + +/// Protected namespace of the module. +pub mod protected +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::orphan::*; +} + +/// Parented namespace of the module. +pub mod orphan +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::exposed::*; +} + +/// Exposed namespace of the module. +pub mod exposed +{ + pub use super::protected as diag; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::prelude::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + Result, + indentation, + debug_report_format, + debug_report_print, + }; + +} + +/// Prelude to use essentials: `use my_module::prelude::*`. +pub mod prelude +{ + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + tree_print, + code_print, + tree_diagnostics_str, + code_diagnostics_str, + code_to_str, + syn_err, + return_syn_err, + }; + + // #[ doc( inline ) ] + // pub use super::private::Result; +} diff --git a/module/core/macro_tools/src/diagnostics.rs b/module/core/macro_tools/src/diagnostics.rs deleted file mode 100644 index 4b595f7f7a..0000000000 --- a/module/core/macro_tools/src/diagnostics.rs +++ /dev/null @@ -1,242 +0,0 @@ -//! -//! Macro helpers. -//! - -/// Internal namespace. -pub( crate ) mod private -{ - // pub use winterval::exposed::*; - - /// - /// Result with syn::Error. - /// - - pub type Result< T > = std::result::Result< T, syn::Error >; - - /// - /// Macro for diagnostics purpose to print both syntax tree and source code behind it with syntax tree. - /// - /// ### Basic use-case. - /// ``` - /// use macro_tools::prelude::*; - /// - /// let code = qt!( std::collections::HashMap< i32, i32 > ); - /// let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - /// tree_print!( tree_type ); - /// ``` - /// - - #[ macro_export ] - macro_rules! tree_print - { - ( $src:expr ) => - {{ - let result = $crate::tree_diagnostics_str!( $src ); - println!( "{}", result ); - result - }}; - ( $( $src:expr ),+ $(,)? ) => - {{ - $( $crate::tree_print!( $src ) );+ - }}; - } - - /// - /// Macro for diagnostics purpose to print both syntax tree and source code behind it without syntax tree. - /// - /// ### Basic use-case. - /// ``` - /// use macro_tools::prelude::*; - /// - /// let code = qt!( std::collections::HashMap< i32, i32 > ); - /// let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - /// tree_print!( tree_type ); - /// ``` - /// - - #[ macro_export ] - macro_rules! code_print - { - ( $src:expr ) => - {{ - let result = $crate::code_diagnostics_str!( $src ); - println!( "{}", result ); - result - }}; - ( $( $src:expr ),+ $(,)? ) => - {{ - $( $crate::code_print!( $src ) );+ - }}; - } - - /// - /// Macro for diagnostics purpose to export both syntax tree and source code behind it into a string. - /// - - #[ macro_export ] - macro_rules! tree_diagnostics_str - { - ( $src:expr ) => - {{ - let src2 = &$src; - format!( "{} : {} :\n{:#?}", stringify!( $src ), $crate::qt!{ #src2 }, $src ) - }}; - } - - /// - /// Macro for diagnostics purpose to diagnose source code behind it and export it into a string. - /// - - #[ macro_export ] - macro_rules! code_diagnostics_str - { - ( $src:expr ) => - {{ - let src2 = &$src; - format!( "{} : {}", stringify!( $src ), $crate::qt!{ #src2 } ) - }}; - } - - /// - /// Macro to export source code behind a syntax tree into a string. - /// - - #[ macro_export ] - macro_rules! code_to_str - { - ( $src:expr ) => - {{ - let src2 = &$src; - format!( "{}", $crate::qt!{ #src2 } ) - }}; - } - - /// - /// Macro to generate syn error either with span of a syntax tree element or with default one `proc_macro2::Span::call_site()`. - /// - /// ### Basic use-case. - /// ``` - /// # use macro_tools::*; - /// syn_err!( "No attr" ); - /// # () - /// ``` - /// - - #[ macro_export ] - macro_rules! syn_err - { - - ( $msg:expr $(,)? ) => - { - $crate::syn::Error::new( proc_macro2::Span::call_site(), $msg ) - }; - ( _, $msg:expr $(,)? ) => - { - $crate::syn::Error::new( proc_macro2::Span::call_site(), $msg ) - }; - ( $span:expr, $msg:expr $(,)? ) => - { - $crate::syn::Error::new( syn::spanned::Spanned::span( &( $span ) ), $msg ) - }; - ( $span:expr, $msg:expr, $( $arg:expr ),+ $(,)? ) => - { - $crate::syn::Error::new( syn::spanned::Spanned::span( &( $span ) ), format!( $msg, $( $arg ),+ ) ) - }; - ( _, $msg:expr, $( $arg:expr ),+ $(,)? ) => - { - $crate::syn::Error::new( proc_macro2::Span::call_site(), format!( $msg, $( $arg ),+ ) ) - }; - - } - - /// - /// Macro to generate syn error either with span of a syntax tree element or with default one `proc_macro2::Span::call_site()`. - /// - /// ### Basic use-case. - /// ``` - /// # use macro_tools::*; - /// syn_err!( "No attr" ); - /// # () - /// ``` - /// - - #[ macro_export ] - macro_rules! return_syn_err - { - ( $( $Arg : tt )* ) => - { - $crate::syn_err!( $( $Arg )* ) - }; - } - - pub use - { - tree_print, - code_print, - tree_diagnostics_str, - code_diagnostics_str, - code_to_str, - syn_err, - }; - -} - -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -pub use protected::*; - -/// Protected namespace of the module. -pub mod protected -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::orphan::*; -} - -/// Parented namespace of the module. -pub mod orphan -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::exposed::*; -} - -/// Exposed namespace of the module. -pub mod exposed -{ - - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::prelude::*; - - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::private:: - { - Result, - // type_rightmost, - // type_parameters, - // eq_pair, - }; - -} - -/// Prelude to use essentials: `use my_module::prelude::*`. -pub mod prelude -{ - - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::private:: - { - tree_print, - code_print, - tree_diagnostics_str, - code_diagnostics_str, - code_to_str, - syn_err, - }; - - // #[ doc( inline ) ] - // pub use super::private::Result; -} diff --git a/module/core/macro_tools/src/generic_analyze.rs b/module/core/macro_tools/src/generic_analyze.rs index 27235a4eac..0ab68918ae 100644 --- a/module/core/macro_tools/src/generic_analyze.rs +++ b/module/core/macro_tools/src/generic_analyze.rs @@ -85,6 +85,7 @@ pub mod exposed prelude::*, private::GenericsAnalysis, }; + pub use super::protected as generic_analyze; } /// Prelude to use essentials: `use my_module::prelude::*`. diff --git a/module/core/macro_tools/src/generics.rs b/module/core/macro_tools/src/generics.rs index 7c170551f5..63f8496495 100644 --- a/module/core/macro_tools/src/generics.rs +++ b/module/core/macro_tools/src/generics.rs @@ -141,7 +141,8 @@ pub( crate ) mod private pub fn params_names( generics : &syn::Generics ) -> syn::Generics { - use syn::{ Generics, GenericParam, LifetimeDef, TypeParam, ConstParam }; + // use syn::{ Generics, GenericParam, LifetimeDef, TypeParam, ConstParam }; + use syn::{ Generics, GenericParam, LifetimeParam, TypeParam, ConstParam }; let result = Generics { @@ -156,7 +157,7 @@ pub( crate ) mod private eq_token : None, default : None, }), - GenericParam::Lifetime( LifetimeDef { lifetime, .. } ) => GenericParam::Lifetime( LifetimeDef + GenericParam::Lifetime( LifetimeParam { lifetime, .. } ) => GenericParam::Lifetime( LifetimeParam { attrs : Vec::new(), lifetime : lifetime.clone(), @@ -182,7 +183,6 @@ pub( crate ) mod private result } - } #[ doc( inline ) ] @@ -214,6 +214,7 @@ pub mod orphan /// Exposed namespace of the module. pub mod exposed { + pub use super::protected as generics; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super:: diff --git a/module/core/macro_tools/src/lib.rs b/module/core/macro_tools/src/lib.rs index f1ab422585..6bf4f43554 100644 --- a/module/core/macro_tools/src/lib.rs +++ b/module/core/macro_tools/src/lib.rs @@ -3,14 +3,26 @@ #![ doc( html_root_url = "https://docs.rs/proc_macro_tools/latest/proc_macro_tools/" ) ] #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] +#[ cfg( feature = "enabled" ) ] pub mod attr; +#[ cfg( feature = "enabled" ) ] pub mod container_kind; -pub mod diagnostics; +#[ cfg( feature = "enabled" ) ] +pub mod diag; +#[ cfg( feature = "enabled" ) ] pub mod generic_analyze; +#[ cfg( feature = "enabled" ) ] pub mod generics; +#[ cfg( feature = "enabled" ) ] pub mod name; +#[ cfg( feature = "enabled" ) ] pub mod quantifier; +#[ cfg( feature = "enabled" ) ] +pub mod tokens; +#[ cfg( feature = "enabled" ) ] pub mod typ; +#[ cfg( feature = "enabled" ) ] +pub mod type_struct; /// /// Dependencies of the module. @@ -28,9 +40,11 @@ pub mod dependency #[ doc( inline ) ] #[ allow( unused_imports ) ] +#[ cfg( feature = "enabled" ) ] pub use protected::*; /// Protected namespace of the module. +#[ cfg( feature = "enabled" ) ] pub mod protected { #[ doc( inline ) ] @@ -40,16 +54,19 @@ pub mod protected orphan::*, attr::orphan::*, container_kind::orphan::*, + diag::orphan::*, generic_analyze::orphan::*, generics::orphan::*, - diagnostics::orphan::*, name::orphan::*, quantifier::orphan::*, + tokens::orphan::*, typ::orphan::*, + type_struct::orphan::*, }; } /// Parented namespace of the module. +#[ cfg( feature = "enabled" ) ] pub mod orphan { #[ doc( inline ) ] @@ -58,6 +75,7 @@ pub mod orphan } /// Exposed namespace of the module. +#[ cfg( feature = "enabled" ) ] pub mod exposed { #[ doc( inline ) ] @@ -75,23 +93,26 @@ pub mod exposed prelude::*, attr::exposed::*, container_kind::exposed::*, + diag::exposed::*, generic_analyze::exposed::*, generics::exposed::*, - diagnostics::exposed::*, name::exposed::*, quantifier::exposed::*, + tokens::exposed::*, typ::exposed::*, + type_struct::exposed::*, }; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::quantifier:: - { - Pair, - Many, - }; + // #[ doc( inline ) ] + // #[ allow( unused_imports ) ] + // pub use super::quantifier:: + // { + // Pair, + // Many, + // }; } /// Prelude to use essentials: `use my_module::prelude::*`. +#[ cfg( feature = "enabled" ) ] pub mod prelude { @@ -140,12 +161,14 @@ pub mod prelude { attr::prelude::*, container_kind::prelude::*, + diag::prelude::*, generic_analyze::prelude::*, generics::prelude::*, - diagnostics::prelude::*, name::prelude::*, quantifier::prelude::*, + tokens::prelude::*, typ::prelude::*, + type_struct::prelude::*, }; } diff --git a/module/core/macro_tools/src/name.rs b/module/core/macro_tools/src/name.rs index c984f48d64..b88fc03f0a 100644 --- a/module/core/macro_tools/src/name.rs +++ b/module/core/macro_tools/src/name.rs @@ -29,7 +29,7 @@ pub( crate ) mod private // syn::Item::ForeignMod( item ) => item.name(), syn::Item::Impl( item ) => item.name(), syn::Item::Macro( item ) => item.name(), - syn::Item::Macro2( item ) => item.name(), + // syn::Item::Macro2( item ) => item.name(), syn::Item::Mod( item ) => item.name(), syn::Item::Static( item ) => item.name(), syn::Item::Struct( item ) => item.name(), @@ -124,13 +124,13 @@ pub( crate ) mod private } } - impl Name for syn::ItemMacro2 - { - fn name( &self ) -> String - { - self.ident.to_string() - } - } + // impl Name for syn::ItemMacro2 + // { + // fn name( &self ) -> String + // { + // self.ident.to_string() + // } + // } impl Name for syn::ItemMod { @@ -247,6 +247,7 @@ pub mod orphan /// Exposed namespace of the module. pub mod exposed { + pub use super::protected as name; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/macro_tools/src/quantifier.rs b/module/core/macro_tools/src/quantifier.rs index d6a74dfec8..d880ee9eb2 100644 --- a/module/core/macro_tools/src/quantifier.rs +++ b/module/core/macro_tools/src/quantifier.rs @@ -323,6 +323,7 @@ pub mod orphan /// Exposed namespace of the module. pub mod exposed { + pub use super::protected as quantifier; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/macro_tools/src/tokens.rs b/module/core/macro_tools/src/tokens.rs new file mode 100644 index 0000000000..b1740ad332 --- /dev/null +++ b/module/core/macro_tools/src/tokens.rs @@ -0,0 +1,192 @@ +//! +//! Attributes analyzys and manipulation. +//! + +/// Internal namespace. +pub( crate ) mod private +{ + use super::super::*; + use std::fmt; + + /// `Tokens` is a wrapper around `proc_macro2::TokenStream`. + /// It is designed to facilitate the parsing and manipulation of token streams + /// within procedural macros. + /// + /// # Examples + /// + /// Creating a new `Tokens` instance from a token stream : + /// + /// ```rust + /// use macro_tools::exposed::*; + /// + /// let ts : proc_macro2::TokenStream = qt! { let x = 10; }; + /// let tokens = tokens::Tokens::new( ts ); + /// ``` + #[ derive( Default ) ] + pub struct Tokens + { + /// `proc_macro2::TokenStream` + pub inner : proc_macro2::TokenStream, + } + + impl Tokens + { + /// Constructor from `proc_macro2::TokenStream`. + pub fn new( inner : proc_macro2::TokenStream ) -> Self + { + Tokens { inner } + } + } + + impl syn::parse::Parse for Tokens + { + fn parse( input : syn::parse::ParseStream< '_ > ) -> syn::Result< Self > + { + let inner : proc_macro2::TokenStream = input.parse()?; + Ok( Tokens::new( inner ) ) + } + } + + impl quote::ToTokens for Tokens + { + fn to_tokens( &self, tokens : &mut proc_macro2::TokenStream ) + { + self.inner.to_tokens( tokens ); + } + } + + impl fmt::Debug for Tokens + { + fn fmt( &self, f : &mut fmt::Formatter< '_ > ) -> fmt::Result + { + write!( f, "{}", self.inner.to_string() ) + } + } + + impl std::fmt::Display for Tokens + { + fn fmt( &self, f : &mut std::fmt::Formatter< '_ > ) -> std::fmt::Result + { + write!( f, "{}", self.inner.to_string() ) + } + } + + /// Represents an equation parsed from a procedural macro input. + /// + /// This struct models an equation consisting of a left-hand side, an operator, + /// and a right-hand side. The `Equation` is typically constructed during the + /// parsing process of macro input, where the `left` and `op` fields are expected + /// to be syntactically represented by `syn::Path` and `syn::BinOp` respectively, + /// indicating the variable and operation involved. The `right` field is a + /// `proc_macro2::TokenStream`, which can represent more complex expressions + /// including, but not limited to, literals, function calls, or further operations. + /// + /// # Fields + /// - `left`: The left-hand side of the equation, represented as a path. + /// This could be a variable or a more complex path in the code being + /// processed by the macro. + /// + /// - `op`: The binary operator used in the equation, such as addition, + /// subtraction, multiplication, etc. + /// + /// - `right`: The right-hand side of the equation. Given the potential + /// complexity of expressions on this side, it is represented as a + /// `proc_macro2::TokenStream` to accommodate any valid Rust expression. + /// + /// # Examples + /// + /// Parsing an equation from macro input: + /// + /// ```rust + /// use macro_tools::exposed::*; + /// let got : tokens::Equation = syn::parse_quote!( default = 31 ); + /// tree_print!( got ); + /// assert_eq!( code_to_str!( got ), "default = 31".to_string() ); + /// ``` + #[ derive( Debug ) ] + pub struct Equation + { + /// The LHS of the equation, represented by a syntactic path. + pub left : syn::Path, + // /// The binary operator (e.g., +, -, *, /) of the equation. + // pub op : syn::BinOp, + /// Equality token. + pub op : syn::Token![ = ], + /// The RHS of the equation, capable of holding complex expressions. + pub right : proc_macro2::TokenStream, + } + + impl syn::parse::Parse for Equation + { + fn parse( input : syn::parse::ParseStream< '_ > ) -> Result< Self > + { + let left : syn::Path = input.parse()?; + let op : syn::Token![ = ] = input.parse()?; + let right : proc_macro2::TokenStream = input.parse()?; + Ok( Equation { left, op, right } ) + } + } + + impl quote::ToTokens for Equation + { + fn to_tokens( &self, tokens : &mut proc_macro2::TokenStream ) + { + self.left.to_tokens( tokens ); + self.op.to_tokens( tokens ); + self.right.to_tokens( tokens ); + } + } + + // impl std::fmt::Display for Equation + // { + // fn fmt( &self, f : &mut std::fmt::Formatter< '_ > ) -> std::fmt::Result + // { + // write!( f, "{}", self.left.to_string() ); + // write!( f, "{}", self.op.to_string() ); + // write!( f, "{}", self.right.to_string() ) + // } + // } + +} + +#[ doc( inline ) ] +#[ allow( unused_imports ) ] +pub use protected::*; + +/// Protected namespace of the module. +pub mod protected +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::orphan::*; +} + +/// Orphan namespace of the module. +pub mod orphan +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::exposed::*; +} + +/// Exposed namespace of the module. +pub mod exposed +{ + pub use super::protected as tokens; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::prelude::*; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + Tokens, + Equation, + }; +} + +/// Prelude to use essentials: `use my_module::prelude::*`. +pub mod prelude +{ +} + diff --git a/module/core/macro_tools/src/typ.rs b/module/core/macro_tools/src/typ.rs index 8019538c57..81b48b675b 100644 --- a/module/core/macro_tools/src/typ.rs +++ b/module/core/macro_tools/src/typ.rs @@ -7,8 +7,6 @@ pub( crate ) mod private { use super::super::*; use interval_adapter::BoundExt; - // use crate::exposed::{ Pair, Many }; - // use crate::Result; /// Check is the rightmost item of path refering a type is specified type. /// @@ -16,12 +14,12 @@ pub( crate ) mod private /// Good to verify `alloc::vec::Vec< i32 >` is vector. /// /// ### Basic use-case. - /// ``` - /// use macro_tools::*; + /// ```rust + /// use macro_tools::exposed::*; /// /// let code = qt!( core::option::Option< i32 > ); /// let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - /// let got = type_rightmost( &tree_type ); + /// let got = typ::type_rightmost( &tree_type ); /// assert_eq!( got, Some( "Option".to_string() ) ); /// ``` @@ -45,11 +43,11 @@ pub( crate ) mod private /// /// ### Basic use-case. /// ``` - /// use macro_tools::*; + /// use macro_tools::{ typ, qt }; /// /// let code = qt!( core::option::Option< i8, i16, i32, i64 > ); /// let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - /// let got = type_parameters( &tree_type, 0..=2 ); + /// let got = typ::type_parameters( &tree_type, 0..=2 ); /// got.iter().for_each( | e | println!( "{}", qt!( #e ) ) ); /// // < i8 /// // < i16 @@ -104,6 +102,14 @@ pub mod protected #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::orphan::*; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + type_rightmost, + type_parameters, + // xxx : rename + }; } /// Orphan namespace of the module. @@ -117,16 +123,10 @@ pub mod orphan /// Exposed namespace of the module. pub mod exposed { + pub use super::protected as typ; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::private:: - { - type_rightmost, - type_parameters, - }; } /// Prelude to use essentials: `use my_module::prelude::*`. diff --git a/module/core/macro_tools/src/type_struct.rs b/module/core/macro_tools/src/type_struct.rs new file mode 100644 index 0000000000..0120ac9e6e --- /dev/null +++ b/module/core/macro_tools/src/type_struct.rs @@ -0,0 +1,223 @@ +//! +//! Parse structures, like `struct { a : i32 }`. +//! + +/// Internal namespace. +pub( crate ) mod private +{ + use super::super::*; + // use interval_adapter::BoundExt; + + // xxx : raname to Parsed + + /// Represents the outcome of parsing a Rust `struct` definition. + /// + /// This structure encapsulates details extracted from a structure definition, + /// such as the structure itself, its name, and its fields. It provides a comprehensive + /// view of a parsed structure, facilitating further processing or analysis of its components. + #[ derive( Debug ) ] + pub struct TypeStructParsed + { + /// The parsed structure item, encompassing the entire `struct`. + pub item : syn::ItemStruct, + /// Identifier of the struct, useful for referencing in generated code. + pub item_name : syn::Ident, + /// Collection of struct's fields, including visibility, attributes, and types. + pub fields : syn::Fields, + + // // xxx : rid off fields below. them are deduced from fields and should be implemented with function + // /// Collection of fields for convenient iteration. Planned for deprecation. + // pub fields_many : Many< syn::Field >, + // /// Types of each field in a vector for easy access. Planned for deprecation. + // pub field_types: Vec< syn::Type >, + // /// Names of each field if available, otherwise `None`. Planned for deprecation. + // pub field_names: Option< Vec< syn::Ident > >, + } + + impl TypeStructParsed + { + + /// Returns a vector of the struct's fields for iteration. + pub fn fields_many( &self ) -> Vec< syn::Field > + { + match &self.fields + { + syn::Fields::Unnamed( fields ) => fields.unnamed.iter().cloned().collect(), + syn::Fields::Named( fields ) => fields.named.iter().cloned().collect(), + syn::Fields::Unit => Vec::new(), + } + } + + /// Extracts the types of each field into a vector. + pub fn field_types( &self ) -> Vec< syn::Type > + { + self.fields_many().iter().map( |field| field.ty.clone() ).collect() + } + + /// Retrieves the names of each field, if they exist. + pub fn field_names( &self ) -> Option< Vec< syn::Ident > > + { + let names: Vec< Option< syn::Ident > > = self.fields_many().iter().map( |field| field.ident.clone() ).collect(); + if names.iter().any( Option::is_none ) + { + None + } + else + { + Some( names.into_iter().filter_map( core::convert::identity ).collect() ) + } + } + + /// Retrieves the type of the first field of the struct. + /// + /// Returns the type if the struct has at least one field, otherwise returns an error. + pub fn first_field_type( &self ) -> Result< syn::Type > + { + let maybe_field = match self.fields + { + syn::Fields::Named( ref fields ) => fields.named.first(), + syn::Fields::Unnamed( ref fields ) => fields.unnamed.first(), + _ => return Err( syn_err!( self.fields.span(), "Expects fields" ) ), + }; + + // let maybe_field = self.fields.0.first(); + // let maybe_field = self.fields; + + if let Some( field ) = maybe_field + { + return Ok( field.ty.clone() ) + } + + return Err( syn_err!( self.item.span(), "Expects type for fields" ) ); + } + + /// Retrieves the name of the first field of the struct, if available. + /// + /// Returns `Some` with the field identifier for named fields, or `None` for unnamed fields. + /// Returns an error if the struct has no fields + pub fn first_field_name( &self ) -> Result< Option< syn::Ident > > + { + let maybe_field = match self.fields + { + syn::Fields::Named( ref fields ) => fields.named.first(), + syn::Fields::Unnamed( ref fields ) => fields.unnamed.first(), + _ => return Err( syn_err!( self.fields.span(), "Expects fields" ) ), + }; + + if let Some( field ) = maybe_field + { + return Ok( field.ident.clone() ) + } + + return Err( syn_err!( self.item.span(), "Expects type for fields" ) ); + } + } + + // + + impl syn::parse::Parse for TypeStructParsed + { + // qqq : write proper documentation with examples of input + + // # example of input + // + // pub struct IsTransparent( bool ); + // + fn parse( input : ParseStream< '_ > ) -> Result< Self > + { + let item : syn::ItemStruct = input.parse()?; + + let item_name = item.ident.clone(); + let fields = item.fields.clone(); + +// let fields_many : Vec< syn::Field > = match item.fields +// { +// syn::Fields::Unnamed( ref fields ) => { fields.unnamed.iter().cloned().collect() }, +// syn::Fields::Named( ref fields ) => { fields.named.iter().cloned().collect() }, +// _ => return Ok( Self { item, item_name, fields, fields_many: Many(vec![]), field_types: vec![], field_names: None } ), +// }; +// +// let fields_many = fields_many.into(); +// let field_types = field_types( &fields_many )?; +// let field_names = field_names( &fields_many )?; +// Ok( Self { item, item_name, fields, fields_many, field_types, field_names } ) + + Ok( Self { item, item_name, fields } ) + } + } + + // + + impl quote::ToTokens for TypeStructParsed + { + fn to_tokens( &self, tokens : &mut proc_macro2::TokenStream ) + { + self.item.to_tokens( tokens ); + } + } + +// fn field_types( fields : &Many< syn::Field > ) -> Result< Vec< syn::Type> > +// { +// let mut field_types : Vec< syn::Type > = vec![]; +// for elem in fields +// { +// field_types.push( elem.ty.clone() ); +// } +// Ok( field_types ) +// } +// +// fn field_names( fields : &Many< syn::Field > ) -> Result< Option< Vec< syn::Ident > > > +// { +// let mut field_names : Vec< syn::Ident > = vec![]; +// for elem in fields +// { +// if let Some( ident ) = &elem.ident +// { +// field_names.push( ident.clone() ); +// } +// else +// { +// return Ok( None ); +// } +// } +// Ok( Some( field_names ) ) +// } + +} + +#[ doc( inline ) ] +#[ allow( unused_imports ) ] +pub use protected::*; + +/// Protected namespace of the module. +pub mod protected +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::orphan::*; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private::TypeStructParsed; +} + +/// Orphan namespace of the module. +pub mod orphan +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::exposed::*; +} + +/// Exposed namespace of the module. +pub mod exposed +{ + pub use super::protected as type_struct; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::prelude::*; +} + +/// Prelude to use essentials: `use my_module::prelude::*`. +pub mod prelude +{ +} diff --git a/module/core/macro_tools/tests/inc/attr_test.rs b/module/core/macro_tools/tests/inc/attr_test.rs index 0d1543dcf5..942289e7b3 100644 --- a/module/core/macro_tools/tests/inc/attr_test.rs +++ b/module/core/macro_tools/tests/inc/attr_test.rs @@ -7,9 +7,18 @@ use super::*; fn basic() { + let attr : syn::Attribute = syn::parse_quote!( #[ default( 31 ) ] ); + tree_print!( attr ); + + let attr : syn::Attribute = syn::parse_quote!( #[ default[ 31 ] ] ); + tree_print!( attr ); + let attr : syn::Attribute = syn::parse_quote!( #[ former( default = 31 ) ] ); - let ( key, val, _meta ) = attr::eq_pair( &attr ).unwrap(); - assert_eq!( key, "default" ); - assert_eq!( val, syn::Lit::Int( syn::LitInt::new( "31", proc_macro2::Span::call_site() ) ) ); + // tree_print!( attr ); + let got = equation( &attr ).unwrap(); + a_id!( code_to_str!( got ), "default = 31".to_string() ); + a_id!( got.left, syn::parse_quote!( default ) ); + a_id!( got.op, syn::token::Eq::default() ); + a_id!( code_to_str!( got.right ), "31".to_string() ); } diff --git a/module/core/macro_tools/tests/inc/basic_test.rs b/module/core/macro_tools/tests/inc/basic_test.rs index 947843e78f..0da1743b07 100644 --- a/module/core/macro_tools/tests/inc/basic_test.rs +++ b/module/core/macro_tools/tests/inc/basic_test.rs @@ -126,72 +126,73 @@ TokenStream [ fn type_container_kind_basic() { + use TheModule::exposed::container_kind; // test.case( "core::option::Option< i32 >" ); let code = qt!( core::option::Option< i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_container_kind( &tree_type ); - a_id!( got, TheModule::ContainerKind::No ); + let got = container_kind::of_type( &tree_type ); + a_id!( got, TheModule::container_kind::ContainerKind::No ); // test.case( "core::option::Option< Vec >" ); let code = qt!( core::option::Option< Vec > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_container_kind( &tree_type ); - a_id!( got, TheModule::ContainerKind::No ); + let got = container_kind::of_type( &tree_type ); + a_id!( got, TheModule::container_kind::ContainerKind::No ); // test.case( "alloc::vec::Vec< i32 >" ); let code = qt!( alloc::vec::Vec< i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_container_kind( &tree_type ); - a_id!( got, TheModule::ContainerKind::Vector ); + let got = container_kind::of_type( &tree_type ); + a_id!( got, TheModule::container_kind::ContainerKind::Vector ); // test.case( "alloc::vec::Vec" ); let code = qt!( alloc::vec::Vec ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_container_kind( &tree_type ); - a_id!( got, TheModule::ContainerKind::Vector ); + let got = container_kind::of_type( &tree_type ); + a_id!( got, TheModule::container_kind::ContainerKind::Vector ); // test.case( "std::vec::Vec< i32 >" ); let code = qt!( std::vec::Vec< i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_container_kind( &tree_type ); - a_id!( got, TheModule::ContainerKind::Vector ); + let got = container_kind::of_type( &tree_type ); + a_id!( got, TheModule::container_kind::ContainerKind::Vector ); // test.case( "std::vec::Vec" ); let code = qt!( std::vec::Vec ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_container_kind( &tree_type ); - a_id!( got, TheModule::ContainerKind::Vector ); + let got = container_kind::of_type( &tree_type ); + a_id!( got, TheModule::container_kind::ContainerKind::Vector ); // test.case( "std::Vec< i32 >" ); let code = qt!( std::Vec< i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_container_kind( &tree_type ); - a_id!( got, TheModule::ContainerKind::Vector ); + let got = container_kind::of_type( &tree_type ); + a_id!( got, TheModule::container_kind::ContainerKind::Vector ); // test.case( "std::Vec" ); let code = qt!( std::Vec ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_container_kind( &tree_type ); - a_id!( got, TheModule::ContainerKind::Vector ); + let got = container_kind::of_type( &tree_type ); + a_id!( got, TheModule::container_kind::ContainerKind::Vector ); // test.case( "not vector" ); let code = qt!( std::SomeVector< i32, i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_container_kind( &tree_type ); - a_id!( got, TheModule::ContainerKind::No ); + let got = container_kind::of_type( &tree_type ); + a_id!( got, TheModule::container_kind::ContainerKind::No ); // test.case( "hash map" ); let code = qt!( std::collections::HashMap< i32, i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_container_kind( &tree_type ); - a_id!( got, TheModule::ContainerKind::HashMap ); + let got = container_kind::of_type( &tree_type ); + a_id!( got, TheModule::container_kind::ContainerKind::HashMap ); // test.case( "hash set" ); let code = qt!( std::collections::HashSet< i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_container_kind( &tree_type ); - a_id!( got, TheModule::ContainerKind::HashSet ); + let got = container_kind::of_type( &tree_type ); + a_id!( got, TheModule::container_kind::ContainerKind::HashSet ); } @@ -203,77 +204,77 @@ TokenStream [ // test.case( "non optional not container" ); let code = qt!( i32 ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_optional_container_kind( &tree_type ); - a_id!( got, ( TheModule::ContainerKind::No, false ) ); + let got = TheModule::container_kind::of_optional( &tree_type ); + a_id!( got, ( TheModule::container_kind::ContainerKind::No, false ) ); // test.case( "optional not container" ); let code = qt!( core::option::Option< i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_optional_container_kind( &tree_type ); - a_id!( got, ( TheModule::ContainerKind::No, true ) ); + let got = TheModule::container_kind::of_optional( &tree_type ); + a_id!( got, ( TheModule::container_kind::ContainerKind::No, true ) ); // test.case( "optional not container" ); let code = qt!( Option< i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_optional_container_kind( &tree_type ); - a_id!( got, ( TheModule::ContainerKind::No, true ) ); + let got = TheModule::container_kind::of_optional( &tree_type ); + a_id!( got, ( TheModule::container_kind::ContainerKind::No, true ) ); // test.case( "optional vector" ); let code = qt!( core::option::Option< Vec > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_optional_container_kind( &tree_type ); - a_id!( got, ( TheModule::ContainerKind::Vector, true ) ); + let got = TheModule::container_kind::of_optional( &tree_type ); + a_id!( got, ( TheModule::container_kind::ContainerKind::Vector, true ) ); // test.case( "optional vector" ); let code = qt!( Option< Vec > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_optional_container_kind( &tree_type ); - a_id!( got, ( TheModule::ContainerKind::Vector, true ) ); + let got = TheModule::container_kind::of_optional( &tree_type ); + a_id!( got, ( TheModule::container_kind::ContainerKind::Vector, true ) ); // test.case( "non optional vector" ); let code = qt!( std::Vec< i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_optional_container_kind( &tree_type ); - a_id!( got, ( TheModule::ContainerKind::Vector, false ) ); + let got = TheModule::container_kind::of_optional( &tree_type ); + a_id!( got, ( TheModule::container_kind::ContainerKind::Vector, false ) ); // test.case( "optional vector" ); let code = qt!( core::option::Option< std::collections::HashMap< i32, i32 > > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_optional_container_kind( &tree_type ); - a_id!( got, ( TheModule::ContainerKind::HashMap, true ) ); + let got = TheModule::container_kind::of_optional( &tree_type ); + a_id!( got, ( TheModule::container_kind::ContainerKind::HashMap, true ) ); // test.case( "optional vector" ); let code = qt!( Option< HashMap > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_optional_container_kind( &tree_type ); - a_id!( got, ( TheModule::ContainerKind::HashMap, true ) ); + let got = TheModule::container_kind::of_optional( &tree_type ); + a_id!( got, ( TheModule::container_kind::ContainerKind::HashMap, true ) ); // test.case( "non optional vector" ); let code = qt!( HashMap< i32, i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_optional_container_kind( &tree_type ); - a_id!( got, ( TheModule::ContainerKind::HashMap, false ) ); + let got = TheModule::container_kind::of_optional( &tree_type ); + a_id!( got, ( TheModule::container_kind::ContainerKind::HashMap, false ) ); // test.case( "optional vector" ); let code = qt!( core::option::Option< std::collections::HashSet< i32, i32 > > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_optional_container_kind( &tree_type ); - a_id!( got, ( TheModule::ContainerKind::HashSet, true ) ); + let got = TheModule::container_kind::of_optional( &tree_type ); + a_id!( got, ( TheModule::container_kind::ContainerKind::HashSet, true ) ); // test.case( "optional vector" ); let code = qt!( Option< HashSet > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_optional_container_kind( &tree_type ); - a_id!( got, ( TheModule::ContainerKind::HashSet, true ) ); + let got = TheModule::container_kind::of_optional( &tree_type ); + a_id!( got, ( TheModule::container_kind::ContainerKind::HashSet, true ) ); // test.case( "non optional vector" ); let code = qt!( HashSet< i32, i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_optional_container_kind( &tree_type ); - a_id!( got, ( TheModule::ContainerKind::HashSet, false ) ); + let got = TheModule::container_kind::of_optional( &tree_type ); + a_id!( got, ( TheModule::container_kind::ContainerKind::HashSet, false ) ); } @@ -285,7 +286,7 @@ TokenStream [ // test.case( "core::option::Option< i32 >" ); let code = qt!( core::option::Option< i32 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got = TheModule::type_rightmost( &tree_type ); + let got = TheModule::typ::type_rightmost( &tree_type ); a_id!( got, Some( "Option".to_string() ) ); } @@ -307,36 +308,36 @@ TokenStream [ let code = qt!( core::option::Option< i8, i16, i32, i64 > ); let tree_type = syn::parse2::< syn::Type >( code ).unwrap(); - let got : Vec< syn::Type > = TheModule::type_parameters( &tree_type, 0..=0 ).into_iter().cloned().collect(); + let got : Vec< syn::Type > = TheModule::typ::type_parameters( &tree_type, 0..=0 ).into_iter().cloned().collect(); let exp = vec![ q!( i8 ) ]; a_id!( got, exp ); - let got : Vec< syn::Type > = TheModule::type_parameters( &tree_type, 0..=1 ).into_iter().cloned().collect(); + let got : Vec< syn::Type > = TheModule::typ::type_parameters( &tree_type, 0..=1 ).into_iter().cloned().collect(); let exp = vec![ q!( i8 ), q!( i16 ) ]; a_id!( got, exp ); - let got : Vec< syn::Type > = TheModule::type_parameters( &tree_type, 0..=2 ).into_iter().cloned().collect(); + let got : Vec< syn::Type > = TheModule::typ::type_parameters( &tree_type, 0..=2 ).into_iter().cloned().collect(); let exp = vec![ q!( i8 ), q!( i16 ), q!( i32 ) ]; a_id!( got, exp ); - let got : Vec< syn::Type > = TheModule::type_parameters( &tree_type, 0..0 ).into_iter().cloned().collect(); + let got : Vec< syn::Type > = TheModule::typ::type_parameters( &tree_type, 0..0 ).into_iter().cloned().collect(); let exp : Vec< syn::Type > = vec![]; a_id!( got, exp ); - let got : Vec< syn::Type > = TheModule::type_parameters( &tree_type, 0..1 ).into_iter().cloned().collect(); + let got : Vec< syn::Type > = TheModule::typ::type_parameters( &tree_type, 0..1 ).into_iter().cloned().collect(); let exp = vec![ q!( i8 ) ]; a_id!( got, exp ); - let got : Vec< syn::Type > = TheModule::type_parameters( &tree_type, 0..2 ).into_iter().cloned().collect(); + let got : Vec< syn::Type > = TheModule::typ::type_parameters( &tree_type, 0..2 ).into_iter().cloned().collect(); let exp = vec![ q!( i8 ), q!( i16 ) ]; a_id!( got, exp ); // unbound - let got : Vec< syn::Type > = TheModule::type_parameters( &tree_type, .. ).into_iter().cloned().collect(); + let got : Vec< syn::Type > = TheModule::typ::type_parameters( &tree_type, .. ).into_iter().cloned().collect(); let exp = vec![ q!( i8 ), q!( i16 ), q!( i32 ), q!( i64 ) ]; a_id!( got, exp ); - let got : Vec< syn::Type > = TheModule::type_parameters( &tree_type, .. ).into_iter().cloned().collect(); + let got : Vec< syn::Type > = TheModule::typ::type_parameters( &tree_type, .. ).into_iter().cloned().collect(); let exp = vec![ q!( i8 ), q!( i16 ), q!( i32 ), q!( i64 ) ]; a_id!( got, exp ); - let got : Vec< syn::Type > = TheModule::type_parameters( &tree_type, .. ).into_iter().cloned().collect(); + let got : Vec< syn::Type > = TheModule::typ::type_parameters( &tree_type, .. ).into_iter().cloned().collect(); let exp = vec![ q!( i8 ), q!( i16 ), q!( i32 ), q!( i64 ) ]; a_id!( got, exp ); @@ -344,7 +345,7 @@ TokenStream [ // - // fn eq_pair( attr : &syn::Attribute ) -> Result< ( String, syn::Lit, syn::Meta ), syn::Error > + // fn equation( attr : &syn::Attribute ) -> Result< ( String, syn::Lit, syn::Meta ), syn::Error > // qqq : xxx : fix // #[test] @@ -384,7 +385,7 @@ TokenStream [ // // let attr = fields.first().ok_or_else( || err( "No field" ) )?.attrs.first().ok_or_else( || err( "No attr" ) )?; // - // let ( key, val, meta ) = TheModule::eq_pair( &attr )?; + // let ( key, val, meta ) = TheModule::equation( &attr )?; // a_id!( key, "default".to_string() ); // a_id!( qt!( #val ).to_string(), "31".to_string() ); // let is = match meta diff --git a/module/core/macro_tools/tests/inc/mod.rs b/module/core/macro_tools/tests/inc/mod.rs index c910532cc9..c49284e219 100644 --- a/module/core/macro_tools/tests/inc/mod.rs +++ b/module/core/macro_tools/tests/inc/mod.rs @@ -5,9 +5,9 @@ use super::*; use test_tools::exposed::*; #[ allow( unused_imports ) ] -use TheModule::prelude::*; -#[ allow( unused_imports ) ] -use TheModule::{ qt, Result }; +use TheModule::exposed::*; +// #[ allow( unused_imports ) ] +// use TheModule::{ qt, Result }; mod attr_test; #[ cfg( not( feature = "no_std" ) ) ] @@ -15,3 +15,4 @@ mod basic_test; mod generics_test; mod quantifier_test; mod syntax_test; +mod tokens_test; diff --git a/module/core/macro_tools/tests/inc/tokens_test.rs b/module/core/macro_tools/tests/inc/tokens_test.rs new file mode 100644 index 0000000000..cf8b8b5797 --- /dev/null +++ b/module/core/macro_tools/tests/inc/tokens_test.rs @@ -0,0 +1,34 @@ + +use super::*; + +// + +#[ test ] +fn tokens() +{ + + let got : TheModule::Tokens = syn::parse_quote!( a = b ); + // tree_print!( got ); + a_id!( got.to_string(), "a = b".to_string() ); + + let got : TheModule::Tokens = syn::parse_quote!( #[ former( default = 31 ) ] ); + // tree_print!( got ); + a_id!( got.to_string(), "# [former (default = 31)]".to_string() ); + +} + +// + +#[ test ] +fn equation() +{ + + let got : TheModule::Equation = syn::parse_quote!( default = 31 ); + tree_print!( got ); + a_id!( code_to_str!( got ), "default = 31".to_string() ); + + a_id!( got.left, syn::parse_quote!( default ) ); + a_id!( got.op, syn::token::Eq::default() ); + a_id!( code_to_str!( got.right ), "31".to_string() ); + +} diff --git a/module/core/macro_tools/tests/tests.rs b/module/core/macro_tools/tests/tests.rs index 7759ff07d4..c9e40f82e3 100644 --- a/module/core/macro_tools/tests/tests.rs +++ b/module/core/macro_tools/tests/tests.rs @@ -1,6 +1,6 @@ use macro_tools as TheModule; #[ allow( unused_imports ) ] -use macro_tools::*; +use macro_tools::exposed::*; #[ allow( unused_imports ) ] use test_tools::exposed::*; diff --git a/module/core/mod_interface/Cargo.toml b/module/core/mod_interface/Cargo.toml index d50c3efe07..239bbfb69f 100644 --- a/module/core/mod_interface/Cargo.toml +++ b/module/core/mod_interface/Cargo.toml @@ -23,7 +23,6 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false - exclude = [ "/tests", "/examples", "-*" ] [features] @@ -36,16 +35,15 @@ enabled = [ "mod_interface_meta/enabled" ] # keep these examples in directories [[example]] -name = "mod_interface_trivial_sample" -path = "examples/mod_interface_trivial_sample/src/main.rs" +name = "mod_interface_trivial" +path = "examples/mod_interface_trivial/src/main.rs" [[example]] -name = "mod_interface_with_debug_sample" -path = "examples/mod_interface_with_debug_sample/src/main.rs" +name = "mod_interface_debug" +path = "examples/mod_interface_debug/src/main.rs" [dependencies] mod_interface_meta = { workspace = true } -# mod_interface_runtime = { workspace = true } [dev-dependencies] test_tools = { workspace = true } diff --git a/module/core/mod_interface/examples/mod_interface_trivial_sample/Cargo.toml b/module/core/mod_interface/examples/mod_interface_debug/Cargo.toml similarity index 51% rename from module/core/mod_interface/examples/mod_interface_trivial_sample/Cargo.toml rename to module/core/mod_interface/examples/mod_interface_debug/Cargo.toml index 0de84e9b7a..cc123adbf0 100644 --- a/module/core/mod_interface/examples/mod_interface_trivial_sample/Cargo.toml +++ b/module/core/mod_interface/examples/mod_interface_debug/Cargo.toml @@ -1,12 +1,8 @@ [package] -name = "mod_interface_trivial_sample" +name = "mod_interface_debug" version = "0.0.0" edition = "2021" publish = false -[[bin]] -name = "mod_interface_trivial_sample" -path = "src/main.rs" - [dependencies] mod_interface = { workspace = true } diff --git a/module/core/mod_interface/examples/mod_interface_with_debug_sample/Readme.md b/module/core/mod_interface/examples/mod_interface_debug/Readme.md similarity index 100% rename from module/core/mod_interface/examples/mod_interface_with_debug_sample/Readme.md rename to module/core/mod_interface/examples/mod_interface_debug/Readme.md diff --git a/module/core/mod_interface/examples/mod_interface_trivial_sample/src/inner.rs b/module/core/mod_interface/examples/mod_interface_debug/src/inner.rs similarity index 100% rename from module/core/mod_interface/examples/mod_interface_trivial_sample/src/inner.rs rename to module/core/mod_interface/examples/mod_interface_debug/src/inner.rs diff --git a/module/core/mod_interface/examples/mod_interface_with_debug_sample/src/main.rs b/module/core/mod_interface/examples/mod_interface_debug/src/main.rs similarity index 100% rename from module/core/mod_interface/examples/mod_interface_with_debug_sample/src/main.rs rename to module/core/mod_interface/examples/mod_interface_debug/src/main.rs diff --git a/module/core/mod_interface/examples/mod_interface_with_debug_sample/Cargo.toml b/module/core/mod_interface/examples/mod_interface_trivial/Cargo.toml similarity index 51% rename from module/core/mod_interface/examples/mod_interface_with_debug_sample/Cargo.toml rename to module/core/mod_interface/examples/mod_interface_trivial/Cargo.toml index cab7c792a9..81fc70675c 100644 --- a/module/core/mod_interface/examples/mod_interface_with_debug_sample/Cargo.toml +++ b/module/core/mod_interface/examples/mod_interface_trivial/Cargo.toml @@ -1,12 +1,8 @@ [package] -name = "mod_interface_with_debug_sample" +name = "mod_interface_trivial" version = "0.0.0" edition = "2021" publish = false -[[bin]] -name = "mod_interface_trivial_sample" -path = "src/main.rs" - [dependencies] mod_interface = { workspace = true } diff --git a/module/core/mod_interface/examples/mod_interface_trivial_sample/Readme.md b/module/core/mod_interface/examples/mod_interface_trivial/Readme.md similarity index 100% rename from module/core/mod_interface/examples/mod_interface_trivial_sample/Readme.md rename to module/core/mod_interface/examples/mod_interface_trivial/Readme.md diff --git a/module/core/mod_interface/examples/mod_interface_with_debug_sample/src/inner.rs b/module/core/mod_interface/examples/mod_interface_trivial/src/inner.rs similarity index 100% rename from module/core/mod_interface/examples/mod_interface_with_debug_sample/src/inner.rs rename to module/core/mod_interface/examples/mod_interface_trivial/src/inner.rs diff --git a/module/core/mod_interface/examples/mod_interface_trivial_sample/src/main.rs b/module/core/mod_interface/examples/mod_interface_trivial/src/main.rs similarity index 100% rename from module/core/mod_interface/examples/mod_interface_trivial_sample/src/main.rs rename to module/core/mod_interface/examples/mod_interface_trivial/src/main.rs diff --git a/module/core/mod_interface/src/lib.rs b/module/core/mod_interface/src/lib.rs index f5b4f39cb4..582ad8e8c9 100644 --- a/module/core/mod_interface/src/lib.rs +++ b/module/core/mod_interface/src/lib.rs @@ -2,17 +2,6 @@ #![ doc( html_logo_url = "https://raw.githubusercontent.com/Wandalen/wTools/master/asset/img/logo_v3_trans_square.png" ) ] #![ doc( html_favicon_url = "https://raw.githubusercontent.com/Wandalen/wTools/alpha/asset/img/logo_v3_trans_square_icon_small_v2.ico" ) ] #![ doc( html_root_url = "https://docs.rs/mod_interface/latest/mod_interface/" ) ] -// #![ deny( rust_2018_idioms ) ] -// #![ deny( missing_debug_implementations ) ] -// #![ deny( missing_docs ) ] - -// #![ feature( type_name_of_val ) ] -// #![ feature( trace_macros ) ] - -//! -//! Protocol of modularity unifying interface of a module and introducing layers. -//! - #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] /// Namespace with dependencies. @@ -23,6 +12,11 @@ pub mod dependency pub use mod_interface_meta; } +#[ doc( inline ) ] +#[ allow( unused_imports ) ] +#[ cfg( feature = "enabled" ) ] +pub use protected::*; + /// Protected namespace of the module. #[ cfg( feature = "enabled" ) ] pub mod protected @@ -31,20 +25,12 @@ pub mod protected #[ allow( unused_imports ) ] pub use super::orphan::*; - // #[ doc( inline ) ] -#[ allow( unused_imports ) ] - // pub use mod_interface_runtime as runtime; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use mod_interface_meta as meta; } -#[ doc( inline ) ] -#[ allow( unused_imports ) ] -#[ cfg( feature = "enabled" ) ] -pub use protected::*; - /// Orphan namespace of the module. #[ cfg( feature = "enabled" ) ] pub mod orphan diff --git a/module/core/mod_interface/tests/inc/derive/mod.rs b/module/core/mod_interface/tests/inc/derive/mod.rs deleted file mode 100644 index 7ca2f7ea81..0000000000 --- a/module/core/mod_interface/tests/inc/derive/mod.rs +++ /dev/null @@ -1,29 +0,0 @@ - -use super::*; - -// xxx : uncomment - -// micro module -mod micro_modules; -mod micro_modules_two; -mod micro_modules_two_joined; - -// layer -mod layer; -mod layer_have_layer; -mod layer_have_layer_separate_use; -mod layer_have_layer_separate_use_two; -mod layer_have_layer_cfg; -mod layer_have_mod_cfg; -mod layer_use_cfg; -mod layer_use_macro; - -mod use_layer; -mod use_basic; -#[ path = "./use_as/derive.rs" ] -mod use_as_derive; -#[ path = "./use_as/manual.rs" ] -mod use_as_manual; - -// attr -mod attr_debug; diff --git a/module/core/mod_interface/tests/inc/manual/mod.rs b/module/core/mod_interface/tests/inc/manual/mod.rs deleted file mode 100644 index c079ea2955..0000000000 --- a/module/core/mod_interface/tests/inc/manual/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -use super::*; - -mod micro_modules; -mod micro_modules_two; -mod layer; -mod layer_use; diff --git a/module/core/mod_interface/tests/inc/mod.rs b/module/core/mod_interface/tests/inc/mod.rs index 7408098435..5d8aaa7045 100644 --- a/module/core/mod_interface/tests/inc/mod.rs +++ b/module/core/mod_interface/tests/inc/mod.rs @@ -1,7 +1,48 @@ #[ allow( unused_imports ) ] use super::*; -// xxx : uncomment -mod manual; -mod derive; +mod manual +{ + + use super::*; + + mod micro_modules; + mod micro_modules_two; + mod layer; + mod layer_use; + +} + +mod derive +{ + + use super::*; + + // micro module + mod micro_modules; + mod micro_modules_two; + mod micro_modules_two_joined; + + // layer + mod layer; + mod layer_have_layer; + mod layer_have_layer_separate_use; + mod layer_have_layer_separate_use_two; + mod layer_have_layer_cfg; + mod layer_have_mod_cfg; + mod layer_use_cfg; + mod layer_use_macro; + + mod use_layer; + mod use_basic; + #[ path = "./use_as/derive.rs" ] + mod use_as_derive; + #[ path = "./use_as/manual.rs" ] + mod use_as_manual; + + // attr + mod attr_debug; + +} + mod trybuild_test; diff --git a/module/core/mod_interface/tests/inc/trybuild_test.rs b/module/core/mod_interface/tests/inc/trybuild_test.rs index 931af49ce2..ba2083ed62 100644 --- a/module/core/mod_interface/tests/inc/trybuild_test.rs +++ b/module/core/mod_interface/tests/inc/trybuild_test.rs @@ -5,70 +5,59 @@ use super::*; // #[ cfg_attr( feature = "enabled", module_mod_interface ) ] -// only_for_terminal_module! -// { - - // #[ cfg( module_mod_interface ) ] - // #[ cfg( module_is_terminal ) ] - #[ test_tools::nightly ] - // #[ cfg( RUSTC_IS_NIGHTLY ) ] - tests_impls! - { - - fn trybuild_tests() - { - // use test_tools::dependency::trybuild; - println!( "current_dir : {:?}", std::env::current_dir().unwrap() ); - // let t = trybuild::TestCases::new(); - let t = test_tools::compiletime::TestCases::new(); - - let current_exe_path = std::env::current_exe().expect( "No such file or directory" ); +// #[ cfg( module_mod_interface ) ] +// #[ cfg( module_is_terminal ) ] +#[ test_tools::nightly ] +#[ test ] +fn trybuild_tests() +{ + // use test_tools::dependency::trybuild; + println!( "current_dir : {:?}", std::env::current_dir().unwrap() ); + // let t = trybuild::TestCases::new(); + let t = test_tools::compiletime::TestCases::new(); - let exe_directory = current_exe_path.parent().expect( "No such file or directory" ); - fn find_workspace_root( start_path : &std::path::Path ) -> Option< &std::path::Path > - { - start_path - .ancestors() - .find( |path| path.join( "Cargo.toml" ).exists() ) - } + let current_exe_path = std::env::current_exe().expect( "No such file or directory" ); - let workspace_root = find_workspace_root( exe_directory ).expect( "No such file or directory" ); - let current_dir = workspace_root.join( "module/core/mod_interface" ); + let exe_directory = current_exe_path.parent().expect( "No such file or directory" ); + fn find_workspace_root( start_path : &std::path::Path ) -> Option< &std::path::Path > + { + start_path + .ancestors() + .find( |path| path.join( "Cargo.toml" ).exists() ) + } - // micro module + let workspace_root = find_workspace_root( exe_directory ).expect( "No such file or directory" ); + let current_dir = workspace_root.join( "module/core/mod_interface" ); - t.pass( current_dir.join( "tests/inc/derive/micro_modules/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/micro_modules_two/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/micro_modules_two_joined/trybuild.rs" ) ); + // micro module - // layer + t.pass( current_dir.join( "tests/inc/derive/micro_modules/trybuild.rs" ) ); + t.pass( current_dir.join( "tests/inc/derive/micro_modules_two/trybuild.rs" ) ); + t.pass( current_dir.join( "tests/inc/derive/micro_modules_two_joined/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/layer/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/layer_have_layer/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/layer_have_layer_separate_use/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/layer_have_layer_separate_use_two/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/layer_have_layer_cfg/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/layer_use_cfg/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/layer_have_mod_cfg/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/layer_use_macro/trybuild.rs" ) ); + // layer - // use + t.pass( current_dir.join( "tests/inc/derive/layer/trybuild.rs" ) ); + t.pass( current_dir.join( "tests/inc/derive/layer_have_layer/trybuild.rs" ) ); + t.pass( current_dir.join( "tests/inc/derive/layer_have_layer_separate_use/trybuild.rs" ) ); + t.pass( current_dir.join( "tests/inc/derive/layer_have_layer_separate_use_two/trybuild.rs" ) ); + t.pass( current_dir.join( "tests/inc/derive/layer_have_layer_cfg/trybuild.rs" ) ); + t.pass( current_dir.join( "tests/inc/derive/layer_use_cfg/trybuild.rs" ) ); + t.pass( current_dir.join( "tests/inc/derive/layer_have_mod_cfg/trybuild.rs" ) ); + t.pass( current_dir.join( "tests/inc/derive/layer_use_macro/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/use_basic/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/use_layer/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/use_as/trybuild.rs" ) ); + // use - // attr + t.pass( current_dir.join( "tests/inc/derive/use_basic/trybuild.rs" ) ); + t.pass( current_dir.join( "tests/inc/derive/use_layer/trybuild.rs" ) ); + t.pass( current_dir.join( "tests/inc/derive/use_as/trybuild.rs" ) ); - t.pass( current_dir.join( "tests/inc/derive/attr_debug/trybuild.rs" ) ); + // attr - // - } + t.pass( current_dir.join( "tests/inc/derive/attr_debug/trybuild.rs" ) ); - } - -// #[ path="../../../../../module/step/meta/src/module/aggregating.rs" ] -// mod aggregating; + // +} use crate::only_for_terminal_module; @@ -82,11 +71,11 @@ only_for_terminal_module! println!( "current_dir : {:?}", std::env::current_dir().unwrap() ); // let t = trybuild::TestCases::new(); let t = test_tools::compiletime::TestCases::new(); - + let current_exe_path = std::env::current_exe().expect( "No such file or directory" ); let exe_directory = current_exe_path.parent().expect( "No such file or directory" ); - fn find_workspace_root( start_path : &std::path::Path ) -> Option< &std::path::Path > + fn find_workspace_root( start_path : &std::path::Path ) -> Option< &std::path::Path > { start_path .ancestors() @@ -104,14 +93,3 @@ only_for_terminal_module! t.compile_fail( current_dir.join( "tests/inc/derive/use_unknown_vis/trybuild.rs" ) ); } } - -// #[ cfg( module_mod_interface ) ] -// #[ cfg( module_is_terminal ) ] -// #[ cfg( RUSTC_IS_NIGHTLY ) ] -#[ test_tools::nightly ] -tests_index! -{ - trybuild_tests, -} - -// } \ No newline at end of file diff --git a/module/core/mod_interface/tests/mod_interface_tests.rs b/module/core/mod_interface/tests/tests.rs similarity index 71% rename from module/core/mod_interface/tests/mod_interface_tests.rs rename to module/core/mod_interface/tests/tests.rs index 1624d2accb..01e7549089 100644 --- a/module/core/mod_interface/tests/mod_interface_tests.rs +++ b/module/core/mod_interface/tests/tests.rs @@ -1,6 +1,3 @@ -// #![ cfg_attr( all(), feature( module_is_terminal ) ) ] -// #![ feature( trace_macros ) ] -// #![ feature( type_name_of_val ) ] /// A struct for testing purpose. #[ derive( Debug, PartialEq ) ] diff --git a/module/core/mod_interface_meta/src/impls.rs b/module/core/mod_interface_meta/src/impls.rs index deadb24dd6..66e92d1236 100644 --- a/module/core/mod_interface_meta/src/impls.rs +++ b/module/core/mod_interface_meta/src/impls.rs @@ -3,7 +3,9 @@ pub( crate ) mod private { use crate::*; // use visibility::ClauseKind; + // use macro_tools::exposed::*; use macro_tools::exposed::*; + // use macro_tools::diag; use std::collections::HashMap; // = use @@ -90,21 +92,6 @@ pub( crate ) mod private // exposed mod { mod_exposed1, mod_exposed2 }; // prelude mod { mod_prelude1, mod_prelude2 }; -// /// -// /// Get vector of a clause. -// /// -// -// macro_rules! clause -// { -// ( -// $ClauseMap:ident, -// $( $Key:tt )+ -// ) -// => -// { -// $ClauseMap.get_mut( &$( $Key )+() ).unwrap() -// }; -// } // zzz : clause should not expect the first argument /// Context for handlign a record. Cotnains clauses map and debug attribute. @@ -161,26 +148,6 @@ pub( crate ) mod private }); } - // use syn::UseTree::*; - // match &path.tree - // { - // Rename( e ) => - // { - // let rename = &e.rename; - // c.clauses_map.get_mut( &ClauseImmediates::Kind() ).unwrap().push( qt! - // { - // use #path as #rename; - // }); - // }, - // Glob( _e ) => - // { - // return Err( syn_err!( "Complex glob uses like `use module1::*` are not supported." ) ); - // }, - // _ => {} - // }; - - // clauses_map.get_mut( &VisProtected::Kind() ).unwrap().push( qt! - // clause!( clauses_map, VisProtected::Kind ).push( qt! c.clauses_map.get_mut( &VisProtected::Kind() ).unwrap().push( qt! { #[ doc( inline ) ] @@ -189,8 +156,6 @@ pub( crate ) mod private pub use #adjsuted_path::orphan::*; }); - // clauses_map.get_mut( &VisExposed::Kind() ).unwrap().push( qt! - // clause!( clauses_map, VisExposed::Kind ).push( qt! c.clauses_map.get_mut( &VisExposed::Kind() ).unwrap().push( qt! { #[ doc( inline ) ] @@ -199,8 +164,6 @@ pub( crate ) mod private pub use #adjsuted_path::exposed::*; }); - // clauses_map.get_mut( &VisPrelude::Kind() ).unwrap().push( qt! - // clause!( clauses_map, VisPrelude::Kind ).push( qt! c.clauses_map.get_mut( &VisPrelude::Kind() ).unwrap().push( qt! { #[ doc( inline ) ] @@ -240,15 +203,6 @@ pub( crate ) mod private )); } - // let path2 = if path.prefix_is_needed() - // { - // qt!{ super::private::#path } - // } - // else - // { - // qt!{ #path } - // }; - let adjsuted_path = path.adjsuted_explicit_path(); let vis2 = if vis.restriction().is_some() @@ -260,8 +214,6 @@ pub( crate ) mod private qt!{ pub } }; - // clauses_map.get_mut( &vis.kind() ).unwrap().push( qt! - // clause!( clauses_map, vis.kind ).push( qt! c.clauses_map.get_mut( &vis.kind() ).unwrap().push( qt! { #[ doc( inline ) ] @@ -282,7 +234,6 @@ pub( crate ) mod private record : &Record, element : &Pair< AttributesOuter, syn::Path >, c : &'_ mut RecordContext< '_ >, - // clauses_map : &mut HashMap< u32, Vec< proc_macro2::TokenStream > >, ) -> Result< () > @@ -291,8 +242,6 @@ pub( crate ) mod private let attrs2 = &element.0; let path = &element.1; - // clauses_map.get_mut( &ClauseImmediates::Kind() ).unwrap().push( qt! - // clause!( clauses_map, ClauseImmediates::Kind ).push( qt! c.clauses_map.get_mut( &ClauseImmediates::Kind() ).unwrap().push( qt! { #attrs1 @@ -311,10 +260,6 @@ pub( crate ) mod private )); } - // println!( "clauses_map.contains_key( {} ) : {}", record.vis.kind(), clauses_map.contains_key( &record.vis.kind() ) ); - // let fixes_list = clauses_map.get_mut( &record.vis.kind() ).ok_or_else( || syn_err!( "Error!" ) )?; - // clauses_map.get_mut( &record.vis.kind() ).unwrap().push( qt! - // clause!( clauses_map, record.vis.kind ).push( qt! c.clauses_map.get_mut( &record.vis.kind() ).unwrap().push( qt! { #[ doc( inline ) ] @@ -335,7 +280,6 @@ pub( crate ) mod private ( record : &Record, element : &Pair< AttributesOuter, syn::Path >, - // clauses_map : &mut HashMap< u32, Vec< proc_macro2::TokenStream > >, c : &'_ mut RecordContext< '_ >, ) -> @@ -355,8 +299,6 @@ pub( crate ) mod private )); } - // clauses_map.get_mut( &ClauseImmediates::Kind() ).unwrap().push( qt! - // clause!( clauses_map, ClauseImmediates::Kind ).push( qt! c.clauses_map.get_mut( &ClauseImmediates::Kind() ).unwrap().push( qt! { #attrs1 @@ -364,8 +306,6 @@ pub( crate ) mod private pub mod #path; }); - // clauses_map.get_mut( &VisProtected::Kind() ).unwrap().push( qt! - // clause!( clauses_map, VisProtected::Kind ).push( qt! c.clauses_map.get_mut( &VisProtected::Kind() ).unwrap().push( qt! { #[ doc( inline ) ] @@ -375,8 +315,6 @@ pub( crate ) mod private pub use super::#path::orphan::*; }); - // clauses_map.get_mut( &VisExposed::Kind() ).unwrap().push( qt! - // clause!( clauses_map, VisExposed::Kind ).push( qt! c.clauses_map.get_mut( &VisExposed::Kind() ).unwrap().push( qt! { #[ doc( inline ) ] @@ -386,8 +324,6 @@ pub( crate ) mod private pub use super::#path::exposed::*; }); - // clauses_map.get_mut( &VisPrelude::Kind() ).unwrap().push( qt! - // clause!( clauses_map, VisPrelude::Kind ).push( qt! c.clauses_map.get_mut( &VisPrelude::Kind() ).unwrap().push( qt! { #[ doc( inline ) ] @@ -527,17 +463,7 @@ pub( crate ) mod private if has_debug { - - // zzz : implement maybe - // let sections = Sections::new - // ( vec![ - // ( "original", original_input ), - // ( "result", qt!{ #result } ), - // ]); - // println!( "{}", sections ); - - println!( "\n = original : \n\n{}\n", original_input ); - println!( "\n = result : \n\n{}\n", qt!{ #result } ); + diag::debug_report_print( original_input, &result ); } Ok( result ) diff --git a/module/core/mod_interface_meta/src/lib.rs b/module/core/mod_interface_meta/src/lib.rs index 645291bcf1..653267ef33 100644 --- a/module/core/mod_interface_meta/src/lib.rs +++ b/module/core/mod_interface_meta/src/lib.rs @@ -1,23 +1,11 @@ -// #![ cfg_attr( feature = "no_std", no_std ) ] #![ doc( html_logo_url = "https://raw.githubusercontent.com/Wandalen/wTools/master/asset/img/logo_v3_trans_square.png" ) ] #![ doc( html_favicon_url = "https://raw.githubusercontent.com/Wandalen/wTools/alpha/asset/img/logo_v3_trans_square_icon_small_v2.ico" ) ] #![ doc( html_root_url = "https://docs.rs/mod_interface_meta/latest/mod_interface_meta/" ) ] -// #![ deny( rust_2018_idioms ) ] -// #![ deny( missing_debug_implementations ) ] -// #![ deny( missing_docs ) ] #![ deny( dead_code ) ] - -// #![ feature( type_name_of_val ) ] -// #![ feature( trace_macros ) ] +#![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] // xxx : write good description and the main use-case -//! -//! Protocol of modularity unifying interface of a module and introducing layers. -//! - -#![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] - mod impls; #[ allow( unused_imports ) ] use impls::exposed::*; @@ -44,49 +32,6 @@ pub fn mod_interface( input : proc_macro::TokenStream ) -> proc_macro::TokenStre } } -// /// Protected namespace of the module. -// pub mod protected -// { -// pub use super::orphan::*; -// pub use super:: -// { -// impls::orphan::*, -// record::orphan::*, -// visibility::orphan::*, -// }; -// } -// -// pub use protected::*; -// -// /// Parented namespace of the module. -// pub mod orphan -// { -// pub use super::exposed::*; -// } -// -// /// Exposed namespace of the module. -// pub mod exposed -// { -// pub use super::prelude::*; -// pub use super:: -// { -// impls::exposed::*, -// record::exposed::*, -// visibility::exposed::*, -// }; -// } -// -// /// Prelude to use essentials: `use my_module::prelude::*`. -// pub mod prelude -// { -// pub use super:: -// { -// impls::prelude::*, -// record::prelude::*, -// visibility::prelude::*, -// }; -// } - /* mod_interface! diff --git a/module/core/mod_interface_meta/src/record.rs b/module/core/mod_interface_meta/src/record.rs index 3a732514e6..b078dcf6c6 100644 --- a/module/core/mod_interface_meta/src/record.rs +++ b/module/core/mod_interface_meta/src/record.rs @@ -191,12 +191,13 @@ pub( crate ) mod private { self.head.iter().try_for_each( | attr | { - // code_print!( attr.path ); - // code_print!( attr.tokens ); + // code_print!( attr ); + // code_print!( attr.path() ); + // code_print!( attr.meta ); let good = true - && code_to_str!( attr.path ) == "debug" - && code_to_str!( attr.tokens ).is_empty() + && code_to_str!( attr.path() ) == "debug" + // && code_to_str!( attr.meta ).is_empty() ; if !good @@ -219,7 +220,7 @@ pub( crate ) mod private { self.head.iter().any( | attr | { - code_to_str!( attr.path ) == "debug" + code_to_str!( attr.path() ) == "debug" }) } } diff --git a/module/core/mod_interface_meta/src/use_tree.rs b/module/core/mod_interface_meta/src/use_tree.rs index 565dc7997c..a7c1397e96 100644 --- a/module/core/mod_interface_meta/src/use_tree.rs +++ b/module/core/mod_interface_meta/src/use_tree.rs @@ -8,7 +8,7 @@ pub( crate ) mod private #[ derive( Debug, PartialEq, Eq, Clone ) ] pub struct UseTree { - pub leading_colon : Option< syn::token::Colon2 >, + pub leading_colon : Option< syn::token::PathSep >, pub tree : syn::UseTree, pub rename : Option< syn::Ident >, pub glob : bool, diff --git a/module/core/mod_interface_meta/src/visibility.rs b/module/core/mod_interface_meta/src/visibility.rs index edf7e111b6..10d8a68c48 100644 --- a/module/core/mod_interface_meta/src/visibility.rs +++ b/module/core/mod_interface_meta/src/visibility.rs @@ -19,6 +19,9 @@ pub( crate ) mod private syn::custom_keyword!( orphan ); syn::custom_keyword!( exposed ); syn::custom_keyword!( prelude ); + + pub use syn::token::Pub as public; + } /// @@ -218,7 +221,11 @@ pub( crate ) mod private Vis!( Orphan, VisOrphan, orphan, Orphan ); Vis!( Exposed, VisExposed, exposed, Exposed ); Vis!( Prelude, VisPrelude, prelude, Prelude ); - HasClauseKind!( syn::VisPublic, Public ); + + Vis!( Public, VisPublic, public, Public ); + // Vis!( Restricted, VisRestricted, restricted, Restricted ); + + // HasClauseKind!( syn::Visibility::Public, Public ); HasClauseKind!( syn::VisRestricted, Restricted ); Clause!( ClauseImmediates, Immadiate ); @@ -227,8 +234,10 @@ pub( crate ) mod private impl_valid_sub_namespace!( VisOrphan, true ); impl_valid_sub_namespace!( VisExposed, true ); impl_valid_sub_namespace!( VisPrelude, true ); - impl_valid_sub_namespace!( syn::VisPublic, false ); + impl_valid_sub_namespace!( VisPublic, false ); impl_valid_sub_namespace!( syn::VisRestricted, false ); + // impl_valid_sub_namespace!( syn::Visibility::Public, false ); + // impl_valid_sub_namespace!( syn::VisRestricted, false ); /// /// Restriction, for example `pub( crate )`. @@ -278,7 +287,8 @@ pub( crate ) mod private Orphan( VisOrphan ), Exposed( VisExposed ), Prelude( VisPrelude ), - Public( syn::VisPublic ), + Public( VisPublic ), + // Public( syn::VisPublic ), // Crate( syn::VisCrate ), // Restricted( syn::VisRestricted ), #[ default ] @@ -310,9 +320,14 @@ pub( crate ) mod private fn parse_pub( input : ParseStream< '_ > ) -> Result< Self > { - Ok( Visibility::Public( syn::VisPublic { pub_token : input.parse()? } ) ) + Self::_parse_vis::< VisPublic >( input ) } + // fn parse_pub( input : ParseStream< '_ > ) -> Result< Self > + // { + // Ok( Visibility::Public( syn::VisPublic { pub_token : input.parse()? } ) ) + // } + fn _parse_vis< Vis >( input : ParseStream< '_ > ) -> Result< Self > where Vis : Into< Visibility > + VisibilityInterface, diff --git a/module/core/reflect_tools_meta/src/implementation/reflect.rs b/module/core/reflect_tools_meta/src/implementation/reflect.rs index e4187fbf80..7dc8d32790 100644 --- a/module/core/reflect_tools_meta/src/implementation/reflect.rs +++ b/module/core/reflect_tools_meta/src/implementation/reflect.rs @@ -6,10 +6,7 @@ use super::*; pub fn reflect( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > { - let parsed = syn::parse::< InputParsed >( input )?; - // let field_types = parsed.field_types; - // let field_names = parsed.field_names; - // let item_name = parsed.item_name; + let parsed = syn::parse::< type_struct::TypeStructParsed >( input )?; let result = qt! { diff --git a/module/core/strs_tools/src/lib.rs b/module/core/strs_tools/src/lib.rs index 2fd1127fc8..1c5bc9300e 100644 --- a/module/core/strs_tools/src/lib.rs +++ b/module/core/strs_tools/src/lib.rs @@ -2,25 +2,12 @@ #![ doc( html_logo_url = "https://raw.githubusercontent.com/Wandalen/wTools/master/asset/img/logo_v3_trans_square.png" ) ] #![ doc( html_favicon_url = "https://raw.githubusercontent.com/Wandalen/wTools/alpha/asset/img/logo_v3_trans_square_icon_small_v2.ico" ) ] #![ doc( html_root_url = "https://docs.rs/strs_tools/latest/strs_tools/" ) ] -// #![ deny( rust_2018_idioms ) ] -// #![ deny( missing_debug_implementations ) ] -// #![ deny( missing_docs ) ] - -//! -//! Tools to manipulate strings. -//! - #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] /// String tools. #[ cfg( feature = "enabled" ) ] pub mod string; -// #[ doc( inline ) ] -// #[ allow( unused_imports ) ] -#[ cfg( feature = "enabled" ) ] -pub use string::*; - #[ doc( inline ) ] #[ allow( unused_imports ) ] #[ cfg( feature = "enabled" ) ] diff --git a/module/core/strs_tools/src/string/indentation.rs b/module/core/strs_tools/src/string/indentation.rs index 839625e010..16f7208663 100644 --- a/module/core/strs_tools/src/string/indentation.rs +++ b/module/core/strs_tools/src/string/indentation.rs @@ -2,9 +2,43 @@ pub( crate ) mod private { + /// Adds indentation and optional prefix/postfix to each line of the given string. /// - /// Add indentation to each line. + /// This function iterates over each line in the input string and applies the specified + /// prefix and postfix to it, effectively indenting the string and optionally wrapping + /// each line with additional content. /// + /// # Parameters + /// - `prefix` : The string to prepend to each line, typically used for indentation. + /// - `src` : The source string to be indented and modified. + /// - `postfix` : The string to append to each line, can be used for line terminators or other suffixes. + /// + /// # Type Parameters + /// - `Prefix` : A type that can be referenced as a string slice, for the prefix. + /// - `Src` : A type that can be referenced as a string slice, for the source string. + /// - `Postfix` : A type that can be referenced as a string slice, for the postfix. + /// + /// # Returns + /// A `String` that represents the original `src` string with `prefix` and `postfix` applied to each line. + /// + /// # Example + /// ``` + /// use strs_tools::exposed::*; + /// + /// let input = "Line 1\nLine 2\nLine 3"; + /// let indented = indentation( " ", input, ";" ); + /// assert_eq!( indented, " Line 1;\n Line 2;\n Line 3;" ); + /// + /// // Demonstrating the function's handling of trailing newlines + /// let input_with_newline = "Line 1\nLine 2\nLine 3\n"; + /// let indented_with_newline = indentation( " ", input_with_newline, ";" ); + /// assert_eq!( indented_with_newline, " Line 1;\n Line 2;\n Line 3;\n ;" ); + /// ``` + /// + /// In the example above, `indentation` is used to add two spaces before each line + /// and a semicolon at the end of each line. The function also demonstrates handling + /// of input strings that end with a newline character by appending an additional line + /// consisting only of the prefix and postfix. pub fn indentation< Prefix, Src, Postfix >( prefix : Prefix, src : Src, postfix : Postfix ) -> String where @@ -14,22 +48,31 @@ pub( crate ) mod private { let prefix = prefix.as_ref(); let postfix = postfix.as_ref(); - let splits = src - .as_ref() - .split( '\n' ) - ; + let src = src.as_ref(); - splits - .map( | e | prefix.to_owned() + e + postfix ) + let mut result = src + .lines() .enumerate() - // intersperse is unstable - // .intersperse( '\n' ) .fold( String::new(), | mut a, b | { - a.push_str( if b.0 > 0 { "\n" } else { "" } ); + if b.0 > 0 + { + a.push_str( "\n" ); + } + a.push_str( prefix ); a.push_str( &b.1 ); + a.push_str( postfix ); a - }) + }); + + if src.ends_with( "\n" ) || src.ends_with( "\n\r" ) || src.ends_with( "\r\n" ) + { + result.push_str( "\n" ); + result.push_str( prefix ); + result.push_str( postfix ); + } + + result } } @@ -56,16 +99,18 @@ pub mod orphan #[ allow( unused_imports ) ] pub use super::private:: { - indentation, }; } /// Exposed namespace of the module. pub mod exposed { + pub use super::protected as indentation; + #[ allow( unused_imports ) ] pub use super::private:: { + indentation, }; } diff --git a/module/core/strs_tools/src/string/isolate.rs b/module/core/strs_tools/src/string/isolate.rs index 6dbb9e2387..abe3ddc13b 100644 --- a/module/core/strs_tools/src/string/isolate.rs +++ b/module/core/strs_tools/src/string/isolate.rs @@ -197,6 +197,8 @@ pub mod orphan /// Exposed namespace of the module. pub mod exposed { + pub use super::protected as isolate; + use super::private as i; pub use i::IsolateOptionsAdapter; diff --git a/module/core/strs_tools/src/string/number.rs b/module/core/strs_tools/src/string/number.rs index e5ceebf263..29da7a5520 100644 --- a/module/core/strs_tools/src/string/number.rs +++ b/module/core/strs_tools/src/string/number.rs @@ -36,6 +36,8 @@ pub mod orphan /// Exposed namespace of the module. pub mod exposed { + pub use super::protected as number; + #[ allow( unused_imports ) ] pub use super::private:: { diff --git a/module/core/strs_tools/src/string/parse_request.rs b/module/core/strs_tools/src/string/parse_request.rs index bb505e31c0..62f8674f6b 100644 --- a/module/core/strs_tools/src/string/parse_request.rs +++ b/module/core/strs_tools/src/string/parse_request.rs @@ -1,8 +1,12 @@ /// Internal namespace. pub( crate ) mod private { - use crate::string::split::*; - use crate::string::isolate::isolate_right; + use crate::*; + use string:: + { + split::*, + // isolate::isolate_right, + }; use std::collections::HashMap; /// @@ -10,17 +14,17 @@ pub( crate ) mod private /// #[ derive( Debug, Clone, PartialEq, Eq ) ] - pub enum OpType + pub enum OpType< T > { - /// Wrapper over single element of type . + /// Wrapper over single element of type < T >. Primitive( T ), - /// Wrapper over vector of elements of type . - Vector( Vec ), - /// Wrapper over hash map of elements of type . + /// Wrapper over vector of elements of type < T >. + Vector( Vec< T > ), + /// Wrapper over hash map of elements of type < T >. Map( HashMap ), } - impl Default for OpType + impl Default for OpType< T > { fn default() -> Self { @@ -28,7 +32,7 @@ pub( crate ) mod private } } - impl From for OpType + impl< T > From< T > for OpType< T > { fn from( value: T ) -> Self { @@ -36,17 +40,17 @@ pub( crate ) mod private } } - impl From> for OpType + impl< T > From> for OpType< T > { - fn from( value: Vec ) -> Self + fn from( value: Vec< T > ) -> Self { OpType::Vector( value ) } } - impl Into > for OpType + impl< T > Into > for OpType< T > { - fn into( self ) -> Vec + fn into( self ) -> Vec< T > { match self { @@ -56,11 +60,11 @@ pub( crate ) mod private } } - impl OpType + impl OpType< T > { /// Append item of OpType to current value. If current type is `Primitive`, then it will be converted to /// `Vector`. - pub fn append( mut self, item : OpType ) -> OpType + pub fn append( mut self, item : OpType< T > ) -> OpType< T > { let mut mut_item = item; match self @@ -104,7 +108,7 @@ pub( crate ) mod private } /// Unwrap primitive value. Consumes self. - pub fn primitive( self ) -> Option + pub fn primitive( self ) -> Option< T > { match self { @@ -114,7 +118,7 @@ pub( crate ) mod private } /// Unwrap vector value. Consumes self. - pub fn vector( self ) -> Option> + pub fn vector( self ) -> Option> { match self { @@ -504,6 +508,8 @@ pub mod orphan /// Exposed namespace of the module. pub mod exposed { + pub use super::protected as parse_request; + pub use super::private:: { ParseOptionsAdapter, diff --git a/module/core/strs_tools/src/string/split.rs b/module/core/strs_tools/src/string/split.rs index f3e75f8334..cb5dac93ca 100644 --- a/module/core/strs_tools/src/string/split.rs +++ b/module/core/strs_tools/src/string/split.rs @@ -672,6 +672,8 @@ pub mod orphan /// Exposed namespace of the module. pub mod exposed { + pub use super::protected as split; + pub use super::private:: { SplitOptionsAdapter, diff --git a/module/core/strs_tools/tests/inc/indentation_test.rs b/module/core/strs_tools/tests/inc/indentation_test.rs index 580a0f0b50..133e68cf04 100644 --- a/module/core/strs_tools/tests/inc/indentation_test.rs +++ b/module/core/strs_tools/tests/inc/indentation_test.rs @@ -1,47 +1,63 @@ -// use test_tools::exposed::*; use super::*; // #[ cfg( not( feature = "no_std" ) ) ] -tests_impls! +#[ test ] +fn basic() { - fn basic() + use TheModule::string::indentation; + + /* test.case( "basic" ) */ { - use TheModule::string::indentation; - - /* test.case( "basic" ) */ - { - let src = "a\nbc"; - let exp = "---a\n---bc"; - let got = indentation( "---", src, "" ); - a_id!( got, exp ); - } - - /* test.case( "empty string" ) */ - { - let src = ""; - let exp = "---"; - let got = indentation( "---", src, "" ); - a_id!( got, exp ); - } - - /* test.case( "two empty string" ) */ - { - let src = "\n"; - let exp = "---\n---"; - let got = indentation( "---", src, "" ); - a_id!( got, exp ); - } + let src = "a\nbc"; + let exp = "---a\n---bc"; + let got = indentation( "---", src, "" ); + a_id!( got, exp ); + } + /* test.case( "empty string" ) */ + { + let src = ""; + let exp = ""; + let got = indentation( "---", src, "" ); + a_id!( got, exp ); } -} -// + /* test.case( "two strings" ) */ + { + let src = "a\nb"; + let exp = "---a+++\n---b+++"; + let got = indentation( "---", src, "+++" ); + a_id!( got, exp ); + } + + /* test.case( "last empty" ) */ + { + let src = "a\n"; + let exp = "---a+++\n---+++"; + let got = indentation( "---", src, "+++" ); + // println!( "got : '{}'", got ); + a_id!( got, exp ); + } + + /* test.case( "first empty" ) */ + { + let src = "\nb"; + let exp = "---+++\n---b+++"; + let got = indentation( "---", src, "+++" ); + // println!( "got : '{}'", got ); + a_id!( got, exp ); + } + + /* test.case( "two empty string" ) */ + { + let src = "\n"; + let exp = "---+++\n---+++"; + let got = indentation( "---", src, "+++" ); + // println!( "got : '{}'", got ); + a_id!( got, exp ); + } -#[ cfg( not( feature = "no_std" ) ) ] -tests_index! -{ - basic, } diff --git a/module/core/type_constructor/tests/inc/many/many_parameter_main_manual_test.rs b/module/core/type_constructor/tests/inc/many/many_parameter_main_manual_test.rs index 5198aa1176..65b0f47ff1 100644 --- a/module/core/type_constructor/tests/inc/many/many_parameter_main_manual_test.rs +++ b/module/core/type_constructor/tests/inc/many/many_parameter_main_manual_test.rs @@ -17,7 +17,7 @@ struct Many< T > ( pub TheModule::_Vec < T > ); impl< T > core::ops::Deref for Many< T > { type Target = TheModule::_Vec < T >; - #[inline] + #[ inline ] fn deref( &self) -> & Self::Target { &self.0 @@ -26,7 +26,7 @@ impl< T > core::ops::Deref for Many< T > impl< T > core::ops::DerefMut for Many< T > { - #[inline] + #[ inline ] fn deref_mut( &mut self) -> & mut Self::Target { &mut self.0 @@ -49,7 +49,7 @@ where // impl< T > From < T > for Many< T > // { -// #[inline] +// #[ inline ] // fn from( src : T ) -> Self // { // Self( TheModule::_vec![ src ] ) @@ -59,7 +59,7 @@ where // impl < T > From < & T > for Many< T > // where T : Clone, // { -// #[inline] +// #[ inline ] // fn from( src : &T ) -> Self // { // Self( TheModule::_vec![ src.clone() ] ) @@ -68,7 +68,7 @@ where // // impl< T > From < ( T, ) > for Many< T > // { -// #[inline] +// #[ inline ] // fn from( src : ( T, ) ) -> Self // { // Self( TheModule::_vec![ src.0 ] ) @@ -77,7 +77,7 @@ where // // impl < T, const N : usize > From < [T ; N] > for Many< T > // { -// #[inline] +// #[ inline ] // fn from( src : [ T ; N ] ) -> Self // { // Self( TheModule::_Vec::from( src ) ) @@ -86,7 +86,7 @@ where // // impl< T > From < &[ T ] > for Many< T > where T : Clone, // { -// #[inline] +// #[ inline ] // fn from( src : &[ T ] ) -> Self // { // Self( TheModule::_Vec::from( src ) ) @@ -95,7 +95,7 @@ where impl< T > TheModule::AsSlice< T > for Many< T > { - #[inline] fn as_slice(& self) -> &[ T ] + #[ inline ] fn as_slice(& self) -> &[ T ] { &self[ .. ] } @@ -106,7 +106,7 @@ TheModule::_if_from! // impl< T > TheModule::From_0 for Many< T > // { - // #[inline] + // #[ inline ] // fn from_0() -> Self // { // Self( TheModule::_Vec::new() ) @@ -115,7 +115,7 @@ TheModule::_if_from! impl< T > TheModule::From_1 < T > for Many< T > { - #[inline] + #[ inline ] fn from_1(_0 : T) -> Self { Self(TheModule::_vec! [_0]) @@ -124,7 +124,7 @@ TheModule::_if_from! impl< T > TheModule::From_2 < T, T > for Many< T > { - #[inline] + #[ inline ] fn from_2(_0 : T, _1 : T) -> Self { Self( TheModule::_vec![ _0, _1 ] ) @@ -133,7 +133,7 @@ TheModule::_if_from! impl< T > TheModule::From_3 < T, T, T > for Many< T > { - #[inline] fn from_3(_0 : T, _1 : T, _2 : T) -> Self + #[ inline ] fn from_3(_0 : T, _1 : T, _2 : T) -> Self { Self( TheModule::_vec![ _0, _1, _2 ] ) } diff --git a/module/core/type_constructor/tests/inc/many/many_parametrized_test.rs b/module/core/type_constructor/tests/inc/many/many_parametrized_test.rs index 69b4807b19..dc05a74904 100644 --- a/module/core/type_constructor/tests/inc/many/many_parametrized_test.rs +++ b/module/core/type_constructor/tests/inc/many/many_parametrized_test.rs @@ -162,29 +162,29 @@ tests_impls! // // impl core :: ops :: Deref for Structs // { -// type Target = TheModule :: _Vec < Struct > ; #[inline] fn deref(& self) -> & +// type Target = TheModule :: _Vec < Struct > ; #[ inline ] fn deref(& self) -> & // Self :: Target { & self.0 } // } // // impl core :: ops :: DerefMut for Structs // { -// #[inline] fn deref_mut(& mut self) -> & mut Self :: Target +// #[ inline ] fn deref_mut(& mut self) -> & mut Self :: Target // { & mut self.0 } // } // // impl From < Struct > for Structs -// { #[inline] fn from(src : Struct) -> Self { Self(TheModule :: _vec! [src]) } } +// { #[ inline ] fn from(src : Struct) -> Self { Self(TheModule :: _vec! [src]) } } // // impl < __FromRef > From < & __FromRef > for Structs where __FromRef : Clone, // Self : From < __FromRef >, // { -// #[inline] fn from(src : & __FromRef) -> Self +// #[ inline ] fn from(src : & __FromRef) -> Self // { From :: from((* src).clone()) } // } // // impl From < (Struct,) > for Structs // { -// #[inline] fn from(src : (Struct,)) -> Self +// #[ inline ] fn from(src : (Struct,)) -> Self // { Self(TheModule :: _vec! [src.0]) } // } // @@ -192,43 +192,43 @@ tests_impls! // for Structs // // where Struct : Clone, // { -// #[inline] fn from(src : [Struct ; N]) -> Self +// #[ inline ] fn from(src : [Struct ; N]) -> Self // { Self(TheModule :: _Vec :: from(src)) } // } // // impl From < & [Struct] > for Structs // where Struct : Clone, // { -// // #[inline] +// // #[ inline ] // fn from(src : & [Struct]) -> Self // { Self(TheModule :: _Vec :: from(src)) } // } // // impl TheModule :: AsSlice < Struct > for Structs // // where Struct : Clone, -// { #[inline] fn as_slice(& self) -> & [Struct] { & self [..] } } +// { #[ inline ] fn as_slice(& self) -> & [Struct] { & self [..] } } // // impl TheModule :: From_0 for Structs // { -// #[inline] fn from_0() -> Self +// #[ inline ] fn from_0() -> Self // { Self(TheModule :: _Vec :: < Struct > :: new()) } // } // // impl TheModule :: From_1 < Struct > for Structs // { -// #[inline] fn from_1(_0 : Struct,) -> Self +// #[ inline ] fn from_1(_0 : Struct,) -> Self // { Self(TheModule :: _vec! [_0]) } // } // // impl TheModule :: From_2 < Struct, Struct, > for Structs // { -// #[inline] fn from_2(_0 : Struct, _1 : Struct,) -> Self +// #[ inline ] fn from_2(_0 : Struct, _1 : Struct,) -> Self // { Self(TheModule :: _vec! [_0, _1]) } // } // // impl TheModule :: From_3 < Struct, Struct, Struct, > for Structs // { -// #[inline] fn from_3(_0 : Struct, _1 : Struct, _2 : Struct,) -> Self +// #[ inline ] fn from_3(_0 : Struct, _1 : Struct, _2 : Struct,) -> Self // { Self(TheModule :: _vec! [_0, _1, _2]) } // } // diff --git a/module/core/variadic_from/src/wtools/from.rs b/module/core/variadic_from/src/wtools/from.rs index 7e1163a104..7a2b539324 100644 --- a/module/core/variadic_from/src/wtools/from.rs +++ b/module/core/variadic_from/src/wtools/from.rs @@ -95,7 +95,7 @@ pub( crate ) mod private where F : From_1< All >, { - #[inline] + #[ inline ] fn to( self ) -> F { F::from_1( self ) @@ -107,7 +107,7 @@ pub( crate ) mod private // F : From_1< F >, // F : From< All >, // { - // #[inline] + // #[ inline ] // fn to( self ) -> F // { // F::from_1( From::from( self ) ) diff --git a/module/core/wtools/src/lib.rs b/module/core/wtools/src/lib.rs index 2d9d6e190f..1658658578 100644 --- a/module/core/wtools/src/lib.rs +++ b/module/core/wtools/src/lib.rs @@ -134,7 +134,7 @@ pub mod exposed #[ cfg( feature = "typing" ) ] pub use super::typing::exposed::*; #[ cfg( feature = "diagnostics" ) ] - pub use super::diagnostics::exposed::*; + pub use super::diag::exposed::*; #[ cfg( any( feature = "dt", feature = "data_type" ) ) ] pub use super::dt::exposed::*; #[ cfg( feature = "time" ) ] @@ -175,7 +175,7 @@ pub mod prelude #[ cfg( feature = "diagnostics" ) ] #[ doc( inline ) ] #[ allow( unused_imports ) ] - pub use super::diagnostics::prelude::*; + pub use super::diag::prelude::*; #[ cfg( any( feature = "dt", feature = "data_type" ) ) ] #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/wtools/tests/wtools_tests.rs b/module/core/wtools/tests/wtools_tests.rs index 3c9b956f7a..a9d5f3c3e9 100644 --- a/module/core/wtools/tests/wtools_tests.rs +++ b/module/core/wtools/tests/wtools_tests.rs @@ -51,7 +51,7 @@ mod data_type; #[ cfg( feature = "diagnostics_tools" ) ] #[ cfg( not( feature = "meta_tools" ) ) ] #[ path = "../../../core/diagnostics_tools/tests/diagnostics_tests.rs" ] -mod diagnostics_tools; +mod diag_tools; #[ cfg( feature = "meta_tools" ) ] diff --git a/module/move/optimization_tools/Cargo.toml b/module/move/optimization_tools/Cargo.toml index e66513316a..79f8029589 100644 --- a/module/move/optimization_tools/Cargo.toml +++ b/module/move/optimization_tools/Cargo.toml @@ -32,15 +32,12 @@ full = [ ] enabled = [] rapidity_6 = [] # to enable slow tests -rapidity_7 = [ "rapidity_6" ] # to enable slow tests -rapidity_8 = [ "rapidity_7" ] # to enable slow tests -rapidity_9 = [ "rapidity_8" ] # to enable slow tests static_plot = [] dynamic_plot = [ "static_plot", "plotters-backend", "piston_window" ] lp_parse = [ "exmex" ] [dependencies] -derive_tools = { workspace = true, features = ["default"] } +derive_tools = { workspace = true, features = [ "derive_more", "full" ] } deterministic_rand = { workspace = true, features = [ "default" ] } iter_tools = { workspace = true, features = [ "default" ] } meta_tools = { workspace = true, features = [ "meta_constructors" ] } diff --git a/module/move/optimization_tools/src/hybrid_optimizer/sim_anneal.rs b/module/move/optimization_tools/src/hybrid_optimizer/sim_anneal.rs index e13872f036..112760b289 100644 --- a/module/move/optimization_tools/src/hybrid_optimizer/sim_anneal.rs +++ b/module/move/optimization_tools/src/hybrid_optimizer/sim_anneal.rs @@ -1,6 +1,8 @@ +//! Implementation of Simulated Annealing for Hybrid Optimizer. + +use derive_tools::{ FromInner, InnerFrom, Display }; /// Represents temperature of SA process. #[ derive( Default, Debug, Display, Clone, Copy, PartialEq, PartialOrd, FromInner, InnerFrom ) ] -#[ derive( Add, Sub, Mul, Div, AddAssign, SubAssign, MulAssign, DivAssign ) ] pub struct Temperature( f64 ); impl Temperature @@ -21,12 +23,12 @@ impl From< f32 > for Temperature Self( src as f64 ) } } -use derive_tools::{ FromInner, InnerFrom, Display }; -use derive_tools::{ Add, Sub, Mul, Div, AddAssign, SubAssign, MulAssign, DivAssign }; + +// use derive_tools::{ Add, Sub, Mul, Div, AddAssign, SubAssign, MulAssign, DivAssign }; /// Struct that represents coefficient to change temperature value. #[ derive( Debug, Display, Clone, Copy, PartialEq, PartialOrd, FromInner, InnerFrom ) ] -#[ derive( Add, Sub, Mul, Div, AddAssign, SubAssign, MulAssign, DivAssign ) ] +// #[ derive( Add, Sub, Mul, Div, AddAssign, SubAssign, MulAssign, DivAssign ) ] pub struct TemperatureFactor( pub f64 ); impl TemperatureFactor @@ -83,11 +85,11 @@ impl TemperatureSchedule for LinearTempSchedule { fn calculate_next_temp( &self, prev_temp : Temperature ) -> Temperature { - Temperature::from( prev_temp.unwrap() * self.coefficient.unwrap() ) + self.constant + Temperature::from( prev_temp.unwrap() * self.coefficient.unwrap() + self.constant.unwrap() ) } fn reset_temperature( &self, prev_temp : Temperature ) -> Temperature { - prev_temp + self.reset_increase_value + Temperature( prev_temp.unwrap() + self.reset_increase_value.unwrap() ) } } \ No newline at end of file diff --git a/module/move/optimization_tools/src/optimal_params_search/mod.rs b/module/move/optimization_tools/src/optimal_params_search/mod.rs index 00b4ccd694..76b84b8492 100644 --- a/module/move/optimization_tools/src/optimal_params_search/mod.rs +++ b/module/move/optimization_tools/src/optimal_params_search/mod.rs @@ -5,6 +5,7 @@ pub mod nelder_mead; pub mod sim_annealing; use std::ops::RangeBounds; use iter_tools::Itertools; +use ordered_float::OrderedFloat; use crate::hybrid_optimizer::*; use results_serialize::read_results; @@ -30,7 +31,7 @@ impl Default for OptimalParamsConfig { improvement_threshold : 0.005, max_no_improvement_steps : 10, - max_iterations : 10, + max_iterations : 100, } } } @@ -128,19 +129,19 @@ where R : RangeBounds< f64 > + Sync, log::info! ( "temp_decrease_coefficient : {:.4?}, max_mutations_per_dynasty: {}, mutation_rate: {:.2}, crossover_rate: {:.2};", - case.coords[ 0 ].into_inner(), case.coords[ 1 ].into_inner() as usize, case.coords[ 2 ], case.coords[ 3 ] + case.coords[ 0 ], case.coords[ 1 ] as usize, case.coords[ 2 ], case.coords[ 3 ] ); log::info! ( "max_stale_iterations : {:?}, population_size: {}, dynasties_limit: {};", - case.coords[ 4 ].into_inner() as usize, case.coords[ 5 ].into_inner() as usize, case.coords[ 6 ].into_inner() as usize + case.coords[ 4 ] as usize, case.coords[ 5 ] as usize, case.coords[ 6 ] as usize ); let temp_schedule = LinearTempSchedule { constant : 0.0.into(), - coefficient : case.coords[ 0 ].into_inner().into(), + coefficient : case.coords[ 0 ].into(), reset_increase_value : 1.0.into(), }; @@ -154,16 +155,16 @@ where R : RangeBounds< f64 > + Sync, }; let props = crate::hybrid_optimizer::PopulationModificationProportions::new() - .set_crossover_rate( case.coords[ 3 ].into_inner() ) - .set_mutation_rate( case.coords[ 2 ].into_inner() ) + .set_crossover_rate( case.coords[ 3 ] ) + .set_mutation_rate( case.coords[ 2 ] ) ; let optimizer = HybridOptimizer::new( Config::default(), h_problem ) - .set_sa_max_mutations_per_dynasty( case.coords[ 1 ].into_inner() as usize ) + .set_sa_max_mutations_per_dynasty( case.coords[ 1 ] as usize ) .set_population_proportions( props ) - .set_max_stale_iterations( case.coords[ 4 ].into_inner() as usize ) - .set_population_size( case.coords[ 5 ].into_inner() as usize ) - .set_dynasties_limit( case.coords[ 6 ].into_inner() as usize ) + .set_max_stale_iterations( case.coords[ 4 ] as usize ) + .set_population_size( case.coords[ 5 ] as usize ) + .set_dynasties_limit( case.coords[ 6 ] as usize ) ; let ( _reason, _solution ) = optimizer.optimize(); }; @@ -252,3 +253,57 @@ pub enum Error #[ error( "starting value is out of bounds" ) ] OutOfBoundsError, } + +#[ derive( Debug, Clone, PartialEq, Hash, Eq ) ] +pub struct Point( ( OrderedFloat< f64 >, usize, OrderedFloat< f64 >, OrderedFloat< f64 >, usize, usize, usize ) ); + +impl From< nelder_mead::Point > for Point +{ + fn from( value: nelder_mead::Point ) -> Self + { + Self + ( ( + OrderedFloat( value.coords[ 0 ] ), + value.coords[ 1 ] as usize, + OrderedFloat( value.coords[ 2 ] ), + OrderedFloat( value.coords[ 3 ] ), + value.coords[ 4 ] as usize, + value.coords[ 5 ] as usize, + value.coords[ 6 ] as usize, + ) ) + } +} + +impl From< ( f64, u32, f64, f64, u32, u32, u32 ) > for Point +{ + fn from( value: ( f64, u32, f64, f64, u32, u32, u32 ) ) -> Self + { + Self + ( ( + OrderedFloat( value.0 ), + value.1 as usize, + OrderedFloat( value.2 ), + OrderedFloat( value.3 ), + value.4 as usize, + value.5 as usize, + value.6 as usize, + ) ) + } +} + +impl From< Point > for ( f64, u32, f64, f64, u32, u32, u32 ) +{ + fn from( value: Point ) -> Self + { + let coords = value.0; + ( + coords.0.into_inner(), + coords.1.try_into().unwrap(), + coords.2.into_inner(), + coords.3.into_inner(), + coords.4.try_into().unwrap(), + coords.5.try_into().unwrap(), + coords.6.try_into().unwrap(), + ) + } +} diff --git a/module/move/optimization_tools/src/optimal_params_search/nelder_mead.rs b/module/move/optimization_tools/src/optimal_params_search/nelder_mead.rs index 90b10b45f8..90c329dba4 100644 --- a/module/move/optimization_tools/src/optimal_params_search/nelder_mead.rs +++ b/module/move/optimization_tools/src/optimal_params_search/nelder_mead.rs @@ -11,17 +11,16 @@ use std:: }; use deterministic_rand::{ Hrng, Seed, Rng }; use iter_tools::Itertools; -use ordered_float::OrderedFloat; use rayon::iter::{ IntoParallelIterator, ParallelIterator }; use super::results_serialize::save_result; /// Represents point in multidimensional space where optimization is performed. -#[ derive( Debug, Clone, PartialEq, Hash, Eq ) ] +#[ derive( Debug, Clone ) ] pub struct Point { /// Coordinates of the point. - pub coords : Vec< OrderedFloat< f64 > >, + pub coords : Vec< f64 >, } impl Point @@ -31,12 +30,6 @@ impl Point { Self { coords : coords.into_iter().map( | elem | elem.into() ).collect_vec() } } - - /// Create new point from given coordinates. - pub fn new_from_ordered( coords : Vec< OrderedFloat< f64 > > ) -> Self - { - Self { coords } - } } /// Represents geometric shape formed by a set of n+1 points in a multidimensional space, where n is a number of dimensions. @@ -76,6 +69,7 @@ pub struct Stats pub starting_point : Point, pub differences : Vec< Vec< f64 > >, pub positive_change : Vec< usize >, + pub cached_points : ( usize, usize ), } impl Stats @@ -83,7 +77,13 @@ impl Stats pub fn new( starting_point : Point) -> Self { let dimensions = starting_point.coords.len(); - Self { starting_point, differences : vec![ Vec::new(); dimensions ], positive_change : vec![ 0; dimensions ] } + Self + { + starting_point, + differences : vec![ Vec::new(); dimensions ], + positive_change : vec![ 0; dimensions ], + cached_points : ( 0, 0 ), + } } pub fn record_diff( &mut self, start_point : &Point, point : &Point ) @@ -141,7 +141,7 @@ pub struct Optimizer< R, F > /// Shrinking involves reducing the distance between the vertices of the simplex, making it smaller. pub sigma : f64, /// Values of objective function calculated in previous executions. - pub calculated_results : Option< HashMap< Point, f64 > >, + pub calculated_results : Option< HashMap< super::Point, f64 > >, /// File for saving values of objective function during optimization process. pub save_results_file : Option< Arc< Mutex< File > > >, /// Additional constraint for coordinates of function. @@ -175,7 +175,7 @@ where R : RangeBounds< f64 > + Sync, } /// Add set of previosly calculated values of objective function. - pub fn set_calculated_results( &mut self, res : HashMap< Point, f64 > ) + pub fn set_calculated_results( &mut self, res : HashMap< super::Point, f64 > ) { self.calculated_results = Some( res ); } @@ -203,7 +203,7 @@ where R : RangeBounds< f64 > + Sync, } /// Calculate value of objective function at given point or get previously calculated value if such exists. - pub fn evaluate_point( &self, p : &Point ) -> f64 + pub fn evaluate_point( &self, p : &Point, stats : &mut Stats ) -> f64 { if let Constraints::WithConstraints( constraint_vec ) = &self.constraints { @@ -216,18 +216,20 @@ where R : RangeBounds< f64 > + Sync, if let Some( points ) = &self.calculated_results { - if let Some( value ) = points.get( &p ) + if let Some( value ) = points.get( &p.clone().into() ) { + stats.cached_points.0 += 1; return *value; } } let result = ( self.objective_function )( p ); + stats.cached_points.1 += 1; if let Some( file ) = &self.save_results_file { _ = save_result ( - p.coords.clone().into_iter().map( | val | val.into_inner() ).collect_vec(), + p.clone().into(), result, file.clone(), ); @@ -266,7 +268,7 @@ where R : RangeBounds< f64 > + Sync, } else { - self.start_point.coords = vec![ OrderedFloat( 0.0 ); size.len() ]; + self.start_point.coords = vec![ 0.0; size.len() ]; } } @@ -351,7 +353,7 @@ where R : RangeBounds< f64 > + Sync, } } } - Point::new_from_ordered( coords ) + Point::new( coords ) } fn calculate_regular_simplex( &mut self ) @@ -380,7 +382,7 @@ where R : RangeBounds< f64 > + Sync, } } - points.push( Point::new_from_ordered( coords ) ) + points.push( Point::new( coords ) ) } self.initial_simplex = Simplex { points } } @@ -485,17 +487,17 @@ where R : RangeBounds< f64 > + Sync, let results = points.into_par_iter().map( | point | { + let mut stats = Stats::new( point.clone() ); let x0 = point.clone(); let dimensions = x0.coords.len(); - let mut prev_best = self.evaluate_point( &x0 ); + let mut prev_best = self.evaluate_point( &x0, &mut stats ); let mut steps_with_no_improv = 0; let mut res = vec![ ( x0.clone(), prev_best ) ]; - let mut stats = Stats::new( point.clone() ); for i in 1..=dimensions { let x = self.initial_simplex.points[ i ].clone(); - let score = self.evaluate_point( &x ); + let score = self.evaluate_point( &x, &mut stats ); res.push( ( x, score ) ); } let mut iterations = 0; @@ -540,7 +542,7 @@ where R : RangeBounds< f64 > + Sync, } //centroid - let mut x0_center = vec![ OrderedFloat( 0.0 ); dimensions ]; + let mut x0_center = vec![ 0.0; dimensions ]; for ( point, _ ) in res.iter().take( res.len() - 1 ) { for ( i, coordinate ) in point.coords.iter().enumerate() @@ -551,69 +553,74 @@ where R : RangeBounds< f64 > + Sync, //reflection let worst_dir = res.last().clone().unwrap(); - let mut x_ref = vec![ OrderedFloat( 0.0 ); dimensions ]; + let mut x_ref = vec![ 0.0; dimensions ]; for i in 0..dimensions { - x_ref[ i ] = x0_center[ i ] + OrderedFloat( self.alpha ) * ( x0_center[ i ] - worst_dir.0.coords[ i ] ); + x_ref[ i ] = x0_center[ i ] + self.alpha * ( x0_center[ i ] - worst_dir.0.coords[ i ] ); } // check if point left the domain, if so, perform projection - let x_ref = self.check_bounds( Point::new_from_ordered( x_ref ) ); + let x_ref = self.check_bounds( Point::new( x_ref ) ); stats.record_diff( &self.start_point, &x_ref ); - let reflection_score = self.evaluate_point( &x_ref ); + let reflection_score = self.evaluate_point( &x_ref, &mut stats ); let second_worst = res[ res.len() - 2 ].1; if res[ 0 ].clone().1 <= reflection_score && reflection_score < second_worst { let prev_point = res.pop().unwrap().0; stats.record_positive_change( &prev_point, &x_ref ); res.push( ( x_ref, reflection_score ) ); + // log::info!("reflection"); continue; } //expansion if reflection_score < res[ 0 ].1 { - let mut x_exp = vec![ OrderedFloat( 0.0 ); dimensions ]; + let mut x_exp = vec![ 0.0; dimensions ]; for i in 0..dimensions { - x_exp[ i ] = x0_center[ i ] + OrderedFloat( self.gamma ) * ( x_ref.coords[ i ] - x0_center[ i ] ); + x_exp[ i ] = x0_center[ i ] + self.gamma * ( x_ref.coords[ i ] - x0_center[ i ] ); } // check if point left the domain, if so, perform projection - let x_exp = self.check_bounds( Point::new_from_ordered( x_exp ) ); + let x_exp = self.check_bounds( Point::new( x_exp ) ); stats.record_diff( &self.start_point, &x_exp ); - let expansion_score = self.evaluate_point( &x_exp ); + let expansion_score = self.evaluate_point( &x_exp, &mut stats ); if expansion_score < reflection_score { let prev_point = res.pop().unwrap().0; stats.record_positive_change( &prev_point, &x_exp ); res.push( ( x_exp, expansion_score ) ); + // log::info!("expansion"); continue; + } else { let prev_point = res.pop().unwrap().0; stats.record_positive_change( &prev_point, &x_ref ); res.push( ( x_ref, reflection_score ) ); + // log::info!("expansion"); continue; } } //contraction - let mut x_con = vec![ OrderedFloat( 0.0 ); dimensions ]; + let mut x_con = vec![ 0.0; dimensions ]; for i in 0..dimensions { - x_con[ i ] = x0_center[ i ] + OrderedFloat( self.rho ) * ( x0_center[ i ] - worst_dir.0.coords[ i ] ); + x_con[ i ] = x0_center[ i ] + self.rho * ( x0_center[ i ] - worst_dir.0.coords[ i ] ); } - let x_con = self.check_bounds( Point::new_from_ordered( x_con ) ); + let x_con = self.check_bounds( Point::new( x_con ) ); stats.record_diff( &self.start_point, &x_con ); - let contraction_score = self.evaluate_point( &x_con ); + let contraction_score = self.evaluate_point( &x_con, &mut stats ); if contraction_score < worst_dir.1 { let prev_point = res.pop().unwrap().0; stats.record_positive_change( &prev_point, &x_con ); res.push( ( x_con, contraction_score ) ); + // log::info!("contraction"); continue; } @@ -622,17 +629,17 @@ where R : RangeBounds< f64 > + Sync, let mut new_res = Vec::new(); for ( point, _ ) in res { - let mut x_shrink = vec![ OrderedFloat( 0.0 ); dimensions ]; + let mut x_shrink = vec![ 0.0; dimensions ]; for i in 0..dimensions { - x_shrink[ i ] = x1.coords[ i ] + OrderedFloat( self.sigma ) * ( point.coords[ i ] - x1.coords[ i ] ); + x_shrink[ i ] = x1.coords[ i ] + self.sigma * ( point.coords[ i ] - x1.coords[ i ] ); } - let x_shrink = self.check_bounds( Point::new_from_ordered( x_shrink ) ); + let x_shrink = self.check_bounds( Point::new( x_shrink ) ); stats.record_diff( &self.start_point, &x_shrink ); - let score = self.evaluate_point( &x_shrink ); + let score = self.evaluate_point( &x_shrink, &mut stats ); new_res.push( ( x_shrink, score ) ); } - + // log::info!("shrink"); res = new_res; } } ).collect::< Vec<_> >(); @@ -645,6 +652,7 @@ where R : RangeBounds< f64 > + Sync, /// Optimize provided objective function with using initialized configuration. pub fn optimize( &mut self ) -> Result< Solution, Error > { + let mut stats = Stats::new( self.start_point.clone() ); if self.start_point.coords.len() == 0 { self.calculate_start_point(); @@ -663,14 +671,14 @@ where R : RangeBounds< f64 > + Sync, let x0 = self.start_point.clone(); let dimensions = x0.coords.len(); - let mut prev_best = self.evaluate_point( &x0 ); + let mut prev_best = self.evaluate_point( &x0, &mut stats ); let mut steps_with_no_improv = 0; let mut res = vec![ ( x0.clone(), prev_best ) ]; for i in 1..=dimensions { let x = self.initial_simplex.points[ i ].clone(); - let score = self.evaluate_point( &x ); + let score = self.evaluate_point( &x, &mut stats ); res.push( ( x, score ) ); } let mut iterations = 0; @@ -715,7 +723,7 @@ where R : RangeBounds< f64 > + Sync, } //centroid - let mut x0_center = vec![ OrderedFloat( 0.0 ); dimensions ]; + let mut x0_center = vec![ 0.0; dimensions ]; for ( point, _ ) in res.iter().take( res.len() - 1 ) { for ( i, coordinate ) in point.coords.iter().enumerate() @@ -726,15 +734,15 @@ where R : RangeBounds< f64 > + Sync, //reflection let worst_dir = res.last().clone().unwrap(); - let mut x_ref = vec![ OrderedFloat( 0.0 ); dimensions ]; + let mut x_ref = vec![ 0.0; dimensions ]; for i in 0..dimensions { - x_ref[ i ] = x0_center[ i ] + OrderedFloat( self.alpha ) * ( x0_center[ i ] - worst_dir.0.coords[ i ] ); + x_ref[ i ] = x0_center[ i ] + self.alpha * ( x0_center[ i ] - worst_dir.0.coords[ i ] ); } // check if point left the domain, if so, perform projection - let x_ref = self.check_bounds( Point::new_from_ordered( x_ref ) ); + let x_ref = self.check_bounds( Point::new( x_ref ) ); - let reflection_score = self.evaluate_point( &x_ref ); + let reflection_score = self.evaluate_point( &x_ref, &mut stats ); let second_worst = res[ res.len() - 2 ].1; if res[ 0 ].clone().1 <= reflection_score && reflection_score < second_worst { @@ -746,14 +754,14 @@ where R : RangeBounds< f64 > + Sync, //expansion if reflection_score < res[ 0 ].1 { - let mut x_exp = vec![ OrderedFloat( 0.0 ); dimensions ]; + let mut x_exp = vec![ 0.0; dimensions ]; for i in 0..dimensions { - x_exp[ i ] = x0_center[ i ] + OrderedFloat( self.gamma ) * ( x_ref.coords[ i ] - x0_center[ i ] ); + x_exp[ i ] = x0_center[ i ] + self.gamma * ( x_ref.coords[ i ] - x0_center[ i ] ); } // check if point left the domain, if so, perform projection - let x_exp = self.check_bounds( Point::new_from_ordered( x_exp ) ); - let expansion_score = self.evaluate_point( &x_exp ); + let x_exp = self.check_bounds( Point::new( x_exp ) ); + let expansion_score = self.evaluate_point( &x_exp, &mut stats ); if expansion_score < reflection_score { @@ -770,13 +778,13 @@ where R : RangeBounds< f64 > + Sync, } //contraction - let mut x_con = vec![ OrderedFloat( 0.0 ); dimensions ]; + let mut x_con = vec![ 0.0; dimensions ]; for i in 0..dimensions { - x_con[ i ] = x0_center[ i ] + OrderedFloat( self.rho ) * ( x0_center[ i ] - worst_dir.0.coords[ i ] ); + x_con[ i ] = x0_center[ i ] + self.rho * ( x0_center[ i ] - worst_dir.0.coords[ i ] ); } - let x_con = self.check_bounds( Point::new_from_ordered( x_con ) ); - let contraction_score = self.evaluate_point( &x_con ); + let x_con = self.check_bounds( Point::new( x_con ) ); + let contraction_score = self.evaluate_point( &x_con, &mut stats ); if contraction_score < worst_dir.1 { @@ -790,13 +798,13 @@ where R : RangeBounds< f64 > + Sync, let mut new_res = Vec::new(); for ( point, _ ) in res { - let mut x_shrink = vec![ OrderedFloat( 0.0 ); dimensions ]; + let mut x_shrink = vec![ 0.0; dimensions ]; for i in 0..dimensions { - x_shrink[ i ] = x1.coords[ i ] + OrderedFloat( self.sigma ) * ( point.coords[ i ] - x1.coords[ i ] ); + x_shrink[ i ] = x1.coords[ i ] + self.sigma * ( point.coords[ i ] - x1.coords[ i ] ); } - let x_shrink = self.check_bounds( Point::new_from_ordered( x_shrink ) ); - let score = self.evaluate_point( &x_shrink ); + let x_shrink = self.check_bounds( Point::new( x_shrink ) ); + let score = self.evaluate_point( &x_shrink, &mut stats ); new_res.push( ( x_shrink, score ) ); } diff --git a/module/move/optimization_tools/src/optimal_params_search/results_serialize.rs b/module/move/optimization_tools/src/optimal_params_search/results_serialize.rs index 432774d6cd..746fd9919a 100644 --- a/module/move/optimization_tools/src/optimal_params_search/results_serialize.rs +++ b/module/move/optimization_tools/src/optimal_params_search/results_serialize.rs @@ -8,7 +8,7 @@ use std:: sync::{ Arc, Mutex }, }; use rkyv::{ Archive, Deserialize, Serialize } ; -use crate::optimal_params_search::nelder_mead::Point; +// use crate::optimal_params_search::nelder_mead::Point; #[ derive( Archive, Deserialize, Serialize, Debug ) ] #[ archive @@ -20,14 +20,14 @@ use crate::optimal_params_search::nelder_mead::Point; #[ archive_attr( derive( Debug ) ) ] struct ObjectiveFunctionValue { - point : Vec< f64 >, + point : ( f64, u32, f64, f64, u32, u32, u32 ), value : f64, } /// Save results of optimal parameters search. -pub fn save_result( point : Vec< f64 >, value : f64, file : Arc< Mutex< File > > ) -> Result< (), Box< dyn std::error::Error > > +pub fn save_result( point : super::Point, value : f64, file : Arc< Mutex< File > > ) -> Result< (), Box< dyn std::error::Error > > { - let obj_value = ObjectiveFunctionValue{ point, value }; + let obj_value = ObjectiveFunctionValue{ point : point.into(), value }; let bytes = rkyv::to_bytes::< _, 256 >( &obj_value ).unwrap(); let mut file = file.lock().unwrap(); @@ -38,7 +38,7 @@ pub fn save_result( point : Vec< f64 >, value : f64, file : Arc< Mutex< File > > } /// Read results from previous execution. -pub fn read_results( file_path : &str ) -> Result< HashMap< Point, f64 >, Box< dyn std::error::Error > > +pub fn read_results( file_path : &str ) -> Result< HashMap< super::Point, f64 >, Box< dyn std::error::Error > > { let read_file = OpenOptions::new().read( true ).open( file_path )?; let mut reader = BufReader::new( read_file ); @@ -58,7 +58,7 @@ pub fn read_results( file_path : &str ) -> Result< HashMap< Point, f64 >, Box< d let deserialized: Result< ObjectiveFunctionValue, _ > = archived.deserialize( &mut rkyv::Infallible ); if let Ok( deserialized ) = deserialized { - data.insert( Point::new( deserialized.point ), deserialized.value ); + data.insert( super::Point::from( deserialized.point ), deserialized.value ); } } diff --git a/module/move/optimization_tools/src/problems/sudoku/cell_val.rs b/module/move/optimization_tools/src/problems/sudoku/cell_val.rs index d9fd537b7d..f5b5394b95 100644 --- a/module/move/optimization_tools/src/problems/sudoku/cell_val.rs +++ b/module/move/optimization_tools/src/problems/sudoku/cell_val.rs @@ -2,11 +2,9 @@ //! use derive_tools::Display; -use derive_tools::{ Add, Sub, Mul, Div }; /// Represents the value of a cell in Sudoku. It can have a value from 1 to 9 or 0 if the cell is not assigned. #[ derive( Default, Debug, Display, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash ) ] -#[ derive( Add, Sub, Mul, Div ) ] pub struct CellVal( u8 ); impl CellVal diff --git a/module/move/optimization_tools/src/problems/sudoku/sudoku.rs b/module/move/optimization_tools/src/problems/sudoku/sudoku.rs index 816c57f68a..e71e3bb1d6 100644 --- a/module/move/optimization_tools/src/problems/sudoku/sudoku.rs +++ b/module/move/optimization_tools/src/problems/sudoku/sudoku.rs @@ -5,7 +5,6 @@ use crate::hybrid_optimizer::*; use crate::problems::sudoku::*; use derive_tools::{ FromInner, InnerFrom, Display }; -use derive_tools::{ Add, Sub, Mul, Div, AddAssign, SubAssign, MulAssign, DivAssign }; use deterministic_rand::{ Hrng, Rng, seq::SliceRandom }; use iter_tools::Itertools; @@ -91,7 +90,6 @@ pub fn cells_pair_random_in_block( initial : &Board, block : BlockIndex, hrng : /// Represents number of errors in sudoku board. #[ derive( Default, Debug, Display, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash, FromInner, InnerFrom ) ] -#[ derive( Add, Sub, Mul, Div, AddAssign, SubAssign, MulAssign, DivAssign ) ] pub struct SudokuCost( usize ); // xxx : derive, please @@ -175,9 +173,9 @@ impl SudokuPerson log::trace!( "cells_swap( {:?}, {:?} )", mutagen.cell1, mutagen.cell2 ); self.board.cells_swap( mutagen.cell1, mutagen.cell2 ); - self.cost -= old_cross_error.into(); - self.cost += self.board.cross_error( mutagen.cell1 ).into(); - self.cost += self.board.cross_error( mutagen.cell2 ).into(); + self.cost = SudokuCost( self.cost.unwrap() - old_cross_error ) ; + self.cost = SudokuCost( self.cost.unwrap() + self.board.cross_error( mutagen.cell1 ) ); + self.cost = SudokuCost( self.cost.unwrap() + self.board.cross_error( mutagen.cell2 ) ); } /// Create random mutagen and apply it current board. @@ -279,9 +277,9 @@ impl MutationOperator for RandomPairInBlockMutation log::trace!( "cells_swap( {:?}, {:?} )", mutagen.cell1, mutagen.cell2 ); person.board.cells_swap( mutagen.cell1, mutagen.cell2 ); - person.cost -= old_cross_error.into(); - person.cost += person.board.cross_error( mutagen.cell1 ).into(); - person.cost += person.board.cross_error( mutagen.cell2 ).into(); + person.cost = SudokuCost( person.cost.unwrap() - old_cross_error ); + person.cost = SudokuCost( person.cost.unwrap() + person.board.cross_error( mutagen.cell1 ) ); + person.cost = SudokuCost( person.cost.unwrap() + person.board.cross_error( mutagen.cell2 ) ); } } diff --git a/module/move/optimization_tools/sudoku_results.md b/module/move/optimization_tools/sudoku_results.md index 94726f89b1..e346a06069 100644 --- a/module/move/optimization_tools/sudoku_results.md +++ b/module/move/optimization_tools/sudoku_results.md @@ -2,47 +2,68 @@ ## For hybrid: - - execution time: 0.311s + - max number of iterations: 100 + + - max no improvement iterations : 10 + + - improvement threshold : 0.005s + + - calculated points: 19 from 48 + + - points from cache: 29 from 48 - level: Easy + - execution time: 0.154s + - parameters: ``` ┌─────────────┬────────┬────────┬─────────┬─────────────┬──────────┬─────────┬────────┐ │ │ start │ min │ max │ sum of diff │ expected │ changes │ final │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ temperature │ 0.8561 │ 0.00 │ 1.00 │ 0.93 │ 0.02 │ 6 │ 0.9787 │ +│ temperature │ 0.8561 │ 0.00 │ 1.00 │ 0.02 │ 0.00 │ 9 │ 0.9995 │ │ decrease │ │ │ │ │ │ │ │ │ coefficient │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ max │ 106 │ 10.00 │ 200.00 │ 318.95 │ 6.38 │ 6 │ 107 │ +│ max │ 106 │ 10.00 │ 200.00 │ 311.97 │ 7.43 │ 9 │ 108 │ │ mutations │ │ │ │ │ │ │ │ │ per │ │ │ │ │ │ │ │ │ dynasty │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ mutation │ 0.42 │ 0.00 │ 1.00 │ 2.60 │ 0.05 │ 6 │ 0.31 │ +│ mutation │ 0.42 │ 0.00 │ 1.00 │ 1.31 │ 0.03 │ 9 │ 0.23 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ crossover │ 0.66 │ 0.00 │ 1.00 │ 3.93 │ 0.08 │ 6 │ 0.58 │ +│ crossover │ 0.66 │ 0.00 │ 1.00 │ 1.70 │ 0.04 │ 9 │ 0.54 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ elitism │ -0.09 │ - │ - │ - │ - │ - │ 0.11 │ +│ elitism │ -0.09 │ - │ - │ - │ - │ - │ 0.23 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ max │ 81 │ 1.00 │ 100.00 │ 474.07 │ 9.48 │ 6 │ 38 │ +│ max │ 81 │ 1.00 │ 100.00 │ 1404.93 │ 33.45 │ 9 │ 62 │ │ stale │ │ │ │ │ │ │ │ │ iterations │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ population │ 116 │ 1.00 │ 1000.00 │ 9216.57 │ 184.33 │ 6 │ 77 │ +│ population │ 116 │ 1.00 │ 1000.00 │ 9233.07 │ 219.83 │ 9 │ 3 │ │ size │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ dynasties │ 249 │ 100.00 │ 2000.00 │ 2423.08 │ 48.46 │ 6 │ 984 │ +│ dynasties │ 249 │ 100.00 │ 2000.00 │ 19863.18 │ 472.93 │ 9 │ 1486 │ │ limit │ │ │ │ │ │ │ │ └─────────────┴────────┴────────┴─────────┴─────────────┴──────────┴─────────┴────────┘ ``` +#### List: + + + - `max number of iterations` : limit of total iterations of optimization process, termination condition + - `max no improvement iterations` : max amount of steps performed without detected improvement, termination condition + - `improvement threshold` : minimal value detected as improvement in objective function result + - `calculated points` : new calculated points that were not found in cache + - `points from cache` : points calculated during previous optimizations and read from cache + - `level` : sudoku board difficulty level + - `execution time` : duration of shortest found hybrid optimization process using final parameters, measured in seconds +#### Table: - `start` : initial value of parameter in starting point - `min` : lower bound of parameter - `max` : upper bound of parameter @@ -52,21 +73,31 @@ - `final` : calculated value of parameter for which execution time was the lowest ## For SA: - - execution time: 0.034s + - max number of iterations: 100 + + - max no improvement iterations : 10 + + - improvement threshold : 0.005s + + - calculated points: 0 from 22 + + - points from cache: 22 from 22 - level: Easy + - execution time: 0.019s + - parameters: ``` ┌─────────────┬────────┬────────┬─────────┬─────────────┬──────────┬─────────┬────────┐ │ │ start │ min │ max │ sum of diff │ expected │ changes │ final │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ temperature │ 0.0660 │ 0.00 │ 1.00 │ 3.08 │ 0.06 │ 6 │ 0.9657 │ +│ temperature │ 0.8244 │ 0.00 │ 1.00 │ 0.48 │ 0.03 │ 12 │ 0.9554 │ │ decrease │ │ │ │ │ │ │ │ │ coefficient │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ max │ 108 │ 10.00 │ 200.00 │ 126.76 │ 2.49 │ 6 │ 102 │ +│ max │ 157 │ 10.00 │ 200.00 │ 261.00 │ 18.64 │ 12 │ 116 │ │ mutations │ │ │ │ │ │ │ │ │ per │ │ │ │ │ │ │ │ │ dynasty │ │ │ │ │ │ │ │ @@ -74,25 +105,36 @@ │ mutation │ 1.00 │ 1.00 │ 1.00 │ 0.00 │ 0.00 │ 0 │ 1.00 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ crossover │ 0.00 │ 0.00 │ 0.00 │ 0.00 │ 0.00 │ 0 │ 0.00 │ +│ crossover │ 0.00 │ 0.00 │ 0.00 │ 0.00 │ 0.00 │ 1 │ 0.00 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ │ elitism │ -0.00 │ - │ - │ - │ - │ - │ 0.00 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ max │ 47 │ 1.00 │ 100.00 │ 89.91 │ 1.76 │ 6 │ 30 │ +│ max │ 67 │ 1.00 │ 100.00 │ 214.24 │ 15.30 │ 12 │ 39 │ │ stale │ │ │ │ │ │ │ │ │ iterations │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ │ population │ 1 │ 1.00 │ 1.00 │ 0.00 │ 0.00 │ 0 │ 1 │ │ size │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ dynasties │ 4974 │ 100.00 │ 5000.00 │ 21180.01 │ 415.29 │ 6 │ 1216 │ +│ dynasties │ 3455 │ 100.00 │ 5000.00 │ 13134.94 │ 938.21 │ 12 │ 1646 │ │ limit │ │ │ │ │ │ │ │ └─────────────┴────────┴────────┴─────────┴─────────────┴──────────┴─────────┴────────┘ ``` +#### List: + + + - `max number of iterations` : limit of total iterations of optimization process, termination condition + - `max no improvement iterations` : max amount of steps performed without detected improvement, termination condition + - `improvement threshold` : minimal value detected as improvement in objective function result + - `calculated points` : new calculated points that were not found in cache + - `points from cache` : points calculated during previous optimizations and read from cache + - `level` : sudoku board difficulty level + - `execution time` : duration of shortest found hybrid optimization process using final parameters, measured in seconds +#### Table: - `start` : initial value of parameter in starting point - `min` : lower bound of parameter - `max` : upper bound of parameter @@ -102,47 +144,68 @@ - `final` : calculated value of parameter for which execution time was the lowest ## For GA: - - execution time: 0.264s + - max number of iterations: 100 + + - max no improvement iterations : 10 + + - improvement threshold : 0.005s + + - calculated points: 81 from 120 + + - points from cache: 39 from 120 - level: Easy + - execution time: 0.263s + - parameters: ``` ┌─────────────┬────────┬────────┬─────────┬─────────────┬──────────┬─────────┬────────┐ │ │ start │ min │ max │ sum of diff │ expected │ changes │ final │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ temperature │ 0.4043 │ 0.00 │ 1.00 │ 0.51 │ 0.03 │ 10 │ 1.0000 │ +│ temperature │ 0.6847 │ 0.00 │ 1.00 │ 0.45 │ 0.00 │ 36 │ 0.9995 │ │ decrease │ │ │ │ │ │ │ │ │ coefficient │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ max │ 37 │ 10.00 │ 200.00 │ 335.93 │ 21.00 │ 10 │ 118 │ +│ max │ 174 │ 10.00 │ 200.00 │ 514.31 │ 4.40 │ 36 │ 97 │ │ mutations │ │ │ │ │ │ │ │ │ per │ │ │ │ │ │ │ │ │ dynasty │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ mutation │ 0.24 │ 0.10 │ 1.00 │ 0.56 │ 0.03 │ 10 │ 0.17 │ +│ mutation │ 0.78 │ 0.10 │ 1.00 │ 5.51 │ 0.05 │ 36 │ 0.22 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ crossover │ 0.94 │ 0.10 │ 1.00 │ 2.44 │ 0.15 │ 10 │ 0.74 │ +│ crossover │ 0.73 │ 0.10 │ 1.00 │ 2.09 │ 0.02 │ 36 │ 0.51 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ elitism │ -0.18 │ - │ - │ - │ - │ - │ 0.08 │ +│ elitism │ -0.52 │ - │ - │ - │ - │ - │ 0.26 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ max │ 30 │ 1.00 │ 100.00 │ 25.45 │ 1.59 │ 10 │ 32 │ +│ max │ 29 │ 1.00 │ 100.00 │ 134.61 │ 1.15 │ 36 │ 31 │ │ stale │ │ │ │ │ │ │ │ │ iterations │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ population │ 1102 │ 10.00 │ 2000.00 │ 8803.52 │ 550.22 │ 10 │ 77 │ +│ population │ 846 │ 10.00 │ 2000.00 │ 24289.87 │ 207.61 │ 36 │ 84 │ │ size │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ dynasties │ 439 │ 100.00 │ 2000.00 │ 3596.94 │ 224.81 │ 10 │ 1221 │ +│ dynasties │ 859 │ 100.00 │ 2000.00 │ 8440.12 │ 72.14 │ 36 │ 1075 │ │ limit │ │ │ │ │ │ │ │ └─────────────┴────────┴────────┴─────────┴─────────────┴──────────┴─────────┴────────┘ ``` +#### List: + + + - `max number of iterations` : limit of total iterations of optimization process, termination condition + - `max no improvement iterations` : max amount of steps performed without detected improvement, termination condition + - `improvement threshold` : minimal value detected as improvement in objective function result + - `calculated points` : new calculated points that were not found in cache + - `points from cache` : points calculated during previous optimizations and read from cache + - `level` : sudoku board difficulty level + - `execution time` : duration of shortest found hybrid optimization process using final parameters, measured in seconds +#### Table: - `start` : initial value of parameter in starting point - `min` : lower bound of parameter - `max` : upper bound of parameter @@ -158,11 +221,11 @@ │ │ coefficient │ per │ │ │ │ iterations │ │ │ │ │ │ │ dynasty │ │ │ │ │ │ │ │ ├────────┼─────────────┼───────────┼──────────┼───────────┼─────────┼────────────┼────────────┼───────────┼───────────┤ -│ hybrid │ 0.9787 │ 107 │ 0.31 │ 0.58 │ 0.11 │ 38 │ 77 │ 984 │ 0.311s │ +│ hybrid │ 0.9995 │ 108 │ 0.23 │ 0.54 │ 0.23 │ 62 │ 3 │ 1486 │ 0.154s │ ├────────┼─────────────┼───────────┼──────────┼───────────┼─────────┼────────────┼────────────┼───────────┼───────────┤ -│ SA │ 0.9657 │ 102 │ 1.00 │ 0.00 │ 0.00 │ 30 │ 1 │ 1216 │ 0.034s │ +│ SA │ 0.9554 │ 116 │ 1.00 │ 0.00 │ 0.00 │ 39 │ 1 │ 1646 │ 0.019s │ ├────────┼─────────────┼───────────┼──────────┼───────────┼─────────┼────────────┼────────────┼───────────┼───────────┤ -│ GA │ 1.0000 │ 118 │ 0.17 │ 0.74 │ 0.08 │ 32 │ 77 │ 1221 │ 0.264s │ +│ GA │ 0.9995 │ 97 │ 0.22 │ 0.51 │ 0.26 │ 31 │ 84 │ 1075 │ 0.263s │ └────────┴─────────────┴───────────┴──────────┴───────────┴─────────┴────────────┴────────────┴───────────┴───────────┘ ``` @@ -176,3 +239,8 @@ - `population size` : number of individuals in population - `dynasties limit` : max number of dynasties of new solutions produced during optimization process, terminates if exceeded - `execution time` : time spent searching for optimal solution, measured in seconds +## To run: + - Sudoku problem: +`cargo test -- --ignored find_opt_params_sudoku` + - Traveling salesman problem: +`cargo test -- --ignored find_opt_params_tsp` diff --git a/module/move/optimization_tools/tests/board.rs b/module/move/optimization_tools/tests/board.rs index 8d011baefe..e3b62ac413 100644 --- a/module/move/optimization_tools/tests/board.rs +++ b/module/move/optimization_tools/tests/board.rs @@ -166,7 +166,7 @@ fn select_mut() let mut board = Board::default(); let indices = board.block_cells( ( 0, 0 ).into() ); - board.select_mut( indices ).for_each( | e | *e = *e + 1.into() ); + board.select_mut( indices ).for_each( | e | *e = CellVal::from( e.unwrap() + 1 ) ); let indices = board.block_cells( ( 0, 0 ).into() ); let got : Vec< CellVal > = board.select( indices ).collect(); let exp : Vec< CellVal > = each_into([ 4, 2, 1, 1, 1, 7, 1, 1, 1 ]).collect(); diff --git a/module/move/optimization_tools/tests/nelder_mead.rs b/module/move/optimization_tools/tests/nelder_mead.rs index f910a6e239..f913ec64c2 100644 --- a/module/move/optimization_tools/tests/nelder_mead.rs +++ b/module/move/optimization_tools/tests/nelder_mead.rs @@ -6,7 +6,7 @@ use optimal_params_search::nelder_mead; #[ test ] fn power_two() -> Result< (), nelder_mead::Error > { - let f = | x : &nelder_mead::Point | ( x.coords[ 0 ] * x.coords[ 0 ] ).into_inner(); + let f = | x : &nelder_mead::Point | ( x.coords[ 0 ] * x.coords[ 0 ] ); let mut optimizer = nelder_mead::Optimizer::new( f ); optimizer.bounds = vec![ Some( -1.0..=8.0 ), Some( 2.0..=4.0 ), Some( 3.0..=6.0 ) ]; optimizer.start_point = nelder_mead::Point::new( vec![ 3.0, 3.0, 3.0 ] ); @@ -27,7 +27,7 @@ fn sin_cos() -> Result< (), nelder_mead::Error > let res = optimizer.optimize()?; - assert!( ( -1.5808971014312196 - res.point.coords[ 0 ].into_inner() ).abs() < 10e-5 ); + assert!( ( -1.5808971014312196 - res.point.coords[ 0 ] ).abs() < 10e-5 ); assert!( ( -1.0 - res.objective ).abs() <= 10e-5 ); Ok( () ) @@ -36,56 +36,24 @@ fn sin_cos() -> Result< (), nelder_mead::Error > #[ test ] fn rosenbrock() -> Result< (), nelder_mead::Error > { - let f = | x : &nelder_mead::Point | ( 1.0 - x.coords[ 0 ].into_inner() ).powi( 2 ) + 100.0 * ( x.coords[ 1 ] - x.coords[ 0 ].powi( 2 )).powi( 2 ) ; + let f = | x : &nelder_mead::Point | ( 1.0 - x.coords[ 0 ] ).powi( 2 ) + 100.0 * ( x.coords[ 1 ] - x.coords[ 0 ].powi( 2 )).powi( 2 ) ; let mut optimizer: nelder_mead::Optimizer< Range< f64 >, _ > = nelder_mead::Optimizer::new( f ); optimizer.start_point = nelder_mead::Point::new( vec![ 0.0, 0.0 ] ); optimizer.set_simplex_size( vec![ Some( 0.1 ), Some( 0.1 ) ] ); let res = optimizer.optimize()?; - assert!( ( 1.0 - res.point.coords[ 0 ].into_inner() ).abs() < 10e-5 ); - assert!( ( 1.0 - res.point.coords[ 1 ].into_inner() ).abs() < 10e-5 ); + assert!( ( 1.0 - res.point.coords[ 0 ] ).abs() < 10e-5 ); + assert!( ( 1.0 - res.point.coords[ 1 ] ).abs() < 10e-5 ); assert!( res.objective < 10e-5 ); Ok( () ) } -// #[ test ] -// fn rosenbrock_extended() -> Result< (), nelder_mead::Error > -// { - -// let f = | x : &nelder_mead::Point | -// { -// let mut y = 0.0; -// for i in 0..30 -// { -// y += ( 1.0 - x.coords[ i ].into_inner() ).powi( 2 ) + 100.0 * ( x.coords[ i + 1 ] - x.coords[ i ].powi( 2 )).powi( 2 ) -// } -// y -// }; -// let mut optimizer: nelder_mead::Optimizer< Range< f64 >, _ > = nelder_mead::Optimizer::new( f ); -// optimizer.start_point = nelder_mead::Point::new( vec![ 10.0; 31 ] ); -// optimizer.set_simplex_size( vec![ Some( 0.1 ); 31 ] ); - -// let start1 = std::time::Instant::now(); -// let res1 = optimizer.optimize()?; -// let _elapsed1 = start1.elapsed(); - -// let start2 = std::time::Instant::now(); -// //let res2 = optimizer.optimize_parallel_by_direction()?; -// let _elapsed2 = start2.elapsed(); - -// //assert_eq!( elapsed1.as_nanos(), elapsed2.as_nanos() ); - -// assert_eq!( res1.objective, res2.objective ); - -// Ok( () ) -// } - #[ test ] fn himmelblau() -> Result< (), nelder_mead::Error > { - let f = | x : &nelder_mead::Point | ( x.coords[ 0 ].powi( 2 ) + x.coords[ 1 ].into_inner() - 11.0 ).powi( 2 ) + ( x.coords[ 0 ] + x.coords[ 1 ].powi( 2 ) - 7.0 ).powi( 2 ) ; + let f = | x : &nelder_mead::Point | ( x.coords[ 0 ].powi( 2 ) + x.coords[ 1 ] - 11.0 ).powi( 2 ) + ( x.coords[ 0 ] + x.coords[ 1 ].powi( 2 ) - 7.0 ).powi( 2 ) ; let mut optimizer: nelder_mead::Optimizer< Range< f64 >, _ > = nelder_mead::Optimizer::new( f ); optimizer.start_point = nelder_mead::Point::new( vec![ 0.0, 0.0 ] ); optimizer.set_simplex_size( vec![ Some( 0.1 ); 2 ] ); @@ -96,7 +64,7 @@ fn himmelblau() -> Result< (), nelder_mead::Error > for minima in [ ( 3.0, 2.0 ), ( -2.805118, 3.131312 ), ( -3.779310, -3.283186 ), ( 3.584428, -1.848126 ) ] { - if ( ( minima.0 - res.point.coords[ 0 ].into_inner() ).abs() < 10e-5 ) && ( ( minima.1 - res.point.coords[ 1 ].into_inner() ).abs() < 10e-5 ) + if ( ( minima.0 - res.point.coords[ 0 ] ).abs() < 10e-5 ) && ( ( minima.1 - res.point.coords[ 1 ] ).abs() < 10e-5 ) { is_one_of_minima_points = true; } diff --git a/module/move/optimization_tools/tests/opt_params.rs b/module/move/optimization_tools/tests/opt_params.rs index ba28eda2f2..ad4250ad9e 100644 --- a/module/move/optimization_tools/tests/opt_params.rs +++ b/module/move/optimization_tools/tests/opt_params.rs @@ -11,6 +11,24 @@ use tabled::{ builder::Builder, settings::Style }; mod tools; use tools::*; +pub struct Statistics +{ + pub table_params : Vec< Vec< String > >, + pub list_params : Vec< ( String, String ) >, +} + +impl Statistics +{ + pub fn new() -> Self + { + Self + { + table_params : Vec::new(), + list_params : Vec::new(), + } + } +} + fn named_results_list< R : RangeBounds< f64 > > ( params : Vec< f64 >, @@ -30,13 +48,13 @@ fn named_results_list< R : RangeBounds< f64 > > let mut start_params = Vec::new(); start_params.push( format!( "{:.4}", stats.starting_point.coords[ 0 ] ) ); - start_params.push( format!( "{:?}", stats.starting_point.coords[ 1 ].into_inner() as usize ) ); + start_params.push( format!( "{:?}", stats.starting_point.coords[ 1 ] as usize ) ); start_params.push( format!( "{:.2}", stats.starting_point.coords[ 2 ] ) ); start_params.push( format!( "{:.2}", stats.starting_point.coords[ 3 ] ) ); - start_params.push( format!( "{:.2}", ( 1.0 - stats.starting_point.coords[ 2 ].into_inner() - stats.starting_point.coords[ 3 ].into_inner() ) ) ); - start_params.push( format!( "{}", stats.starting_point.coords[ 4 ].into_inner() as usize ) ); - start_params.push( format!( "{}", stats.starting_point.coords[ 5 ].into_inner() as usize ) ); - start_params.push( format!( "{}", stats.starting_point.coords[ 6 ].into_inner() as usize ) ); + start_params.push( format!( "{:.2}", ( 1.0 - stats.starting_point.coords[ 2 ] - stats.starting_point.coords[ 3 ] ) ) ); + start_params.push( format!( "{}", stats.starting_point.coords[ 4 ] as usize ) ); + start_params.push( format!( "{}", stats.starting_point.coords[ 5 ] as usize ) ); + start_params.push( format!( "{}", stats.starting_point.coords[ 6 ] as usize ) ); let params_name = [ @@ -143,15 +161,13 @@ fn named_results_list< R : RangeBounds< f64 > > list } -type ResWithStats = Vec< Vec< String > >; - fn write_results ( filename : String, title : String, - mut hybrid_res : ResWithStats, - mut sa_res : ResWithStats, - mut ga_res : ResWithStats, + mut hybrid_res : Statistics, + mut sa_res : Statistics, + mut ga_res : Statistics, ) -> Result< (), std::io::Error > { let mut file = std::fs::File::create( format!( "{}.md", filename ) )?; @@ -160,14 +176,14 @@ fn write_results for ( mode, params ) in &mut [ ( "hybrid", &mut hybrid_res ), ( "SA", &mut sa_res ), ( "GA", &mut ga_res ) ] { std::io::Write::write(&mut file, format!( "## For {}:\n\n", mode ).as_bytes() )?; - let exec_time = params.last().unwrap(); - std::io::Write::write(&mut file, format!( " - {}: {}\n\n", exec_time[ 0 ], exec_time[ 1 ] ).as_bytes() )?; - let level = params[ params.len() - 2 ].clone(); - std::io::Write::write(&mut file, format!( " - {}: {}\n\n", level[ 0 ], level[ 1 ] ).as_bytes() )?; + for param in ¶ms.list_params + { + std::io::Write::write(&mut file, format!( " - {}: {}\n\n", param.0, param.1 ).as_bytes() )?; + } + std::io::Write::write(&mut file, format!( " - parameters: \n\n" ).as_bytes() )?; let mut builder = Builder::default(); - let head_row = [ "", "start", "min", "max", "sum of diff", "expected", "changes", "final" ] .into_iter() .map( str::to_owned ) @@ -176,20 +192,20 @@ fn write_results builder.push_record( head_row.clone() ); - for i in 0..params.len() - 2 + for i in 0..params.table_params.len() { let mut row = Vec::new(); if *mode == "SA" && [ 2, 3, 4, 6 ].contains( &i ) { - row.push( format!( "{}", params[ i ][ 0 ].clone().replace( " ", "\n") ) ); + row.push( format!( "{}", params.table_params[ i ][ 0 ].clone().replace( " ", "\n") ) ); } else { - row.push( params[ i ][ 0 ].clone().replace( " ", "\n") ); + row.push( params.table_params[ i ][ 0 ].clone().replace( " ", "\n") ); } - row.extend( params[ i ].iter().skip( 1 ).cloned() ); + row.extend( params.table_params[ i ].iter().skip( 1 ).cloned() ); builder.push_record( row ); } @@ -198,6 +214,30 @@ fn write_results std::io::Write::write( &mut file, format!( "```\n{}\n```", table ).as_bytes() )?; std::io::Write::write( &mut file, format!("\n\n\n" ).as_bytes() )?; + std::io::Write::write(&mut file, format!( "#### List:\n" ).as_bytes() )?; + let problem_level = if params.list_params[ params.list_params.len() - 2 ].0 == String::from( "level" ) + { + " - `level` : sudoku board difficulty level\n" + } + else + { + " - `number of nodes` : number of nodes in graph representing cities from traveling salesman problem\n" + }; + + let list_legend = concat! + ( + "\n\n", + " - `max number of iterations` : limit of total iterations of optimization process, termination condition\n", + " - `max no improvement iterations` : max amount of steps performed without detected improvement, termination condition\n", + " - `improvement threshold` : minimal value detected as improvement in objective function result\n", + " - `calculated points` : new calculated points that were not found in cache\n", + " - `points from cache` : points calculated during previous optimizations and read from cache\n", + ); + + std::io::Write::write(&mut file, list_legend.as_bytes() )?; + std::io::Write::write(&mut file, problem_level.as_bytes() )?; + std::io::Write::write(&mut file, b" - `execution time` : duration of shortest found hybrid optimization process using final parameters, measured in seconds\n" )?; + std::io::Write::write(&mut file, format!( "#### Table:\n" ).as_bytes() )?; let str_legend = concat! ( " - `start` : initial value of parameter in starting point\n", @@ -216,9 +256,9 @@ fn write_results std::io::Write::write(&mut file, format!( "## Summary:\n" ).as_bytes() )?; let mut builder = Builder::default(); let mut headers = vec![ String::from( "mode" ) ]; - for i in 0..hybrid_res.len() - 2 + for i in 0..hybrid_res.table_params.len() { - headers.push( hybrid_res[ i ][ 0 ].clone().replace( " ", "\n") ); + headers.push( hybrid_res.table_params[ i ][ 0 ].clone().replace( " ", "\n") ); } headers.push( String::from( "execution\ntime" ) ); @@ -227,7 +267,7 @@ fn write_results for ( mode, params ) in [ ( "hybrid", &hybrid_res ), ( "SA", &sa_res ), ( "GA", &ga_res ) ] { let mut row = Vec::new(); - for i in 0..params.len() - 1 + for i in 0..params.table_params.len() + 1 { if i == 0 { @@ -235,10 +275,10 @@ fn write_results } else { - row.push( params[ i - 1 ].last().unwrap().clone() ); + row.push( params.table_params[ i - 1 ].last().unwrap().clone() ); } } - row.push( params.last().unwrap()[ 1 ].clone() ); + row.push( params.list_params.last().unwrap().1.clone() ); builder.push_record( row ); } @@ -260,9 +300,14 @@ fn write_results " - `dynasties limit` : max number of dynasties of new solutions produced during optimization process, terminates if exceeded\n", " - `execution time` : time spent searching for optimal solution, measured in seconds\n", ); - std::io::Write::write( &mut file, final_legend.as_bytes() )?; + std::io::Write::write(&mut file, format!( "## To run:\n" ).as_bytes() )?; + std::io::Write::write( &mut file, b" - Sudoku problem:\n" )?; + std::io::Write::write( &mut file, b"`cargo test -- --ignored find_opt_params_sudoku`\n" )?; + std::io::Write::write( &mut file, b" - Traveling salesman problem:\n" )?; + std::io::Write::write( &mut file, b"`cargo test -- --ignored find_opt_params_tsp`\n" )?; + Ok( () ) } @@ -308,20 +353,32 @@ fn find_opt_params_sudoku() -> Result< (), Box< dyn std::error::Error > > ); assert!( res.is_ok() ); - let mut hybrid_res = Vec::new(); + let mut hybrid_res = Statistics::new(); if let Ok( solution ) = res { - hybrid_res = named_results_list - ( - solution.point.coords - .into_iter() - .map( | val | val.into_inner() ) - .collect_vec(), - solution.stats.unwrap(), - starting_params.bounds, - ); - hybrid_res.push( vec![ String::from( "level" ), format!( "{:?}", Board::from( easy ).calculate_level() ) ] ); - hybrid_res.push( vec![ String::from( "execution time" ), format!( "{:.3}s", solution.objective ) ] ); + let cached = solution.stats.clone().unwrap().cached_points; + hybrid_res = Statistics + { + table_params : named_results_list + ( + solution.point.coords + .into_iter() + .map( | val | val ) + .collect_vec(), + solution.stats.unwrap(), + starting_params.bounds, + ), + list_params : vec! + [ + ( String::from( "max number of iterations" ), format!( "{}", config.max_iterations ) ), + ( String::from( "max no improvement iterations " ), format!( "{}", config.max_no_improvement_steps ) ), + ( String::from( "improvement threshold " ), format!( "{}s", config.improvement_threshold ) ), + ( String::from( "calculated points" ), format!( "{} from {}", cached.1, cached.1 + cached.0 ) ), + ( String::from( "points from cache" ), format!( "{} from {}", cached.0, cached.1 + cached.0 ) ), + ( String::from( "level" ), format!( "{:?}", Board::from( easy ).calculate_level() ) ), + ( String::from( "execution time" ), format!( "{:.3}s", solution.objective ) ), + ] + } } // SA @@ -341,17 +398,32 @@ fn find_opt_params_sudoku() -> Result< (), Box< dyn std::error::Error > > ); assert!( res.is_ok() ); - let mut sa_res = Vec::new(); + let mut sa_res = Statistics::new(); if let Ok( solution ) = res { - sa_res = named_results_list - ( - solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(), - solution.stats.unwrap(), - starting_params.bounds, - ); - sa_res.push( vec![ String::from( "level" ), format!( "{:?}", Board::from( easy ).calculate_level() ) ] ); - sa_res.push( vec![ String::from( "execution time" ), format!( "{:.3}s", solution.objective ) ] ); + let cached = solution.stats.clone().unwrap().cached_points; + sa_res = Statistics + { + table_params : named_results_list + ( + solution.point.coords + .into_iter() + .map( | val | val ) + .collect_vec(), + solution.stats.unwrap(), + starting_params.bounds, + ), + list_params : vec! + [ + ( String::from( "max number of iterations" ), format!( "{}", config.max_iterations ) ), + ( String::from( "max no improvement iterations " ), format!( "{}", config.max_no_improvement_steps ) ), + ( String::from( "improvement threshold " ), format!( "{}s", config.improvement_threshold ) ), + ( String::from( "calculated points" ), format!( "{} from {}", cached.1, cached.1 + cached.0 ) ), + ( String::from( "points from cache" ), format!( "{} from {}", cached.0, cached.1 + cached.0 ) ), + ( String::from( "level" ), format!( "{:?}", Board::from( easy ).calculate_level() ) ), + ( String::from( "execution time" ), format!( "{:.3}s", solution.objective ) ), + ] + } } // GA @@ -363,24 +435,39 @@ fn find_opt_params_sudoku() -> Result< (), Box< dyn std::error::Error > > let starting_params = hybrid_optimizer::starting_params_for_ga()?; let res = optimal_params_search::find_hybrid_optimal_params ( - config, + config.clone(), starting_params.clone(), hybrid_problem, Some( path ), ); assert!( res.is_ok() ); - let mut ga_res = Vec::new(); + let mut ga_res = Statistics::new(); if let Ok( solution ) = res { - ga_res = named_results_list - ( - solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(), - solution.stats.unwrap(), - starting_params.bounds, - ); - ga_res.push( vec![ String::from( "level" ), format!( "{:?}", Board::from( easy ).calculate_level() ) ] ); - ga_res.push( vec![ String::from( "execution time" ), format!( "{:.3}s", solution.objective ) ] ); + let cached = solution.stats.clone().unwrap().cached_points; + ga_res = Statistics + { + table_params : named_results_list + ( + solution.point.coords + .into_iter() + .map( | val | val ) + .collect_vec(), + solution.stats.unwrap(), + starting_params.bounds, + ), + list_params : vec! + [ + ( String::from( "max number of iterations" ), format!( "{}", config.max_iterations ) ), + ( String::from( "max no improvement iterations " ), format!( "{}", config.max_no_improvement_steps ) ), + ( String::from( "improvement threshold " ), format!( "{}s", config.improvement_threshold ) ), + ( String::from( "calculated points" ), format!( "{} from {}", cached.1, cached.1 + cached.0 ) ), + ( String::from( "points from cache" ), format!( "{} from {}", cached.0, cached.1 + cached.0 ) ), + ( String::from( "level" ), format!( "{:?}", Board::from( easy ).calculate_level() ) ), + ( String::from( "execution time" ), format!( "{:.3}s", solution.objective ) ), + ] + } } write_results( String::from( "sudoku_results" ), String::from( "Sudoku Problem" ), hybrid_res, sa_res, ga_res )?; Ok( () ) @@ -416,17 +503,32 @@ fn find_opt_params_tsp() -> Result< (), Box< dyn std::error::Error > > Some( path.clone() ), ); assert!( res.is_ok() ); - let mut hybrid_res = Vec::new(); + let mut hybrid_res = Statistics::new(); if let Ok( solution ) = res { - hybrid_res = named_results_list - ( - solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(), - solution.stats.unwrap(), - starting_params.bounds, - ); - hybrid_res.push( vec![ String::from( "number of nodes" ), number_of_nodes.to_string() ] ); - hybrid_res.push( vec![ String::from( "execution time" ), format!( "{:.3}s", solution.objective ) ] ); + let cached = solution.stats.clone().unwrap().cached_points; + hybrid_res = Statistics + { + table_params : named_results_list + ( + solution.point.coords + .into_iter() + .map( | val | val ) + .collect_vec(), + solution.stats.unwrap(), + starting_params.bounds, + ), + list_params : vec! + [ + ( String::from( "max number of iterations" ), format!( "{}", config.max_iterations ) ), + ( String::from( "max no improvement iterations " ), format!( "{}", config.max_no_improvement_steps ) ), + ( String::from( "improvement threshold " ), format!( "{}s", config.improvement_threshold ) ), + ( String::from( "calculated points" ), format!( "{} from {}", cached.1, cached.1 + cached.0 ) ), + ( String::from( "points from cache" ), format!( "{} from {}", cached.0, cached.1 + cached.0 ) ), + ( String::from( "number of nodes" ), format!( "{}", number_of_nodes ) ), + ( String::from( "execution time" ), format!( "{:.3}s", solution.objective ) ), + ] + } } // SA @@ -443,17 +545,32 @@ fn find_opt_params_tsp() -> Result< (), Box< dyn std::error::Error > > Some( path.clone() ), ); assert!( res.is_ok() ); - let mut sa_res = Vec::new(); + let mut sa_res = Statistics::new(); if let Ok( solution ) = res { - sa_res = named_results_list - ( - solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(), - solution.stats.unwrap(), - starting_params.bounds, - ); - sa_res.push( vec![ String::from( "number of nodes" ), number_of_nodes.to_string() ] ); - sa_res.push( vec![ String::from( "execution time" ), format!( "{:.3}s", solution.objective ) ] ); + let cached = solution.stats.clone().unwrap().cached_points; + sa_res = Statistics + { + table_params : named_results_list + ( + solution.point.coords + .into_iter() + .map( | val | val ) + .collect_vec(), + solution.stats.unwrap(), + starting_params.bounds, + ), + list_params : vec! + [ + ( String::from( "max number of iterations" ), format!( "{}", config.max_iterations ) ), + ( String::from( "max no improvement iterations " ), format!( "{}", config.max_no_improvement_steps ) ), + ( String::from( "improvement threshold " ), format!( "{}s", config.improvement_threshold ) ), + ( String::from( "calculated points" ), format!( "{} from {}", cached.1, cached.1 + cached.0 ) ), + ( String::from( "points from cache" ), format!( "{} from {}", cached.0, cached.1 + cached.0 ) ), + ( String::from( "number of nodes" ), format!( "{}", number_of_nodes ) ), + ( String::from( "execution time" ), format!( "{:.3}s", solution.objective ) ), + ] + } } // GA @@ -464,23 +581,39 @@ fn find_opt_params_tsp() -> Result< (), Box< dyn std::error::Error > > ); let starting_params = hybrid_optimizer::starting_params_for_ga()?; let res = optimal_params_search::find_hybrid_optimal_params( - config, + config.clone(), starting_params.clone(), hybrid_problem, Some( path ), ); assert!( res.is_ok() ); - let mut ga_res = Vec::new(); + let mut ga_res = Statistics::new(); + if let Ok( solution ) = res { - ga_res = named_results_list - ( - solution.point.coords.into_iter().map( | val | val.into_inner() ).collect_vec(), - solution.stats.unwrap(), - starting_params.bounds, - ); - ga_res.push( vec![ String::from( "number of nodes" ), number_of_nodes.to_string() ] ); - ga_res.push( vec![ String::from( "execution time" ), format!( "{:.3}s", solution.objective ) ] ); + let cached = solution.stats.clone().unwrap().cached_points; + ga_res = Statistics + { + table_params : named_results_list + ( + solution.point.coords + .into_iter() + .map( | val | val ) + .collect_vec(), + solution.stats.unwrap(), + starting_params.bounds, + ), + list_params : vec! + [ + ( String::from( "max number of iterations" ), format!( "{}", config.max_iterations ) ), + ( String::from( "max no improvement iterations " ), format!( "{}", config.max_no_improvement_steps ) ), + ( String::from( "improvement threshold " ), format!( "{}s", config.improvement_threshold ) ), + ( String::from( "calculated points" ), format!( "{} from {}", cached.1, cached.1 + cached.0 ) ), + ( String::from( "points from cache" ), format!( "{} from {}", cached.0, cached.1 + cached.0 ) ), + ( String::from( "number of nodes" ), format!( "{}", number_of_nodes ) ), + ( String::from( "execution time" ), format!( "{:.3}s", solution.objective ) ), + ] + } } write_results( String::from( "tsp_results" ), String::from( "Traveling Salesman Problem" ), hybrid_res, sa_res, ga_res )?; diff --git a/module/move/optimization_tools/tsp_results.md b/module/move/optimization_tools/tsp_results.md index a9705cc0b9..78b5195456 100644 --- a/module/move/optimization_tools/tsp_results.md +++ b/module/move/optimization_tools/tsp_results.md @@ -2,47 +2,68 @@ ## For hybrid: - - execution time: 0.193s + - max number of iterations: 100 + + - max no improvement iterations : 10 + + - improvement threshold : 0.005s + + - calculated points: 124 from 133 + + - points from cache: 9 from 133 - number of nodes: 4 + - execution time: 0.008s + - parameters: ``` ┌─────────────┬────────┬────────┬─────────┬─────────────┬──────────┬─────────┬────────┐ │ │ start │ min │ max │ sum of diff │ expected │ changes │ final │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ temperature │ 0.1471 │ 0.00 │ 1.00 │ 0.65 │ 0.04 │ 10 │ 0.9999 │ +│ temperature │ 0.7726 │ 0.00 │ 1.00 │ 28.88 │ 0.21 │ 74 │ 0.7349 │ │ decrease │ │ │ │ │ │ │ │ │ coefficient │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ max │ 112 │ 10.00 │ 200.00 │ 91.21 │ 5.70 │ 10 │ 103 │ +│ max │ 14 │ 10.00 │ 200.00 │ 6917.13 │ 49.76 │ 74 │ 33 │ │ mutations │ │ │ │ │ │ │ │ │ per │ │ │ │ │ │ │ │ │ dynasty │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ mutation │ 0.83 │ 0.00 │ 1.00 │ 3.91 │ 0.24 │ 10 │ 0.08 │ +│ mutation │ 0.00 │ 0.00 │ 1.00 │ 23.18 │ 0.17 │ 74 │ 0.13 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ crossover │ 0.16 │ 0.00 │ 1.00 │ 2.56 │ 0.16 │ 10 │ 0.68 │ +│ crossover │ 0.63 │ 0.00 │ 1.00 │ 40.81 │ 0.29 │ 74 │ 0.86 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ elitism │ 0.01 │ - │ - │ - │ - │ - │ 0.23 │ +│ elitism │ 0.37 │ - │ - │ - │ - │ - │ 0.01 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ max │ 7 │ 1.00 │ 100.00 │ 148.60 │ 9.29 │ 10 │ 41 │ +│ max │ 58 │ 1.00 │ 100.00 │ 3695.03 │ 26.58 │ 74 │ 62 │ │ stale │ │ │ │ │ │ │ │ │ iterations │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ population │ 994 │ 1.00 │ 1000.00 │ 6105.97 │ 381.62 │ 10 │ 4 │ +│ population │ 674 │ 1.00 │ 1000.00 │ 46923.94 │ 337.58 │ 74 │ 1 │ │ size │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ dynasties │ 1315 │ 100.00 │ 2000.00 │ 1647.99 │ 103.00 │ 10 │ 997 │ +│ dynasties │ 824 │ 100.00 │ 2000.00 │ 79548.00 │ 572.29 │ 74 │ 138 │ │ limit │ │ │ │ │ │ │ │ └─────────────┴────────┴────────┴─────────┴─────────────┴──────────┴─────────┴────────┘ ``` +#### List: + + + - `max number of iterations` : limit of total iterations of optimization process, termination condition + - `max no improvement iterations` : max amount of steps performed without detected improvement, termination condition + - `improvement threshold` : minimal value detected as improvement in objective function result + - `calculated points` : new calculated points that were not found in cache + - `points from cache` : points calculated during previous optimizations and read from cache + - `number of nodes` : number of nodes in graph representing cities from traveling salesman problem + - `execution time` : duration of shortest found hybrid optimization process using final parameters, measured in seconds +#### Table: - `start` : initial value of parameter in starting point - `min` : lower bound of parameter - `max` : upper bound of parameter @@ -52,21 +73,31 @@ - `final` : calculated value of parameter for which execution time was the lowest ## For SA: - - execution time: 0.012s + - max number of iterations: 100 + + - max no improvement iterations : 10 + + - improvement threshold : 0.005s + + - calculated points: 16 from 26 + + - points from cache: 10 from 26 - number of nodes: 4 + - execution time: 0.007s + - parameters: ``` ┌─────────────┬────────┬────────┬─────────┬─────────────┬──────────┬─────────┬────────┐ │ │ start │ min │ max │ sum of diff │ expected │ changes │ final │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ temperature │ 0.5856 │ 0.00 │ 1.00 │ 0.22 │ 0.01 │ 10 │ 1.0000 │ +│ temperature │ 0.4533 │ 0.00 │ 1.00 │ 0.28 │ 0.01 │ 12 │ 0.9997 │ │ decrease │ │ │ │ │ │ │ │ │ coefficient │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ max │ 63 │ 10.00 │ 200.00 │ 375.07 │ 22.06 │ 10 │ 113 │ +│ max │ 54 │ 10.00 │ 200.00 │ 397.21 │ 20.91 │ 12 │ 120 │ │ mutations │ │ │ │ │ │ │ │ │ per │ │ │ │ │ │ │ │ │ dynasty │ │ │ │ │ │ │ │ @@ -80,19 +111,30 @@ │ elitism │ -0.00 │ - │ - │ - │ - │ - │ 0.00 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ max │ 12 │ 1.00 │ 100.00 │ 180.15 │ 10.60 │ 10 │ 44 │ +│ max │ 91 │ 1.00 │ 100.00 │ 920.69 │ 48.46 │ 12 │ 87 │ │ stale │ │ │ │ │ │ │ │ │ iterations │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ │ population │ 1 │ 1.00 │ 1.00 │ 0.00 │ 0.00 │ 0 │ 1 │ │ size │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ dynasties │ 2185 │ 100.00 │ 5000.00 │ 26327.49 │ 1548.68 │ 10 │ 118 │ +│ dynasties │ 2849 │ 100.00 │ 5000.00 │ 35258.61 │ 1855.72 │ 12 │ 117 │ │ limit │ │ │ │ │ │ │ │ └─────────────┴────────┴────────┴─────────┴─────────────┴──────────┴─────────┴────────┘ ``` +#### List: + + + - `max number of iterations` : limit of total iterations of optimization process, termination condition + - `max no improvement iterations` : max amount of steps performed without detected improvement, termination condition + - `improvement threshold` : minimal value detected as improvement in objective function result + - `calculated points` : new calculated points that were not found in cache + - `points from cache` : points calculated during previous optimizations and read from cache + - `number of nodes` : number of nodes in graph representing cities from traveling salesman problem + - `execution time` : duration of shortest found hybrid optimization process using final parameters, measured in seconds +#### Table: - `start` : initial value of parameter in starting point - `min` : lower bound of parameter - `max` : upper bound of parameter @@ -102,47 +144,68 @@ - `final` : calculated value of parameter for which execution time was the lowest ## For GA: - - execution time: 0.072s + - max number of iterations: 100 + + - max no improvement iterations : 10 + + - improvement threshold : 0.005s + + - calculated points: 40 from 67 + + - points from cache: 27 from 67 - number of nodes: 4 + - execution time: 0.033s + - parameters: ``` ┌─────────────┬────────┬────────┬─────────┬─────────────┬──────────┬─────────┬────────┐ │ │ start │ min │ max │ sum of diff │ expected │ changes │ final │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ temperature │ 0.9963 │ 0.00 │ 1.00 │ 0.02 │ 0.00 │ 9 │ 1.0000 │ +│ temperature │ 0.9963 │ 0.00 │ 1.00 │ 0.05 │ 0.00 │ 35 │ 1.0000 │ │ decrease │ │ │ │ │ │ │ │ │ coefficient │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ max │ 170 │ 10.00 │ 200.00 │ 1133.26 │ 49.27 │ 9 │ 35 │ +│ max │ 170 │ 10.00 │ 200.00 │ 4452.25 │ 71.81 │ 35 │ 18 │ │ mutations │ │ │ │ │ │ │ │ │ per │ │ │ │ │ │ │ │ │ dynasty │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ mutation │ 0.39 │ 0.10 │ 1.00 │ 2.65 │ 0.12 │ 9 │ 0.13 │ +│ mutation │ 0.39 │ 0.10 │ 1.00 │ 7.29 │ 0.12 │ 35 │ 0.13 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ crossover │ 0.81 │ 0.10 │ 1.00 │ 3.95 │ 0.17 │ 9 │ 0.28 │ +│ crossover │ 0.81 │ 0.10 │ 1.00 │ 10.88 │ 0.18 │ 35 │ 0.29 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ elitism │ -0.20 │ - │ - │ - │ - │ - │ 0.59 │ +│ elitism │ -0.20 │ - │ - │ - │ - │ - │ 0.58 │ │ rate │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ max │ 58 │ 1.00 │ 100.00 │ 559.76 │ 24.34 │ 9 │ 30 │ +│ max │ 58 │ 1.00 │ 100.00 │ 1560.73 │ 25.17 │ 35 │ 28 │ │ stale │ │ │ │ │ │ │ │ │ iterations │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ population │ 572 │ 10.00 │ 2000.00 │ 11617.22 │ 505.10 │ 9 │ 37 │ +│ population │ 572 │ 10.00 │ 2000.00 │ 44693.82 │ 720.87 │ 35 │ 19 │ │ size │ │ │ │ │ │ │ │ ├─────────────┼────────┼────────┼─────────┼─────────────┼──────────┼─────────┼────────┤ -│ dynasties │ 1824 │ 100.00 │ 2000.00 │ 15481.88 │ 673.13 │ 9 │ 115 │ +│ dynasties │ 1824 │ 100.00 │ 2000.00 │ 43273.64 │ 697.96 │ 35 │ 123 │ │ limit │ │ │ │ │ │ │ │ └─────────────┴────────┴────────┴─────────┴─────────────┴──────────┴─────────┴────────┘ ``` +#### List: + + + - `max number of iterations` : limit of total iterations of optimization process, termination condition + - `max no improvement iterations` : max amount of steps performed without detected improvement, termination condition + - `improvement threshold` : minimal value detected as improvement in objective function result + - `calculated points` : new calculated points that were not found in cache + - `points from cache` : points calculated during previous optimizations and read from cache + - `number of nodes` : number of nodes in graph representing cities from traveling salesman problem + - `execution time` : duration of shortest found hybrid optimization process using final parameters, measured in seconds +#### Table: - `start` : initial value of parameter in starting point - `min` : lower bound of parameter - `max` : upper bound of parameter @@ -158,11 +221,11 @@ │ │ coefficient │ per │ │ │ │ iterations │ │ │ │ │ │ │ dynasty │ │ │ │ │ │ │ │ ├────────┼─────────────┼───────────┼──────────┼───────────┼─────────┼────────────┼────────────┼───────────┼───────────┤ -│ hybrid │ 0.9999 │ 103 │ 0.08 │ 0.68 │ 0.23 │ 41 │ 4 │ 997 │ 0.193s │ +│ hybrid │ 0.7349 │ 33 │ 0.13 │ 0.86 │ 0.01 │ 62 │ 1 │ 138 │ 0.008s │ ├────────┼─────────────┼───────────┼──────────┼───────────┼─────────┼────────────┼────────────┼───────────┼───────────┤ -│ SA │ 1.0000 │ 113 │ 1.00 │ 0.00 │ 0.00 │ 44 │ 1 │ 118 │ 0.012s │ +│ SA │ 0.9997 │ 120 │ 1.00 │ 0.00 │ 0.00 │ 87 │ 1 │ 117 │ 0.007s │ ├────────┼─────────────┼───────────┼──────────┼───────────┼─────────┼────────────┼────────────┼───────────┼───────────┤ -│ GA │ 1.0000 │ 35 │ 0.13 │ 0.28 │ 0.59 │ 30 │ 37 │ 115 │ 0.072s │ +│ GA │ 1.0000 │ 18 │ 0.13 │ 0.29 │ 0.58 │ 28 │ 19 │ 123 │ 0.033s │ └────────┴─────────────┴───────────┴──────────┴───────────┴─────────┴────────────┴────────────┴───────────┴───────────┘ ``` @@ -176,3 +239,8 @@ - `population size` : number of individuals in population - `dynasties limit` : max number of dynasties of new solutions produced during optimization process, terminates if exceeded - `execution time` : time spent searching for optimal solution, measured in seconds +## To run: + - Sudoku problem: +`cargo test -- --ignored find_opt_params_sudoku` + - Traveling salesman problem: +`cargo test -- --ignored find_opt_params_tsp` diff --git a/module/move/willbe/src/action/publish.rs b/module/move/willbe/src/action/publish.rs index 5ad8731028..4b6b98306e 100644 --- a/module/move/willbe/src/action/publish.rs +++ b/module/move/willbe/src/action/publish.rs @@ -5,6 +5,7 @@ mod private use std::collections::{ HashSet, HashMap }; use core::fmt::Formatter; + use std::{ env, fs }; use wtools::error::for_app::{ Error, anyhow }; use path::AbsolutePath; @@ -108,7 +109,7 @@ mod private /// Publish packages. /// - pub fn publish( patterns : Vec< String >, dry : bool ) -> Result< PublishReport, ( PublishReport, Error ) > + pub fn publish( patterns : Vec< String >, dry : bool, temp : bool ) -> Result< PublishReport, ( PublishReport, Error ) > { let mut report = PublishReport::default(); @@ -158,14 +159,41 @@ mod private let graph = metadata.graph(); let subgraph_wanted = graph::subgraph( &graph, &packages_to_publish ); let tmp = subgraph_wanted.map( | _, n | graph[ *n ].clone(), | _, e | graph[ *e ].clone() ); - let subgraph = graph::remove_not_required_to_publish( &package_map, &tmp, &packages_to_publish ); + + let mut unique_name = format!( "temp_dir_for_publish_command_{}", path::unique_folder_name_generate().err_with( || report.clone() )? ); + + let dir = if temp + { + let mut temp_dir = env::temp_dir().join( unique_name ); + + while temp_dir.exists() + { + unique_name = format!( "temp_dir_for_publish_command_{}", path::unique_folder_name_generate().err_with( || report.clone() )? ); + temp_dir = env::temp_dir().join( unique_name ); + } + + fs::create_dir( &temp_dir ).err_with( || report.clone() )?; + Some( temp_dir ) + } + else + { + None + }; + + let subgraph = graph::remove_not_required_to_publish( &package_map, &tmp, &packages_to_publish, dir.clone() ); let subgraph = subgraph.map( | _, n | n, | _, e | e ); let queue = graph::toposort( subgraph ).unwrap().into_iter().map( | n | package_map.get( &n ).unwrap() ).collect::< Vec< _ > >(); - + for package in queue { - let current_report = package::publish_single( package, true, dry ) + let args = package::PublishSingleOptions::former() + .package( package ) + .force( true ) + .option_base_temp_dir( &dir ) + .dry( dry ) + .form(); + let current_report = package::publish_single( args ) .map_err ( | ( current_report, e ) | @@ -176,9 +204,15 @@ mod private )?; report.packages.push(( package.crate_dir().absolute_path(), current_report )); } - + + if temp + { + fs::remove_dir_all( dir.unwrap() ).err_with( || report.clone() )?; + } + Ok( report ) } + trait ErrWith< T, T1, E > { diff --git a/module/move/willbe/src/action/test.rs b/module/move/willbe/src/action/test.rs index 11ba32eabe..f6c6d2b8d5 100644 --- a/module/move/willbe/src/action/test.rs +++ b/module/move/willbe/src/action/test.rs @@ -26,7 +26,6 @@ mod private iter::Itertools, }; - /// Used to store arguments for running tests. /// /// - The `dir` field represents the directory of the crate under test. @@ -121,7 +120,7 @@ mod private exclude_features, temp_path: None, }; - + tests_run( &t_args, &packages, dry ) } } diff --git a/module/move/willbe/src/command/deploy_renew.rs b/module/move/willbe/src/command/deploy_renew.rs index 854ac56461..df9e1ff9fb 100644 --- a/module/move/willbe/src/command/deploy_renew.rs +++ b/module/move/willbe/src/command/deploy_renew.rs @@ -19,6 +19,7 @@ mod private template.set_values( values ); action::deploy_renew( &std::env::current_dir()?, template ).context( "Fail to create deploy template" ) } + } crate::mod_interface! diff --git a/module/move/willbe/src/command/mod.rs b/module/move/willbe/src/command/mod.rs index 33bf157049..3794abbed9 100644 --- a/module/move/willbe/src/command/mod.rs +++ b/module/move/willbe/src/command/mod.rs @@ -14,7 +14,7 @@ pub( crate ) mod private .command( "publish" ) .hint( "publish the specified package to `crates.io`" ) - .long_hint("used to publish the specified local package, which is located in the provided directory path, to the `crates.io` crate registry.") + .long_hint( "used to publish the specified local package, which is located in the provided directory path, to the `crates.io` crate registry." ) .subject() .hint( "Provide path(s) to the package(s) that you want to publish.\n\t Each path should point to a directory that contains a `Cargo.toml` file.\n\t Paths should be separated by a comma." ) .kind( Type::List( Type::String.into(), ',' ) ) @@ -24,7 +24,12 @@ pub( crate ) mod private .hint( "Enables 'dry run'. Does not publish, only simulates. Default is `true`." ) .kind( Type::Bool ) .optional( true ) - .end() + .end() + .property( "temp" ) + .hint( "If flag is `true` all test will be running in temporary directories. Default `true`." ) + .kind( Type::Bool ) + .optional( true ) + .end() // .property( "verbosity" ).hint( "Setup level of verbosity." ).kind( Type::String ).optional( true ).alias( "v" ).end() .routine( command::publish ) .end() diff --git a/module/move/willbe/src/command/publish.rs b/module/move/willbe/src/command/publish.rs index e426946b29..d89cf05a6a 100644 --- a/module/move/willbe/src/command/publish.rs +++ b/module/move/willbe/src/command/publish.rs @@ -19,7 +19,11 @@ mod private .get_owned( "dry" ) .unwrap_or( true ); - match action::publish( patterns, dry ) + let temp : bool = properties + .get_owned( "temp" ) + .unwrap_or( true ); + + match action::publish( patterns, dry, temp ) { Ok( report ) => { diff --git a/module/move/willbe/src/entity/package.rs b/module/move/willbe/src/entity/package.rs index bf69e349ba..4a8688111c 100644 --- a/module/move/willbe/src/entity/package.rs +++ b/module/move/willbe/src/entity/package.rs @@ -9,12 +9,12 @@ mod private }; use std::fmt::Formatter; use std::hash::Hash; + use std::path::PathBuf; use cargo_metadata::{ Dependency, DependencyKind, Package as PackageMetadata }; use toml_edit::value; use tool::process; use manifest::{ Manifest, ManifestError }; - // use { cargo, git, version, path, wtools }; // qqq : why is it required? use crates_tools::CrateArchive; use workspace::Workspace; @@ -33,6 +33,7 @@ mod private } }; use action::readme_health_table_renew::Stability; + use former::Former; /// #[ derive( Debug ) ] @@ -380,6 +381,25 @@ mod private } } + /// Option for publish single + #[ derive( Debug, Former ) ] + pub struct PublishSingleOptions< 'a > + { + package : &'a Package, + force : bool, + base_temp_dir : &'a Option< PathBuf >, + dry : bool, + } + + impl < 'a >PublishSingleOptionsFormer< 'a > + { + pub fn option_base_temp_dir( mut self, value : impl Into< &'a Option< PathBuf > > ) -> Self + { + self.container.base_temp_dir = Some( value.into() ); + self + } + } + /// Publishes a single package without publishing its dependencies. /// /// This function is designed to publish a single package. It does not publish any of the package's dependencies. @@ -393,42 +413,56 @@ mod private /// /// Returns : /// Returns a result containing a report indicating the result of the operation. - pub fn publish_single( package : &Package, force : bool, dry : bool ) -> Result< PublishReport, ( PublishReport, wError ) > + pub fn publish_single< 'a >( args : PublishSingleOptions< 'a > ) -> Result< PublishReport, ( PublishReport, wError ) > { let mut report = PublishReport::default(); - if package.local_is().map_err( | err | ( report.clone(), format_err!( err ) ) )? + if args.package.local_is().map_err( | err | ( report.clone(), format_err!( err ) ) )? { return Ok( report ); } - let package_dir = &package.crate_dir(); - - let output = cargo::pack( &package_dir, dry ).context( "Take information about package" ).map_err( | e | ( report.clone(), e ) )?; + let package_dir = &args.package.crate_dir(); + let temp_dir = args.base_temp_dir.as_ref().map + ( + | p | + { + let path = p.join( package_dir.as_ref().file_name().unwrap() ); + std::fs::create_dir_all( &path ).unwrap(); + path + } + ); + + let pack_args = cargo::PackOptions::former() + .path( package_dir.absolute_path().as_ref().to_path_buf() ) + .option_temp_path( temp_dir.clone() ) + .dry( args.dry ) + .form(); + let output = cargo::pack( pack_args ).context( "Take information about package" ).map_err( | e | ( report.clone(), e ) )?; if output.err.contains( "not yet committed") { return Err(( report, format_err!( "Some changes wasn't committed. Please, commit or stash that changes and try again." ) )); } report.get_info = Some( output ); - if force || publish_need( &package ).map_err( | err | ( report.clone(), format_err!( err ) ) )? + if args.force || publish_need( &args.package, temp_dir.clone() ).map_err( | err | ( report.clone(), format_err!( err ) ) )? { report.publish_required = true; let mut files_changed_for_bump = vec![]; - let mut manifest = package.manifest().map_err( | err | ( report.clone(), format_err!( err ) ) )?; + let mut manifest = args.package.manifest().map_err( | err | ( report.clone(), format_err!( err ) ) )?; // bump a version in the package manifest - let bump_report = version::bump( &mut manifest, dry ).context( "Try to bump package version" ).map_err( | e | ( report.clone(), e ) )?; - files_changed_for_bump.push( package.manifest_path() ); + let bump_report = version::bump( &mut manifest, args.dry ).context( "Try to bump package version" ).map_err( | e | ( report.clone(), e ) )?; + files_changed_for_bump.push( args.package.manifest_path() ); let new_version = bump_report.new_version.clone().unwrap(); - let package_name = package.name().map_err( | err | ( report.clone(), format_err!( err ) ) )?; + let package_name = args.package.name().map_err( | err | ( report.clone(), format_err!( err ) ) )?; // bump the package version in dependents (so far, only workspace) - let workspace_manifest_dir : AbsolutePath = Workspace::with_crate_dir( package.crate_dir() ).map_err( | err | ( report.clone(), err ) )?.workspace_root().map_err( | err | ( report.clone(), format_err!( err ) ) )?.try_into().unwrap(); + let workspace_manifest_dir : AbsolutePath = Workspace::with_crate_dir( args.package.crate_dir() ).map_err( | err | ( report.clone(), err ) )?.workspace_root().map_err( | err | ( report.clone(), format_err!( err ) ) )?.try_into().unwrap(); let workspace_manifest_path = workspace_manifest_dir.join( "Cargo.toml" ); // qqq : should be refactored - if !dry + if !args.dry { let mut workspace_manifest = manifest::open( workspace_manifest_path.clone() ).map_err( | e | ( report.clone(), format_err!( e ) ) )?; let workspace_manifest_data = workspace_manifest.manifest_data.as_mut().ok_or_else( || ( report.clone(), format_err!( PackageError::Manifest( ManifestError::EmptyManifestData ) ) ) )?; @@ -464,14 +498,22 @@ mod private report.bump = Some( ExtendedBumpReport { base : bump_report, changed_files : files_changed_for_bump.clone() } ); let commit_message = format!( "{package_name}-v{new_version}" ); - let res = git::add( workspace_manifest_dir, objects_to_add, dry ).map_err( | e | ( report.clone(), e ) )?; + let res = git::add( workspace_manifest_dir, objects_to_add, args.dry ).map_err( | e | ( report.clone(), e ) )?; report.add = Some( res ); - let res = git::commit( package_dir, commit_message, dry ).map_err( | e | ( report.clone(), e ) )?; + let res = git::commit( package_dir, commit_message, args.dry ).map_err( | e | ( report.clone(), e ) )?; report.commit = Some( res ); - let res = git::push( package_dir, dry ).map_err( | e | ( report.clone(), e ) )?; + let res = git::push( package_dir, args.dry ).map_err( | e | ( report.clone(), e ) )?; report.push = Some( res ); - - let res = cargo::publish( package_dir, dry ).map_err( | e | ( report.clone(), e ) )?; + + let res = cargo::publish + ( + cargo::PublishOptions::former() + .path( package_dir.absolute_path().as_ref().to_path_buf() ) + .option_temp_path( temp_dir ) + .dry( args.dry ) + .form() + ) + .map_err( | e | ( report.clone(), e ) )?; report.publish = Some( res ); } @@ -655,7 +697,7 @@ mod private /// /// Panics if the manifest is not loaded or local package is not packed. - pub fn publish_need( package : &Package ) -> Result< bool, PackageError > + pub fn publish_need( package : &Package, path : Option< PathBuf > ) -> Result< bool, PackageError > { // These files are ignored because they can be safely changed without affecting functionality // @@ -665,7 +707,9 @@ mod private let name = package.name()?; let version = package.version()?; - let local_package_path = packed_crate::local_path( &name, &version, package.crate_dir() ).map_err( | _ | PackageError::LocalPath )?; + let local_package_path = path + .map( | p | p.join( format!( "package/{0}-{1}.crate", name, version ) ) ) + .unwrap_or( packed_crate::local_path( &name, &version, package.crate_dir() ).map_err( | _ | PackageError::LocalPath )? ); // qqq : for Bohdan : bad, properly handle errors // aaa : return result instead of panic @@ -710,9 +754,12 @@ crate::mod_interface! protected use PublishReport; protected use publish_single; + protected use PublishSingleOptions; protected use Package; protected use PackageError; + protected use publish_need; + protected use CrateId; protected use DependenciesSort; protected use DependenciesOptions; diff --git a/module/move/willbe/src/entity/test.rs b/module/move/willbe/src/entity/test.rs index 1327037bd0..0a5c4078d2 100644 --- a/module/move/willbe/src/entity/test.rs +++ b/module/move/willbe/src/entity/test.rs @@ -66,7 +66,7 @@ mod private /// /// Returns a `Result` containing a `CmdReport` if the command is executed successfully, /// or an error if the command fails to execute. - pub fn _run< P >( path : P, options : SingleTestOptions, dry : bool ) -> Result< CmdReport > + pub fn _run< P >( path : P, options : SingleTestOptions, dry : bool ) -> Result< CmdReport, ( CmdReport, Error ) > where P : AsRef< Path > { @@ -87,7 +87,7 @@ mod private } else { - process::process_run_with_param_and_joined_steams(program, options, path ) + process::process_run_with_param_and_joined_steams( program, options, path ) } } @@ -136,7 +136,7 @@ mod private /// for which the tests were run, and the values are nested `BTreeMap` where the keys are /// feature names and the values are `CmdReport` structs representing the test results for /// the specific feature and channel. - pub tests : BTreeMap< channel::Channel, BTreeMap< String, CmdReport > >, + pub tests : BTreeMap< channel::Channel, BTreeMap< String, Result< CmdReport, CmdReport > > >, } impl std::fmt::Display for TestReport @@ -160,19 +160,22 @@ mod private { for ( feature, result ) in features { + let feature = if feature.is_empty() { "no-features" } else { feature }; // if tests failed or if build failed - if result.out.contains( "failures" ) || result.out.contains( "error" ) + match result { - let mut out = result.out.replace( "\n", "\n " ); - out.push_str( "\n" ); - failed += 1; - write!( f, " [ {} | {} ]: ❌ failed\n \n{out}", channel, feature )?; - } - else - { - let feature = if feature.is_empty() { "no-features" } else { feature }; - success += 1; - writeln!( f, " [ {} | {} ]: ✅ successful", channel, feature )?; + Ok( _ ) => + { + success += 1; + writeln!( f, " [ {} | {} ]: ✅ successful", channel, feature )?; + } + Err( result ) => + { + let mut out = result.out.replace( "\n", "\n " ); + out.push_str( "\n" ); + failed += 1; + write!( f, " [ {} | {} ]: ❌ failed\n \n{out}", channel, feature )?; + } } } } @@ -299,8 +302,8 @@ mod private } // aaa : for Petro : bad. tooooo long line. cap on 100 ch // aaa : strip - let cmd_rep = _run( dir, args_t.form(), dry ).unwrap_or_else( | rep | rep.downcast().unwrap() ); - r.lock().unwrap().tests.entry( channel ).or_default().insert( feature.iter().join( "," ), cmd_rep ); + let cmd_rep = _run( dir, args_t.form(), dry ); + r.lock().unwrap().tests.entry( channel ).or_default().insert( feature.iter().join( "," ), cmd_rep.map_err( | e | e.0 ) ); } ); } @@ -310,7 +313,7 @@ mod private // unpack. all tasks must be completed until now let report = Mutex::into_inner( Arc::into_inner( report ).unwrap() ).unwrap(); - let at_least_one_failed = report.tests.iter().flat_map( | ( _, v ) | v.iter().map( | ( _, v ) | v ) ).any( | r | r.out.contains( "failures" ) || r.out.contains( "error" ) ); + let at_least_one_failed = report.tests.iter().flat_map( | ( _, v ) | v.iter().map( | ( _, v ) | v ) ).any( | r | r.is_err() ); if at_least_one_failed { Err( ( report, format_err!( "Some tests was failed" ) ) ) } else { Ok( report ) } } diff --git a/module/move/willbe/src/tool/cargo.rs b/module/move/willbe/src/tool/cargo.rs index 7d13daa39a..1f211c28d7 100644 --- a/module/move/willbe/src/tool/cargo.rs +++ b/module/move/willbe/src/tool/cargo.rs @@ -1,14 +1,41 @@ mod private { use crate::*; - - use std:: - { - path::Path, - }; + + use std::path::PathBuf; + use former::Former; use process::CmdReport; use wtools::error::Result; + /// Represents pack options + #[ derive( Debug, Former ) ] + pub struct PackOptions + { + path : PathBuf, + temp_path : Option< PathBuf >, + dry : bool, + } + + impl PackOptionsFormer + { + pub fn option_temp_path( mut self, value : impl Into< Option< PathBuf > > ) -> Self + { + self.container.temp_path = value.into(); + self + } + } + + impl PackOptions + { + fn to_pack_args( &self ) -> Vec< String > + { + [ "package".to_string() ] + .into_iter() + .chain( self.temp_path.clone().map( | p | vec![ "--target-dir".to_string(), p.to_string_lossy().into() ] ).into_iter().flatten() ) + .collect() + } + } + /// /// Assemble the local package into a distributable tarball. /// @@ -16,20 +43,18 @@ mod private /// - `path` - path to the package directory /// - `dry` - a flag that indicates whether to execute the command or not /// - pub fn pack< P >( path : P, dry : bool ) -> Result< CmdReport > - where - P : AsRef< Path > + pub fn pack( args : PackOptions ) -> Result< CmdReport > { - let ( program, options ) = ( "cargo", [ "package" ] ); + let ( program, options ) = ( "cargo", args.to_pack_args() ); - if dry + if args.dry { Ok ( CmdReport { command : format!( "{program} {}", options.join( " " ) ), - path : path.as_ref().to_path_buf(), + path : args.path.to_path_buf(), out : String::new(), err : String::new(), } @@ -37,36 +62,61 @@ mod private } else { - process::run(program, options, path ) + process::run(program, options, args.path ) + } + } + + + /// Represents the arguments for the publish. + #[ derive( Debug, Former, Clone, Default ) ] + pub struct PublishOptions + { + path : PathBuf, + temp_path : Option< PathBuf >, + dry : bool, + } + + impl PublishOptionsFormer + { + pub fn option_temp_path( mut self, value : impl Into< Option< PathBuf > > ) -> Self + { + self.container.temp_path = value.into(); + self + } + } + + impl PublishOptions + { + fn as_publish_args( &self ) -> Vec< String > + { + let target_dir = self.temp_path.clone().map( | p | vec![ "--target-dir".to_string(), p.to_string_lossy().into() ] ); + [ "publish".to_string() ].into_iter().chain( target_dir.into_iter().flatten() ).collect::< Vec< String > >() } } /// Upload a package to the registry - pub fn publish< P >( path : P, dry : bool ) -> Result< CmdReport > - where - P : AsRef< Path > + pub fn publish( args : PublishOptions ) -> Result< CmdReport > { - let ( program, options ) = ( "cargo", [ "publish" ] ); + let ( program, arguments) = ( "cargo", args.as_publish_args() ); - if dry + if args.dry { Ok - ( - CmdReport - { - command : format!( "{program} {}", options.join( " " ) ), - path : path.as_ref().to_path_buf(), - out : String::new(), - err : String::new(), - } - ) + ( + CmdReport + { + command : format!( "{program} {}", arguments.join( " " ) ), + path : args.path.to_path_buf(), + out : String::new(), + err : String::new(), + } + ) } else { - process::run(program, options, path ) + process::run(program, arguments, args.path ) } } - } // @@ -76,4 +126,7 @@ crate::mod_interface! protected use pack; protected use publish; + protected use PublishOptions; + protected use PackOptions; + } diff --git a/module/move/willbe/src/tool/graph.rs b/module/move/willbe/src/tool/graph.rs index ae63074ab5..845b826ad4 100644 --- a/module/move/willbe/src/tool/graph.rs +++ b/module/move/willbe/src/tool/graph.rs @@ -10,6 +10,7 @@ pub( crate ) mod private hash::Hash, collections::{ HashMap, HashSet } }; + use std::path::PathBuf; use petgraph:: { graph::Graph, @@ -168,7 +169,14 @@ pub( crate ) mod private /// # Returns /// /// A new `Graph` with the nodes that are not required to be published removed. - pub fn remove_not_required_to_publish( package_map : &HashMap< String, Package >, graph : &Graph< String, String >, roots : &[ String ] ) -> Graph< String, String > + pub fn remove_not_required_to_publish + ( + package_map : &HashMap< String, Package >, + graph : &Graph< String, String >, + roots : &[ String ], + temp_path : Option< PathBuf >, + ) + -> Graph< String, String > { let mut nodes = HashSet::new(); let mut cleared_graph = Graph::new(); @@ -188,8 +196,15 @@ pub( crate ) mod private } } let package = package_map.get( &graph[ n ] ).unwrap(); - _ = cargo::pack( package.crate_dir(), false ).unwrap(); - if publish_need( package ).unwrap() + _ = cargo::pack + ( + cargo::PackOptions::former() + .path( package.crate_dir().absolute_path().as_ref().to_path_buf() ) + .option_temp_path( temp_path.clone() ) + .dry( false ) + .form() + ).unwrap(); + if publish_need( package, temp_path.clone() ).unwrap() { nodes.insert( n ); } diff --git a/module/move/willbe/src/tool/process.rs b/module/move/willbe/src/tool/process.rs index 2895d41bdf..062ba29e32 100644 --- a/module/move/willbe/src/tool/process.rs +++ b/module/move/willbe/src/tool/process.rs @@ -10,6 +10,8 @@ pub( crate ) mod private process::{ Command, Stdio }, }; use duct::cmd; + use error_tools::err; + use error_tools::for_app::Error; use wtools:: { iter::Itertools, @@ -18,7 +20,7 @@ pub( crate ) mod private /// Process command output. - #[ derive( Debug, Clone ) ] + #[ derive( Debug, Clone, Default ) ] pub struct CmdReport { /// Command that was executed. @@ -169,7 +171,7 @@ pub( crate ) mod private args : Args, path : P, ) - -> Result< CmdReport > + -> Result< CmdReport, ( CmdReport, Error ) > where AP : AsRef< Path >, Args : IntoIterator< Item = Arg >, @@ -183,12 +185,13 @@ pub( crate ) mod private .stderr_to_stdout() .stdout_capture() .unchecked() - .run()?; + .run() + .map_err( | e | ( Default::default(), e.into() ) )?; let report = CmdReport { command : format!( "{} {}", application.display(), args.iter().map( | a | a.to_string_lossy() ).join( " " ) ), path : path.to_path_buf(), - out : String::from_utf8( output.stdout ).context( "Found invalid UTF-8" )?, + out : String::from_utf8( output.stdout ).context( "Found invalid UTF-8" ).map_err( | e | ( Default::default(), e.into() ) )?, err : Default::default(), }; @@ -198,7 +201,7 @@ pub( crate ) mod private } else { - Err( format_err!( report ) ) + Err( ( report, err!( "Process was finished with error code : {}", output.status ) ) ) } } diff --git a/module/move/willbe/tests/inc/action/tests_run.rs b/module/move/willbe/tests/inc/action/tests_run.rs index 0705e001c4..16732fab12 100644 --- a/module/move/willbe/tests/inc/action/tests_run.rs +++ b/module/move/willbe/tests/inc/action/tests_run.rs @@ -35,8 +35,8 @@ fn fail_test() let stable = rep.failure_reports[0].tests.get( &channel::Channel::Stable ).unwrap(); let no_features = stable.get( "" ).unwrap(); - - assert!( no_features.out.contains( "failures" ) ); + assert!( no_features.is_err() ); + assert!( no_features.clone().unwrap_err().out.contains( "failures" ) ); } #[ test ] @@ -69,7 +69,7 @@ fn fail_build() let stable = rep.failure_reports[ 0 ].tests.get( &channel::Channel::Stable ).unwrap(); let no_features = stable.get( "" ).unwrap(); - assert!( no_features.out.contains( "error" ) && no_features.out.contains( "achtung" ) ); + assert!( no_features.clone().unwrap_err().out.contains( "error" ) && no_features.clone().unwrap_err().out.contains( "achtung" ) ); } #[ test ] diff --git a/module/move/willbe/tests/inc/publish_need.rs b/module/move/willbe/tests/inc/publish_need.rs index fdc25934de..5dbb2af275 100644 --- a/module/move/willbe/tests/inc/publish_need.rs +++ b/module/move/willbe/tests/inc/publish_need.rs @@ -27,7 +27,7 @@ fn package_path< P : AsRef< Path > >( path : P ) -> PathBuf fn package< P : AsRef< Path > >( path : P ) -> Package { let path = path.as_ref(); - _ = cargo::pack( path, false ).expect( "Failed to package a package" ); + _ = cargo::pack( cargo::PackOptions::former().path( path.to_path_buf() ).dry( false ).form() ).expect( "Failed to package a package" ); let absolute = AbsolutePath::try_from( path ).unwrap(); Package::try_from( absolute ).unwrap() @@ -42,12 +42,12 @@ fn no_changes() // aaa : use `package_path` function let package_path = package_path( "c" ); - _ = cargo::pack( &package_path, false ).expect( "Failed to package a package" ); + _ = cargo::pack( cargo::PackOptions::former().path( package_path.clone() ).dry( false ).form() ).expect( "Failed to package a package" ); let absolute = AbsolutePath::try_from( package_path ).unwrap(); let package = Package::try_from( absolute ).unwrap(); // Act - let publish_needed = publish_need( &package ).unwrap(); + let publish_needed = publish_need( &package, None ).unwrap(); // Assert assert!( !publish_needed ); @@ -67,13 +67,13 @@ fn with_changes() let mut manifest = manifest::open( absolute ).unwrap(); version::bump( &mut manifest, false ).unwrap(); - _ = cargo::pack( &temp, false ).expect( "Failed to package a package" ); + _ = cargo::pack( cargo::PackOptions::former().path( temp.path().to_path_buf() ).dry( false ).form() ).expect( "Failed to package a package" ); let absolute = AbsolutePath::try_from( temp.as_ref() ).unwrap(); let package = Package::try_from( absolute ).unwrap(); // Act - let publish_needed = publish_need( &package ).unwrap(); + let publish_needed = publish_need( &package, None ).unwrap(); // Assert assert!( publish_needed ); @@ -85,7 +85,7 @@ fn cascade_with_changes() { let abc = [ "a", "b", "c" ].into_iter().map( package_path ).map( package ).collect::< Vec< _ > >(); let [ a, b, c ] = abc.as_slice() else { unreachable!() }; - if ![ c, b, a ].into_iter().inspect( | x | { dbg!( x.name().unwrap() ); } ).map( publish_need ).inspect( | x | { dbg!(x); } ).all( | p | !p.expect( "There was an error verifying whether the package needs publishing or not" ) ) + if ![ c, b, a ].into_iter().inspect( | x | { dbg!( x.name().unwrap() ); } ).map( | a | publish_need( a, None ) ).inspect( | x | { dbg!(x); } ).all( | p | !p.expect( "There was an error verifying whether the package needs publishing or not" ) ) { panic!( "The packages must be up-to-dated" ); } @@ -128,7 +128,7 @@ default-features = true let b_temp = package( b_temp_path ); let a_temp = package( a_temp_path ); - assert!( publish_need( &c_temp ).unwrap() ); - assert!( publish_need( &b_temp ).unwrap() ); - assert!( publish_need( &a_temp ).unwrap() ); + assert!( publish_need( &c_temp, None ).unwrap() ); + assert!( publish_need( &b_temp, None ).unwrap() ); + assert!( publish_need( &a_temp, None ).unwrap() ); } diff --git a/module/template/layer/layer.rs b/module/template/layer/layer.rs new file mode 100644 index 0000000000..fdb2480069 --- /dev/null +++ b/module/template/layer/layer.rs @@ -0,0 +1,57 @@ +/// Internal namespace. +pub( crate ) mod private +{ + use super::super::*; + + // ... all code should goes here ... + +} + +#[ doc( inline ) ] +#[ allow( unused_imports ) ] +pub use protected::*; + +/// Protected namespace of the module. +pub mod protected +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::orphan::*; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + // ... list all items of private which should be visible outside + }; +} + +/// Orphan namespace of the module. +pub mod orphan +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::exposed::*; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + }; +} + +/// Exposed namespace of the module. +pub mod exposed +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::prelude::*; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + }; +} + +/// Prelude to use essentials: `use my_module::prelude::*`. +pub mod prelude +{ +}