From 64dad168f0ac535c8e979d06159baba53d89a549 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Fri, 27 Feb 2026 19:33:35 +0100 Subject: [PATCH 01/15] feat: checkpoint --- Cargo.lock | 22 +++ libs/@local/hashql/macros/Cargo.toml | 16 ++ libs/@local/hashql/macros/src/grammar.rs | 49 +++++ libs/@local/hashql/macros/src/id.rs | 223 +++++++++++++++++++++++ libs/@local/hashql/macros/src/lib.rs | 33 ++++ 5 files changed, 343 insertions(+) create mode 100644 libs/@local/hashql/macros/Cargo.toml create mode 100644 libs/@local/hashql/macros/src/grammar.rs create mode 100644 libs/@local/hashql/macros/src/id.rs create mode 100644 libs/@local/hashql/macros/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 27c456d179e..2a2fdd387b6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3894,6 +3894,13 @@ dependencies = [ "tracing", ] +[[package]] +name = "hashql-macros" +version = "0.0.0" +dependencies = [ + "unsynn", +] + [[package]] name = "hashql-mir" version = "0.0.0" @@ -5512,6 +5519,12 @@ dependencies = [ "unsigned-varint 0.7.2", ] +[[package]] +name = "mutants" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc0287524726960e07b119cebd01678f852f147742ae0d925e6a520dca956126" + [[package]] name = "napi" version = "2.16.17" @@ -9890,6 +9903,15 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb066959b24b5196ae73cb057f45598450d2c5f71460e98c49b738086eff9c06" +[[package]] +name = "unsynn" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "501a7adf1a4bd9951501e5c66621e972ef8874d787628b7f90e64f936ef7ec0a" +dependencies = [ + "mutants", +] + [[package]] name = "untrusted" version = "0.9.0" diff --git a/libs/@local/hashql/macros/Cargo.toml b/libs/@local/hashql/macros/Cargo.toml new file mode 100644 index 00000000000..a357f13fb85 --- /dev/null +++ b/libs/@local/hashql/macros/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "hashql-macros" +version.workspace = true +edition.workspace = true +license.workspace = true +publish.workspace = true +authors.workspace = true + +[lib] +proc-macro = true + +[lints] +workspace = true + +[dependencies] +unsynn = { version = "0.3.0", default-features = false } diff --git a/libs/@local/hashql/macros/src/grammar.rs b/libs/@local/hashql/macros/src/grammar.rs new file mode 100644 index 00000000000..32907743698 --- /dev/null +++ b/libs/@local/hashql/macros/src/grammar.rs @@ -0,0 +1,49 @@ +use unsynn::*; + +keyword! { + /// The "pub" keyword. + pub KPub = ["pub"]; + /// The "struct" keyword. + pub KStruct = ["struct"]; + /// The "enum" keyword. + pub KEnum = ["enum"]; + /// The "in" keyword. + pub KIn = ["in"]; + pub KId = ["id"]; + pub KDerive = ["derive"]; + pub KDisplay = ["display"]; + pub KStep = ["Step"]; + pub KIs = ["is"]; + pub KCrate = ["crate"]; +} + +pub type VerbatimUntil = Many, AngleTokenTree>>; +pub type ModPath = Cons, PathSepDelimited>; +pub type Visibility = Cons, ModPath>>>>; + +pub struct Bridge(pub T); +impl ToTokens for Bridge +where + T: proc_macro::ToTokens, +{ + fn to_tokens(&self, tokens: &mut TokenStream) { + proc_macro::ToTokens::to_tokens(&self.0, tokens); + } +} + +unsynn! { + /// Parses either a `TokenTree` or `<...>` grouping (which is not a [`Group`] as far as proc-macros + /// are concerned). + #[derive(Clone)] + pub struct AngleTokenTree( + pub Either, AngleTokenTree>>, Gt>, TokenTree>, + ); + + /// Represents an attribute annotation on a field, typically in the form `#[attr]`. + pub struct Attribute { + /// The pound sign preceding the attribute. + pub _pound: Pound, + /// The content of the attribute enclosed in square brackets. + pub body: BracketGroupContaining, + } +} diff --git a/libs/@local/hashql/macros/src/id.rs b/libs/@local/hashql/macros/src/id.rs new file mode 100644 index 00000000000..7bfa4cb38e6 --- /dev/null +++ b/libs/@local/hashql/macros/src/id.rs @@ -0,0 +1,223 @@ +use core::fmt::Display; + +use proc_macro::{Diagnostic, Level, Span, TokenStream}; +use unsynn::{Parse, ToTokenIter, ToTokens, quote}; + +use crate::grammar::Bridge; + +mod grammar { + #![expect(clippy::result_large_err)] + use unsynn::*; + + use crate::grammar::{ + AngleTokenTree, Attribute, KCrate, KDerive, KDisplay, KEnum, KId, KIs, KStep, KStruct, + ModPath, VerbatimUntil, Visibility, + }; + + pub(super) type AttributeIdBody = CommaDelimitedVec; + + unsynn! { + pub(super) enum AttributeBody { + Id { + _id: KId, + + inner: ParenthesisGroupContaining + }, + Any(Vec) + } + + pub(super) enum IdDerive { + Step(KStep) + } + + pub(super) enum IdDisplay { + None(Bang), + Format(TokenTree) + } + + pub(super) enum IdAttribute { + Crate { + _crate: KCrate, + _eq: Assign, + path: ModPath + }, + Derive { + _derive: KDerive, + + traits: ParenthesisGroupContaining> + }, + Display { + _display: KDisplay, + _eq: Assign, + + format: IdDisplay + } + } + + pub(super) enum RangeOp { + Exclusive(DotDot), + Inclusive(DotDotEq) + } + + pub(super) struct StructBody { + pub r#type: ModPath, + pub _is: KIs, + + pub start: VerbatimUntil, + pub op: RangeOp, + pub end: Vec + } + + pub(super) struct ParsedStruct { + pub attributes: Vec>, + pub visibility: Visibility, + + pub _struct: KStruct, + + pub name: Ident, + + pub body: ParenthesisGroupContaining + } + + pub(super) struct ParsedEnum { + pub attributes: Vec>, + pub visibility: Visibility, + + pub _enum: KEnum, + + pub name: Ident, + + pub body: BraceGroupContaining> + } + + /// Represents a variant of an enum, including the optional discriminant value + pub(super) struct UnitEnumVariant { + /// Attributes applied to the variant. + pub attributes: Vec>>, + /// The name of the variant. + pub name: Ident, + } + + pub(super) enum Parsed { + Struct(ParsedStruct), + Enum(ParsedEnum) + } + } +} + +pub(crate) fn expand(attr: TokenStream, item: TokenStream) -> TokenStream { + let (attributes, parsed) = match parse(attr, item) { + Ok(parsed) => parsed, + Err(error) => { + if let Some(token) = error.failed_at() { + emit_error(token.span(), error); + + return TokenStream::new(); + } + + // Unable to report a useful error (at a position) + let value = Bridge(error.to_string()); + return quote!(compile_error!(#value)); + } + }; + + todo!() +} + +fn parse( + attr: TokenStream, + item: TokenStream, +) -> Result<(Vec, grammar::Parsed), unsynn::Error> { + let mut attr_tokens = attr.to_token_iter(); + let mut item_tokens = item.to_token_iter(); + + let additional = grammar::AttributeIdBody::parse_all(&mut attr_tokens)?; + let parsed = grammar::Parsed::parse_all(&mut item_tokens)?; + + Ok((additional.into(), parsed)) +} + +fn expand_struct( + additional_attributes: Vec, + grammar::ParsedStruct { + attributes, + visibility, + _struct: _, + name, + body, + }: grammar::ParsedStruct, +) -> TokenStream { + let mut id_attributes = additional_attributes; + let mut other_attributes = TokenStream::new(); + for attribute in attributes { + match attribute.body.content { + grammar::AttributeBody::Any(_) => { + other_attributes.extend(attribute.into_token_stream()); + } + grammar::AttributeBody::Id { _id: _, inner } => { + id_attributes.extend(inner.content.into_iter().map(|attr| attr.value)); + } + } + } + + let krate = id_attributes + .iter() + .find_map(|attr| match attr { + grammar::IdAttribute::Crate { _crate, _eq, path } => Some(quote!(#path)), + grammar::IdAttribute::Derive { .. } | grammar::IdAttribute::Display { .. } => None, + }) + .unwrap_or_else(|| quote!(::hashql_core)); + + let mut output = TokenStream::new(); + + let inner_type = body.content.r#type; + let min = body.content.start; + let op = body.content.op; + let max = body.content.end; + + let assert_message = Bridge(format!( + "ID value must be between the range of {}{}{}", + min.to_token_stream(), + op.to_token_stream(), + max.to_token_stream() + )); + + let assert_in_bounds = quote! { + assert!( + value >= (#min as #inner_type) && value <= (#max as #inner_type), + #assert_message + ); + }; + + output.extend(quote! { + #visibility struct #name { + _internal_do_not_use: #inner_type + } + + impl #name { + /// Creates a new ID with the given value + /// + /// # Panics + /// + /// If the value is outside the valid range + #[must_use] + #visibility const fn new(value: #inner_type) -> Self { + #assert_in_bounds + + Self { + _internal_do_not_use: value, + } + } + } + + impl #krate::id::Id for $name { + + } + }); + + todo!() +} + +fn emit_error(span: Span, message: impl Display) { + Diagnostic::spanned(span, Level::Error, message.to_string()).emit(); +} diff --git a/libs/@local/hashql/macros/src/lib.rs b/libs/@local/hashql/macros/src/lib.rs new file mode 100644 index 00000000000..30f6ad087f1 --- /dev/null +++ b/libs/@local/hashql/macros/src/lib.rs @@ -0,0 +1,33 @@ +#![feature(proc_macro_diagnostic, proc_macro_totokens)] + +extern crate proc_macro; + +mod grammar; +mod id; + +use proc_macro::{Diagnostic, TokenStream}; + +/// Defines a type as an [`Id`]. +/// +/// Supports two shapes: +/// +/// **Struct** (newtype wrapper around an integer with a valid range): +/// ```ignore +/// #[hashql_core::id] +/// #[id(steppable)] +/// pub struct NodeId(u32 is 0..=0xFFFF_FF00); +/// ``` +/// +/// **Enum** (unit variants mapped to sequential discriminants): +/// ```ignore +/// #[hashql_core::id] +/// pub enum TargetId { +/// Interpreter, +/// Postgres, +/// Embedding, +/// } +/// ``` +#[proc_macro_attribute] +pub fn id(attr: TokenStream, item: TokenStream) -> TokenStream { + todo!() +} From a27257fe00806fbb5596a4b7b69987ec15bd1f5c Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Fri, 27 Feb 2026 19:41:10 +0100 Subject: [PATCH 02/15] feat: checkpoint --- libs/@local/hashql/macros/src/grammar.rs | 6 +++++ libs/@local/hashql/macros/src/id.rs | 30 +++++++++++++++++------- 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/libs/@local/hashql/macros/src/grammar.rs b/libs/@local/hashql/macros/src/grammar.rs index 32907743698..fb5b94be4b0 100644 --- a/libs/@local/hashql/macros/src/grammar.rs +++ b/libs/@local/hashql/macros/src/grammar.rs @@ -15,6 +15,12 @@ keyword! { pub KStep = ["Step"]; pub KIs = ["is"]; pub KCrate = ["crate"]; + pub KU8 = ["u8"]; + pub KU16 = ["u16"]; + pub KU32 = ["u32"]; + pub KU64 = ["u64"]; + pub KU128 = ["u128"]; + pub KUsize = ["usize"]; } pub type VerbatimUntil = Many, AngleTokenTree>>; diff --git a/libs/@local/hashql/macros/src/id.rs b/libs/@local/hashql/macros/src/id.rs index 7bfa4cb38e6..9a40c390d2c 100644 --- a/libs/@local/hashql/macros/src/id.rs +++ b/libs/@local/hashql/macros/src/id.rs @@ -10,8 +10,8 @@ mod grammar { use unsynn::*; use crate::grammar::{ - AngleTokenTree, Attribute, KCrate, KDerive, KDisplay, KEnum, KId, KIs, KStep, KStruct, - ModPath, VerbatimUntil, Visibility, + AngleTokenTree, Attribute, KCrate, KDerive, KDisplay, KEnum, KId, KIs, KStep, KStruct, KU8, + KU16, KU32, KU64, KU128, KUsize, ModPath, VerbatimUntil, Visibility, }; pub(super) type AttributeIdBody = CommaDelimitedVec; @@ -59,8 +59,17 @@ mod grammar { Inclusive(DotDotEq) } + pub(super) enum StructScalar { + U8(KU8), + U16(KU16), + U32(KU32), + U64(KU64), + U128(KU128), + Usize(KUsize), + } + pub(super) struct StructBody { - pub r#type: ModPath, + pub r#type: StructScalar, pub _is: KIs, pub start: VerbatimUntil, @@ -182,14 +191,14 @@ fn expand_struct( max.to_token_stream() )); - let assert_in_bounds = quote! { - assert!( - value >= (#min as #inner_type) && value <= (#max as #inner_type), - #assert_message - ); + let max_cmp = match op { + grammar::RangeOp::Exclusive(_) => quote!(<), + grammar::RangeOp::Inclusive(_) => quote!(<=), }; output.extend(quote! { + #other_attributes + #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #visibility struct #name { _internal_do_not_use: #inner_type } @@ -202,7 +211,10 @@ fn expand_struct( /// If the value is outside the valid range #[must_use] #visibility const fn new(value: #inner_type) -> Self { - #assert_in_bounds + assert!( + value >= #min && value #max_cmp #max, + #assert_message + ); Self { _internal_do_not_use: value, From 13ea134f49920134e89ace1348a5df5d6cb0b845 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Fri, 27 Feb 2026 19:57:55 +0100 Subject: [PATCH 03/15] feat: checkpoint --- libs/@local/hashql/macros/src/grammar.rs | 1 + libs/@local/hashql/macros/src/id.rs | 68 ++++++++++++++++++++++-- 2 files changed, 65 insertions(+), 4 deletions(-) diff --git a/libs/@local/hashql/macros/src/grammar.rs b/libs/@local/hashql/macros/src/grammar.rs index fb5b94be4b0..2b68f4bdea2 100644 --- a/libs/@local/hashql/macros/src/grammar.rs +++ b/libs/@local/hashql/macros/src/grammar.rs @@ -15,6 +15,7 @@ keyword! { pub KStep = ["Step"]; pub KIs = ["is"]; pub KCrate = ["crate"]; + pub KConst = ["const"]; pub KU8 = ["u8"]; pub KU16 = ["u16"]; pub KU32 = ["u32"]; diff --git a/libs/@local/hashql/macros/src/id.rs b/libs/@local/hashql/macros/src/id.rs index 9a40c390d2c..dc4a778355b 100644 --- a/libs/@local/hashql/macros/src/id.rs +++ b/libs/@local/hashql/macros/src/id.rs @@ -10,8 +10,8 @@ mod grammar { use unsynn::*; use crate::grammar::{ - AngleTokenTree, Attribute, KCrate, KDerive, KDisplay, KEnum, KId, KIs, KStep, KStruct, KU8, - KU16, KU32, KU64, KU128, KUsize, ModPath, VerbatimUntil, Visibility, + AngleTokenTree, Attribute, KConst, KCrate, KDerive, KDisplay, KEnum, KId, KIs, KStep, + KStruct, KU8, KU16, KU32, KU64, KU128, KUsize, ModPath, VerbatimUntil, Visibility, }; pub(super) type AttributeIdBody = CommaDelimitedVec; @@ -41,6 +41,9 @@ mod grammar { _eq: Assign, path: ModPath }, + Const { + _const: KConst + }, Derive { _derive: KDerive, @@ -173,7 +176,9 @@ fn expand_struct( .iter() .find_map(|attr| match attr { grammar::IdAttribute::Crate { _crate, _eq, path } => Some(quote!(#path)), - grammar::IdAttribute::Derive { .. } | grammar::IdAttribute::Display { .. } => None, + grammar::IdAttribute::Derive { .. } + | grammar::IdAttribute::Display { .. } + | &grammar::IdAttribute::Const { .. } => None, }) .unwrap_or_else(|| quote!(::hashql_core)); @@ -196,13 +201,24 @@ fn expand_struct( grammar::RangeOp::Inclusive(_) => quote!(<=), }; + let konst = if id_attributes + .iter() + .any(|attr| matches!(attr, grammar::IdAttribute::Const { .. })) + { + quote!(const) + } else { + TokenStream::new() + }; + output.extend(quote! { #other_attributes #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #visibility struct #name { _internal_do_not_use: #inner_type } + }); + output.extend(quote! { impl #name { /// Creates a new ID with the given value /// @@ -220,10 +236,54 @@ fn expand_struct( _internal_do_not_use: value, } } + + #[inline] + #visibility const unsafe fn new_unchecked(value: #inner_type) -> Self { + Self { + _internal_do_not_use: value, + } + } } - impl #krate::id::Id for $name { + impl #konst #krate::id::Id for $name { + const MIN: Self = Self::new(#min); + const MAX: Self = Self::new(#max); + + fn from_u32(value: u32) -> Self { + // TODO: we must check that the value is indeed + } + + fn from_u64(value: u64) -> Self { + } + + fn from_usize(value: usize) -> Self { + + } + + #[inline] + fn as_u32(self) -> u32 { + self._internal_do_not_use as u32 + } + + #[inline] + fn as_u64(self) -> u64 { + self._internal_do_not_use as u64 + } + + #[inline] + fn as_usize(self) -> usize { + self._internal_do_not_use as usize + } + + #[inline] + fn prev(self) -> ::core::option::Option { + if self._internal_do_not_use == #min { + None + } else { + Some(unsafe { Self::new_unchecked(self._internal_do_not_use - 1) }) + } + } } }); From b0d193449a065e345ef007f15af66c83e49b88f1 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Fri, 27 Feb 2026 20:19:31 +0100 Subject: [PATCH 04/15] feat: checkpoint --- libs/@local/hashql/macros/src/id/attr.rs | 88 ++++++ libs/@local/hashql/macros/src/id/enum.rs | 7 + .../hashql/macros/src/{id.rs => id/mod.rs} | 160 ++--------- libs/@local/hashql/macros/src/id/struct.rs | 257 ++++++++++++++++++ libs/@local/hashql/macros/src/lib.rs | 3 +- 5 files changed, 376 insertions(+), 139 deletions(-) create mode 100644 libs/@local/hashql/macros/src/id/attr.rs create mode 100644 libs/@local/hashql/macros/src/id/enum.rs rename libs/@local/hashql/macros/src/{id.rs => id/mod.rs} (50%) create mode 100644 libs/@local/hashql/macros/src/id/struct.rs diff --git a/libs/@local/hashql/macros/src/id/attr.rs b/libs/@local/hashql/macros/src/id/attr.rs new file mode 100644 index 00000000000..117c2df5b67 --- /dev/null +++ b/libs/@local/hashql/macros/src/id/attr.rs @@ -0,0 +1,88 @@ +use std::collections::BTreeSet; + +use unsynn::{ToTokens as _, TokenStream, TokenTree, quote}; + +use super::grammar::{self, AttributeBody, IdAttribute}; +use crate::grammar::Attribute; + +pub enum DisplayAttribute { + Auto, + None, + Format(TokenTree), +} + +#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq)] +pub(crate) enum Trait { + Step, +} + +pub(crate) struct Attributes { + pub krate: TokenStream, + pub r#const: TokenStream, + pub display: DisplayAttribute, + pub traits: BTreeSet, + + pub extra: TokenStream, +} + +impl Attributes { + fn parse_attribute(&mut self, attribute: IdAttribute) { + match attribute { + IdAttribute::Crate { _crate, _eq, path } => self.krate = path.into_token_stream(), + IdAttribute::Const { _const } => self.r#const = _const.into_token_stream(), + IdAttribute::Derive { _derive, traits } => { + for r#trait in traits.content { + match r#trait.value { + grammar::IdDerive::Step(_) => { + self.traits.insert(Trait::Step); + } + } + } + } + IdAttribute::Display { + _display, + _eq, + format, + } => match format { + grammar::IdDisplay::None(_) => { + self.display = DisplayAttribute::None; + } + grammar::IdDisplay::Format(token_tree) => { + self.display = DisplayAttribute::Format(token_tree); + } + }, + } + } + + pub(crate) fn parse( + additional: Vec, + attributes: Vec>, + ) -> Self { + let mut this = Self { + krate: quote!(crate), + r#const: TokenStream::new(), + display: DisplayAttribute::Auto, + traits: BTreeSet::new(), + extra: TokenStream::new(), + }; + + for attribute in additional { + this.parse_attribute(attribute); + } + + for attribute in attributes { + match attribute.body.content { + grammar::AttributeBody::Any(_) => { + this.extra.extend(attribute.into_token_stream()); + } + grammar::AttributeBody::Id { _id: _, inner } => { + for attribute in inner.content { + this.parse_attribute(attribute.value); + } + } + } + } + + this + } +} diff --git a/libs/@local/hashql/macros/src/id/enum.rs b/libs/@local/hashql/macros/src/id/enum.rs new file mode 100644 index 00000000000..5d532ffafae --- /dev/null +++ b/libs/@local/hashql/macros/src/id/enum.rs @@ -0,0 +1,7 @@ +fn expand_enum( + _additional_attributes: Vec, + _parsed: grammar::ParsedEnum, +) -> TokenStream { + emit_error(Span::call_site(), "enum id types are not yet implemented"); + TokenStream::new() +} diff --git a/libs/@local/hashql/macros/src/id.rs b/libs/@local/hashql/macros/src/id/mod.rs similarity index 50% rename from libs/@local/hashql/macros/src/id.rs rename to libs/@local/hashql/macros/src/id/mod.rs index dc4a778355b..57aba6dbf9b 100644 --- a/libs/@local/hashql/macros/src/id.rs +++ b/libs/@local/hashql/macros/src/id/mod.rs @@ -1,3 +1,7 @@ +mod attr; +mod r#enum; +mod r#struct; + use core::fmt::Display; use proc_macro::{Diagnostic, Level, Span, TokenStream}; @@ -133,7 +137,10 @@ pub(crate) fn expand(attr: TokenStream, item: TokenStream) -> TokenStream { } }; - todo!() + match parsed { + grammar::Parsed::Struct(parsed) => expand_struct(attributes, parsed), + grammar::Parsed::Enum(parsed) => expand_enum(attributes, parsed), + } } fn parse( @@ -149,145 +156,22 @@ fn parse( Ok((additional.into(), parsed)) } -fn expand_struct( - additional_attributes: Vec, - grammar::ParsedStruct { - attributes, - visibility, - _struct: _, - name, - body, - }: grammar::ParsedStruct, -) -> TokenStream { - let mut id_attributes = additional_attributes; - let mut other_attributes = TokenStream::new(); - for attribute in attributes { - match attribute.body.content { - grammar::AttributeBody::Any(_) => { - other_attributes.extend(attribute.into_token_stream()); - } - grammar::AttributeBody::Id { _id: _, inner } => { - id_attributes.extend(inner.content.into_iter().map(|attr| attr.value)); - } - } +fn scalar_rank(scalar: &grammar::StructScalar) -> u32 { + match scalar { + grammar::StructScalar::U8(_) => u8::BITS, + grammar::StructScalar::U16(_) => u16::BITS, + grammar::StructScalar::U32(_) => u32::BITS, + grammar::StructScalar::Usize(_) => usize::BITS, + grammar::StructScalar::U64(_) => u64::BITS, + grammar::StructScalar::U128(_) => u128::BITS, } +} - let krate = id_attributes - .iter() - .find_map(|attr| match attr { - grammar::IdAttribute::Crate { _crate, _eq, path } => Some(quote!(#path)), - grammar::IdAttribute::Derive { .. } - | grammar::IdAttribute::Display { .. } - | &grammar::IdAttribute::Const { .. } => None, - }) - .unwrap_or_else(|| quote!(::hashql_core)); - - let mut output = TokenStream::new(); - - let inner_type = body.content.r#type; - let min = body.content.start; - let op = body.content.op; - let max = body.content.end; - - let assert_message = Bridge(format!( - "ID value must be between the range of {}{}{}", - min.to_token_stream(), - op.to_token_stream(), - max.to_token_stream() - )); - - let max_cmp = match op { - grammar::RangeOp::Exclusive(_) => quote!(<), - grammar::RangeOp::Inclusive(_) => quote!(<=), - }; - - let konst = if id_attributes - .iter() - .any(|attr| matches!(attr, grammar::IdAttribute::Const { .. })) - { - quote!(const) - } else { - TokenStream::new() - }; - - output.extend(quote! { - #other_attributes - #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] - #visibility struct #name { - _internal_do_not_use: #inner_type - } - }); - - output.extend(quote! { - impl #name { - /// Creates a new ID with the given value - /// - /// # Panics - /// - /// If the value is outside the valid range - #[must_use] - #visibility const fn new(value: #inner_type) -> Self { - assert!( - value >= #min && value #max_cmp #max, - #assert_message - ); - - Self { - _internal_do_not_use: value, - } - } - - #[inline] - #visibility const unsafe fn new_unchecked(value: #inner_type) -> Self { - Self { - _internal_do_not_use: value, - } - } - } - - impl #konst #krate::id::Id for $name { - const MIN: Self = Self::new(#min); - const MAX: Self = Self::new(#max); - - fn from_u32(value: u32) -> Self { - // TODO: we must check that the value is indeed - } - - fn from_u64(value: u64) -> Self { - - } - - fn from_usize(value: usize) -> Self { - - } - - #[inline] - fn as_u32(self) -> u32 { - self._internal_do_not_use as u32 - } - - #[inline] - fn as_u64(self) -> u64 { - self._internal_do_not_use as u64 - } - - #[inline] - fn as_usize(self) -> usize { - self._internal_do_not_use as usize - } - - #[inline] - fn prev(self) -> ::core::option::Option { - if self._internal_do_not_use == #min { - None - } else { - Some(unsafe { Self::new_unchecked(self._internal_do_not_use - 1) }) - } - } - } - }); - - todo!() +fn param_scalar(name: &str) -> grammar::StructScalar { + use unsynn::ToTokenIter; + let ts: TokenStream = name.parse().unwrap(); + let mut iter = ts.to_token_iter(); + unsynn::Parse::parse(&mut iter).unwrap() } fn emit_error(span: Span, message: impl Display) { diff --git a/libs/@local/hashql/macros/src/id/struct.rs b/libs/@local/hashql/macros/src/id/struct.rs new file mode 100644 index 00000000000..3a2cc6bdcb5 --- /dev/null +++ b/libs/@local/hashql/macros/src/id/struct.rs @@ -0,0 +1,257 @@ +use proc_macro::TokenStream; +use unsynn::{ToTokens as _, quote}; + +use super::grammar; +use crate::{grammar::Bridge, id::attr::Attributes}; + +fn expand_struct( + additional_attributes: Vec, + grammar::ParsedStruct { + attributes, + visibility, + _struct: _, + name, + body, + }: grammar::ParsedStruct, +) -> TokenStream { + let attributes = Attributes::parse(additional_attributes, attributes); + + let mut output = TokenStream::new(); + + let inner_type = body.content.r#type; + let min = body.content.start; + let op = body.content.op; + let max = body.content.end; + + let assert_message = Bridge(format!( + "ID value must be between the range of {}{}{}", + min.to_token_stream(), + op.to_token_stream(), + max.to_token_stream() + )); + + let max_cmp = match op { + grammar::RangeOp::Exclusive(_) => quote!(<), + grammar::RangeOp::Inclusive(_) => quote!(<=), + }; + + output.extend(attributes.extra); + output.extend(quote! { + #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] + #visibility struct #name { + _internal_do_not_use: #inner_type + } + }); + + // Inherent impl + output.extend(quote! { + impl #name { + #[must_use] + #visibility const fn new(value: #inner_type) -> Self { + assert!(value >= #min && value #max_cmp #max, #assert_message); + Self { _internal_do_not_use: value } + } + + #[inline] + #visibility const unsafe fn new_unchecked(value: #inner_type) -> Self { + Self { _internal_do_not_use: value } + } + } + }); + + // Id trait + let (vc32, bc32, cc32) = scalar_casts(&inner_type, ¶m_scalar("u32")); + let (vc64, bc64, cc64) = scalar_casts(&inner_type, ¶m_scalar("u64")); + let (vcus, bcus, ccus) = scalar_casts(&inner_type, ¶m_scalar("usize")); + + output.extend(quote! { + #[automatically_derived] + #[expect(clippy::allow_attributes, reason = "automatically generated")] + #[allow(clippy::cast_possible_truncation, clippy::cast_lossless, clippy::checked_conversions)] + impl #konst #krate::id::Id for #name { + const MIN: Self = Self::new(#min); + const MAX: Self = Self::new(#max); + + fn from_u32(value: u32) -> Self { + assert!((value #vc32) >= (#min #bc32) && (value #vc32) #max_cmp (#max #bc32), #assert_message); + Self { _internal_do_not_use: value #cc32 } + } + fn from_u64(value: u64) -> Self { + assert!((value #vc64) >= (#min #bc64) && (value #vc64) #max_cmp (#max #bc64), #assert_message); + Self { _internal_do_not_use: value #cc64 } + } + fn from_usize(value: usize) -> Self { + assert!((value #vcus) >= (#min #bcus) && (value #vcus) #max_cmp (#max #bcus), #assert_message); + Self { _internal_do_not_use: value #ccus } + } + + #[inline] fn as_u32(self) -> u32 { self._internal_do_not_use as u32 } + #[inline] fn as_u64(self) -> u64 { self._internal_do_not_use as u64 } + #[inline] fn as_usize(self) -> usize { self._internal_do_not_use as usize } + + #[inline] + fn prev(self) -> ::core::option::Option { + if self._internal_do_not_use == #min { + ::core::option::Option::None + } else { + ::core::option::Option::Some(unsafe { Self::new_unchecked(self._internal_do_not_use - 1) }) + } + } + } + }); + + // TryFrom impls + for (param_name, value_cast, bounds_cast, construct_cast) in [ + ("u32", &vc32, &bc32, &cc32), + ("u64", &vc64, &bc64, &cc64), + ("usize", &vcus, &bcus, &ccus), + ] { + let param_ty: TokenStream = param_name.parse().unwrap(); + // For the error report, always widen to u64 + let err_value_cast = if param_name == "u64" { + TokenStream::new() + } else { + quote!(as u64) + }; + + output.extend(quote! { + #[automatically_derived] + #[expect(clippy::allow_attributes, reason = "automatically generated")] + #[allow(clippy::cast_possible_truncation, clippy::cast_lossless, clippy::checked_conversions)] + impl ::core::convert::TryFrom<#param_ty> for #name { + type Error = #krate::id::IdError; + fn try_from(value: #param_ty) -> ::core::result::Result { + if (value #value_cast) >= (#min #bounds_cast) && (value #value_cast) #max_cmp (#max #bounds_cast) { + ::core::result::Result::Ok(Self { _internal_do_not_use: value #construct_cast }) + } else { + ::core::result::Result::Err(#krate::id::IdError::OutOfRange { + value: value #err_value_cast, min: #min as u64, max: #max as u64, + }) + } + } + } + }); + } + + // HasId + output.extend(quote! { + impl #krate::id::HasId for #name { + type Id = #name; + fn id(&self) -> Self::Id { *self } + } + }); + + // Display + let display_attr = id_attributes.iter().find_map(|attr| match attr { + grammar::IdAttribute::Display { format, .. } => Some(format), + _ => None, + }); + + match display_attr { + Some(grammar::IdDisplay::None(_)) => {} + Some(grammar::IdDisplay::Format(format)) => { + output.extend(quote! { + impl ::core::fmt::Display for #name { + fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + fmt.write_fmt(format_args!(#format, self._internal_do_not_use)) + } + } + }); + } + None => { + output.extend(quote! { + impl ::core::fmt::Display for #name { + fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + ::core::fmt::Display::fmt(&self._internal_do_not_use, fmt) + } + } + }); + } + } + + // Debug + output.extend(quote! { + impl ::core::fmt::Debug for #name { + fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + fmt.debug_tuple(stringify!(#name)) + .field(&self._internal_do_not_use) + .finish() + } + } + }); + + // Step + let has_step = id_attributes.iter().any(|attr| { + matches!(attr, grammar::IdAttribute::Derive { traits, .. } + if traits.content.iter().any(|delimited| matches!(delimited.value, grammar::IdDerive::Step(_)))) + }); + + if has_step { + output.extend(quote! { + impl ::core::iter::Step for #name { + #[inline] + fn steps_between(start: &Self, end: &Self) -> (usize, ::core::option::Option) { + ::steps_between( + &#krate::id::Id::as_usize(*start), + &#krate::id::Id::as_usize(*end), + ) + } + + #[inline] + fn forward_checked(start: Self, count: usize) -> ::core::option::Option { + #krate::id::Id::as_usize(start) + .checked_add(count) + .map(#krate::id::Id::from_usize) + } + + #[inline] + fn backward_checked(start: Self, count: usize) -> ::core::option::Option { + #krate::id::Id::as_usize(start) + .checked_sub(count) + .map(#krate::id::Id::from_usize) + } + } + }); + } + + output +} + +/// Given the inner scalar type and a parameter type (u32/u64/usize), returns +/// `(value_cast, bounds_cast, construct_cast)`: +/// +/// - `value_cast`: applied to `value` in comparisons when it's narrower than inner +/// - `bounds_cast`: applied to `min`/`max` in comparisons when they're narrower than param +/// - `construct_cast`: applied to `value` when constructing the inner field +/// +/// When inner and param are the same width, all three are empty. +fn scalar_casts( + inner: &grammar::StructScalar, + param: &grammar::StructScalar, +) -> (TokenStream, TokenStream, TokenStream) { + let inner_rank = scalar_rank(inner); + let param_rank = scalar_rank(param); + + if inner_rank == param_rank { + // Same width, no casts needed + (TokenStream::new(), TokenStream::new(), TokenStream::new()) + } else if inner_rank > param_rank { + // Inner is wider; widen value to inner for comparison, cast value to inner for construct + let inner_ts = inner.to_token_stream(); + ( + quote!(as #inner_ts), + TokenStream::new(), + quote!(as #inner_ts), + ) + } else { + // Param is wider; widen min/max to param for comparison, narrow value to inner for + // construct + let param_ts = param.to_token_stream(); + let inner_ts = inner.to_token_stream(); + ( + TokenStream::new(), + quote!(as #param_ts), + quote!(as #inner_ts), + ) + } +} diff --git a/libs/@local/hashql/macros/src/lib.rs b/libs/@local/hashql/macros/src/lib.rs index 30f6ad087f1..11d154c57dc 100644 --- a/libs/@local/hashql/macros/src/lib.rs +++ b/libs/@local/hashql/macros/src/lib.rs @@ -1,4 +1,5 @@ #![feature(proc_macro_diagnostic, proc_macro_totokens)] +#![recursion_limit = "512"] extern crate proc_macro; @@ -29,5 +30,5 @@ use proc_macro::{Diagnostic, TokenStream}; /// ``` #[proc_macro_attribute] pub fn id(attr: TokenStream, item: TokenStream) -> TokenStream { - todo!() + id::expand(attr, item) } From 448af7a04e4532273e09b665ae1cc5f657f2f29c Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Fri, 27 Feb 2026 22:06:58 +0100 Subject: [PATCH 05/15] feat: checkpoint --- Cargo.lock | 3 + libs/@local/hashql/macros/Cargo.toml | 4 +- libs/@local/hashql/macros/src/grammar.rs | 10 - libs/@local/hashql/macros/src/id/enum.rs | 12 +- libs/@local/hashql/macros/src/id/mod.rs | 32 +- libs/@local/hashql/macros/src/id/struct.rs | 369 +++++++++++++-------- libs/@local/hashql/macros/src/lib.rs | 5 +- 7 files changed, 248 insertions(+), 187 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2a2fdd387b6..90fe200f5df 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3898,6 +3898,8 @@ dependencies = [ name = "hashql-macros" version = "0.0.0" dependencies = [ + "proc-macro2", + "quote", "unsynn", ] @@ -9910,6 +9912,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "501a7adf1a4bd9951501e5c66621e972ef8874d787628b7f90e64f936ef7ec0a" dependencies = [ "mutants", + "proc-macro2", ] [[package]] diff --git a/libs/@local/hashql/macros/Cargo.toml b/libs/@local/hashql/macros/Cargo.toml index a357f13fb85..19740e762f1 100644 --- a/libs/@local/hashql/macros/Cargo.toml +++ b/libs/@local/hashql/macros/Cargo.toml @@ -13,4 +13,6 @@ proc-macro = true workspace = true [dependencies] -unsynn = { version = "0.3.0", default-features = false } +proc-macro2.workspace = true +quote = { workspace = true, features = ["proc-macro"] } +unsynn = { version = "0.3.0", default-features = false, features = ["proc_macro2"] } diff --git a/libs/@local/hashql/macros/src/grammar.rs b/libs/@local/hashql/macros/src/grammar.rs index 2b68f4bdea2..c34bade7e98 100644 --- a/libs/@local/hashql/macros/src/grammar.rs +++ b/libs/@local/hashql/macros/src/grammar.rs @@ -28,16 +28,6 @@ pub type VerbatimUntil = Many, AngleTokenTree>>; pub type ModPath = Cons, PathSepDelimited>; pub type Visibility = Cons, ModPath>>>>; -pub struct Bridge(pub T); -impl ToTokens for Bridge -where - T: proc_macro::ToTokens, -{ - fn to_tokens(&self, tokens: &mut TokenStream) { - proc_macro::ToTokens::to_tokens(&self.0, tokens); - } -} - unsynn! { /// Parses either a `TokenTree` or `<...>` grouping (which is not a [`Group`] as far as proc-macros /// are concerned). diff --git a/libs/@local/hashql/macros/src/id/enum.rs b/libs/@local/hashql/macros/src/id/enum.rs index 5d532ffafae..652e884c616 100644 --- a/libs/@local/hashql/macros/src/id/enum.rs +++ b/libs/@local/hashql/macros/src/id/enum.rs @@ -1,7 +1,15 @@ -fn expand_enum( +use proc_macro2::{Span, TokenStream}; + +use super::{emit_error, grammar}; + +pub(super) fn expand_enum( _additional_attributes: Vec, _parsed: grammar::ParsedEnum, ) -> TokenStream { - emit_error(Span::call_site(), "enum id types are not yet implemented"); + emit_error( + Span::call_site().unwrap(), + "enum id types are not yet implemented", + ); + TokenStream::new() } diff --git a/libs/@local/hashql/macros/src/id/mod.rs b/libs/@local/hashql/macros/src/id/mod.rs index 57aba6dbf9b..678baf8096c 100644 --- a/libs/@local/hashql/macros/src/id/mod.rs +++ b/libs/@local/hashql/macros/src/id/mod.rs @@ -4,10 +4,11 @@ mod r#struct; use core::fmt::Display; -use proc_macro::{Diagnostic, Level, Span, TokenStream}; +use proc_macro::{Diagnostic, Level, Span}; +use proc_macro2::TokenStream; use unsynn::{Parse, ToTokenIter, ToTokens, quote}; -use crate::grammar::Bridge; +use self::{r#enum::expand_enum, r#struct::expand_struct}; mod grammar { #![expect(clippy::result_large_err)] @@ -15,7 +16,7 @@ mod grammar { use crate::grammar::{ AngleTokenTree, Attribute, KConst, KCrate, KDerive, KDisplay, KEnum, KId, KIs, KStep, - KStruct, KU8, KU16, KU32, KU64, KU128, KUsize, ModPath, VerbatimUntil, Visibility, + KStruct, KU8, KU16, KU32, KU64, KU128, ModPath, VerbatimUntil, Visibility, }; pub(super) type AttributeIdBody = CommaDelimitedVec; @@ -72,7 +73,6 @@ mod grammar { U32(KU32), U64(KU64), U128(KU128), - Usize(KUsize), } pub(super) struct StructBody { @@ -126,14 +126,14 @@ pub(crate) fn expand(attr: TokenStream, item: TokenStream) -> TokenStream { Ok(parsed) => parsed, Err(error) => { if let Some(token) = error.failed_at() { - emit_error(token.span(), error); + emit_error(token.span().unwrap(), error); return TokenStream::new(); } // Unable to report a useful error (at a position) - let value = Bridge(error.to_string()); - return quote!(compile_error!(#value)); + let message = error.to_string(); + return quote!(compile_error!(#message)); } }; @@ -156,24 +156,6 @@ fn parse( Ok((additional.into(), parsed)) } -fn scalar_rank(scalar: &grammar::StructScalar) -> u32 { - match scalar { - grammar::StructScalar::U8(_) => u8::BITS, - grammar::StructScalar::U16(_) => u16::BITS, - grammar::StructScalar::U32(_) => u32::BITS, - grammar::StructScalar::Usize(_) => usize::BITS, - grammar::StructScalar::U64(_) => u64::BITS, - grammar::StructScalar::U128(_) => u128::BITS, - } -} - -fn param_scalar(name: &str) -> grammar::StructScalar { - use unsynn::ToTokenIter; - let ts: TokenStream = name.parse().unwrap(); - let mut iter = ts.to_token_iter(); - unsynn::Parse::parse(&mut iter).unwrap() -} - fn emit_error(span: Span, message: impl Display) { Diagnostic::spanned(span, Level::Error, message.to_string()).emit(); } diff --git a/libs/@local/hashql/macros/src/id/struct.rs b/libs/@local/hashql/macros/src/id/struct.rs index 3a2cc6bdcb5..cd66a02d353 100644 --- a/libs/@local/hashql/macros/src/id/struct.rs +++ b/libs/@local/hashql/macros/src/id/struct.rs @@ -1,10 +1,135 @@ -use proc_macro::TokenStream; -use unsynn::{ToTokens as _, quote}; +use core::cmp; -use super::grammar; -use crate::{grammar::Bridge, id::attr::Attributes}; +use proc_macro2::{Ident, Span, TokenStream}; +use quote::{ToTokens, format_ident, quote}; +use unsynn::{Ge, Gt, ToTokens as _}; -fn expand_struct( +use super::grammar::{self, StructBody, StructScalar}; +use crate::id::attr::{Attributes, DisplayAttribute, Trait}; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +enum IntegerScalar { + U8, + U16, + U32, + U64, + U128, +} + +impl From for IntegerScalar { + fn from(scalar: StructScalar) -> Self { + match scalar { + StructScalar::U8(_) => Self::U8, + StructScalar::U16(_) => Self::U16, + StructScalar::U32(_) => Self::U32, + StructScalar::U64(_) => Self::U64, + StructScalar::U128(_) => Self::U128, + } + } +} + +impl ToTokens for IntegerScalar { + fn to_tokens(&self, tokens: &mut TokenStream) { + let ident = match self { + Self::U8 => Ident::new("u8", Span::call_site()), + Self::U16 => Ident::new("u16", Span::call_site()), + Self::U32 => Ident::new("u32", Span::call_site()), + Self::U64 => Ident::new("u64", Span::call_site()), + Self::U128 => Ident::new("u128", Span::call_site()), + }; + + tokens.extend([ident]) + } +} + +enum RangeKind { + Inclusive, + Exclusive, +} + +impl From for RangeKind { + fn from(op: grammar::RangeOp) -> Self { + match op { + grammar::RangeOp::Inclusive(_) => Self::Inclusive, + grammar::RangeOp::Exclusive(_) => Self::Exclusive, + } + } +} + +impl ToTokens for RangeKind { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + RangeKind::Inclusive => Ge::new().to_tokens(tokens), + RangeKind::Exclusive => Gt::new().to_tokens(tokens), + } + } +} + +struct Constraint { + scalar: IntegerScalar, + + min: TokenStream, + max: TokenStream, + + kind: RangeKind, +} + +impl Constraint { + fn message(&self) -> String { + format!( + "id value must be between {}{}{}", + self.min, + self.kind.to_token_stream(), + self.max + ) + } + + fn comparison(&self, ident: &Ident, ident_scalar: IntegerScalar) -> TokenStream { + let Self { + scalar, + min, + max, + kind, + } = self; + + let width = cmp::max(*scalar, ident_scalar); + + quote! { + (#ident as #width) >= (#min as #width) && + (#ident as #width) #kind (#max as #width) + } + } + + fn assertion(&self, ident: Ident, ident_scalar: IntegerScalar) -> TokenStream { + let comparison = self.comparison(&ident, ident_scalar); + let message = self.message(); + + quote! { + assert!(#comparison, #message) + } + } +} + +impl From for Constraint { + fn from( + StructBody { + r#type, + _is, + start, + op, + end, + }: StructBody, + ) -> Self { + Self { + scalar: r#type.into(), + min: start.into_token_stream(), + max: end.into_token_stream(), + kind: op.into(), + } + } +} + +pub(crate) fn expand_struct( additional_attributes: Vec, grammar::ParsedStruct { attributes, @@ -14,118 +139,138 @@ fn expand_struct( body, }: grammar::ParsedStruct, ) -> TokenStream { - let attributes = Attributes::parse(additional_attributes, attributes); - let mut output = TokenStream::new(); + let Attributes { + krate, + r#const: konst, + display, + traits, + extra, + } = Attributes::parse(additional_attributes, attributes); + let vis = visibility.into_token_stream(); - let inner_type = body.content.r#type; - let min = body.content.start; - let op = body.content.op; - let max = body.content.end; + let constraint = Constraint::from(body.content); + let scalar = constraint.scalar; - let assert_message = Bridge(format!( - "ID value must be between the range of {}{}{}", - min.to_token_stream(), - op.to_token_stream(), - max.to_token_stream() - )); + let new_assertion = constraint.assertion(format_ident!("value"), scalar); + let u32_assertion = constraint.assertion(format_ident!("value"), IntegerScalar::U32); + let u64_assertion = constraint.assertion(format_ident!("value"), IntegerScalar::U64); + let usize_assertion = constraint.assertion(format_ident!("value"), IntegerScalar::U64); // u64 to be safe, even on 32-bit systems - let max_cmp = match op { - grammar::RangeOp::Exclusive(_) => quote!(<), - grammar::RangeOp::Inclusive(_) => quote!(<=), - }; + let min = &constraint.min; + let max = &constraint.max; - output.extend(attributes.extra); output.extend(quote! { + #extra #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] - #visibility struct #name { - _internal_do_not_use: #inner_type + #vis struct #name { + #[doc(hidden)] + _internal_do_not_use: #scalar } - }); - // Inherent impl - output.extend(quote! { impl #name { #[must_use] - #visibility const fn new(value: #inner_type) -> Self { - assert!(value >= #min && value #max_cmp #max, #assert_message); + #[inline] + #vis const fn new(value: #scalar) -> Self { + #new_assertion + Self { _internal_do_not_use: value } } + #[must_use] #[inline] - #visibility const unsafe fn new_unchecked(value: #inner_type) -> Self { + #vis const unsafe fn new_unchecked(value: #scalar) -> Self { Self { _internal_do_not_use: value } } } - }); - - // Id trait - let (vc32, bc32, cc32) = scalar_casts(&inner_type, ¶m_scalar("u32")); - let (vc64, bc64, cc64) = scalar_casts(&inner_type, ¶m_scalar("u64")); - let (vcus, bcus, ccus) = scalar_casts(&inner_type, ¶m_scalar("usize")); - output.extend(quote! { #[automatically_derived] - #[expect(clippy::allow_attributes, reason = "automatically generated")] - #[allow(clippy::cast_possible_truncation, clippy::cast_lossless, clippy::checked_conversions)] impl #konst #krate::id::Id for #name { const MIN: Self = Self::new(#min); const MAX: Self = Self::new(#max); - fn from_u32(value: u32) -> Self { - assert!((value #vc32) >= (#min #bc32) && (value #vc32) #max_cmp (#max #bc32), #assert_message); - Self { _internal_do_not_use: value #cc32 } + fn from_u32(value: u32) -> Option { + #u32_assertion + + Self { _internal_do_not_use: (value as #scalar) } + } + + fn from_u64(value: u64) -> Option { + #u64_assertion + + Self { _internal_do_not_use: (value as #scalar) } } - fn from_u64(value: u64) -> Self { - assert!((value #vc64) >= (#min #bc64) && (value #vc64) #max_cmp (#max #bc64), #assert_message); - Self { _internal_do_not_use: value #cc64 } + + fn from_usize(value: usize) -> Option { + #usize_assertion + + Self { _internal_do_not_use: (value as #scalar) } + } + + #[inline] + fn as_u32(&self) -> u32 { + (self._internal_do_not_use as u32) } - fn from_usize(value: usize) -> Self { - assert!((value #vcus) >= (#min #bcus) && (value #vcus) #max_cmp (#max #bcus), #assert_message); - Self { _internal_do_not_use: value #ccus } + + #[inline] + fn as_u64(&self) -> u64 { + (self._internal_do_not_use as u64) } - #[inline] fn as_u32(self) -> u32 { self._internal_do_not_use as u32 } - #[inline] fn as_u64(self) -> u64 { self._internal_do_not_use as u64 } - #[inline] fn as_usize(self) -> usize { self._internal_do_not_use as usize } + #[inline] + fn as_usize(&self) -> usize { + (self._internal_do_not_use as usize) + } #[inline] fn prev(self) -> ::core::option::Option { if self._internal_do_not_use == #min { ::core::option::Option::None } else { - ::core::option::Option::Some(unsafe { Self::new_unchecked(self._internal_do_not_use - 1) }) + ::core::option::Option::Some(Self { _internal_do_not_use: (self._internal_do_not_use - 1) }) } } } + + impl #krate::id::HasId for #name { + type Id = Self; + + fn id(&self) -> Self::Id { + *self + } + } + }); + + // Debug + output.extend(quote! { + impl ::core::fmt::Debug for #name { + fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + fmt.debug_tuple(stringify!(#name)) + .field(&self._internal_do_not_use) + .finish() + } + } }); - // TryFrom impls - for (param_name, value_cast, bounds_cast, construct_cast) in [ - ("u32", &vc32, &bc32, &cc32), - ("u64", &vc64, &bc64, &cc64), - ("usize", &vcus, &bcus, &ccus), + // TryFrom + for (param, param_scalar) in [ + (quote!(u32), IntegerScalar::U32), + (quote!(u64), IntegerScalar::U64), + (quote!(usize), IntegerScalar::U64), // u64 to be safe on 32-bit ] { - let param_ty: TokenStream = param_name.parse().unwrap(); - // For the error report, always widen to u64 - let err_value_cast = if param_name == "u64" { - TokenStream::new() - } else { - quote!(as u64) - }; + let comparison = constraint.comparison(&format_ident!("value"), param_scalar); output.extend(quote! { #[automatically_derived] - #[expect(clippy::allow_attributes, reason = "automatically generated")] - #[allow(clippy::cast_possible_truncation, clippy::cast_lossless, clippy::checked_conversions)] - impl ::core::convert::TryFrom<#param_ty> for #name { + impl ::core::convert::TryFrom<#param> for #name { type Error = #krate::id::IdError; - fn try_from(value: #param_ty) -> ::core::result::Result { - if (value #value_cast) >= (#min #bounds_cast) && (value #value_cast) #max_cmp (#max #bounds_cast) { - ::core::result::Result::Ok(Self { _internal_do_not_use: value #construct_cast }) + + fn try_from(value: #param) -> ::core::result::Result { + if #comparison { + ::core::result::Result::Ok(Self { _internal_do_not_use: value as #scalar }) } else { ::core::result::Result::Err(#krate::id::IdError::OutOfRange { - value: value #err_value_cast, min: #min as u64, max: #max as u64, + value: value as u64, min: #min as u64, max: #max as u64, }) } } @@ -133,23 +278,10 @@ fn expand_struct( }); } - // HasId - output.extend(quote! { - impl #krate::id::HasId for #name { - type Id = #name; - fn id(&self) -> Self::Id { *self } - } - }); - // Display - let display_attr = id_attributes.iter().find_map(|attr| match attr { - grammar::IdAttribute::Display { format, .. } => Some(format), - _ => None, - }); - - match display_attr { - Some(grammar::IdDisplay::None(_)) => {} - Some(grammar::IdDisplay::Format(format)) => { + match display { + DisplayAttribute::None => {} + DisplayAttribute::Format(format) => { output.extend(quote! { impl ::core::fmt::Display for #name { fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { @@ -158,7 +290,7 @@ fn expand_struct( } }); } - None => { + DisplayAttribute::Auto => { output.extend(quote! { impl ::core::fmt::Display for #name { fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { @@ -169,24 +301,8 @@ fn expand_struct( } } - // Debug - output.extend(quote! { - impl ::core::fmt::Debug for #name { - fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { - fmt.debug_tuple(stringify!(#name)) - .field(&self._internal_do_not_use) - .finish() - } - } - }); - // Step - let has_step = id_attributes.iter().any(|attr| { - matches!(attr, grammar::IdAttribute::Derive { traits, .. } - if traits.content.iter().any(|delimited| matches!(delimited.value, grammar::IdDerive::Step(_)))) - }); - - if has_step { + if traits.contains(&Trait::Step) { output.extend(quote! { impl ::core::iter::Step for #name { #[inline] @@ -201,14 +317,14 @@ fn expand_struct( fn forward_checked(start: Self, count: usize) -> ::core::option::Option { #krate::id::Id::as_usize(start) .checked_add(count) - .map(#krate::id::Id::from_usize) + .and_then(|value| Self::try_from(value).ok()) } #[inline] fn backward_checked(start: Self, count: usize) -> ::core::option::Option { #krate::id::Id::as_usize(start) .checked_sub(count) - .map(#krate::id::Id::from_usize) + .and_then(|value| Self::try_from(value).ok()) } } }); @@ -216,42 +332,3 @@ fn expand_struct( output } - -/// Given the inner scalar type and a parameter type (u32/u64/usize), returns -/// `(value_cast, bounds_cast, construct_cast)`: -/// -/// - `value_cast`: applied to `value` in comparisons when it's narrower than inner -/// - `bounds_cast`: applied to `min`/`max` in comparisons when they're narrower than param -/// - `construct_cast`: applied to `value` when constructing the inner field -/// -/// When inner and param are the same width, all three are empty. -fn scalar_casts( - inner: &grammar::StructScalar, - param: &grammar::StructScalar, -) -> (TokenStream, TokenStream, TokenStream) { - let inner_rank = scalar_rank(inner); - let param_rank = scalar_rank(param); - - if inner_rank == param_rank { - // Same width, no casts needed - (TokenStream::new(), TokenStream::new(), TokenStream::new()) - } else if inner_rank > param_rank { - // Inner is wider; widen value to inner for comparison, cast value to inner for construct - let inner_ts = inner.to_token_stream(); - ( - quote!(as #inner_ts), - TokenStream::new(), - quote!(as #inner_ts), - ) - } else { - // Param is wider; widen min/max to param for comparison, narrow value to inner for - // construct - let param_ts = param.to_token_stream(); - let inner_ts = inner.to_token_stream(); - ( - TokenStream::new(), - quote!(as #param_ts), - quote!(as #inner_ts), - ) - } -} diff --git a/libs/@local/hashql/macros/src/lib.rs b/libs/@local/hashql/macros/src/lib.rs index 11d154c57dc..3b6ee24d52e 100644 --- a/libs/@local/hashql/macros/src/lib.rs +++ b/libs/@local/hashql/macros/src/lib.rs @@ -1,12 +1,11 @@ #![feature(proc_macro_diagnostic, proc_macro_totokens)] -#![recursion_limit = "512"] extern crate proc_macro; mod grammar; mod id; -use proc_macro::{Diagnostic, TokenStream}; +use proc_macro::TokenStream; /// Defines a type as an [`Id`]. /// @@ -30,5 +29,5 @@ use proc_macro::{Diagnostic, TokenStream}; /// ``` #[proc_macro_attribute] pub fn id(attr: TokenStream, item: TokenStream) -> TokenStream { - id::expand(attr, item) + id::expand(attr.into(), item.into()).into() } From 9afcd8bc3b536edef54f12c5b928ac83be285e0e Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Fri, 27 Feb 2026 22:16:21 +0100 Subject: [PATCH 06/15] feat: checkpoint --- libs/@local/hashql/macros/src/grammar.rs | 13 ++++++++---- libs/@local/hashql/macros/src/id/attr.rs | 6 +++--- libs/@local/hashql/macros/src/id/mod.rs | 10 +++++++-- libs/@local/hashql/macros/src/id/struct.rs | 24 +++++++++++++++++++++- libs/@local/hashql/macros/src/lib.rs | 1 + 5 files changed, 44 insertions(+), 10 deletions(-) diff --git a/libs/@local/hashql/macros/src/grammar.rs b/libs/@local/hashql/macros/src/grammar.rs index c34bade7e98..5fed1c39f89 100644 --- a/libs/@local/hashql/macros/src/grammar.rs +++ b/libs/@local/hashql/macros/src/grammar.rs @@ -1,4 +1,8 @@ -use unsynn::*; +#![expect(clippy::result_large_err)] +use unsynn::{ + BracketGroupContaining, Cons, Either, Except, Gt, Ident, Lt, Many, ParenthesisGroupContaining, + PathSep, PathSepDelimited, Pound, TokenTree, keyword, unsynn, +}; keyword! { /// The "pub" keyword. @@ -24,9 +28,10 @@ keyword! { pub KUsize = ["usize"]; } -pub type VerbatimUntil = Many, AngleTokenTree>>; -pub type ModPath = Cons, PathSepDelimited>; -pub type Visibility = Cons, ModPath>>>>; +pub(crate) type VerbatimUntil = Many, AngleTokenTree>>; +pub(crate) type ModPath = Cons, PathSepDelimited>; +pub(crate) type Visibility = + Cons, ModPath>>>>; unsynn! { /// Parses either a `TokenTree` or `<...>` grouping (which is not a [`Group`] as far as proc-macros diff --git a/libs/@local/hashql/macros/src/id/attr.rs b/libs/@local/hashql/macros/src/id/attr.rs index 117c2df5b67..6285718fa01 100644 --- a/libs/@local/hashql/macros/src/id/attr.rs +++ b/libs/@local/hashql/macros/src/id/attr.rs @@ -1,11 +1,11 @@ -use std::collections::BTreeSet; +use alloc::collections::BTreeSet; use unsynn::{ToTokens as _, TokenStream, TokenTree, quote}; use super::grammar::{self, AttributeBody, IdAttribute}; use crate::grammar::Attribute; -pub enum DisplayAttribute { +pub(crate) enum DisplayAttribute { Auto, None, Format(TokenTree), @@ -29,7 +29,7 @@ impl Attributes { fn parse_attribute(&mut self, attribute: IdAttribute) { match attribute { IdAttribute::Crate { _crate, _eq, path } => self.krate = path.into_token_stream(), - IdAttribute::Const { _const } => self.r#const = _const.into_token_stream(), + IdAttribute::Const { _const: r#const } => self.r#const = r#const.into_token_stream(), IdAttribute::Derive { _derive, traits } => { for r#trait in traits.content { match r#trait.value { diff --git a/libs/@local/hashql/macros/src/id/mod.rs b/libs/@local/hashql/macros/src/id/mod.rs index 678baf8096c..a122f1650c6 100644 --- a/libs/@local/hashql/macros/src/id/mod.rs +++ b/libs/@local/hashql/macros/src/id/mod.rs @@ -6,13 +6,16 @@ use core::fmt::Display; use proc_macro::{Diagnostic, Level, Span}; use proc_macro2::TokenStream; -use unsynn::{Parse, ToTokenIter, ToTokens, quote}; +use unsynn::{Parse as _, ToTokenIter as _, ToTokens as _, quote}; use self::{r#enum::expand_enum, r#struct::expand_struct}; mod grammar { #![expect(clippy::result_large_err)] - use unsynn::*; + use unsynn::{ + Assign, Bang, BraceGroupContaining, CommaDelimitedVec, DotDot, DotDotEq, Ident, + ParenthesisGroupContaining, TokenTree, unsynn, + }; use crate::grammar::{ AngleTokenTree, Attribute, KConst, KCrate, KDerive, KDisplay, KEnum, KId, KIs, KStep, @@ -124,6 +127,8 @@ mod grammar { pub(crate) fn expand(attr: TokenStream, item: TokenStream) -> TokenStream { let (attributes, parsed) = match parse(attr, item) { Ok(parsed) => parsed, + + #[expect(clippy::collection_is_never_read, reason = "false positive")] Err(error) => { if let Some(token) = error.failed_at() { emit_error(token.span().unwrap(), error); @@ -143,6 +148,7 @@ pub(crate) fn expand(attr: TokenStream, item: TokenStream) -> TokenStream { } } +#[expect(clippy::result_large_err)] fn parse( attr: TokenStream, item: TokenStream, diff --git a/libs/@local/hashql/macros/src/id/struct.rs b/libs/@local/hashql/macros/src/id/struct.rs index cd66a02d353..abd6916d119 100644 --- a/libs/@local/hashql/macros/src/id/struct.rs +++ b/libs/@local/hashql/macros/src/id/struct.rs @@ -129,6 +129,7 @@ impl From for Constraint { } } +#[expect(clippy::too_many_lines, reason = "macro")] pub(crate) fn expand_struct( additional_attributes: Vec, grammar::ParsedStruct { @@ -160,6 +161,14 @@ pub(crate) fn expand_struct( let min = &constraint.min; let max = &constraint.max; + let range_end = match constraint.kind { + RangeKind::Inclusive => format!("{max}]"), + RangeKind::Exclusive => format!("{max})"), + }; + let new_panic_doc = format!("Panics if `value` is not in `[{min}, {range_end}`."); + let unchecked_safety_doc = + format!("The caller must ensure that `value` is in `[{min}, {range_end}`."); + output.extend(quote! { #extra #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -169,6 +178,11 @@ pub(crate) fn expand_struct( } impl #name { + /// Creates a new id from a raw scalar value. + /// + /// # Panics + /// + #[doc = #new_panic_doc] #[must_use] #[inline] #vis const fn new(value: #scalar) -> Self { @@ -177,6 +191,11 @@ pub(crate) fn expand_struct( Self { _internal_do_not_use: value } } + /// Creates a new id from a raw scalar value without bounds checking. + /// + /// # Safety + /// + #[doc = #unchecked_safety_doc] #[must_use] #[inline] #vis const unsafe fn new_unchecked(value: #scalar) -> Self { @@ -235,6 +254,7 @@ pub(crate) fn expand_struct( impl #krate::id::HasId for #name { type Id = Self; + #[inline] fn id(&self) -> Self::Id { *self } @@ -270,7 +290,9 @@ pub(crate) fn expand_struct( ::core::result::Result::Ok(Self { _internal_do_not_use: value as #scalar }) } else { ::core::result::Result::Err(#krate::id::IdError::OutOfRange { - value: value as u64, min: #min as u64, max: #max as u64, + value: value as u64, + min: #min as u64, + max: #max as u64, }) } } diff --git a/libs/@local/hashql/macros/src/lib.rs b/libs/@local/hashql/macros/src/lib.rs index 3b6ee24d52e..af908afc773 100644 --- a/libs/@local/hashql/macros/src/lib.rs +++ b/libs/@local/hashql/macros/src/lib.rs @@ -1,5 +1,6 @@ #![feature(proc_macro_diagnostic, proc_macro_totokens)] +extern crate alloc; extern crate proc_macro; mod grammar; From 5e6c8cd22a18c85b4ffb2d39500cb42c20a5270d Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Fri, 27 Feb 2026 22:17:26 +0100 Subject: [PATCH 07/15] feat: checkpoint --- libs/@local/hashql/macros/src/id/struct.rs | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/libs/@local/hashql/macros/src/id/struct.rs b/libs/@local/hashql/macros/src/id/struct.rs index abd6916d119..54106e20381 100644 --- a/libs/@local/hashql/macros/src/id/struct.rs +++ b/libs/@local/hashql/macros/src/id/struct.rs @@ -38,7 +38,7 @@ impl ToTokens for IntegerScalar { Self::U128 => Ident::new("u128", Span::call_site()), }; - tokens.extend([ident]) + tokens.extend([ident]); } } @@ -59,8 +59,8 @@ impl From for RangeKind { impl ToTokens for RangeKind { fn to_tokens(&self, tokens: &mut TokenStream) { match self { - RangeKind::Inclusive => Ge::new().to_tokens(tokens), - RangeKind::Exclusive => Gt::new().to_tokens(tokens), + Self::Inclusive => Ge::new().to_tokens(tokens), + Self::Exclusive => Gt::new().to_tokens(tokens), } } } @@ -100,8 +100,8 @@ impl Constraint { } } - fn assertion(&self, ident: Ident, ident_scalar: IntegerScalar) -> TokenStream { - let comparison = self.comparison(&ident, ident_scalar); + fn assertion(&self, ident: &Ident, ident_scalar: IntegerScalar) -> TokenStream { + let comparison = self.comparison(ident, ident_scalar); let message = self.message(); quote! { @@ -153,10 +153,11 @@ pub(crate) fn expand_struct( let constraint = Constraint::from(body.content); let scalar = constraint.scalar; - let new_assertion = constraint.assertion(format_ident!("value"), scalar); - let u32_assertion = constraint.assertion(format_ident!("value"), IntegerScalar::U32); - let u64_assertion = constraint.assertion(format_ident!("value"), IntegerScalar::U64); - let usize_assertion = constraint.assertion(format_ident!("value"), IntegerScalar::U64); // u64 to be safe, even on 32-bit systems + let value_ident = format_ident!("value"); + let new_assertion = constraint.assertion(&value, scalar); + let u32_assertion = constraint.assertion(&value, IntegerScalar::U32); + let u64_assertion = constraint.assertion(&value, IntegerScalar::U64); + let usize_assertion = constraint.assertion(&value, IntegerScalar::U64); // u64 to be safe, even on 32-bit systems let min = &constraint.min; let max = &constraint.max; @@ -278,7 +279,7 @@ pub(crate) fn expand_struct( (quote!(u64), IntegerScalar::U64), (quote!(usize), IntegerScalar::U64), // u64 to be safe on 32-bit ] { - let comparison = constraint.comparison(&format_ident!("value"), param_scalar); + let comparison = constraint.comparison(&value_ident, param_scalar); output.extend(quote! { #[automatically_derived] From a910b8136701527c2483dab5b031b44029d243b5 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Fri, 27 Feb 2026 22:51:47 +0100 Subject: [PATCH 08/15] feat: finish id macro --- libs/@local/hashql/macros/src/id/common.rs | 36 +++ libs/@local/hashql/macros/src/id/enum.rs | 290 ++++++++++++++++++++- libs/@local/hashql/macros/src/id/mod.rs | 1 + libs/@local/hashql/macros/src/id/struct.rs | 68 ++--- 4 files changed, 344 insertions(+), 51 deletions(-) create mode 100644 libs/@local/hashql/macros/src/id/common.rs diff --git a/libs/@local/hashql/macros/src/id/common.rs b/libs/@local/hashql/macros/src/id/common.rs new file mode 100644 index 00000000000..6a3249c023f --- /dev/null +++ b/libs/@local/hashql/macros/src/id/common.rs @@ -0,0 +1,36 @@ +use proc_macro2::{Ident, Span, TokenStream}; +use quote::ToTokens; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +pub(crate) enum IntegerScalar { + U8, + U16, + U32, + U64, + U128, +} + +impl IntegerScalar { + pub(crate) const fn from_variant_count(count: usize) -> Self { + match count { + 0..=0xFF => Self::U8, + 0x100..=0xFFFF => Self::U16, + 0x1_0000..=0xFFFF_FFFF => Self::U32, + _ => Self::U64, + } + } +} + +impl ToTokens for IntegerScalar { + fn to_tokens(&self, tokens: &mut TokenStream) { + let ident = match self { + Self::U8 => Ident::new("u8", Span::call_site()), + Self::U16 => Ident::new("u16", Span::call_site()), + Self::U32 => Ident::new("u32", Span::call_site()), + Self::U64 => Ident::new("u64", Span::call_site()), + Self::U128 => Ident::new("u128", Span::call_site()), + }; + + tokens.extend([ident]); + } +} diff --git a/libs/@local/hashql/macros/src/id/enum.rs b/libs/@local/hashql/macros/src/id/enum.rs index 652e884c616..a4cd33123e4 100644 --- a/libs/@local/hashql/macros/src/id/enum.rs +++ b/libs/@local/hashql/macros/src/id/enum.rs @@ -1,15 +1,289 @@ -use proc_macro2::{Span, TokenStream}; +use proc_macro2::{Literal, TokenStream}; +use quote::{quote, quote_spanned}; +use unsynn::ToTokens as _; -use super::{emit_error, grammar}; +use super::grammar; +use crate::id::{ + attr::{Attributes, DisplayAttribute, Trait}, + common::IntegerScalar, +}; +#[expect(clippy::too_many_lines, reason = "macro")] pub(super) fn expand_enum( - _additional_attributes: Vec, - _parsed: grammar::ParsedEnum, + additional_attributes: Vec, + grammar::ParsedEnum { + attributes, + visibility, + _enum: _, + name, + body, + }: grammar::ParsedEnum, ) -> TokenStream { - emit_error( - Span::call_site().unwrap(), - "enum id types are not yet implemented", + let mut output = TokenStream::new(); + + let Attributes { + krate, + r#const: konst, + display, + traits, + extra, + } = Attributes::parse(additional_attributes, attributes); + let vis = visibility.into_token_stream(); + + let mut variants: Vec<_> = Vec::new(); + for variant in &*body.content { + variants.push(&variant.value.name); + } + + let variant_count = variants.len(); + let backing = IntegerScalar::from_variant_count(variant_count); + + let discriminant_arms = variants.iter().enumerate().map(|(index, variant)| { + let literal = Literal::usize_suffixed(index); + + quote_spanned!(variant.span() => #literal => ::core::option::Option::Some(Self::#variant)) + }); + let self_variants = variants + .iter() + .map(|variant| quote_spanned!(variant.span() => Self::#variant)); + + let body = body.to_token_stream(); + + // 1. Enum definition + output.extend(quote! { + #extra + #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] + #[repr(#backing)] + #vis enum #name #body + }); + + // 2. Inherent impl + output.extend(quote! { + impl #name { + /// The number of variants in this enum. + #vis const VARIANT_COUNT: usize = #variant_count; + + /// Converts a discriminant value to the corresponding variant, returning + /// [`None`] if the value does not match any variant. + #[inline] + #vis const fn try_from_discriminant(value: #backing) -> ::core::option::Option { + match value { + #(#discriminant_arms,)* + _ => ::core::option::Option::None, + } + } + + /// Converts a discriminant value to the corresponding variant. + /// + /// # Panics + /// + /// Panics if the value does not match any variant. + #[inline] + #vis const fn from_discriminant(value: #backing) -> Self { + match Self::try_from_discriminant(value) { + ::core::option::Option::Some(variant) => variant, + ::core::option::Option::None => unreachable!(), + } + } + + /// Converts a discriminant value to the corresponding variant without + /// checking that the value is valid. + /// + /// # Safety + /// + /// The value must be a valid discriminant for this enum. + #[inline] + #[expect(unsafe_code)] + #vis unsafe const fn from_discriminant_unchecked(value: #backing) -> Self { + match Self::try_from_discriminant(value) { + ::core::option::Option::Some(variant) => variant, + // SAFETY: The caller guarantees that the value is a valid discriminant. + ::core::option::Option::None => unsafe { ::core::mem::unreachable_unchecked() }, + } + } + + /// Returns the discriminant value of this variant. + #[inline] + #vis const fn into_discriminant(self) -> #backing { + self as #backing + } + + /// Returns an array containing all variants in discriminant order. + #[inline] + #[must_use] + #vis const fn all() -> [Self; Self::VARIANT_COUNT] { + [#(#self_variants),*] + } + } + }); + + // 3. Compile-time roundtrip assertion + output.extend(quote! { + const _: () = { + let mut index = 0 as #backing; + + while index < #name::VARIANT_COUNT as #backing { + let variant = #name::from_discriminant(index); + let roundtrip = variant.into_discriminant(); + + assert!(roundtrip == index); + index += 1; + } + }; + }); + + // 4. Id trait impl + let max = Literal::usize_unsuffixed(variant_count.saturating_sub(1)); + let count = Literal::usize_unsuffixed(variant_count); + let message = format!( + "ID must be between 0 and {}", + variant_count.saturating_sub(1) ); - TokenStream::new() + output.extend(quote! { + #[automatically_derived] + #[expect(clippy::cast_possible_truncation, clippy::cast_lossless)] + impl #konst #krate::id::Id for #name { + const MIN: Self = Self::from_discriminant(0); + const MAX: Self = Self::from_discriminant(#max); + + fn from_u32(index: u32) -> Self { + assert!(index < #count, #message); + + Self::from_discriminant(index as #backing) + } + + fn from_u64(index: u64) -> Self { + assert!(index < #count, #message); + + Self::from_discriminant(index as #backing) + } + + fn from_usize(index: usize) -> Self { + assert!(index < #count, #message); + + Self::from_discriminant(index as #backing) + } + + #[inline] + fn as_u32(self) -> u32 { + self.into_discriminant() as u32 + } + + #[inline] + fn as_u64(self) -> u64 { + self.into_discriminant() as u64 + } + + #[inline] + fn as_usize(self) -> usize { + self.into_discriminant() as usize + } + + #[inline] + fn prev(self) -> ::core::option::Option { + let discriminant = self.into_discriminant(); + + let prev = discriminant.checked_sub(1)?; + Self::try_from_discriminant(prev) + } + } + }); + + // 5. TryFrom impls + for int in [quote!(u32), quote!(u64), quote!(usize)] { + output.extend(quote! { + #[automatically_derived] + impl ::core::convert::TryFrom<#int> for #name { + type Error = #krate::id::IdError; + + #[inline] + fn try_from(value: #int) -> ::core::result::Result { + if value >= #count { + return Err(#krate::id::IdError::OutOfRange { + value: value as u64, + min: 0, + max: #max, + }); + } + + Ok(Self::from_discriminant(value as #backing)) + } + } + }); + } + + // 6. HasId impl + output.extend(quote! { + #[automatically_derived] + impl #krate::id::HasId for #name { + type Id = Self; + + #[inline] + fn id(&self) -> Self::Id { + *self + } + } + }); + + // 7. Display + match display { + DisplayAttribute::None => {} + DisplayAttribute::Format(format) => { + output.extend(quote! { + impl ::core::fmt::Display for #name { + fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + fmt.write_fmt(format_args!(#format, self.into_discriminant())) + } + } + }); + } + DisplayAttribute::Auto => { + let lowercase_names = variants.iter().map(|variant| { + let lowercase = variant.to_string().to_lowercase(); + quote_spanned!(variant.span() => Self::#variant => fmt.write_str(#lowercase)) + }); + + output.extend(quote! { + impl ::core::fmt::Display for #name { + fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { + match self { + #(#lowercase_names),* + } + } + } + }); + } + } + + // 8. Step (optional) + if traits.contains(&Trait::Step) { + output.extend(quote! { + impl ::core::iter::Step for #name { + #[inline] + fn steps_between(start: &Self, end: &Self) -> (usize, ::core::option::Option) { + ::steps_between( + &#krate::id::Id::as_usize(*start), + &#krate::id::Id::as_usize(*end), + ) + } + + #[inline] + fn forward_checked(start: Self, count: usize) -> ::core::option::Option { + #krate::id::Id::as_usize(start) + .checked_add(count) + .and_then(|value| Self::try_from(value).ok()) + } + + #[inline] + fn backward_checked(start: Self, count: usize) -> ::core::option::Option { + #krate::id::Id::as_usize(start) + .checked_sub(count) + .and_then(|value| Self::try_from(value).ok()) + } + } + }); + } + + output } diff --git a/libs/@local/hashql/macros/src/id/mod.rs b/libs/@local/hashql/macros/src/id/mod.rs index a122f1650c6..583f57f4b2c 100644 --- a/libs/@local/hashql/macros/src/id/mod.rs +++ b/libs/@local/hashql/macros/src/id/mod.rs @@ -1,4 +1,5 @@ mod attr; +pub(crate) mod common; mod r#enum; mod r#struct; diff --git a/libs/@local/hashql/macros/src/id/struct.rs b/libs/@local/hashql/macros/src/id/struct.rs index 54106e20381..db2753e36cd 100644 --- a/libs/@local/hashql/macros/src/id/struct.rs +++ b/libs/@local/hashql/macros/src/id/struct.rs @@ -1,20 +1,14 @@ use core::cmp; -use proc_macro2::{Ident, Span, TokenStream}; +use proc_macro2::{Ident, TokenStream}; use quote::{ToTokens, format_ident, quote}; use unsynn::{Ge, Gt, ToTokens as _}; use super::grammar::{self, StructBody, StructScalar}; -use crate::id::attr::{Attributes, DisplayAttribute, Trait}; - -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -enum IntegerScalar { - U8, - U16, - U32, - U64, - U128, -} +use crate::id::{ + attr::{Attributes, DisplayAttribute, Trait}, + common::IntegerScalar, +}; impl From for IntegerScalar { fn from(scalar: StructScalar) -> Self { @@ -28,20 +22,6 @@ impl From for IntegerScalar { } } -impl ToTokens for IntegerScalar { - fn to_tokens(&self, tokens: &mut TokenStream) { - let ident = match self { - Self::U8 => Ident::new("u8", Span::call_site()), - Self::U16 => Ident::new("u16", Span::call_site()), - Self::U32 => Ident::new("u32", Span::call_site()), - Self::U64 => Ident::new("u64", Span::call_site()), - Self::U128 => Ident::new("u128", Span::call_site()), - }; - - tokens.extend([ident]); - } -} - enum RangeKind { Inclusive, Exclusive, @@ -154,10 +134,10 @@ pub(crate) fn expand_struct( let scalar = constraint.scalar; let value_ident = format_ident!("value"); - let new_assertion = constraint.assertion(&value, scalar); - let u32_assertion = constraint.assertion(&value, IntegerScalar::U32); - let u64_assertion = constraint.assertion(&value, IntegerScalar::U64); - let usize_assertion = constraint.assertion(&value, IntegerScalar::U64); // u64 to be safe, even on 32-bit systems + let new_assertion = constraint.assertion(&value_ident, scalar); + let u32_assertion = constraint.assertion(&value_ident, IntegerScalar::U32); + let u64_assertion = constraint.assertion(&value_ident, IntegerScalar::U64); + let usize_assertion = constraint.assertion(&value_ident, IntegerScalar::U64); // u64 to be safe, even on 32-bit systems let min = &constraint.min; let max = &constraint.max; @@ -199,47 +179,48 @@ pub(crate) fn expand_struct( #[doc = #unchecked_safety_doc] #[must_use] #[inline] - #vis const unsafe fn new_unchecked(value: #scalar) -> Self { + #vis unsafe const fn new_unchecked(value: #scalar) -> Self { Self { _internal_do_not_use: value } } } #[automatically_derived] + #[expect(clippy::cast_possible_truncation, clippy::cast_lossless)] impl #konst #krate::id::Id for #name { const MIN: Self = Self::new(#min); const MAX: Self = Self::new(#max); - fn from_u32(value: u32) -> Option { + fn from_u32(value: u32) -> Self { #u32_assertion - Self { _internal_do_not_use: (value as #scalar) } + Self { _internal_do_not_use: value as #scalar } } - fn from_u64(value: u64) -> Option { + fn from_u64(value: u64) -> Self { #u64_assertion - Self { _internal_do_not_use: (value as #scalar) } + Self { _internal_do_not_use: value as #scalar } } - fn from_usize(value: usize) -> Option { + fn from_usize(value: usize) -> Self { #usize_assertion - Self { _internal_do_not_use: (value as #scalar) } + Self { _internal_do_not_use: value as #scalar } } #[inline] - fn as_u32(&self) -> u32 { - (self._internal_do_not_use as u32) + fn as_u32(self) -> u32 { + self._internal_do_not_use as u32 } #[inline] - fn as_u64(&self) -> u64 { - (self._internal_do_not_use as u64) + fn as_u64(self) -> u64 { + self._internal_do_not_use as u64 } #[inline] - fn as_usize(&self) -> usize { - (self._internal_do_not_use as usize) + fn as_usize(self) -> usize { + self._internal_do_not_use as usize } #[inline] @@ -247,11 +228,12 @@ pub(crate) fn expand_struct( if self._internal_do_not_use == #min { ::core::option::Option::None } else { - ::core::option::Option::Some(Self { _internal_do_not_use: (self._internal_do_not_use - 1) }) + ::core::option::Option::Some(Self { _internal_do_not_use: self._internal_do_not_use - 1 }) } } } + #[automatically_derived] impl #krate::id::HasId for #name { type Id = Self; From c84153a5cc41099a8a723d55b0d4044b842bc4da Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Fri, 27 Feb 2026 23:57:51 +0100 Subject: [PATCH 09/15] feat: switch to new format --- Cargo.lock | 1 + Cargo.toml | 1 + libs/@local/hashql/core/Cargo.toml | 1 + libs/@local/hashql/core/benches/bit_matrix.rs | 10 +- .../src/graph/algorithms/dominators/mod.rs | 7 +- .../core/src/graph/algorithms/tarjan/mod.rs | 9 +- .../core/src/graph/algorithms/tarjan/tests.rs | 5 +- libs/@local/hashql/core/src/graph/linked.rs | 2 +- libs/@local/hashql/core/src/graph/mod.rs | 4 +- libs/@local/hashql/core/src/graph/tests.rs | 12 +- .../hashql/core/src/id/bit_vec/finite.rs | 13 +- .../core/src/id/bit_vec/matrix/tests.rs | 10 +- .../hashql/core/src/id/bit_vec/tests.rs | 13 +- libs/@local/hashql/core/src/id/mod.rs | 258 +----------------- libs/@local/hashql/core/src/id/slice.rs | 7 +- libs/@local/hashql/core/src/id/union_find.rs | 5 +- libs/@local/hashql/core/src/intern/map.rs | 15 +- libs/@local/hashql/core/src/lib.rs | 2 + libs/@local/hashql/core/src/module/mod.rs | 8 +- .../hashql/core/src/type/kind/generic/mod.rs | 4 +- .../@local/hashql/core/src/type/kind/infer.rs | 3 +- libs/@local/hashql/core/src/type/mod.rs | 3 +- libs/@local/hashql/macros/src/id/attr.rs | 2 +- libs/@local/hashql/macros/src/id/enum.rs | 6 +- libs/@local/hashql/macros/src/id/mod.rs | 12 +- libs/@local/hashql/macros/src/id/struct.rs | 64 ++--- libs/@local/hashql/macros/src/lib.rs | 98 ++++++- .../@local/hashql/mir/src/body/basic_block.rs | 6 +- libs/@local/hashql/mir/src/body/local.rs | 9 +- libs/@local/hashql/mir/src/body/place.rs | 8 +- libs/@local/hashql/mir/src/builder/place.rs | 7 +- libs/@local/hashql/mir/src/def.rs | 16 +- .../mir/src/pass/analysis/callgraph/mod.rs | 12 +- .../pass/analysis/data_dependency/graph.rs | 16 +- .../pass/analysis/data_dependency/resolve.rs | 2 +- .../src/pass/analysis/size_estimation/mod.rs | 6 +- .../execution/placement/solve/condensation.rs | 8 +- .../execution/statement_placement/common.rs | 2 +- .../statement_placement/embedding/mod.rs | 2 +- .../statement_placement/postgres/mod.rs | 4 +- .../hashql/mir/src/pass/execution/target.rs | 187 +------------ .../administrative_reduction/visitor.rs | 7 +- .../hashql/mir/src/pass/transform/dle/mod.rs | 4 +- .../mir/src/pass/transform/inline/mod.rs | 2 +- libs/@local/hashql/mir/src/pretty/text.rs | 2 +- libs/@local/hashql/mir/src/reify/mod.rs | 2 +- libs/@local/hashql/mir/src/visit/ref.rs | 2 + 47 files changed, 270 insertions(+), 609 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 90fe200f5df..d69d792197f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3830,6 +3830,7 @@ dependencies = [ "hash-codec", "hashbrown 0.16.1", "hashql-diagnostics", + "hashql-macros", "insta", "lexical", "memchr", diff --git a/Cargo.toml b/Cargo.toml index 6c9060ef490..8aaf6b4a212 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -97,6 +97,7 @@ hashql-core.path = "libs/@local/hashql/core" hashql-diagnostics.path = "libs/@local/hashql/diagnostics" hashql-eval.path = "libs/@local/hashql/eval" hashql-hir.path = "libs/@local/hashql/hir" +hashql-macros.path = "libs/@local/hashql/macros" hashql-mir.path = "libs/@local/hashql/mir" hashql-syntax-jexpr.path = "libs/@local/hashql/syntax-jexpr" type-system.path = "libs/@blockprotocol/type-system/rust" diff --git a/libs/@local/hashql/core/Cargo.toml b/libs/@local/hashql/core/Cargo.toml index 995af13ac17..c1c1f638fc5 100644 --- a/libs/@local/hashql/core/Cargo.toml +++ b/libs/@local/hashql/core/Cargo.toml @@ -10,6 +10,7 @@ authors.workspace = true # Public workspace dependencies hash-codec = { workspace = true, features = ["numeric"], public = true } hashql-diagnostics = { workspace = true, public = true } +hashql-macros = { workspace = true, public = true } # Public third-party dependencies anstyle = { workspace = true, public = true } diff --git a/libs/@local/hashql/core/benches/bit_matrix.rs b/libs/@local/hashql/core/benches/bit_matrix.rs index 0b22bd1da7f..651ab39ff93 100644 --- a/libs/@local/hashql/core/benches/bit_matrix.rs +++ b/libs/@local/hashql/core/benches/bit_matrix.rs @@ -4,15 +4,13 @@ use core::hint::black_box; use codspeed_criterion_compat::{ BatchSize, BenchmarkId, Criterion, criterion_group, criterion_main, }; -use hashql_core::{ - id::{ - Id as _, - bit_vec::{BitMatrix, SparseBitMatrix}, - }, +use hashql_core::id::{ + Id as _, + bit_vec::{BitMatrix, SparseBitMatrix}, newtype, }; -newtype!(struct BenchId(usize is 0..=usize::MAX)); +newtype!(struct BenchId(u32 is 0..=u32::MAX)); // ============================================================================= // Dense BitMatrix diff --git a/libs/@local/hashql/core/src/graph/algorithms/dominators/mod.rs b/libs/@local/hashql/core/src/graph/algorithms/dominators/mod.rs index c1e5ebf871e..fc78e863dd7 100644 --- a/libs/@local/hashql/core/src/graph/algorithms/dominators/mod.rs +++ b/libs/@local/hashql/core/src/graph/algorithms/dominators/mod.rs @@ -41,8 +41,7 @@ pub use self::{ }; use crate::{ graph::{DirectedGraph, Predecessors, Successors}, - id::{Id, IdSlice, IdVec}, - newtype, + id::{Id, IdSlice, IdVec, newtype}, }; mod frontier; @@ -56,7 +55,7 @@ struct PreOrderFrame { } newtype!( - #[steppable] + #[id(derive(Step), crate = crate)] struct PreorderIndex(u32 is 0..=u32::MAX) ); @@ -444,7 +443,7 @@ struct Time { finish: u32, } -newtype!(struct EdgeIndex(u32 is 0..=u32::MAX)); +newtype!(#[id(crate = crate)] struct EdgeIndex(u32 is 0..=u32::MAX)); fn compute_access_time( start_node: N, diff --git a/libs/@local/hashql/core/src/graph/algorithms/tarjan/mod.rs b/libs/@local/hashql/core/src/graph/algorithms/tarjan/mod.rs index 24eef14a5e4..edda7708ba7 100644 --- a/libs/@local/hashql/core/src/graph/algorithms/tarjan/mod.rs +++ b/libs/@local/hashql/core/src/graph/algorithms/tarjan/mod.rs @@ -18,13 +18,12 @@ use crate::{ collections::{FastHashSet, fast_hash_set_in}, graph::{DirectedGraph, EdgeId, Successors}, heap::BumpAllocator, - id::{HasId, Id, IdSlice, IdVec}, - newtype, + id::{HasId, Id, IdSlice, IdVec, newtype}, }; -newtype!(pub struct SccId(u32 is 0..=u32::MAX)); +newtype!(#[id(crate = crate)] pub struct SccId(u32 is 0..=u32::MAX)); -newtype!(struct DiscoveryTime(usize is 0..=usize::MAX)); +newtype!(#[id(crate = crate)] struct DiscoveryTime(u32 is 0..=u32::MAX)); /// Trait for attaching metadata to nodes and strongly connected components during traversal. /// @@ -408,7 +407,7 @@ where } fn iter_edges(&self) -> impl ExactSizeIterator> + DoubleEndedIterator { - (0..self.edge_count()).map(EdgeId::new) + (0..self.edge_count()).map(EdgeId::from_usize) } } diff --git a/libs/@local/hashql/core/src/graph/algorithms/tarjan/tests.rs b/libs/@local/hashql/core/src/graph/algorithms/tarjan/tests.rs index e48c68c8839..0c0bb613d04 100644 --- a/libs/@local/hashql/core/src/graph/algorithms/tarjan/tests.rs +++ b/libs/@local/hashql/core/src/graph/algorithms/tarjan/tests.rs @@ -17,11 +17,10 @@ use crate::{ DirectedGraph as _, NodeId, Successors as _, algorithms::tarjan::Tarjan, tests::TestGraph, }, heap::Scratch, - id::Id as _, - newtype, + id::{Id as _, newtype}, }; -newtype!(struct SccId(usize is 0..=usize::MAX)); +newtype!(#[id(crate = crate)] struct SccId(u32 is 0..=u32::MAX)); type Sccs = StronglyConnectedComponents; diff --git a/libs/@local/hashql/core/src/graph/linked.rs b/libs/@local/hashql/core/src/graph/linked.rs index b5469e3100e..9c779ff5786 100644 --- a/libs/@local/hashql/core/src/graph/linked.rs +++ b/libs/@local/hashql/core/src/graph/linked.rs @@ -62,7 +62,7 @@ use crate::id::{HasId, Id, IdSlice, IdVec}; /// /// Uses the maximum [`EdgeId`] value, which can never be a valid edge ID since /// edge insertion would overflow before reaching this value. -const TOMBSTONE: EdgeId = EdgeId(usize::MAX); +const TOMBSTONE: EdgeId = EdgeId::MAX; /// A node in a [`LinkedGraph`] with associated data. /// diff --git a/libs/@local/hashql/core/src/graph/mod.rs b/libs/@local/hashql/core/src/graph/mod.rs index 81f76cbc2b3..7bb9672b461 100644 --- a/libs/@local/hashql/core/src/graph/mod.rs +++ b/libs/@local/hashql/core/src/graph/mod.rs @@ -39,8 +39,8 @@ use self::algorithms::{ pub use self::linked::LinkedGraph; use crate::id::{HasId, Id, newtype}; -newtype!(pub struct NodeId(usize is 0..=usize::MAX)); -newtype!(pub struct EdgeId(usize is 0..=usize::MAX)); +newtype!(#[id(crate = crate)] pub struct NodeId(u32 is 0..=u32::MAX)); +newtype!(#[id(crate = crate)] pub struct EdgeId(u32 is 0..=u32::MAX)); /// Direction of edge traversal in a directed graph. /// diff --git a/libs/@local/hashql/core/src/graph/tests.rs b/libs/@local/hashql/core/src/graph/tests.rs index bdeb92a2400..5abad5dc5bc 100644 --- a/libs/@local/hashql/core/src/graph/tests.rs +++ b/libs/@local/hashql/core/src/graph/tests.rs @@ -19,15 +19,15 @@ impl TestGraph { }; for &(source, target) in edges { - let source = NodeId::new(source); - let target = NodeId::new(target); + let source = NodeId::from_usize(source); + let target = NodeId::from_usize(target); graph.node_count = graph .node_count .max(source.as_usize() + 1) .max(target.as_usize() + 1); - let edge_id = EdgeId::new(graph.edge_count); + let edge_id = EdgeId::from_usize(graph.edge_count); graph .successors @@ -45,7 +45,7 @@ impl TestGraph { } for node in 0..graph.node_count { - let node = NodeId::new(node); + let node = NodeId::from_usize(node); graph.successors.entry(node).or_default(); graph.predecessors.entry(node).or_default(); @@ -76,11 +76,11 @@ impl DirectedGraph for TestGraph { } fn iter_nodes(&self) -> impl ExactSizeIterator> + DoubleEndedIterator { - (0..self.node_count).map(NodeId::new) + (0..self.node_count).map(NodeId::from_usize) } fn iter_edges(&self) -> impl ExactSizeIterator> + DoubleEndedIterator { - (0..self.edge_count).map(EdgeId::new) + (0..self.edge_count).map(EdgeId::from_usize) } } diff --git a/libs/@local/hashql/core/src/id/bit_vec/finite.rs b/libs/@local/hashql/core/src/id/bit_vec/finite.rs index b5d696f5cd6..2490383981b 100644 --- a/libs/@local/hashql/core/src/id/bit_vec/finite.rs +++ b/libs/@local/hashql/core/src/id/bit_vec/finite.rs @@ -379,15 +379,16 @@ impl ExactSizeIterator for FiniteBitIter { #[cfg(test)] mod tests { #![expect(clippy::min_ident_chars)] - use crate::{ - id::{ - Id as _, - bit_vec::{BitRelations as _, FiniteBitSet}, - }, + use crate::id::{ + Id as _, + bit_vec::{BitRelations as _, FiniteBitSet}, newtype, }; - newtype!(struct TestId(u32 is 0..=127)); + newtype!( + #[id(crate = crate)] + struct TestId(u32 is 0..=127) + ); #[test] fn new_empty_creates_empty_set() { diff --git a/libs/@local/hashql/core/src/id/bit_vec/matrix/tests.rs b/libs/@local/hashql/core/src/id/bit_vec/matrix/tests.rs index 8ed7b38f871..3780c5bdab2 100644 --- a/libs/@local/hashql/core/src/id/bit_vec/matrix/tests.rs +++ b/libs/@local/hashql/core/src/id/bit_vec/matrix/tests.rs @@ -1,10 +1,10 @@ use super::{BitMatrix, RowRef, SparseBitMatrix}; -use crate::{ - id::{Id as _, bit_vec::DenseBitSet}, - newtype, -}; +use crate::id::{Id as _, bit_vec::DenseBitSet, newtype}; -newtype!(struct TestId(usize is 0..=usize::MAX)); +newtype!( + #[id(crate = crate)] + struct TestId(u32 is 0..=u32::MAX) +); fn id(index: usize) -> TestId { TestId::from_usize(index) diff --git a/libs/@local/hashql/core/src/id/bit_vec/tests.rs b/libs/@local/hashql/core/src/id/bit_vec/tests.rs index 35485d167d0..9175da08830 100644 --- a/libs/@local/hashql/core/src/id/bit_vec/tests.rs +++ b/libs/@local/hashql/core/src/id/bit_vec/tests.rs @@ -24,15 +24,16 @@ use alloc::rc::Rc; use core::{marker::PhantomData, ops::RangeBounds}; use super::GrowableBitSet; -use crate::{ - id::{ - Id as _, - bit_vec::{BitRelations as _, Chunk, ChunkedBitSet, DenseBitSet, WORD_BITS}, - }, +use crate::id::{ + Id as _, + bit_vec::{BitRelations as _, Chunk, ChunkedBitSet, DenseBitSet, WORD_BITS}, newtype, }; -newtype!(struct TestId(usize is 0..=usize::MAX)); +newtype!( + #[id(crate = crate)] + struct TestId(u32 is 0..=u32::MAX) +); #[test] fn new_filled() { diff --git a/libs/@local/hashql/core/src/id/mod.rs b/libs/@local/hashql/core/src/id/mod.rs index 02efc9a8871..f00a6487116 100644 --- a/libs/@local/hashql/core/src/id/mod.rs +++ b/libs/@local/hashql/core/src/id/mod.rs @@ -209,262 +209,6 @@ where } } -/// Creates a new ID type with a specified valid range. -/// -/// This uses the experimental pattern type syntax to define the minimum and maximum values. -/// -/// # Syntax -/// ``` -/// hashql_core::id::newtype!(pub struct NodeId(u32 is 0..=0xFFFF_FF00)); -/// ``` -/// -/// This creates a newtype wrapper around [`u32`] with the Id trait fully implemented. -/// -/// # Optional Attributes -/// -/// - `#[steppable]` - Implements `core::iter::Step` for the ID type, enabling range iteration -/// -/// ``` -/// # #![feature(step_trait)] -/// hashql_core::id::newtype!( -/// #[steppable] -/// pub struct NodeId(u32 is 0..=100) -/// ); -/// ``` -#[macro_export] -macro_rules! newtype { - (@internal in_bounds; $value:ident, $type:ty, $min:literal, $max:expr) => { - $value >= ($min as $type) && $value <= ($max as $type) - }; - - (@internal error; $value:ident, $min:literal, $max:expr) => { - concat!("ID value must be between ", stringify!($min), " and ", stringify!($max)) - }; - - ($(#[$($attr:tt)*])* $vis:vis struct $name:ident($type:ident is $min:literal..=$max:expr)) => { - $crate::id::newtype!(@parse_attrs [] [] [] ; $(#[$($attr)*])* ; $vis struct $name($type is $min..=$max)); - }; - - (@parse_attrs [$($other:tt)*] [$($step:tt)*] [$($display:tt)*]; #[steppable] $(#[$($rest:tt)*])* ; $($tail:tt)*) => { - $crate::id::newtype!(@parse_attrs [$($other)*] [$($step)* steppable] [$($display)*] ; $(#[$($rest)*])* ; $($tail)*); - }; - - (@parse_attrs [$($other:tt)*] [$($step:tt)*] [$($display:tt)*]; #[display = $display_expr:expr] $(#[$($rest:tt)*])* ; $($tail:tt)*) => { - $crate::id::newtype!(@parse_attrs [$($other)*] [$($step)*] [$($display)* display = $display_expr] ; $(#[$($rest)*])* ; $($tail)*); - }; - - (@parse_attrs [$($other:tt)*] [$($step:tt)*] [$($display:tt)*]; #[no_display] $(#[$($rest:tt)*])* ; $($tail:tt)*) => { - $crate::id::newtype!(@parse_attrs [$($other)*] [$($step)*] [$($display)* no_display] ; $(#[$($rest)*])* ; $($tail)*); - }; - - (@parse_attrs [$($other:tt)*] [$($step:tt)*] [$($display:tt)*]; #[$attr:meta] $(#[$($rest:tt)*])* ; $($tail:tt)*) => { - $crate::id::newtype!(@parse_attrs [$($other)* #[$attr]] [$($step)*] [$($display)*] ; $(#[$($rest)*])* ; $($tail)*); - }; - - (@parse_attrs [$($other:tt)*] [$($step:tt)*] [$($display:tt)*]; ; $($tail:tt)*) => { - $crate::id::newtype!(@impl [$($other)*] [$($step)*] [$($display)*] $($tail)*); - }; - - // Implementation - (@impl [$(#[$attr:meta])*] [$($step:tt)*] [$($display:tt)*] $vis:vis struct $name:ident($type:ident is $min:literal..=$max:expr)) => { - $(#[$attr])* - #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] - $vis struct $name($type); - - #[expect(clippy::allow_attributes)] - #[allow(dead_code, clippy::checked_conversions)] - impl $name { - /// Creates a new ID with the given value. - /// - /// # Panics - /// - /// When value is outside the valid range of $min..$max. - #[must_use] - $vis const fn new(value: $type) -> Self { - assert!( - $crate::id::newtype!(@internal in_bounds; value, $type, $min, $max), - $crate::id::newtype!(@internal error; value, $min, $max) - ); - - Self(value) - } - } - - #[automatically_derived] - #[expect(clippy::allow_attributes, reason = "automatically generated")] - #[allow(clippy::cast_possible_truncation, clippy::cast_lossless, clippy::checked_conversions)] - impl $crate::id::Id for $name { - const MIN: Self = Self($min); - const MAX: Self = Self($max); - - // fast path that does not go through the default implementation - fn from_u32(value: u32) -> Self { - assert!( - $crate::id::newtype!(@internal in_bounds; value, u32, $min, $max), - $crate::id::newtype!(@internal error; value, $min, $max) - ); - - Self(value as $type) - } - - fn from_u64(value: u64) -> Self { - assert!( - $crate::id::newtype!(@internal in_bounds; value, u64, $min, $max), - $crate::id::newtype!(@internal error; value, $min, $max) - ); - - Self(value as $type) - } - - fn from_usize(value: usize) -> Self { - assert!( - $crate::id::newtype!(@internal in_bounds; value, usize, $min, $max), - $crate::id::newtype!(@internal error; value, $min, $max) - ); - - Self(value as $type) - } - - #[inline] - fn as_u32(self) -> u32 { - self.0 as u32 - } - - #[inline] - fn as_u64(self) -> u64 { - self.0 as u64 - } - - #[inline] - fn as_usize(self) -> usize { - self.0 as usize - } - - #[inline] - fn prev(self) -> ::core::option::Option { - if self.0 == $min { - None - } else { - Some(Self(self.0 - 1)) - } - } - } - - #[expect(clippy::allow_attributes, reason = "automatically generated")] - #[allow(clippy::cast_possible_truncation, clippy::cast_lossless, clippy::checked_conversions)] - impl ::core::convert::TryFrom for $name { - type Error = $crate::id::IdError; - - fn try_from(value: u32) -> ::core::result::Result { - if $crate::id::newtype!(@internal in_bounds; value, u32, $min, $max) { - Ok(Self(value as $type)) - } else { - Err($crate::id::IdError::OutOfRange { - value: u64::from(value), - min: $min as u64, - max: $max as u64, - }) - } - } - } - - #[expect(clippy::allow_attributes, reason = "automatically generated")] - #[allow(clippy::cast_possible_truncation, clippy::cast_lossless, clippy::checked_conversions)] - impl ::core::convert::TryFrom for $name { - type Error = $crate::id::IdError; - - fn try_from(value: u64) -> ::core::result::Result { - if $crate::id::newtype!(@internal in_bounds; value, u64, $min, $max) { - Ok(Self(value as $type)) - } else { - Err($crate::id::IdError::OutOfRange { - value, - min: $min as u64, - max: $max as u64, - }) - } - } - } - - #[expect(clippy::allow_attributes, reason = "automatically generated")] - #[allow(clippy::cast_possible_truncation, clippy::cast_lossless, clippy::checked_conversions)] - impl ::core::convert::TryFrom for $name { - type Error = $crate::id::IdError; - - fn try_from(value: usize) -> ::core::result::Result { - if $crate::id::newtype!(@internal in_bounds; value, usize, $min, $max) { - Ok(Self(value as $type)) - } else { - Err($crate::id::IdError::OutOfRange { - value: value as u64, - min: $min as u64, - max: $max as u64, - }) - } - } - } - - impl $crate::id::HasId for $name { - type Id = $name; - - fn id(&self) -> Self::Id { - *self - } - } - - $crate::id::newtype!(@maybe_display $name ; $($display)*); - $crate::id::newtype!(@maybe_step $name ; $($step)*); - }; - - // Generate Step implementation if steppable was specified - (@maybe_step $name:ident ; steppable) => { - impl ::core::iter::Step for $name { - #[inline] - fn steps_between(start: &Self, end: &Self) -> (usize, Option) { - ::steps_between( - &$crate::id::Id::as_usize(*start), - &$crate::id::Id::as_usize(*end), - ) - } - - #[inline] - fn forward_checked(start: Self, count: usize) -> Option { - $crate::id::Id::as_usize(start) - .checked_add(count) - .map($crate::id::Id::from_usize) - } - - #[inline] - fn backward_checked(start: Self, count: usize) -> Option { - $crate::id::Id::as_usize(start) - .checked_sub(count) - .map($crate::id::Id::from_usize) - } - } - }; - - // No Step implementation if steppable was not specified - (@maybe_step $name:ident ; ) => {}; - - (@maybe_display $name:ident ; no_display) => {}; - - (@maybe_display $name:ident ; display = $display:expr) => { - impl ::core::fmt::Display for $name { - fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { - fmt.write_fmt(format_args!($display, self.0)) - } - } - }; - - (@maybe_display $name:ident ; ) => { - impl ::core::fmt::Display for $name { - fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { - ::core::fmt::Display::fmt(&self.0, fmt) - } - } - } -} - /// Thread-safe ID generator that produces unique IDs. /// /// Uses an atomic counter to generate sequential IDs, making it safe to use @@ -688,7 +432,7 @@ macro_rules! newtype_collections { }; } -pub use newtype; +pub use hashql_macros::define_id as newtype; pub use newtype_collections; pub use newtype_counter; pub use newtype_producer; diff --git a/libs/@local/hashql/core/src/id/slice.rs b/libs/@local/hashql/core/src/id/slice.rs index 2cbefb39a68..6003f9b0535 100644 --- a/libs/@local/hashql/core/src/id/slice.rs +++ b/libs/@local/hashql/core/src/id/slice.rs @@ -466,9 +466,12 @@ mod tests { use core::mem::MaybeUninit; use super::IdSlice; - use crate::{id::Id as _, newtype}; + use crate::id::Id as _; - newtype!(struct TestId(u32 is 0..=0xFFFF_FF00)); + hashql_macros::define_id! { + #[id(crate = crate)] + struct TestId(u32 is 0..=0xFFFF_FF00) + } #[test] fn from_raw_indexing() { diff --git a/libs/@local/hashql/core/src/id/union_find.rs b/libs/@local/hashql/core/src/id/union_find.rs index d3a451d0c6a..5fd5d592a51 100644 --- a/libs/@local/hashql/core/src/id/union_find.rs +++ b/libs/@local/hashql/core/src/id/union_find.rs @@ -382,7 +382,10 @@ mod tests { use super::*; // Define a test ID type - crate::id::newtype!(struct TestId(u32 is 0..=1000)); + crate::id::newtype!( + #[id(crate = crate)] + struct TestId(u32 is 0..=1000) + ); #[test] fn path_compression() { diff --git a/libs/@local/hashql/core/src/intern/map.rs b/libs/@local/hashql/core/src/intern/map.rs index 463a6a8212c..41e2a0e8e39 100644 --- a/libs/@local/hashql/core/src/intern/map.rs +++ b/libs/@local/hashql/core/src/intern/map.rs @@ -771,12 +771,14 @@ mod tests { use crate::{ heap::Heap, - id::HasId, + id::{HasId, newtype}, intern::{Decompose, InternMap, Interned}, - newtype, }; - newtype!(struct TaggedId(u32 is 0..=0xFFFF_FF00)); + newtype!( + #[id(crate = crate)] + struct TaggedId(u32 is 0..=0xFFFF_FF00) + ); #[derive(Debug, PartialEq, Eq, Hash)] struct TaggedValue { @@ -803,7 +805,10 @@ mod tests { } } - newtype!(struct ListId(u32 is 0..=0xFFFF_FF00)); + newtype!( + #[id(crate = crate)] + struct ListId(u32 is 0..=0xFFFF_FF00) + ); // A recursive test type that can reference other TestNode instances by ID #[derive(Debug, PartialEq, Eq, Hash)] @@ -877,7 +882,7 @@ mod tests { assert_eq!(retrieved, value); // Look up a non-existent ID - let non_existent = map.get(TaggedId(999)); + let non_existent = map.get(TaggedId::new(999)); assert!(non_existent.is_none()); // Test the index method diff --git a/libs/@local/hashql/core/src/lib.rs b/libs/@local/hashql/core/src/lib.rs index 6527eac72a5..fff9663179e 100644 --- a/libs/@local/hashql/core/src/lib.rs +++ b/libs/@local/hashql/core/src/lib.rs @@ -55,3 +55,5 @@ pub mod symbol; pub mod sync; pub mod r#type; pub mod value; + +pub use hashql_macros::id; diff --git a/libs/@local/hashql/core/src/module/mod.rs b/libs/@local/hashql/core/src/module/mod.rs index 7517079d1d1..f97b1617874 100644 --- a/libs/@local/hashql/core/src/module/mod.rs +++ b/libs/@local/hashql/core/src/module/mod.rs @@ -25,14 +25,16 @@ pub use self::{resolver::Reference, universe::Universe}; use crate::{ collections::{FastHashMap, FastHashSet}, heap::Heap, - id::{HasId, Id as _}, + id::{HasId, Id as _, newtype}, intern::{Decompose, InternMap, InternSet, Interned, Provisioned}, - newtype, symbol::Symbol, r#type::environment::Environment, }; -newtype!(pub struct ModuleId(u32 is 0..=0xFFFF_FF00)); +newtype! { + #[id(crate = crate)] + pub struct ModuleId(u32 is 0..=0xFFFF_FF00) +} impl ModuleId { pub const ROOT: Self = Self::MAX; diff --git a/libs/@local/hashql/core/src/type/kind/generic/mod.rs b/libs/@local/hashql/core/src/type/kind/generic/mod.rs index bfa57660eba..d4a452ddbd3 100644 --- a/libs/@local/hashql/core/src/type/kind/generic/mod.rs +++ b/libs/@local/hashql/core/src/type/kind/generic/mod.rs @@ -14,8 +14,9 @@ pub use self::{ use super::TypeKind; use crate::{ collections::{SmallVec, TinyVec}, + id::newtype, intern::Interned, - newtype, newtype_collections, newtype_producer, + newtype_collections, newtype_producer, pretty::display::DisplayBuilder, span::SpanId, symbol::{Ident, Symbol}, @@ -32,6 +33,7 @@ use crate::{ }; newtype!( + #[id(crate = crate)] pub struct GenericArgumentId(u32 is 0..=0xFFFF_FF00) ); diff --git a/libs/@local/hashql/core/src/type/kind/infer.rs b/libs/@local/hashql/core/src/type/kind/infer.rs index 4f41bfe4c1d..c42834a3bd1 100644 --- a/libs/@local/hashql/core/src/type/kind/infer.rs +++ b/libs/@local/hashql/core/src/type/kind/infer.rs @@ -1,6 +1,7 @@ -use crate::{newtype, newtype_producer}; +use crate::{id::newtype, newtype_producer}; newtype!( + #[id(crate = crate)] pub struct HoleId(u32 is 0..=0xFFFF_FF00) ); diff --git a/libs/@local/hashql/core/src/type/mod.rs b/libs/@local/hashql/core/src/type/mod.rs index b408080e103..5a629e933d4 100644 --- a/libs/@local/hashql/core/src/type/mod.rs +++ b/libs/@local/hashql/core/src/type/mod.rs @@ -36,6 +36,7 @@ id::newtype!( /// The value space is restricted to `0..=0xFFFF_FF00`, reserving the last 256 for niches. /// As real pattern types are an experimental feature in Rust, these can currently only be /// used by directly modifying and accessing the `TypeId`'s internal value. + #[id(crate = crate)] pub struct TypeId(u32 is 0..=0xFFFF_FF00) ); @@ -47,7 +48,7 @@ impl TypeId { /// The uniqueness constraint is not enforced by the type system, but rather just a statistical /// improbability, considering that 4.294.967.040 types would need to be generated, for a /// collision to occur. - pub const PLACEHOLDER: Self = Self(0xFFFF_FF00); + pub const PLACEHOLDER: Self = Self::new(0xFFFF_FF00); } id::newtype_collections!(pub type TypeId* from TypeId); diff --git a/libs/@local/hashql/macros/src/id/attr.rs b/libs/@local/hashql/macros/src/id/attr.rs index 6285718fa01..2ef9b682ca2 100644 --- a/libs/@local/hashql/macros/src/id/attr.rs +++ b/libs/@local/hashql/macros/src/id/attr.rs @@ -59,7 +59,7 @@ impl Attributes { attributes: Vec>, ) -> Self { let mut this = Self { - krate: quote!(crate), + krate: quote!(::hashql_core), r#const: TokenStream::new(), display: DisplayAttribute::Auto, traits: BTreeSet::new(), diff --git a/libs/@local/hashql/macros/src/id/enum.rs b/libs/@local/hashql/macros/src/id/enum.rs index a4cd33123e4..d199159b4bd 100644 --- a/libs/@local/hashql/macros/src/id/enum.rs +++ b/libs/@local/hashql/macros/src/id/enum.rs @@ -39,7 +39,7 @@ pub(super) fn expand_enum( let backing = IntegerScalar::from_variant_count(variant_count); let discriminant_arms = variants.iter().enumerate().map(|(index, variant)| { - let literal = Literal::usize_suffixed(index); + let literal = Literal::usize_unsuffixed(index); quote_spanned!(variant.span() => #literal => ::core::option::Option::Some(Self::#variant)) }); @@ -94,11 +94,11 @@ pub(super) fn expand_enum( /// The value must be a valid discriminant for this enum. #[inline] #[expect(unsafe_code)] - #vis unsafe const fn from_discriminant_unchecked(value: #backing) -> Self { + #vis const unsafe fn from_discriminant_unchecked(value: #backing) -> Self { match Self::try_from_discriminant(value) { ::core::option::Option::Some(variant) => variant, // SAFETY: The caller guarantees that the value is a valid discriminant. - ::core::option::Option::None => unsafe { ::core::mem::unreachable_unchecked() }, + ::core::option::Option::None => unsafe { ::core::hint::unreachable_unchecked() }, } } diff --git a/libs/@local/hashql/macros/src/id/mod.rs b/libs/@local/hashql/macros/src/id/mod.rs index 583f57f4b2c..09d7dbaab5d 100644 --- a/libs/@local/hashql/macros/src/id/mod.rs +++ b/libs/@local/hashql/macros/src/id/mod.rs @@ -7,7 +7,8 @@ use core::fmt::Display; use proc_macro::{Diagnostic, Level, Span}; use proc_macro2::TokenStream; -use unsynn::{Parse as _, ToTokenIter as _, ToTokens as _, quote}; +use quote::quote; +use unsynn::{Parse as _, ToTokenIter as _}; use self::{r#enum::expand_enum, r#struct::expand_struct}; @@ -67,8 +68,8 @@ mod grammar { } pub(super) enum RangeOp { - Exclusive(DotDot), - Inclusive(DotDotEq) + Inclusive(DotDotEq), + Exclusive(DotDot) } pub(super) enum StructScalar { @@ -90,7 +91,7 @@ mod grammar { pub(super) struct ParsedStruct { pub attributes: Vec>, - pub visibility: Visibility, + pub visibility: Option, pub _struct: KStruct, @@ -101,7 +102,7 @@ mod grammar { pub(super) struct ParsedEnum { pub attributes: Vec>, - pub visibility: Visibility, + pub visibility: Option, pub _enum: KEnum, @@ -129,7 +130,6 @@ pub(crate) fn expand(attr: TokenStream, item: TokenStream) -> TokenStream { let (attributes, parsed) = match parse(attr, item) { Ok(parsed) => parsed, - #[expect(clippy::collection_is_never_read, reason = "false positive")] Err(error) => { if let Some(token) = error.failed_at() { emit_error(token.span().unwrap(), error); diff --git a/libs/@local/hashql/macros/src/id/struct.rs b/libs/@local/hashql/macros/src/id/struct.rs index db2753e36cd..41e99f7c995 100644 --- a/libs/@local/hashql/macros/src/id/struct.rs +++ b/libs/@local/hashql/macros/src/id/struct.rs @@ -1,8 +1,8 @@ use core::cmp; use proc_macro2::{Ident, TokenStream}; -use quote::{ToTokens, format_ident, quote}; -use unsynn::{Ge, Gt, ToTokens as _}; +use quote::{format_ident, quote}; +use unsynn::ToTokens as _; use super::grammar::{self, StructBody, StructScalar}; use crate::id::{ @@ -36,15 +36,6 @@ impl From for RangeKind { } } -impl ToTokens for RangeKind { - fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - Self::Inclusive => Ge::new().to_tokens(tokens), - Self::Exclusive => Gt::new().to_tokens(tokens), - } - } -} - struct Constraint { scalar: IntegerScalar, @@ -56,27 +47,28 @@ struct Constraint { impl Constraint { fn message(&self) -> String { - format!( - "id value must be between {}{}{}", - self.min, - self.kind.to_token_stream(), - self.max - ) + let op = match self.kind { + RangeKind::Inclusive => "<=", + RangeKind::Exclusive => "<", + }; + + format!("id value must be between {}{op}{}", self.min, self.max) } fn comparison(&self, ident: &Ident, ident_scalar: IntegerScalar) -> TokenStream { - let Self { - scalar, - min, - max, - kind, - } = self; - - let width = cmp::max(*scalar, ident_scalar); - - quote! { - (#ident as #width) >= (#min as #width) && - (#ident as #width) #kind (#max as #width) + let width = cmp::max(self.scalar, ident_scalar); + let min = &self.min; + let max = &self.max; + + match self.kind { + RangeKind::Inclusive => quote! { + (#ident as #width) >= (#min as #width) && + (#ident as #width) <= (#max as #width) + }, + RangeKind::Exclusive => quote! { + (#ident as #width) >= (#min as #width) && + (#ident as #width) < (#max as #width) + }, } } @@ -85,7 +77,7 @@ impl Constraint { let message = self.message(); quote! { - assert!(#comparison, #message) + assert!((#comparison), #message); } } } @@ -115,7 +107,7 @@ pub(crate) fn expand_struct( grammar::ParsedStruct { attributes, visibility, - _struct: _, + _struct: r#struct, name, body, }: grammar::ParsedStruct, @@ -130,6 +122,8 @@ pub(crate) fn expand_struct( } = Attributes::parse(additional_attributes, attributes); let vis = visibility.into_token_stream(); + let int = body.content.r#type.to_token_stream(); + let constraint = Constraint::from(body.content); let scalar = constraint.scalar; @@ -150,12 +144,14 @@ pub(crate) fn expand_struct( let unchecked_safety_doc = format!("The caller must ensure that `value` is in `[{min}, {range_end}`."); + let kw = r#struct.into_token_stream(); + output.extend(quote! { #extra #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] - #vis struct #name { + #vis #kw #name { #[doc(hidden)] - _internal_do_not_use: #scalar + _internal_do_not_use: #int } impl #name { @@ -179,7 +175,7 @@ pub(crate) fn expand_struct( #[doc = #unchecked_safety_doc] #[must_use] #[inline] - #vis unsafe const fn new_unchecked(value: #scalar) -> Self { + #vis const unsafe fn new_unchecked(value: #scalar) -> Self { Self { _internal_do_not_use: value } } } diff --git a/libs/@local/hashql/macros/src/lib.rs b/libs/@local/hashql/macros/src/lib.rs index af908afc773..89e4936fcf5 100644 --- a/libs/@local/hashql/macros/src/lib.rs +++ b/libs/@local/hashql/macros/src/lib.rs @@ -8,27 +8,105 @@ mod id; use proc_macro::TokenStream; -/// Defines a type as an [`Id`]. +/// Defines an enum as an [`Id`] type. /// -/// Supports two shapes: +/// This attribute macro works on enums with unit variants, generating sequential +/// discriminants, conversion methods, and trait implementations. /// -/// **Struct** (newtype wrapper around an integer with a valid range): -/// ```ignore -/// #[hashql_core::id] -/// #[id(steppable)] -/// pub struct NodeId(u32 is 0..=0xFFFF_FF00); -/// ``` +/// For struct-based Id types, use [`define_id!`] instead, since attribute macros +/// require syntactically valid Rust on the annotated item. +/// +/// # Example /// -/// **Enum** (unit variants mapped to sequential discriminants): /// ```ignore -/// #[hashql_core::id] +/// #[hashql_macros::id] /// pub enum TargetId { /// Interpreter, /// Postgres, /// Embedding, /// } /// ``` +/// +/// # Attributes +/// +/// Attributes can be passed either as arguments to `#[id(...)]` or as a +/// separate `#[id(...)]` attribute on the item: +/// +/// - `crate = path` — path to the `hashql_core` crate (default: `::hashql_core`) +/// - `const` — add `const` to trait impl blocks +/// - `derive(Step)` — implement [`core::iter::Step`] +/// - `display = "format"` — implement [`Display`] with a format string +/// - `display = "auto"` — implement [`Display`] using lowercased variant names +/// - `display = !` — suppress the [`Display`] implementation #[proc_macro_attribute] pub fn id(attr: TokenStream, item: TokenStream) -> TokenStream { id::expand(attr.into(), item.into()).into() } + +/// Defines a type as an [`Id`]. +/// +/// This is a function-like macro that supports both struct and enum shapes. +/// Struct-based Id types must use this macro because their syntax (`u32 is 0..=MAX`) +/// is not valid Rust, which precludes use of the `#[id]` attribute macro. +/// +/// # Struct +/// +/// Creates a newtype wrapper around an integer with a valid range: +/// +/// ```ignore +/// define_id! { +/// /// A unique node identifier. +/// #[id(derive(Step))] +/// pub struct NodeId(u32 is 0..=0xFFFF_FF00) +/// } +/// ``` +/// +/// Supported backing types: `u8`, `u16`, `u32`, `u64`, `u128`. +/// +/// The range bound determines valid values. Inclusive (`..=`) and exclusive (`..`) +/// ranges are both supported. +/// +/// # Enum +/// +/// Creates an enum with sequential discriminants: +/// +/// ```ignore +/// define_id! { +/// pub enum TargetId { +/// Interpreter, +/// Postgres, +/// Embedding, +/// } +/// } +/// ``` +/// +/// The backing integer type is inferred from the number of variants. +/// +/// # Attributes +/// +/// Placed inside an `#[id(...)]` annotation on the item: +/// +/// - `crate = path` — path to the `hashql_core` crate (default: `::hashql_core`) +/// - `const` — add `const` to trait impl blocks +/// - `derive(Step)` — implement [`core::iter::Step`] +/// - `display = "format"` — implement [`Display`] with a format string +/// - `display = "auto"` — implement [`Display`] using the inner value (struct) or lowercased +/// variant names (enum) +/// - `display = !` — suppress the [`Display`] implementation +/// +/// # Generated items +/// +/// For both shapes, the macro generates: +/// - [`Id`] trait implementation +/// - [`HasId`] trait implementation +/// - [`TryFrom`], [`TryFrom`], [`TryFrom`] implementations +/// - [`Debug`] and (by default) [`Display`] implementations +/// +/// Struct-specific: `new`, `new_unchecked` constructors. +/// +/// Enum-specific: `VARIANT_COUNT`, `all`, `try_from_discriminant`, +/// `from_discriminant`, `from_discriminant_unchecked`, `into_discriminant`. +#[proc_macro] +pub fn define_id(item: TokenStream) -> TokenStream { + id::expand(TokenStream::new().into(), item.into()).into() +} diff --git a/libs/@local/hashql/mir/src/body/basic_block.rs b/libs/@local/hashql/mir/src/body/basic_block.rs index cfac2d7fbb8..d074cc8f890 100644 --- a/libs/@local/hashql/mir/src/body/basic_block.rs +++ b/libs/@local/hashql/mir/src/body/basic_block.rs @@ -9,7 +9,6 @@ use hashql_core::{heap, id, intern::Interned}; use super::{local::Local, statement::Statement, terminator::Terminator}; id::newtype!( - #[display = "bb{}"] /// A unique identifier for a basic block in the HashQL MIR. /// /// Basic blocks are identified by unique IDs that allow efficient referencing @@ -20,12 +19,13 @@ id::newtype!( /// /// The value space is restricted to `0..=0xFFFF_FF00`, reserving the last 256 /// values for niche optimizations in `Option` and similar types. + #[id(display = "bb{}")] pub struct BasicBlockId(u32 is 0..=0xFFFF_FF00) ); impl BasicBlockId { - pub const PLACEHOLDER: Self = Self(0xFFFF_FF00); - pub const START: Self = Self(0); + pub const PLACEHOLDER: Self = Self::new(0xFFFF_FF00); + pub const START: Self = Self::new(0); } id::newtype_collections!(pub type BasicBlock* from BasicBlockId); diff --git a/libs/@local/hashql/mir/src/body/local.rs b/libs/@local/hashql/mir/src/body/local.rs index e648531fe4e..eb1b39b4412 100644 --- a/libs/@local/hashql/mir/src/body/local.rs +++ b/libs/@local/hashql/mir/src/body/local.rs @@ -7,8 +7,6 @@ use hashql_core::{id, span::SpanId, symbol::Symbol, r#type::TypeId}; id::newtype!( - #[steppable] - #[display = "%{}"] /// A unique identifier for a local variable in the HashQL MIR. /// /// Local variables represent storage locations within a function's execution context. @@ -28,12 +26,13 @@ id::newtype!( /// Each [`Local`] is valid within the scope of a single function body. The MIR /// uses explicit storage management through [`StorageLive`] and [`StorageDead`] /// statements to track when local variables are active. - pub struct Local(usize is 0..=usize::MAX) + #[id(derive(Step), display = "%{}")] + pub struct Local(u32 is 0..=u32::MAX) ); impl Local { - pub const ENV: Self = Self(0); - pub const VERTEX: Self = Self(1); + pub const ENV: Self = Self::new(0); + pub const VERTEX: Self = Self::new(1); } id::newtype_collections!(pub type Local* from Local); diff --git a/libs/@local/hashql/mir/src/body/place.rs b/libs/@local/hashql/mir/src/body/place.rs index a84a5edd362..32062a9098a 100644 --- a/libs/@local/hashql/mir/src/body/place.rs +++ b/libs/@local/hashql/mir/src/body/place.rs @@ -33,13 +33,13 @@ id::newtype!( /// - Tuple elements (e.g., `tuple.0`, `tuple.1`) /// - Closed struct fields with known layout /// - Any structured type where field positions are stable and complete - pub struct FieldIndex(usize is 0..=usize::MAX) + pub struct FieldIndex(u32 is 0..=u32::MAX) ); impl FieldIndex { - pub const ENV: Self = Self(1); - pub const FN_PTR: Self = Self(0); - pub const OPAQUE_VALUE: Self = Self(0); + pub const ENV: Self = Self::new(1); + pub const FN_PTR: Self = Self::new(0); + pub const OPAQUE_VALUE: Self = Self::new(0); } /// Context for reading from a [`Place`]. diff --git a/libs/@local/hashql/mir/src/builder/place.rs b/libs/@local/hashql/mir/src/builder/place.rs index b31687ab11b..8bdb077e5f2 100644 --- a/libs/@local/hashql/mir/src/builder/place.rs +++ b/libs/@local/hashql/mir/src/builder/place.rs @@ -1,6 +1,9 @@ use core::ops::Deref; -use hashql_core::r#type::{TypeId, builder::IntoSymbol}; +use hashql_core::{ + id::Id as _, + r#type::{TypeId, builder::IntoSymbol}, +}; use super::base::BaseBuilder; use crate::body::{ @@ -63,7 +66,7 @@ impl<'heap> PlaceBuilder<'_, 'heap, HasLocal> { pub fn field(mut self, index: usize, ty: TypeId) -> Self { self.projections.push(Projection { r#type: ty, - kind: ProjectionKind::Field(FieldIndex::new(index)), + kind: ProjectionKind::Field(FieldIndex::from_usize(index)), }); self diff --git a/libs/@local/hashql/mir/src/def.rs b/libs/@local/hashql/mir/src/def.rs index 08036e0cf80..401c6d3c96a 100644 --- a/libs/@local/hashql/mir/src/def.rs +++ b/libs/@local/hashql/mir/src/def.rs @@ -30,47 +30,47 @@ impl DefId { /// This operation inserts a key-value pair into a dictionary, /// returning a new dictionary with the added pair. The original /// dictionary remains unchanged. - pub const DICT_INSERT: Self = Self(0xFFFF_FE00); + pub const DICT_INSERT: Self = Self::new(0xFFFF_FE00); /// Built-in dictionary insert operation (mutable). /// /// This operation inserts a key-value pair into a dictionary in-place, /// modifying the original dictionary. Used for efficient dictionary /// construction and updates. - pub const DICT_INSERT_MUT: Self = Self(0xFFFF_FE01); + pub const DICT_INSERT_MUT: Self = Self::new(0xFFFF_FE01); /// Built-in dictionary remove operation (immutable). /// /// This operation removes a key-value pair from a dictionary, /// returning a new dictionary without the specified key. The /// original dictionary remains unchanged. - pub const DICT_REMOVE: Self = Self(0xFFFF_FE02); + pub const DICT_REMOVE: Self = Self::new(0xFFFF_FE02); /// Built-in dictionary remove operation (mutable). /// /// This operation removes a key-value pair from a dictionary in-place, /// modifying the original dictionary and returning the removed value /// if the key existed. - pub const DICT_REMOVE_MUT: Self = Self(0xFFFF_FE03); + pub const DICT_REMOVE_MUT: Self = Self::new(0xFFFF_FE03); /// Built-in list pop operation (immutable). /// /// This operation removes the last element from a list, returning /// both the element and a new list without the element. The original /// list remains unchanged. - pub const LIST_POP: Self = Self(0xFFFF_FE04); + pub const LIST_POP: Self = Self::new(0xFFFF_FE04); /// Built-in list pop operation (mutable). /// /// This operation removes the last element from a list in-place, /// returning the removed element while modifying the original list. - pub const LIST_POP_MUT: Self = Self(0xFFFF_FE05); + pub const LIST_POP_MUT: Self = Self::new(0xFFFF_FE05); /// Built-in list push operation (immutable). /// /// This operation appends an element to a list, returning a new list /// without modifying the original. Used for functional-style list /// manipulation where immutability is preferred. - pub const LIST_PUSH: Self = Self(0xFFFF_FE06); + pub const LIST_PUSH: Self = Self::new(0xFFFF_FE06); /// Built-in list push operation (mutable). /// /// This operation appends an element to a list in-place, modifying /// the original list. Used for imperative-style list manipulation /// where performance is critical. - pub const LIST_PUSH_MUT: Self = Self(0xFFFF_FE07); + pub const LIST_PUSH_MUT: Self = Self::new(0xFFFF_FE07); pub const PLACEHOLDER: Self = Self::MAX; } diff --git a/libs/@local/hashql/mir/src/pass/analysis/callgraph/mod.rs b/libs/@local/hashql/mir/src/pass/analysis/callgraph/mod.rs index 84a76ca1ab1..8c26ca917bf 100644 --- a/libs/@local/hashql/mir/src/pass/analysis/callgraph/mod.rs +++ b/libs/@local/hashql/mir/src/pass/analysis/callgraph/mod.rs @@ -157,7 +157,7 @@ impl<'heap, A: Allocator + Clone> CallGraph<'heap, A> { impl CallGraph<'_, A> { #[inline] pub fn callsites(&self, def: DefId) -> impl Iterator { - let node = NodeId::new(def.as_usize()); + let node = NodeId::from_usize(def.as_usize()); self.inner.outgoing_edges(node).map(move |edge| CallSite { caller: def, @@ -168,7 +168,7 @@ impl CallGraph<'_, A> { #[inline] pub fn apply_callsites(&self, def: DefId) -> impl Iterator> { - let node = NodeId::new(def.as_usize()); + let node = NodeId::from_usize(def.as_usize()); self.inner .outgoing_edges(node) @@ -184,7 +184,7 @@ impl CallGraph<'_, A> { #[inline] pub fn is_leaf(&self, def: DefId) -> bool { - let def = NodeId::new(def.as_usize()); + let def = NodeId::from_usize(def.as_usize()); self.inner .outgoing_edges(def) @@ -202,8 +202,8 @@ impl CallGraph<'_, A> { #[inline] pub fn is_single_caller(&self, caller: DefId, target: DefId) -> bool { - let caller = NodeId::new(caller.as_usize()); - let target = NodeId::new(target.as_usize()); + let caller = NodeId::from_usize(caller.as_usize()); + let target = NodeId::from_usize(target.as_usize()); self.inner .incoming_edges(target) @@ -214,7 +214,7 @@ impl CallGraph<'_, A> { #[inline] pub fn unique_caller(&self, callee: DefId) -> Option { // Same as is_single_caller, but makes sure that there is exactly one edge - let callee = NodeId::new(callee.as_usize()); + let callee = NodeId::from_usize(callee.as_usize()); let mut incoming = self .inner diff --git a/libs/@local/hashql/mir/src/pass/analysis/data_dependency/graph.rs b/libs/@local/hashql/mir/src/pass/analysis/data_dependency/graph.rs index abccbff598e..aef1480be5c 100644 --- a/libs/@local/hashql/mir/src/pass/analysis/data_dependency/graph.rs +++ b/libs/@local/hashql/mir/src/pass/analysis/data_dependency/graph.rs @@ -246,14 +246,14 @@ pub struct DataDependencyGraph<'heap, A: Allocator = Global> { impl<'heap, A: Allocator> DataDependencyGraph<'heap, A> { pub fn depends_on(&self, local: Local) -> impl Iterator { self.graph - .successors(NodeId::new(local.as_usize())) - .map(|node| Local::new(node.as_usize())) + .successors(NodeId::from_usize(local.as_usize())) + .map(|node| Local::from_usize(node.as_usize())) } pub fn dependent_on(&self, local: Local) -> impl Iterator { self.graph - .predecessors(NodeId::new(local.as_usize())) - .map(|node| Local::new(node.as_usize())) + .predecessors(NodeId::from_usize(local.as_usize())) + .map(|node| Local::from_usize(node.as_usize())) } /// Creates a transient graph with all edges resolved to their ultimate sources. @@ -277,7 +277,7 @@ impl<'heap, A: Allocator> DataDependencyGraph<'heap, A> { // Resolve each edge and add to the new graph or constant bindings. for edge in self.graph.edges() { let place = PlaceRef { - local: Local::new(edge.target().as_usize()), + local: Local::from_usize(edge.target().as_usize()), projections: &edge.data.projections, }; @@ -285,7 +285,7 @@ impl<'heap, A: Allocator> DataDependencyGraph<'heap, A> { Operand::Place(resolved) => { graph.add_edge( edge.source(), - NodeId::new(resolved.local.as_usize()), + NodeId::from_usize(resolved.local.as_usize()), EdgeData { kind: edge.data.kind, projections: resolved.projections, @@ -294,7 +294,7 @@ impl<'heap, A: Allocator> DataDependencyGraph<'heap, A> { } Operand::Constant(constant) => { constant_bindings.insert( - Local::new(edge.source().as_usize()), + Local::from_usize(edge.source().as_usize()), edge.data.kind, constant, ); @@ -369,7 +369,7 @@ impl<'heap, A: Allocator> DataDependencyGraph<'heap, A> { &'this self, local: Local, ) -> IncidentEdges<'this, Local, EdgeData<'heap>, A> { - let node_id = NodeId::new(local.as_usize()); + let node_id = NodeId::from_usize(local.as_usize()); self.graph.outgoing_edges(node_id) } diff --git a/libs/@local/hashql/mir/src/pass/analysis/data_dependency/resolve.rs b/libs/@local/hashql/mir/src/pass/analysis/data_dependency/resolve.rs index 3886a73c94f..254394474da 100644 --- a/libs/@local/hashql/mir/src/pass/analysis/data_dependency/resolve.rs +++ b/libs/@local/hashql/mir/src/pass/analysis/data_dependency/resolve.rs @@ -138,7 +138,7 @@ fn traverse<'heap, A: Allocator + Clone>( ) -> ControlFlow, Local> { // The edge's target may itself have projections that must be resolved first. let target = PlaceRef { - local: Local::new(edge.target().as_usize()), + local: Local::from_usize(edge.target().as_usize()), projections: edge.data.projections.0, }; diff --git a/libs/@local/hashql/mir/src/pass/analysis/size_estimation/mod.rs b/libs/@local/hashql/mir/src/pass/analysis/size_estimation/mod.rs index a8b68bdccb2..6aee163f6e7 100644 --- a/libs/@local/hashql/mir/src/pass/analysis/size_estimation/mod.rs +++ b/libs/@local/hashql/mir/src/pass/analysis/size_estimation/mod.rs @@ -50,7 +50,7 @@ use hashql_core::{ tarjan::{SccId, StronglyConnectedComponents}, }, heap::Heap, - id::{IdVec, bit_vec::DenseBitSet}, + id::{Id as _, IdVec, bit_vec::DenseBitSet}, }; pub use self::{ @@ -102,8 +102,8 @@ impl PendingDataflow { } /// Returns the synthetic local used to track whether the return type needs dynamic analysis. - const fn return_slot(&self) -> Local { - Local::new(self.inner.domain_size() - 1) + fn return_slot(&self) -> Local { + Local::from_usize(self.inner.domain_size() - 1) } fn insert(&mut self, local: Local) { diff --git a/libs/@local/hashql/mir/src/pass/execution/placement/solve/condensation.rs b/libs/@local/hashql/mir/src/pass/execution/placement/solve/condensation.rs index 095d0a0a27a..f1e12b5c3c0 100644 --- a/libs/@local/hashql/mir/src/pass/execution/placement/solve/condensation.rs +++ b/libs/@local/hashql/mir/src/pass/execution/placement/solve/condensation.rs @@ -188,8 +188,8 @@ impl<'alloc, S: BumpAllocator> Condensation<'alloc, S> { let target_scc = self.scc.scc(target); self.graph.add_edge( - NodeId::new(source_scc.as_usize()), - NodeId::new(target_scc.as_usize()), + NodeId::from_usize(source_scc.as_usize()), + NodeId::from_usize(target_scc.as_usize()), BoundaryEdge { source: PlacementLocation { region: source_scc, @@ -238,7 +238,7 @@ impl<'alloc, S: BumpAllocator> DirectedGraph for Condensation<'alloc, S> { impl IndexMut for Condensation<'_, S> { fn index_mut(&mut self, index: PlacementRegionId) -> &mut Self::Output { - &mut self.graph[NodeId::new(index.as_usize())].data + &mut self.graph[NodeId::from_usize(index.as_usize())].data } } @@ -246,6 +246,6 @@ impl<'alloc, S: BumpAllocator> Index for Condensation<'alloc, type Output = PlacementRegion<'alloc>; fn index(&self, index: PlacementRegionId) -> &Self::Output { - &self.graph[NodeId::new(index.as_usize())].data + &self.graph[NodeId::from_usize(index.as_usize())].data } } diff --git a/libs/@local/hashql/mir/src/pass/execution/statement_placement/common.rs b/libs/@local/hashql/mir/src/pass/execution/statement_placement/common.rs index b7c24593f53..a99d024999a 100644 --- a/libs/@local/hashql/mir/src/pass/execution/statement_placement/common.rs +++ b/libs/@local/hashql/mir/src/pass/execution/statement_placement/common.rs @@ -4,8 +4,8 @@ use hashql_core::{ id::{ Id as _, bit_vec::{BitRelations as _, DenseBitSet}, + newtype, }, - newtype, r#type::TypeId, }; diff --git a/libs/@local/hashql/mir/src/pass/execution/statement_placement/embedding/mod.rs b/libs/@local/hashql/mir/src/pass/execution/statement_placement/embedding/mod.rs index 12e0bfe0793..047cd59cfa5 100644 --- a/libs/@local/hashql/mir/src/pass/execution/statement_placement/embedding/mod.rs +++ b/libs/@local/hashql/mir/src/pass/execution/statement_placement/embedding/mod.rs @@ -146,7 +146,7 @@ impl<'heap, A: Allocator + Clone, S: Allocator> StatementPlacement<'heap, A> // Embedding backend cannot receive any arguments directly for arg in 0..body.args { - domain.remove(Local::new(arg)); + domain.remove(Local::from_usize(arg)); } }, ), diff --git a/libs/@local/hashql/mir/src/pass/execution/statement_placement/postgres/mod.rs b/libs/@local/hashql/mir/src/pass/execution/statement_placement/postgres/mod.rs index 8656ed7988b..ac99b437435 100644 --- a/libs/@local/hashql/mir/src/pass/execution/statement_placement/postgres/mod.rs +++ b/libs/@local/hashql/mir/src/pass/execution/statement_placement/postgres/mod.rs @@ -3,7 +3,7 @@ use core::{alloc::Allocator, ops::ControlFlow}; use hashql_core::{ debug_panic, - id::bit_vec::DenseBitSet, + id::{Id as _, bit_vec::DenseBitSet}, symbol::sym, sync::lock::LocalLock, r#type::{ @@ -701,7 +701,7 @@ impl<'heap, S: Allocator> PostgresStatementPlacement<'heap, S> { for (index, &field) in env.fields.iter().enumerate() { let is_supported = visitor.visit_id(field).is_continue(); - supported.set(FieldIndex::new(index), is_supported); + supported.set(FieldIndex::from_usize(index), is_supported); } supported diff --git a/libs/@local/hashql/mir/src/pass/execution/target.rs b/libs/@local/hashql/mir/src/pass/execution/target.rs index e282a6cbb2a..706fd8c2a9b 100644 --- a/libs/@local/hashql/mir/src/pass/execution/target.rs +++ b/libs/@local/hashql/mir/src/pass/execution/target.rs @@ -1,9 +1,4 @@ -use core::{ - fmt, - mem::{self, MaybeUninit}, -}; - -use hashql_core::id::{self, Id as _, IdArray, bit_vec::FiniteBitSet}; +use hashql_core::id::{IdArray, bit_vec::FiniteBitSet}; /// Execution backend that a basic block can be assigned to. /// @@ -14,7 +9,7 @@ use hashql_core::id::{self, Id as _, IdArray, bit_vec::FiniteBitSet}; /// The discriminant order determines iteration order in [`TargetId::all`] and affects cost /// estimation during placement. The interpreter is evaluated last so it can incorporate traversal /// costs computed by the other backends. -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[hashql_core::id] pub enum TargetId { /// In-process evaluator that supports all MIR operations. /// @@ -32,7 +27,6 @@ pub enum TargetId { } impl TargetId { - pub const VARIANT_COUNT: usize = mem::variant_count::(); pub const VARIANT_COUNT_U32: u32 = match u32::try_from(Self::VARIANT_COUNT) { Ok(count) => count, Err(_) => unreachable!(), @@ -42,48 +36,6 @@ impl TargetId { Err(_) => unreachable!(), }; - const fn try_from_discriminant(value: u8) -> Option { - match value { - 0 => Some(Self::Interpreter), - 1 => Some(Self::Postgres), - 2 => Some(Self::Embedding), - _ => None, - } - } - - #[inline] - const fn from_discriminant(value: u8) -> Self { - match Self::try_from_discriminant(value) { - Some(target) => target, - None => unreachable!(), - } - } - - #[inline] - const fn into_discriminant(self) -> u8 { - self as u8 - } - - #[inline] - #[must_use] - pub const fn all() -> [Self; Self::VARIANT_COUNT] { - #[expect(unsafe_code)] - const VARIANTS: [TargetId; TargetId::VARIANT_COUNT] = { - let mut array = [MaybeUninit::uninit(); TargetId::VARIANT_COUNT]; - - let mut index = 0_u8; - while index < TargetId::VARIANT_COUNT_U8 { - array[index as usize].write(TargetId::from_discriminant(index)); - index += 1; - } - - // SAFETY: All elements have been initialized. - unsafe { MaybeUninit::array_assume_init(array) } - }; - - VARIANTS - } - #[must_use] pub const fn abbreviation(self) -> &'static str { match self { @@ -94,140 +46,5 @@ impl TargetId { } } -const _: () = { - let mut index = 0_u8; - - while index < TargetId::VARIANT_COUNT_U8 { - let target = TargetId::from_discriminant(index); - let roundtrip = target.into_discriminant(); - - assert!(roundtrip == index); - index += 1; - } -}; - -#[expect(clippy::cast_possible_truncation, clippy::cast_lossless)] -impl id::Id for TargetId { - const MAX: Self = Self::from_discriminant((Self::VARIANT_COUNT - 1) as u8); - const MIN: Self = Self::from_discriminant(0); - - fn from_u32(index: u32) -> Self { - assert!( - index < (Self::VARIANT_COUNT as u32), - "ID must be between 0 and {}", - Self::VARIANT_COUNT - 1 - ); - - Self::from_discriminant(index as u8) - } - - fn from_u64(index: u64) -> Self { - assert!( - index < (Self::VARIANT_COUNT as u64), - "ID must be between 0 and {}", - Self::VARIANT_COUNT - 1 - ); - - Self::from_discriminant(index as u8) - } - - fn from_usize(index: usize) -> Self { - assert!( - index < Self::VARIANT_COUNT, - "ID must be between 0 and {}", - Self::VARIANT_COUNT - 1 - ); - - Self::from_discriminant(index as u8) - } - - #[inline] - fn as_u32(self) -> u32 { - self.into_discriminant() as u32 - } - - #[inline] - fn as_u64(self) -> u64 { - self.into_discriminant() as u64 - } - - #[inline] - fn as_usize(self) -> usize { - self.into_discriminant() as usize - } - - #[inline] - fn prev(self) -> Option { - let discriminant = self.into_discriminant(); - let prev = discriminant.checked_sub(1)?; - Self::try_from_discriminant(prev) - } -} - -impl TryFrom for TargetId { - type Error = id::IdError; - - #[inline] - fn try_from(value: u32) -> Result { - u8::try_from(value) - .ok() - .and_then(Self::try_from_discriminant) - .ok_or_else(|| id::IdError::OutOfRange { - value: u64::from(value), - min: 0, - max: Self::MAX.as_u64(), - }) - } -} - -impl TryFrom for TargetId { - type Error = id::IdError; - - #[inline] - fn try_from(value: u64) -> Result { - u8::try_from(value) - .ok() - .and_then(Self::try_from_discriminant) - .ok_or_else(|| id::IdError::OutOfRange { - value, - min: 0, - max: Self::MAX.as_u64(), - }) - } -} - -impl TryFrom for TargetId { - type Error = id::IdError; - - fn try_from(value: usize) -> Result { - u8::try_from(value) - .ok() - .and_then(Self::try_from_discriminant) - .ok_or_else(|| id::IdError::OutOfRange { - value: value as u64, - min: 0, - max: Self::MAX.as_u64(), - }) - } -} - -impl id::HasId for TargetId { - type Id = Self; - - fn id(&self) -> Self::Id { - *self - } -} - -impl fmt::Display for TargetId { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Interpreter => fmt.write_str("interpreter"), - Self::Embedding => fmt.write_str("embedding"), - Self::Postgres => fmt.write_str("postgres"), - } - } -} - pub(crate) type TargetBitSet = FiniteBitSet; pub(crate) type TargetArray = IdArray; diff --git a/libs/@local/hashql/mir/src/pass/transform/administrative_reduction/visitor.rs b/libs/@local/hashql/mir/src/pass/transform/administrative_reduction/visitor.rs index 267ca619eb6..01cf86ce954 100644 --- a/libs/@local/hashql/mir/src/pass/transform/administrative_reduction/visitor.rs +++ b/libs/@local/hashql/mir/src/pass/transform/administrative_reduction/visitor.rs @@ -6,7 +6,10 @@ use core::{alloc::Allocator, convert::Infallible, mem}; -use hashql_core::{heap::Heap, id::IdVec}; +use hashql_core::{ + heap::Heap, + id::{Id as _, IdVec}, +}; use super::{Reducable, disjoint::DisjointIdSlice}; use crate::{ @@ -237,7 +240,7 @@ impl<'heap, A: Allocator> AdministrativeReductionVisitor<'_, '_, 'heap, A> { .enumerate() .map(|(param, argument)| Statement { kind: StatementKind::Assign(Assign { - lhs: Place::local(Local::new(local_offset + param)), + lhs: Place::local(Local::from_usize(local_offset + param)), rhs: RValue::Load(argument), }), span, diff --git a/libs/@local/hashql/mir/src/pass/transform/dle/mod.rs b/libs/@local/hashql/mir/src/pass/transform/dle/mod.rs index 29f0be76ceb..5b19ded62f9 100644 --- a/libs/@local/hashql/mir/src/pass/transform/dle/mod.rs +++ b/libs/@local/hashql/mir/src/pass/transform/dle/mod.rs @@ -116,7 +116,7 @@ impl<'env, 'heap, A: BumpAllocator> TransformPass<'env, 'heap> for DeadLocalElim // The function args cannot be dead for index in 0..body.args { - dead.remove(Local::new(index)); + dead.remove(Local::from_usize(index)); } if dead.is_empty() { @@ -145,7 +145,7 @@ impl<'env, 'heap, A: BumpAllocator> TransformPass<'env, 'heap> for DeadLocalElim // For an explanation of how this compression algorithm works, see the DBE implementation. let mut write_index = Local::new(0); - let local_count = Local::new(body.local_decls.len() - dead.count()); + let local_count = Local::from_usize(body.local_decls.len() - dead.count()); for read_index in body.local_decls.ids() { if write_index == local_count { diff --git a/libs/@local/hashql/mir/src/pass/transform/inline/mod.rs b/libs/@local/hashql/mir/src/pass/transform/inline/mod.rs index 5867f489049..3dedbbbd247 100644 --- a/libs/@local/hashql/mir/src/pass/transform/inline/mod.rs +++ b/libs/@local/hashql/mir/src/pass/transform/inline/mod.rs @@ -404,7 +404,7 @@ impl<'heap, A: Allocator> InlineState<'_, '_, '_, 'heap, A> { block.statements.push(Statement { span: callsite.span, kind: StatementKind::Assign(Assign { - lhs: Place::local(Local::new(local_offset + index)), + lhs: Place::local(Local::from_usize(local_offset + index)), rhs: RValue::Load(arg), }), }); diff --git a/libs/@local/hashql/mir/src/pretty/text.rs b/libs/@local/hashql/mir/src/pretty/text.rs index d588455784c..8c41aab20df 100644 --- a/libs/@local/hashql/mir/src/pretty/text.rs +++ b/libs/@local/hashql/mir/src/pretty/text.rs @@ -526,7 +526,7 @@ where self.format_part(body.source)?; self.line_buffer.write_all(b"(")?; - self.csv((0..body.args).map(Local::new).map(|local| { + self.csv((0..body.args).map(Local::from_usize).map(|local| { let decl = body.local_decls[local]; KeyValuePair( diff --git a/libs/@local/hashql/mir/src/reify/mod.rs b/libs/@local/hashql/mir/src/reify/mod.rs index c37e2416eb5..e72e49d3116 100644 --- a/libs/@local/hashql/mir/src/reify/mod.rs +++ b/libs/@local/hashql/mir/src/reify/mod.rs @@ -255,7 +255,7 @@ impl<'ctx, 'mir, 'hir, 'env, 'heap> Reifier<'ctx, 'mir, 'hir, 'env, 'heap> { projections: self.context.mir.interner.projections.intern_slice(&[ Projection { r#type: env_type.fields[index], - kind: ProjectionKind::Field(FieldIndex::new(index)), + kind: ProjectionKind::Field(FieldIndex::from_usize(index)), }, ]), })), diff --git a/libs/@local/hashql/mir/src/visit/ref.rs b/libs/@local/hashql/mir/src/visit/ref.rs index 5c581182bd4..8b2e1a9fe71 100644 --- a/libs/@local/hashql/mir/src/visit/ref.rs +++ b/libs/@local/hashql/mir/src/visit/ref.rs @@ -320,6 +320,7 @@ pub trait Visitor<'heap> { walk_graph_read_head(self, location, head) } + #[expect(clippy::trivially_copy_pass_by_ref)] fn visit_graph_read_body( &mut self, location: GraphReadLocation, @@ -756,6 +757,7 @@ pub fn walk_graph_read_head<'heap, T: Visitor<'heap> + ?Sized>( } } +#[expect(clippy::trivially_copy_pass_by_ref)] pub fn walk_graph_read_body<'heap, T: Visitor<'heap> + ?Sized>( visitor: &mut T, location: GraphReadLocation, From 3aee431027927c0a8e78f2cbc70e8860f42f16d0 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sat, 28 Feb 2026 00:35:33 +0100 Subject: [PATCH 10/15] feat: symbol table proc macro --- libs/@local/hashql/core/src/symbol/sym.rs | 163 +---------------- libs/@local/hashql/macros/src/id/mod.rs | 8 +- libs/@local/hashql/macros/src/lib.rs | 35 +++- libs/@local/hashql/macros/src/sym.rs | 206 ++++++++++++++++++++++ 4 files changed, 243 insertions(+), 169 deletions(-) create mode 100644 libs/@local/hashql/macros/src/sym.rs diff --git a/libs/@local/hashql/core/src/symbol/sym.rs b/libs/@local/hashql/core/src/symbol/sym.rs index 578e7327372..23a675bb88b 100644 --- a/libs/@local/hashql/core/src/symbol/sym.rs +++ b/libs/@local/hashql/core/src/symbol/sym.rs @@ -1,166 +1,7 @@ #![expect(non_upper_case_globals, non_snake_case, clippy::min_ident_chars)] -use super::{ConstantSymbol, Symbol}; +use super::Symbol; -/// Generates pre-interned symbols available at compile time. -/// -/// This macro produces three artifacts from a single symbol table definition: -/// -/// 1. **`SYMBOLS`** - A static slice of string values for interner pre-population -/// 2. **Symbol constants** - `Symbol<'static>` constants (e.g., `sym::foo`, `sym::symbol::plus`) -/// 3. **`LOOKUP`** - A static slice mapping string values to their [`Repr`] for fast lookup -/// -/// # Syntax -/// -/// ```text -/// symbols! {@table; -/// // Simple symbol: name becomes both the constant and string value -/// foo, -/// -/// // Explicit string: use when string differs from identifier -/// r#true: "true", -/// input_exists: "$exists", -/// -/// // Nested module: groups related symbols under a namespace -/// symbol: { -/// plus: "+", -/// minus: "-", -/// }, -/// } -/// ``` -/// -/// Each symbol `name` or `name: "value"` generates: -/// - A constant `name: Symbol<'static>` with auto-generated docs -/// - A submodule `name` containing `CONST: ConstantSymbol` for pattern matching -/// -/// Modules create nested namespaces, so `symbol::plus` becomes accessible as `sym::symbol::plus`. -/// -/// # Internal Rules -/// -/// The macro uses internal rules (prefixed with `@`) to process the token stream: -/// -/// - **`@strings`** - Collects all string values into the `SYMBOLS` slice -/// - **`@consts`** - Generates `Symbol` constants and companion modules with index tracking -/// - **`@consts @cont`** - Continuation after processing a nested module to resume counting -/// - **`@lookup`** - Builds the string-to-repr mapping table for runtime lookup -/// - **`@path`** - Helper to construct module paths (reverses accumulated path segments) -/// - **`@table`** - Entry point that dispatches to all three generators -/// -/// Index tracking uses the `${count($count)}` metavariable to assign sequential indices. -/// Each processed symbol appends `()` to the count accumulator, and `${count(...)}` returns -/// the number of elements. -/// -/// [`Repr`]: super::repr::Repr -macro_rules! symbols { - (@strings [$($acc:tt)*];) => { - pub(crate) static SYMBOLS: &[&str] = &[ - $($acc),* - ]; - }; - (@strings [$($acc:tt)*]; , $($rest:tt)*) => { - symbols!(@strings [$($acc)*]; $($rest)*); - }; - (@strings [$($acc:tt)*]; $module:ident : { $($inner:tt)* } $(, $($rest:tt)*)?) => { - symbols!(@strings [$($acc)*]; $($inner)* $(, $($rest)*)?); - }; - (@strings [$($acc:tt)*]; $name:ident : $value:literal $(, $($rest:tt)*)?) => { - symbols!(@strings [$($acc)* $value]; $($($rest)*)?); - }; - (@strings [$($acc:tt)*]; $name:ident $(, $($rest:tt)*)?) => { - symbols!(@strings [$($acc)* (stringify!($name))]; $($($rest)*)?); - }; - - (@consts @cont [$($count:tt)*] [$($next:tt)*];) => { - symbols!(@consts [$($count)*]; $($next)*); - }; - (@consts @cont [$($count:tt)*] [$($next:tt)*]; , $($rest:tt)*) => { - symbols!(@consts @cont [$($count)*] [$($next)*]; $($rest)*); - }; - (@consts @cont [$($count:tt)*] [$($next:tt)*]; $name:ident : $value:literal $(, $($rest:tt)*)?) => { - symbols!(@consts @cont [$($count)* ()] [$($next)*]; $($($rest)*)?); - }; - (@consts @cont [$($count:tt)*] [$($next:tt)*]; $module:ident : { $($inner:tt)* } $(, $($rest:tt)*)?) => { - compile_error!("nested modules in modules are not supported"); - }; - (@consts @cont [$($count:tt)*] [$($next:tt)*]; $name:ident $(, $($rest:tt)*)?) => { - symbols!(@consts @cont [$($count)* ()] [$($next)*]; $($($rest)*)?); - }; - - (@consts [$($count:tt)*];) => {}; - (@consts [$($count:tt)*]; , $($rest:tt)*) => { - symbols!(@consts [$($count)*]; $($rest)*); - }; - (@consts [$($count:tt)*]; $name:ident : $value:literal $(, $($rest:tt)*)?) => { - const _: () = { assert!(SYMBOLS[${count($count)}] == $value) }; - #[doc = concat!("The symbol `", $value, "`")] - pub const $name: Symbol<'static> = Symbol::from_constant($name::CONST); - - pub mod $name { - use super::*; - - pub const CONST: ConstantSymbol = ConstantSymbol::new_unchecked(${count($count)}); - } - - symbols!(@consts [$($count)* ()]; $($($rest)*)?); - }; - (@consts [$($count:tt)*]; $module:ident : { $($inner:tt)* } $(, $($rest:tt)*)?) => { - pub mod $module { - use super::*; - - symbols!(@consts [$($count)*]; $($inner)*); - } - - symbols!(@consts @cont [$($count)*] [$($($rest)*)?]; $($inner)*); - }; - (@consts [$($count:tt)*]; $name:ident $(, $($rest:tt)*)?) => { - const _: () = { assert!(SYMBOLS[${count($count)}] == stringify!($name)) }; - #[doc = concat!("The symbol `", stringify!($name), "`")] - pub const $name: Symbol<'static> = Symbol::from_constant($name::CONST); - - pub mod $name { - use super::*; - - pub const CONST: ConstantSymbol = ConstantSymbol::new_unchecked(${count($count)}); - } - - symbols!(@consts [$($count)* ()]; $($($rest)*)?); - }; - - (@path [] [$($path:ident)*];) => { - $($path)::* - }; - (@path [$next:tt $($rest:tt)*] [$($path:tt)*];) => { - symbols!(@path [$($rest)*] [$next $($path)*];) - }; - - (@lookup [$(, $arm:expr => $value:expr)*] [$($path:tt),*];) => { - pub(crate) static LOOKUP: &[(&'static str, super::repr::Repr)] = &[ - $(($arm, $value.into_repr())),* - ]; - }; - (@lookup [$($arms:tt)*] [$tail:tt $(, $path:tt)*]; | $($rest:tt)*) => { - symbols!(@lookup [$($arms)*] [$($path),*]; $($rest)*); - }; - (@lookup [$($arms:tt)*] [$($path:tt),*]; , $($rest:tt)*) => { - symbols!(@lookup [$($arms)*] [$($path),*]; $($rest)*); - }; - (@lookup [$($arms:tt)*] [$($path:tt),*]; $name:ident : $value:literal $(, $($rest:tt)*)?) => { - symbols!(@lookup [$($arms)*, $value => symbols!(@path [$name $($path)*] [];)] [$($path),*]; $($($rest)*)?); - }; - (@lookup [$($arms:tt)*] [$($path:tt),*]; $module:ident : { $($inner:tt)* } $(, $($rest:tt)*)?) => { - symbols!(@lookup [$($arms)*] [$module $(, $path)*]; $($inner)* ,| $($($rest)*)?); - }; - (@lookup [$($arms:tt)*] [$($path:tt),*]; $name:ident $(, $($rest:tt)*)?) => { - symbols!(@lookup [$($arms)*, stringify!($name) => symbols!(@path [$name $($path)*] [];)] [$($path),*]; $($($rest)*)?); - }; - - (@table; $($items:tt)*) => { - symbols!(@strings []; $($items)*); - symbols!(@consts []; $($items)*); - symbols!(@lookup [] [self]; $($items)*); - }; -} - -symbols! {@table; +hashql_macros::define_symbols! { // [tidy] sort alphabetically start access, add, diff --git a/libs/@local/hashql/macros/src/id/mod.rs b/libs/@local/hashql/macros/src/id/mod.rs index 09d7dbaab5d..79f4c2246ad 100644 --- a/libs/@local/hashql/macros/src/id/mod.rs +++ b/libs/@local/hashql/macros/src/id/mod.rs @@ -3,14 +3,12 @@ pub(crate) mod common; mod r#enum; mod r#struct; -use core::fmt::Display; - -use proc_macro::{Diagnostic, Level, Span}; use proc_macro2::TokenStream; use quote::quote; use unsynn::{Parse as _, ToTokenIter as _}; use self::{r#enum::expand_enum, r#struct::expand_struct}; +use crate::emit_error; mod grammar { #![expect(clippy::result_large_err)] @@ -162,7 +160,3 @@ fn parse( Ok((additional.into(), parsed)) } - -fn emit_error(span: Span, message: impl Display) { - Diagnostic::spanned(span, Level::Error, message.to_string()).emit(); -} diff --git a/libs/@local/hashql/macros/src/lib.rs b/libs/@local/hashql/macros/src/lib.rs index 89e4936fcf5..9ef507ef2eb 100644 --- a/libs/@local/hashql/macros/src/lib.rs +++ b/libs/@local/hashql/macros/src/lib.rs @@ -5,8 +5,11 @@ extern crate proc_macro; mod grammar; mod id; +mod sym; -use proc_macro::TokenStream; +use core::fmt::Display; + +use proc_macro::{Diagnostic, Level, Span, TokenStream}; /// Defines an enum as an [`Id`] type. /// @@ -110,3 +113,33 @@ pub fn id(attr: TokenStream, item: TokenStream) -> TokenStream { pub fn define_id(item: TokenStream) -> TokenStream { id::expand(TokenStream::new().into(), item.into()).into() } + +/// Generates a pre-interned symbol table. +/// +/// Produces three artifacts from a symbol definition list: +/// +/// 1. `SYMBOLS` — a static slice of string values for interner pre-population +/// 2. Symbol constants — `Symbol<'static>` constants with companion `ConstantSymbol` modules +/// 3. `LOOKUP` — a static slice mapping string values to their [`Repr`] for fast lookup +/// +/// # Syntax +/// +/// ```ignore +/// define_symbols! { +/// foo, // simple: name = string value +/// r#true: "true", // explicit string value +/// symbol: { plus: "+", }, // module grouping +/// } +/// ``` +/// +/// The call site must have `Symbol` and `ConstantSymbol` in scope (e.g. via +/// `use super::{ConstantSymbol, Symbol}`). The generated `LOOKUP` references +/// `super::repr::Repr`. +#[proc_macro] +pub fn define_symbols(item: TokenStream) -> TokenStream { + sym::expand(item.into()).into() +} + +fn emit_error(span: Span, message: impl Display) { + Diagnostic::spanned(span, Level::Error, message.to_string()).emit(); +} diff --git a/libs/@local/hashql/macros/src/sym.rs b/libs/@local/hashql/macros/src/sym.rs new file mode 100644 index 00000000000..a4e9165e3e0 --- /dev/null +++ b/libs/@local/hashql/macros/src/sym.rs @@ -0,0 +1,206 @@ +use alloc::vec::Vec; + +use proc_macro2::{Ident, TokenStream}; +use quote::{quote, quote_spanned}; +use unsynn::{ToTokenIter as _, ToTokens as _}; + +use crate::emit_error; + +mod grammar { + #![expect(clippy::result_large_err)] + use unsynn::{BraceGroupContaining, Colon, CommaDelimitedVec, Ident, LiteralString, unsynn}; + + unsynn! { + pub(super) struct SymbolModule(pub CommaDelimitedVec); + + pub(super) enum SymbolEntry { + Module { + name: Ident, + _colon: Colon, + entries: BraceGroupContaining + }, + Explicit { + name: Ident, + _colon: Colon, + value: LiteralString + }, + Implicit(Ident) + } + } +} + +use grammar::SymbolEntry; + +use self::grammar::SymbolModule; + +pub(crate) fn expand(item: TokenStream) -> TokenStream { + let entries = match parse(item) { + Ok(entries) => entries, + Err(error) => { + if let Some(token) = error.failed_at() { + emit_error(token.span().unwrap(), error); + + return TokenStream::new(); + } + + let message = error.to_string(); + return quote!(compile_error!(#message)); + } + }; + + generate(&entries) +} + +#[expect(clippy::result_large_err)] +fn parse(item: TokenStream) -> Result, unsynn::Error> { + let mut tokens = item.to_token_iter(); + + let parsed: SymbolModule = unsynn::Parse::parse_all(&mut tokens)?; + + Ok(parsed.0.into()) +} + +/// Returns the string content of an identifier, stripping any `r#` raw prefix. +#[expect(clippy::option_if_let_else)] +fn ident_string_value(ident: &Ident) -> String { + let raw = ident.to_string(); + + match raw.strip_prefix("r#") { + Some(stripped) => stripped.to_owned(), + None => raw, + } +} + +fn generate(entries: &[SymbolEntry]) -> TokenStream { + let mut output = TokenStream::new(); + + // 1. SYMBOLS array + let mut symbol_values = Vec::new(); + for entry in entries { + collect_symbol_values(entry, &mut symbol_values); + } + output.extend(quote! { + pub(crate) static SYMBOLS: &[&str] = &[ + #(#symbol_values),* + ]; + }); + + // 2. Constants and modules + let mut counter = 0; + for entry in entries { + generate_entry_constants(entry, &mut counter, &mut output); + } + + // 3. LOOKUP + let mut lookup_entries = Vec::new(); + let mut module_path = Vec::new(); + for entry in entries { + generate_lookup_entry(entry, &mut module_path, &mut lookup_entries); + } + output.extend(quote! { + pub(crate) static LOOKUP: &[(&'static str, super::repr::Repr)] = &[ + #(#lookup_entries),* + ]; + }); + + output +} + +fn collect_symbol_values(entry: &SymbolEntry, values: &mut Vec) { + match entry { + SymbolEntry::Module { entries, .. } => { + for inner in &*entries.content.0 { + collect_symbol_values(&inner.value, values); + } + } + SymbolEntry::Explicit { value, .. } => { + values.push(value.to_token_stream()); + } + SymbolEntry::Implicit(name) => { + let string_value = ident_string_value(name); + values.push(quote!(#string_value)); + } + } +} + +fn generate_entry_constants(entry: &SymbolEntry, counter: &mut usize, output: &mut TokenStream) { + match entry { + SymbolEntry::Module { name, entries, .. } => { + let mut inner_output = TokenStream::new(); + + for inner in &*entries.content.0 { + generate_entry_constants(&inner.value, counter, &mut inner_output); + } + + output.extend(quote_spanned! { name.span() => + pub mod #name { + use crate::symbol::{Symbol, sym::SYMBOLS}; + + #inner_output + } + }); + } + SymbolEntry::Explicit { name, value, .. } => { + let doc = format!("The symbol `{}`", value.as_str()); + let value_tokens = value.to_token_stream(); + + output.extend(quote_spanned! {name.span() => + const _: () = { assert!(SYMBOLS[#counter] == #value_tokens) }; + #[doc = #doc] + pub const #name: Symbol<'static> = Symbol::from_constant(#name::CONST); + pub mod #name { + use crate::symbol::ConstantSymbol; + + pub const CONST: ConstantSymbol = ConstantSymbol::new_unchecked(#counter); + } + }); + *counter += 1; + } + SymbolEntry::Implicit(name) => { + let string_value = ident_string_value(name); + let doc = format!("The symbol `{string_value}`"); + + output.extend(quote_spanned! {name.span() => + const _: () = { assert!(SYMBOLS[#counter] == #string_value) }; + #[doc = #doc] + pub const #name: Symbol<'static> = Symbol::from_constant(#name::CONST); + pub mod #name { + use crate::symbol::ConstantSymbol; + + pub const CONST: ConstantSymbol = ConstantSymbol::new_unchecked(#counter); + } + }); + *counter += 1; + } + } +} + +fn generate_lookup_entry<'a>( + entry: &'a SymbolEntry, + module_path: &mut Vec<&'a Ident>, + lookup: &mut Vec, +) { + match entry { + SymbolEntry::Module { name, entries, .. } => { + module_path.push(name); + for inner in &*entries.content.0 { + generate_lookup_entry(&inner.value, module_path, lookup); + } + module_path.pop(); + } + SymbolEntry::Explicit { name, value, .. } => { + let path = build_path(module_path, name); + let value_tokens = value.to_token_stream(); + lookup.push(quote_spanned!(name.span() => (#value_tokens, #path.into_repr()))); + } + SymbolEntry::Implicit(name) => { + let path = build_path(module_path, name); + let string_value = ident_string_value(name); + lookup.push(quote_spanned!(name.span() => (#string_value, #path.into_repr()))); + } + } +} + +fn build_path(module_path: &[&Ident], name: &Ident) -> TokenStream { + quote_spanned!(name.span() => self #(:: #module_path)* :: #name) +} From d3165bdfeaa86296c4c7e47493561273b31a51b3 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sat, 28 Feb 2026 00:39:33 +0100 Subject: [PATCH 11/15] feat: add to turborepo --- apps/hash-graph/docs/dependency-diagram.mmd | 68 +++++++++--------- .../graph/api/docs/dependency-diagram.mmd | 72 ++++++++++--------- .../test-server/docs/dependency-diagram.mmd | 68 +++++++++--------- .../hashql/ast/docs/dependency-diagram.mmd | 56 ++++++++------- .../compiletest/docs/dependency-diagram.mmd | 56 ++++++++------- .../hashql/core/docs/dependency-diagram.mmd | 42 +++++------ libs/@local/hashql/core/package.json | 3 +- libs/@local/hashql/core/src/lib.rs | 1 - .../hashql/eval/docs/dependency-diagram.mmd | 56 ++++++++------- .../hashql/hir/docs/dependency-diagram.mmd | 56 ++++++++------- libs/@local/hashql/macros/package.json | 10 +++ .../hashql/mir/docs/dependency-diagram.mmd | 58 +++++++-------- .../syntax-jexpr/docs/dependency-diagram.mmd | 58 +++++++-------- yarn.lock | 7 ++ 14 files changed, 324 insertions(+), 287 deletions(-) create mode 100644 libs/@local/hashql/macros/package.json diff --git a/apps/hash-graph/docs/dependency-diagram.mmd b/apps/hash-graph/docs/dependency-diagram.mmd index 4eacaedd9fb..e1d32bd7d26 100644 --- a/apps/hash-graph/docs/dependency-diagram.mmd +++ b/apps/hash-graph/docs/dependency-diagram.mmd @@ -39,20 +39,21 @@ graph TD 27[hashql-diagnostics] 28[hashql-eval] 29[hashql-hir] - 30[hashql-mir] - 31[hashql-syntax-jexpr] - 32[hash-status] - 33[hash-telemetry] - 34[hash-temporal-client] - 35[darwin-kperf] - 36[darwin-kperf-criterion] - 37[darwin-kperf-events] - 38[darwin-kperf-sys] - 39[error-stack] - 40[hash-graph-test-data] + 30[hashql-macros] + 31[hashql-mir] + 32[hashql-syntax-jexpr] + 33[hash-status] + 34[hash-telemetry] + 35[hash-temporal-client] + 36[darwin-kperf] + 37[darwin-kperf-criterion] + 38[darwin-kperf-events] + 39[darwin-kperf-sys] + 40[error-stack] + 41[hash-graph-test-data] 0 --> 11 1 --> 10 - 1 -.-> 40 + 1 -.-> 41 2 -.-> 3 2 --> 23 4 --> 8 @@ -60,25 +61,25 @@ graph TD 4 --> 13 4 --> 19 4 --> 28 - 4 --> 31 + 4 --> 32 5 --> 1 6 --> 7 - 6 --> 33 + 6 --> 34 8 -.-> 6 8 --> 15 - 8 --> 32 + 8 --> 33 9 --> 5 9 --> 14 - 9 --> 34 + 9 --> 35 10 --> 2 11 --> 4 - 12 --> 32 + 12 --> 33 13 --> 9 - 14 -.-> 40 - 15 -.-> 40 + 14 -.-> 41 + 15 -.-> 41 16 --> 20 17 --> 22 - 17 --> 39 + 17 --> 40 18 --> 2 18 -.-> 17 18 --> 17 @@ -89,24 +90,25 @@ graph TD 21 --> 18 23 -.-> 22 23 --> 22 - 23 --> 39 + 23 --> 40 24 -.-> 25 25 --> 28 - 25 --> 30 25 --> 31 - 25 --> 39 + 25 --> 32 + 25 --> 40 26 --> 2 26 --> 27 - 26 -.-> 36 + 26 --> 30 + 26 -.-> 37 28 --> 9 28 --> 29 29 -.-> 25 - 30 --> 29 - 31 --> 24 - 31 --> 26 - 33 --> 39 - 34 --> 1 - 35 --> 37 - 35 --> 38 - 36 --> 35 - 40 --> 9 + 31 --> 29 + 32 --> 24 + 32 --> 26 + 34 --> 40 + 35 --> 1 + 36 --> 38 + 36 --> 39 + 37 --> 36 + 41 --> 9 diff --git a/libs/@local/graph/api/docs/dependency-diagram.mmd b/libs/@local/graph/api/docs/dependency-diagram.mmd index f4ec1edf964..c81612f0bd1 100644 --- a/libs/@local/graph/api/docs/dependency-diagram.mmd +++ b/libs/@local/graph/api/docs/dependency-diagram.mmd @@ -39,21 +39,22 @@ graph TD 27[hashql-diagnostics] 28[hashql-eval] 29[hashql-hir] - 30[hashql-mir] - 31[hashql-syntax-jexpr] - 32[hash-status] - 33[hash-telemetry] - 34[hash-temporal-client] - 35[darwin-kperf] - 36[darwin-kperf-criterion] - 37[darwin-kperf-events] - 38[darwin-kperf-sys] - 39[error-stack] - 40[hash-graph-benches] - 41[hash-graph-test-data] + 30[hashql-macros] + 31[hashql-mir] + 32[hashql-syntax-jexpr] + 33[hash-status] + 34[hash-telemetry] + 35[hash-temporal-client] + 36[darwin-kperf] + 37[darwin-kperf-criterion] + 38[darwin-kperf-events] + 39[darwin-kperf-sys] + 40[error-stack] + 41[hash-graph-benches] + 42[hash-graph-test-data] 0 --> 11 1 --> 10 - 1 -.-> 41 + 1 -.-> 42 2 -.-> 3 2 --> 23 4 --> 8 @@ -61,25 +62,25 @@ graph TD 4 --> 13 4 --> 19 4 --> 28 - 4 --> 31 + 4 --> 32 5 --> 1 6 --> 7 - 6 --> 33 + 6 --> 34 8 -.-> 6 8 --> 15 - 8 --> 32 + 8 --> 33 9 --> 5 9 --> 14 - 9 --> 34 + 9 --> 35 10 --> 2 11 --> 4 - 12 --> 32 + 12 --> 33 13 --> 9 - 14 -.-> 41 - 15 -.-> 41 + 14 -.-> 42 + 15 -.-> 42 16 --> 20 17 --> 22 - 17 --> 39 + 17 --> 40 18 --> 2 18 -.-> 17 18 --> 17 @@ -90,25 +91,26 @@ graph TD 21 --> 18 23 -.-> 22 23 --> 22 - 23 --> 39 + 23 --> 40 24 -.-> 25 25 --> 28 - 25 --> 30 25 --> 31 - 25 --> 39 + 25 --> 32 + 25 --> 40 26 --> 2 26 --> 27 - 26 -.-> 36 + 26 --> 30 + 26 -.-> 37 28 --> 9 28 --> 29 29 -.-> 25 - 30 --> 29 - 31 --> 24 - 31 --> 26 - 33 --> 39 - 34 --> 1 - 35 --> 37 - 35 --> 38 - 36 --> 35 - 40 -.-> 4 - 41 --> 9 + 31 --> 29 + 32 --> 24 + 32 --> 26 + 34 --> 40 + 35 --> 1 + 36 --> 38 + 36 --> 39 + 37 --> 36 + 41 -.-> 4 + 42 --> 9 diff --git a/libs/@local/graph/test-server/docs/dependency-diagram.mmd b/libs/@local/graph/test-server/docs/dependency-diagram.mmd index 52e1f52912c..87719d33cc2 100644 --- a/libs/@local/graph/test-server/docs/dependency-diagram.mmd +++ b/libs/@local/graph/test-server/docs/dependency-diagram.mmd @@ -39,20 +39,21 @@ graph TD 27[hashql-diagnostics] 28[hashql-eval] 29[hashql-hir] - 30[hashql-mir] - 31[hashql-syntax-jexpr] - 32[hash-status] - 33[hash-telemetry] - 34[hash-temporal-client] - 35[darwin-kperf] - 36[darwin-kperf-criterion] - 37[darwin-kperf-events] - 38[darwin-kperf-sys] - 39[error-stack] - 40[hash-graph-test-data] + 30[hashql-macros] + 31[hashql-mir] + 32[hashql-syntax-jexpr] + 33[hash-status] + 34[hash-telemetry] + 35[hash-temporal-client] + 36[darwin-kperf] + 37[darwin-kperf-criterion] + 38[darwin-kperf-events] + 39[darwin-kperf-sys] + 40[error-stack] + 41[hash-graph-test-data] 0 --> 11 1 --> 10 - 1 -.-> 40 + 1 -.-> 41 2 -.-> 3 2 --> 23 4 --> 8 @@ -60,25 +61,25 @@ graph TD 4 --> 13 4 --> 19 4 --> 28 - 4 --> 31 + 4 --> 32 5 --> 1 6 --> 7 - 6 --> 33 + 6 --> 34 8 -.-> 6 8 --> 15 - 8 --> 32 + 8 --> 33 9 --> 5 9 --> 14 - 9 --> 34 + 9 --> 35 10 --> 2 11 --> 4 - 12 --> 32 + 12 --> 33 13 --> 9 - 14 -.-> 40 - 15 -.-> 40 + 14 -.-> 41 + 15 -.-> 41 16 --> 20 17 --> 22 - 17 --> 39 + 17 --> 40 18 --> 2 18 -.-> 17 18 --> 17 @@ -89,24 +90,25 @@ graph TD 21 --> 18 23 -.-> 22 23 --> 22 - 23 --> 39 + 23 --> 40 24 -.-> 25 25 --> 28 - 25 --> 30 25 --> 31 - 25 --> 39 + 25 --> 32 + 25 --> 40 26 --> 2 26 --> 27 - 26 -.-> 36 + 26 --> 30 + 26 -.-> 37 28 --> 9 28 --> 29 29 -.-> 25 - 30 --> 29 - 31 --> 24 - 31 --> 26 - 33 --> 39 - 34 --> 1 - 35 --> 37 - 35 --> 38 - 36 --> 35 - 40 --> 9 + 31 --> 29 + 32 --> 24 + 32 --> 26 + 34 --> 40 + 35 --> 1 + 36 --> 38 + 36 --> 39 + 37 --> 36 + 41 --> 9 diff --git a/libs/@local/hashql/ast/docs/dependency-diagram.mmd b/libs/@local/hashql/ast/docs/dependency-diagram.mmd index 2d73a21e90c..c468334f01e 100644 --- a/libs/@local/hashql/ast/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/ast/docs/dependency-diagram.mmd @@ -27,50 +27,52 @@ graph TD 15[hashql-diagnostics] 16[hashql-eval] 17[hashql-hir] - 18[hashql-mir] - 19[hashql-syntax-jexpr] - 20[hash-temporal-client] - 21[darwin-kperf] - 22[darwin-kperf-criterion] - 23[darwin-kperf-events] - 24[darwin-kperf-sys] - 25[error-stack] - 26[hash-graph-benches] - 27[hash-graph-test-data] + 18[hashql-macros] + 19[hashql-mir] + 20[hashql-syntax-jexpr] + 21[hash-temporal-client] + 22[darwin-kperf] + 23[darwin-kperf-criterion] + 24[darwin-kperf-events] + 25[darwin-kperf-sys] + 26[error-stack] + 27[hash-graph-benches] + 28[hash-graph-test-data] 0 --> 8 1 --> 7 - 1 -.-> 27 + 1 -.-> 28 2 -.-> 3 2 --> 11 4 --> 16 - 4 --> 19 + 4 --> 20 5 --> 1 6 --> 5 6 --> 9 - 6 --> 20 + 6 --> 21 7 --> 2 8 --> 4 - 9 -.-> 27 + 9 -.-> 28 11 -.-> 10 11 --> 10 - 11 --> 25 + 11 --> 26 12 -.-> 13 13 --> 16 - 13 --> 18 13 --> 19 - 13 --> 25 + 13 --> 20 + 13 --> 26 14 --> 2 14 --> 15 - 14 -.-> 22 + 14 --> 18 + 14 -.-> 23 16 --> 6 16 --> 17 17 -.-> 13 - 18 --> 17 - 19 --> 12 - 19 --> 14 - 20 --> 1 - 21 --> 23 - 21 --> 24 - 22 --> 21 - 26 -.-> 4 - 27 --> 6 + 19 --> 17 + 20 --> 12 + 20 --> 14 + 21 --> 1 + 22 --> 24 + 22 --> 25 + 23 --> 22 + 27 -.-> 4 + 28 --> 6 diff --git a/libs/@local/hashql/compiletest/docs/dependency-diagram.mmd b/libs/@local/hashql/compiletest/docs/dependency-diagram.mmd index 9c1a9805c01..433bed9565b 100644 --- a/libs/@local/hashql/compiletest/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/compiletest/docs/dependency-diagram.mmd @@ -27,50 +27,52 @@ graph TD 15[hashql-diagnostics] 16[hashql-eval] 17[hashql-hir] - 18[hashql-mir] - 19[hashql-syntax-jexpr] - 20[hash-temporal-client] - 21[darwin-kperf] - 22[darwin-kperf-criterion] - 23[darwin-kperf-events] - 24[darwin-kperf-sys] - 25[error-stack] - 26[hash-graph-benches] - 27[hash-graph-test-data] + 18[hashql-macros] + 19[hashql-mir] + 20[hashql-syntax-jexpr] + 21[hash-temporal-client] + 22[darwin-kperf] + 23[darwin-kperf-criterion] + 24[darwin-kperf-events] + 25[darwin-kperf-sys] + 26[error-stack] + 27[hash-graph-benches] + 28[hash-graph-test-data] 0 --> 8 1 --> 7 - 1 -.-> 27 + 1 -.-> 28 2 -.-> 3 2 --> 11 4 --> 16 - 4 --> 19 + 4 --> 20 5 --> 1 6 --> 5 6 --> 9 - 6 --> 20 + 6 --> 21 7 --> 2 8 --> 4 - 9 -.-> 27 + 9 -.-> 28 11 -.-> 10 11 --> 10 - 11 --> 25 + 11 --> 26 12 -.-> 13 13 --> 16 - 13 --> 18 13 --> 19 - 13 --> 25 + 13 --> 20 + 13 --> 26 14 --> 2 14 --> 15 - 14 -.-> 22 + 14 --> 18 + 14 -.-> 23 16 --> 6 16 --> 17 17 -.-> 13 - 18 --> 17 - 19 --> 12 - 19 --> 14 - 20 --> 1 - 21 --> 23 - 21 --> 24 - 22 --> 21 - 26 -.-> 4 - 27 --> 6 + 19 --> 17 + 20 --> 12 + 20 --> 14 + 21 --> 1 + 22 --> 24 + 22 --> 25 + 23 --> 22 + 27 -.-> 4 + 28 --> 6 diff --git a/libs/@local/hashql/core/docs/dependency-diagram.mmd b/libs/@local/hashql/core/docs/dependency-diagram.mmd index 0c5f479e9a8..6615a2d475b 100644 --- a/libs/@local/hashql/core/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/core/docs/dependency-diagram.mmd @@ -22,37 +22,39 @@ graph TD 10[hashql-diagnostics] 11[hashql-eval] 12[hashql-hir] - 13[hashql-mir] - 14[hashql-syntax-jexpr] - 15[darwin-kperf] - 16[darwin-kperf-criterion] - 17[darwin-kperf-events] - 18[darwin-kperf-sys] - 19[error-stack] - 20[hash-graph-benches] + 13[hashql-macros] + 14[hashql-mir] + 15[hashql-syntax-jexpr] + 16[darwin-kperf] + 17[darwin-kperf-criterion] + 18[darwin-kperf-events] + 19[darwin-kperf-sys] + 20[error-stack] + 21[hash-graph-benches] 0 --> 4 1 -.-> 2 1 --> 6 3 --> 11 - 3 --> 14 + 3 --> 15 4 --> 3 6 -.-> 5 6 --> 5 - 6 --> 19 + 6 --> 20 7 -.-> 8 8 --> 11 - 8 --> 13 8 --> 14 - 8 --> 19 + 8 --> 15 + 8 --> 20 9 --> 1 9 --> 10 - 9 -.-> 16 + 9 --> 13 + 9 -.-> 17 11 --> 12 12 -.-> 8 - 13 --> 12 - 14 --> 7 - 14 --> 9 - 15 --> 17 - 15 --> 18 - 16 --> 15 - 20 -.-> 3 + 14 --> 12 + 15 --> 7 + 15 --> 9 + 16 --> 18 + 16 --> 19 + 17 --> 16 + 21 -.-> 3 diff --git a/libs/@local/hashql/core/package.json b/libs/@local/hashql/core/package.json index 43668aa22c1..e8b0af5a925 100644 --- a/libs/@local/hashql/core/package.json +++ b/libs/@local/hashql/core/package.json @@ -14,7 +14,8 @@ }, "dependencies": { "@rust/hash-codec": "workspace:*", - "@rust/hashql-diagnostics": "workspace:*" + "@rust/hashql-diagnostics": "workspace:*", + "@rust/hashql-macros": "workspace:*" }, "devDependencies": { "@rust/darwin-kperf-criterion": "workspace:*" diff --git a/libs/@local/hashql/core/src/lib.rs b/libs/@local/hashql/core/src/lib.rs index fff9663179e..ce3619b06cd 100644 --- a/libs/@local/hashql/core/src/lib.rs +++ b/libs/@local/hashql/core/src/lib.rs @@ -3,7 +3,6 @@ //! ## Workspace dependencies #![cfg_attr(doc, doc = simple_mermaid::mermaid!("../docs/dependency-diagram.mmd"))] #![expect(clippy::indexing_slicing)] -#![recursion_limit = "256"] #![feature( // Language Features arbitrary_self_types, diff --git a/libs/@local/hashql/eval/docs/dependency-diagram.mmd b/libs/@local/hashql/eval/docs/dependency-diagram.mmd index 81a250cb31f..0b1028cfe78 100644 --- a/libs/@local/hashql/eval/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/eval/docs/dependency-diagram.mmd @@ -27,50 +27,52 @@ graph TD 16[hashql-eval] class 16 root 17[hashql-hir] - 18[hashql-mir] - 19[hashql-syntax-jexpr] - 20[hash-temporal-client] - 21[darwin-kperf] - 22[darwin-kperf-criterion] - 23[darwin-kperf-events] - 24[darwin-kperf-sys] - 25[error-stack] - 26[hash-graph-benches] - 27[hash-graph-test-data] + 18[hashql-macros] + 19[hashql-mir] + 20[hashql-syntax-jexpr] + 21[hash-temporal-client] + 22[darwin-kperf] + 23[darwin-kperf-criterion] + 24[darwin-kperf-events] + 25[darwin-kperf-sys] + 26[error-stack] + 27[hash-graph-benches] + 28[hash-graph-test-data] 0 --> 8 1 --> 7 - 1 -.-> 27 + 1 -.-> 28 2 -.-> 3 2 --> 11 4 --> 16 - 4 --> 19 + 4 --> 20 5 --> 1 6 --> 5 6 --> 9 - 6 --> 20 + 6 --> 21 7 --> 2 8 --> 4 - 9 -.-> 27 + 9 -.-> 28 11 -.-> 10 11 --> 10 - 11 --> 25 + 11 --> 26 12 -.-> 13 13 --> 16 - 13 --> 18 13 --> 19 - 13 --> 25 + 13 --> 20 + 13 --> 26 14 --> 2 14 --> 15 - 14 -.-> 22 + 14 --> 18 + 14 -.-> 23 16 --> 6 16 --> 17 17 -.-> 13 - 18 --> 17 - 19 --> 12 - 19 --> 14 - 20 --> 1 - 21 --> 23 - 21 --> 24 - 22 --> 21 - 26 -.-> 4 - 27 --> 6 + 19 --> 17 + 20 --> 12 + 20 --> 14 + 21 --> 1 + 22 --> 24 + 22 --> 25 + 23 --> 22 + 27 -.-> 4 + 28 --> 6 diff --git a/libs/@local/hashql/hir/docs/dependency-diagram.mmd b/libs/@local/hashql/hir/docs/dependency-diagram.mmd index 3bb3fb061a4..01d987d5bcb 100644 --- a/libs/@local/hashql/hir/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/hir/docs/dependency-diagram.mmd @@ -27,50 +27,52 @@ graph TD 16[hashql-eval] 17[hashql-hir] class 17 root - 18[hashql-mir] - 19[hashql-syntax-jexpr] - 20[hash-temporal-client] - 21[darwin-kperf] - 22[darwin-kperf-criterion] - 23[darwin-kperf-events] - 24[darwin-kperf-sys] - 25[error-stack] - 26[hash-graph-benches] - 27[hash-graph-test-data] + 18[hashql-macros] + 19[hashql-mir] + 20[hashql-syntax-jexpr] + 21[hash-temporal-client] + 22[darwin-kperf] + 23[darwin-kperf-criterion] + 24[darwin-kperf-events] + 25[darwin-kperf-sys] + 26[error-stack] + 27[hash-graph-benches] + 28[hash-graph-test-data] 0 --> 8 1 --> 7 - 1 -.-> 27 + 1 -.-> 28 2 -.-> 3 2 --> 11 4 --> 16 - 4 --> 19 + 4 --> 20 5 --> 1 6 --> 5 6 --> 9 - 6 --> 20 + 6 --> 21 7 --> 2 8 --> 4 - 9 -.-> 27 + 9 -.-> 28 11 -.-> 10 11 --> 10 - 11 --> 25 + 11 --> 26 12 -.-> 13 13 --> 16 - 13 --> 18 13 --> 19 - 13 --> 25 + 13 --> 20 + 13 --> 26 14 --> 2 14 --> 15 - 14 -.-> 22 + 14 --> 18 + 14 -.-> 23 16 --> 6 16 --> 17 17 -.-> 13 - 18 --> 17 - 19 --> 12 - 19 --> 14 - 20 --> 1 - 21 --> 23 - 21 --> 24 - 22 --> 21 - 26 -.-> 4 - 27 --> 6 + 19 --> 17 + 20 --> 12 + 20 --> 14 + 21 --> 1 + 22 --> 24 + 22 --> 25 + 23 --> 22 + 27 -.-> 4 + 28 --> 6 diff --git a/libs/@local/hashql/macros/package.json b/libs/@local/hashql/macros/package.json new file mode 100644 index 00000000000..fc812410670 --- /dev/null +++ b/libs/@local/hashql/macros/package.json @@ -0,0 +1,10 @@ +{ + "name": "@rust/hashql-macros", + "version": "0.0.0-private", + "private": true, + "license": "AGPL-3", + "scripts": { + "fix:clippy": "just clippy --fix", + "lint:clippy": "just clippy" + } +} diff --git a/libs/@local/hashql/mir/docs/dependency-diagram.mmd b/libs/@local/hashql/mir/docs/dependency-diagram.mmd index 1e4eaf1083c..604cd1e3edc 100644 --- a/libs/@local/hashql/mir/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/mir/docs/dependency-diagram.mmd @@ -26,51 +26,53 @@ graph TD 15[hashql-diagnostics] 16[hashql-eval] 17[hashql-hir] - 18[hashql-mir] - class 18 root - 19[hashql-syntax-jexpr] - 20[hash-temporal-client] - 21[darwin-kperf] - 22[darwin-kperf-criterion] - 23[darwin-kperf-events] - 24[darwin-kperf-sys] - 25[error-stack] - 26[hash-graph-benches] - 27[hash-graph-test-data] + 18[hashql-macros] + 19[hashql-mir] + class 19 root + 20[hashql-syntax-jexpr] + 21[hash-temporal-client] + 22[darwin-kperf] + 23[darwin-kperf-criterion] + 24[darwin-kperf-events] + 25[darwin-kperf-sys] + 26[error-stack] + 27[hash-graph-benches] + 28[hash-graph-test-data] 0 --> 8 1 --> 7 - 1 -.-> 27 + 1 -.-> 28 2 -.-> 3 2 --> 11 4 --> 16 - 4 --> 19 + 4 --> 20 5 --> 1 6 --> 5 6 --> 9 - 6 --> 20 + 6 --> 21 7 --> 2 8 --> 4 - 9 -.-> 27 + 9 -.-> 28 11 -.-> 10 11 --> 10 - 11 --> 25 + 11 --> 26 12 -.-> 13 13 --> 16 - 13 --> 18 13 --> 19 - 13 --> 25 + 13 --> 20 + 13 --> 26 14 --> 2 14 --> 15 - 14 -.-> 22 + 14 --> 18 + 14 -.-> 23 16 --> 6 16 --> 17 17 -.-> 13 - 18 --> 17 - 19 --> 12 - 19 --> 14 - 20 --> 1 - 21 --> 23 - 21 --> 24 - 22 --> 21 - 26 -.-> 4 - 27 --> 6 + 19 --> 17 + 20 --> 12 + 20 --> 14 + 21 --> 1 + 22 --> 24 + 22 --> 25 + 23 --> 22 + 27 -.-> 4 + 28 --> 6 diff --git a/libs/@local/hashql/syntax-jexpr/docs/dependency-diagram.mmd b/libs/@local/hashql/syntax-jexpr/docs/dependency-diagram.mmd index 79502ac116d..ffdf9333745 100644 --- a/libs/@local/hashql/syntax-jexpr/docs/dependency-diagram.mmd +++ b/libs/@local/hashql/syntax-jexpr/docs/dependency-diagram.mmd @@ -26,51 +26,53 @@ graph TD 15[hashql-diagnostics] 16[hashql-eval] 17[hashql-hir] - 18[hashql-mir] - 19[hashql-syntax-jexpr] - class 19 root - 20[hash-temporal-client] - 21[darwin-kperf] - 22[darwin-kperf-criterion] - 23[darwin-kperf-events] - 24[darwin-kperf-sys] - 25[error-stack] - 26[hash-graph-benches] - 27[hash-graph-test-data] + 18[hashql-macros] + 19[hashql-mir] + 20[hashql-syntax-jexpr] + class 20 root + 21[hash-temporal-client] + 22[darwin-kperf] + 23[darwin-kperf-criterion] + 24[darwin-kperf-events] + 25[darwin-kperf-sys] + 26[error-stack] + 27[hash-graph-benches] + 28[hash-graph-test-data] 0 --> 8 1 --> 7 - 1 -.-> 27 + 1 -.-> 28 2 -.-> 3 2 --> 11 4 --> 16 - 4 --> 19 + 4 --> 20 5 --> 1 6 --> 5 6 --> 9 - 6 --> 20 + 6 --> 21 7 --> 2 8 --> 4 - 9 -.-> 27 + 9 -.-> 28 11 -.-> 10 11 --> 10 - 11 --> 25 + 11 --> 26 12 -.-> 13 13 --> 16 - 13 --> 18 13 --> 19 - 13 --> 25 + 13 --> 20 + 13 --> 26 14 --> 2 14 --> 15 - 14 -.-> 22 + 14 --> 18 + 14 -.-> 23 16 --> 6 16 --> 17 17 -.-> 13 - 18 --> 17 - 19 --> 12 - 19 --> 14 - 20 --> 1 - 21 --> 23 - 21 --> 24 - 22 --> 21 - 26 -.-> 4 - 27 --> 6 + 19 --> 17 + 20 --> 12 + 20 --> 14 + 21 --> 1 + 22 --> 24 + 22 --> 25 + 23 --> 22 + 27 -.-> 4 + 28 --> 6 diff --git a/yarn.lock b/yarn.lock index 9c1cc94cca7..2940b60674d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -14947,6 +14947,7 @@ __metadata: "@rust/darwin-kperf-criterion": "workspace:*" "@rust/hash-codec": "workspace:*" "@rust/hashql-diagnostics": "workspace:*" + "@rust/hashql-macros": "workspace:*" languageName: unknown linkType: soft @@ -14978,6 +14979,12 @@ __metadata: languageName: unknown linkType: soft +"@rust/hashql-macros@workspace:*, @rust/hashql-macros@workspace:libs/@local/hashql/macros": + version: 0.0.0-use.local + resolution: "@rust/hashql-macros@workspace:libs/@local/hashql/macros" + languageName: unknown + linkType: soft + "@rust/hashql-mir@workspace:*, @rust/hashql-mir@workspace:libs/@local/hashql/mir": version: 0.0.0-use.local resolution: "@rust/hashql-mir@workspace:libs/@local/hashql/mir" From 8cc43c5555f94caef4de9fbfc92c6a956efd59ef Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sat, 28 Feb 2026 00:43:31 +0100 Subject: [PATCH 12/15] fix: dependency nits --- Cargo.toml | 1 + libs/@local/hashql/macros/Cargo.toml | 13 ++++++++++--- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 8aaf6b4a212..296526c7173 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -286,6 +286,7 @@ unicode-ident = { version = "1.0.22", default-features = fa unicode-normalization = { version = "0.1.25", default-features = false } unicode-properties = { version = "0.1.4", default-features = false } unicode-segmentation = { version = "1.12.0", default-features = false } +unsynn = { version = "0.3.0", default-features = false, features = ["proc_macro2"] } url = { version = "2.5.7", default-features = false } utoipa = { version = "4.2.3", default-features = false } uuid = { version = "1.18.1", default-features = false } diff --git a/libs/@local/hashql/macros/Cargo.toml b/libs/@local/hashql/macros/Cargo.toml index 19740e762f1..a601e45b9e3 100644 --- a/libs/@local/hashql/macros/Cargo.toml +++ b/libs/@local/hashql/macros/Cargo.toml @@ -13,6 +13,13 @@ proc-macro = true workspace = true [dependencies] -proc-macro2.workspace = true -quote = { workspace = true, features = ["proc-macro"] } -unsynn = { version = "0.3.0", default-features = false, features = ["proc_macro2"] } +# Public workspace dependencies + +# Public third-party dependencies + +# Private workspace dependencies + +# Private third-party dependencies +proc-macro2 = { workspace = true } +quote = { workspace = true, features = ["proc-macro"] } +unsynn = { workspace = true } From 84ae6e211c5187bdb9f4ee207d56b782fc761e9d Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sat, 28 Feb 2026 07:28:19 +0100 Subject: [PATCH 13/15] feat: move to derive macro --- libs/@local/hashql/core/src/id/mod.rs | 2 +- libs/@local/hashql/core/src/lib.rs | 2 - libs/@local/hashql/macros/src/grammar.rs | 41 ++++------ libs/@local/hashql/macros/src/id/attr.rs | 9 +-- libs/@local/hashql/macros/src/id/common.rs | 14 ++++ libs/@local/hashql/macros/src/id/enum.rs | 18 ++--- libs/@local/hashql/macros/src/id/mod.rs | 80 ++++++++++++++----- libs/@local/hashql/macros/src/id/struct.rs | 5 +- libs/@local/hashql/macros/src/lib.rs | 67 ++++++---------- .../hashql/mir/src/pass/execution/target.rs | 4 +- 10 files changed, 131 insertions(+), 111 deletions(-) diff --git a/libs/@local/hashql/core/src/id/mod.rs b/libs/@local/hashql/core/src/id/mod.rs index f00a6487116..9cfa413505c 100644 --- a/libs/@local/hashql/core/src/id/mod.rs +++ b/libs/@local/hashql/core/src/id/mod.rs @@ -13,6 +13,7 @@ use core::{ }; use ::core::sync::atomic; +pub use hashql_macros::{Id, define_id as newtype}; pub use self::{ array::IdArray, index::IntoSliceIndex, slice::IdSlice, union_find::IdUnionFind, vec::IdVec, @@ -432,7 +433,6 @@ macro_rules! newtype_collections { }; } -pub use hashql_macros::define_id as newtype; pub use newtype_collections; pub use newtype_counter; pub use newtype_producer; diff --git a/libs/@local/hashql/core/src/lib.rs b/libs/@local/hashql/core/src/lib.rs index ce3619b06cd..3599b4b8442 100644 --- a/libs/@local/hashql/core/src/lib.rs +++ b/libs/@local/hashql/core/src/lib.rs @@ -54,5 +54,3 @@ pub mod symbol; pub mod sync; pub mod r#type; pub mod value; - -pub use hashql_macros::id; diff --git a/libs/@local/hashql/macros/src/grammar.rs b/libs/@local/hashql/macros/src/grammar.rs index 5fed1c39f89..358f34c5b3f 100644 --- a/libs/@local/hashql/macros/src/grammar.rs +++ b/libs/@local/hashql/macros/src/grammar.rs @@ -5,27 +5,22 @@ use unsynn::{ }; keyword! { - /// The "pub" keyword. - pub KPub = ["pub"]; - /// The "struct" keyword. - pub KStruct = ["struct"]; - /// The "enum" keyword. - pub KEnum = ["enum"]; - /// The "in" keyword. - pub KIn = ["in"]; - pub KId = ["id"]; - pub KDerive = ["derive"]; - pub KDisplay = ["display"]; - pub KStep = ["Step"]; - pub KIs = ["is"]; - pub KCrate = ["crate"]; - pub KConst = ["const"]; - pub KU8 = ["u8"]; - pub KU16 = ["u16"]; - pub KU32 = ["u32"]; - pub KU64 = ["u64"]; - pub KU128 = ["u128"]; - pub KUsize = ["usize"]; + pub KPub = "pub"; + pub KStruct = "struct"; + pub KEnum = "enum"; + pub KIn = "in"; + pub KId = "id"; + pub KDerive = "derive"; + pub KDisplay = "display"; + pub KStep = "Step"; + pub KIs = "is"; + pub KCrate = "crate"; + pub KConst = "const"; + pub KU8 = "u8"; + pub KU16 = "u16"; + pub KU32 = "u32"; + pub KU64 = "u64"; + pub KU128 = "u128"; } pub(crate) type VerbatimUntil = Many, AngleTokenTree>>; @@ -41,11 +36,9 @@ unsynn! { pub Either, AngleTokenTree>>, Gt>, TokenTree>, ); - /// Represents an attribute annotation on a field, typically in the form `#[attr]`. + /// An attribute in the form `#[...]`. pub struct Attribute { - /// The pound sign preceding the attribute. pub _pound: Pound, - /// The content of the attribute enclosed in square brackets. pub body: BracketGroupContaining, } } diff --git a/libs/@local/hashql/macros/src/id/attr.rs b/libs/@local/hashql/macros/src/id/attr.rs index 2ef9b682ca2..bac41370edd 100644 --- a/libs/@local/hashql/macros/src/id/attr.rs +++ b/libs/@local/hashql/macros/src/id/attr.rs @@ -54,10 +54,7 @@ impl Attributes { } } - pub(crate) fn parse( - additional: Vec, - attributes: Vec>, - ) -> Self { + pub(crate) fn parse(attributes: Vec>) -> Self { let mut this = Self { krate: quote!(::hashql_core), r#const: TokenStream::new(), @@ -66,10 +63,6 @@ impl Attributes { extra: TokenStream::new(), }; - for attribute in additional { - this.parse_attribute(attribute); - } - for attribute in attributes { match attribute.body.content { grammar::AttributeBody::Any(_) => { diff --git a/libs/@local/hashql/macros/src/id/common.rs b/libs/@local/hashql/macros/src/id/common.rs index 6a3249c023f..66d9418476e 100644 --- a/libs/@local/hashql/macros/src/id/common.rs +++ b/libs/@local/hashql/macros/src/id/common.rs @@ -1,3 +1,5 @@ +use core::fmt; + use proc_macro2::{Ident, Span, TokenStream}; use quote::ToTokens; @@ -21,6 +23,18 @@ impl IntegerScalar { } } +impl fmt::Display for IntegerScalar { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.write_str(match self { + Self::U8 => "u8", + Self::U16 => "u16", + Self::U32 => "u32", + Self::U64 => "u64", + Self::U128 => "u128", + }) + } +} + impl ToTokens for IntegerScalar { fn to_tokens(&self, tokens: &mut TokenStream) { let ident = match self { diff --git a/libs/@local/hashql/macros/src/id/enum.rs b/libs/@local/hashql/macros/src/id/enum.rs index d199159b4bd..730efea41c5 100644 --- a/libs/@local/hashql/macros/src/id/enum.rs +++ b/libs/@local/hashql/macros/src/id/enum.rs @@ -10,7 +10,6 @@ use crate::id::{ #[expect(clippy::too_many_lines, reason = "macro")] pub(super) fn expand_enum( - additional_attributes: Vec, grammar::ParsedEnum { attributes, visibility, @@ -26,8 +25,8 @@ pub(super) fn expand_enum( r#const: konst, display, traits, - extra, - } = Attributes::parse(additional_attributes, attributes); + extra: _, + } = Attributes::parse(attributes); let vis = visibility.into_token_stream(); let mut variants: Vec<_> = Vec::new(); @@ -47,14 +46,13 @@ pub(super) fn expand_enum( .iter() .map(|variant| quote_spanned!(variant.span() => Self::#variant)); - let body = body.to_token_stream(); - - // 1. Enum definition + // 1. Size assertion: ensures the enum has the expected repr + let size_message = format!("expected `{name}` to be `{backing}`-sized"); output.extend(quote! { - #extra - #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] - #[repr(#backing)] - #vis enum #name #body + const _: () = assert!( + ::core::mem::size_of::<#name>() == ::core::mem::size_of::<#backing>(), + #size_message + ); }); // 2. Inherent impl diff --git a/libs/@local/hashql/macros/src/id/mod.rs b/libs/@local/hashql/macros/src/id/mod.rs index 79f4c2246ad..0c4b767dbb5 100644 --- a/libs/@local/hashql/macros/src/id/mod.rs +++ b/libs/@local/hashql/macros/src/id/mod.rs @@ -25,6 +25,7 @@ mod grammar { pub(super) type AttributeIdBody = CommaDelimitedVec; unsynn! { + /// Content of an `#[...]` attribute: either `#[id(...)]` or any other attribute. pub(super) enum AttributeBody { Id { _id: KId, @@ -34,29 +35,36 @@ mod grammar { Any(Vec) } + /// Traits that can appear inside `#[id(derive(...))]`. pub(super) enum IdDerive { Step(KStep) } + /// The value after `display =`: either `!` (suppress) or a format string. pub(super) enum IdDisplay { None(Bang), Format(TokenTree) } + /// A single key-value entry inside `#[id(...)]`. pub(super) enum IdAttribute { + /// `crate = path`: overrides the path to `hashql_core` in generated code. Crate { _crate: KCrate, _eq: Assign, path: ModPath }, + /// `const`: makes generated trait impl blocks const. Const { _const: KConst }, + /// `derive(Step, ...)`: generates additional trait implementations. Derive { _derive: KDerive, traits: ParenthesisGroupContaining> }, + /// `display = "format"` or `display = !`: controls `Display` generation. Display { _display: KDisplay, _eq: Assign, @@ -65,11 +73,13 @@ mod grammar { } } + /// The range operator in `start..end` or `start..=end`. pub(super) enum RangeOp { Inclusive(DotDotEq), Exclusive(DotDot) } + /// The backing integer type in a struct body (`u8`, `u16`, ...). pub(super) enum StructScalar { U8(KU8), U16(KU16), @@ -78,6 +88,7 @@ mod grammar { U128(KU128), } + /// The parenthesized body of a struct: `(u32 is 0..=MAX)`. pub(super) struct StructBody { pub r#type: StructScalar, pub _is: KIs, @@ -87,6 +98,7 @@ mod grammar { pub end: Vec } + /// A complete struct definition for `define_id!`. pub(super) struct ParsedStruct { pub attributes: Vec>, pub visibility: Option, @@ -98,6 +110,7 @@ mod grammar { pub body: ParenthesisGroupContaining } + /// A complete enum definition for `#[derive(Id)]`. pub(super) struct ParsedEnum { pub attributes: Vec>, pub visibility: Option, @@ -109,14 +122,14 @@ mod grammar { pub body: BraceGroupContaining> } - /// Represents a variant of an enum, including the optional discriminant value + /// A single unit variant with optional attributes. pub(super) struct UnitEnumVariant { - /// Attributes applied to the variant. pub attributes: Vec>>, - /// The name of the variant. pub name: Ident, } + /// Dispatches between struct and enum so each entry point can reject + /// the wrong shape with a helpful error message. pub(super) enum Parsed { Struct(ParsedStruct), Enum(ParsedEnum) @@ -124,8 +137,9 @@ mod grammar { } } -pub(crate) fn expand(attr: TokenStream, item: TokenStream) -> TokenStream { - let (attributes, parsed) = match parse(attr, item) { +/// Entry point for the `#[derive(Id)]` derive macro (enum only). +pub(crate) fn expand_derive(item: TokenStream) -> TokenStream { + let parsed = match parse(item) { Ok(parsed) => parsed, Err(error) => { @@ -135,28 +149,58 @@ pub(crate) fn expand(attr: TokenStream, item: TokenStream) -> TokenStream { return TokenStream::new(); } - // Unable to report a useful error (at a position) let message = error.to_string(); return quote!(compile_error!(#message)); } }; match parsed { - grammar::Parsed::Struct(parsed) => expand_struct(attributes, parsed), - grammar::Parsed::Enum(parsed) => expand_enum(attributes, parsed), + grammar::Parsed::Enum(parsed) => expand_enum(parsed), + grammar::Parsed::Struct(parsed) => { + emit_error( + AsRef::::as_ref(&parsed._struct) + .span() + .unwrap(), + "use `define_id!` for struct Id types; `#[derive(Id)]` only supports enums", + ); + TokenStream::new() + } } } -#[expect(clippy::result_large_err)] -fn parse( - attr: TokenStream, - item: TokenStream, -) -> Result<(Vec, grammar::Parsed), unsynn::Error> { - let mut attr_tokens = attr.to_token_iter(); - let mut item_tokens = item.to_token_iter(); +/// Entry point for the `define_id!` function-like macro (struct only). +pub(crate) fn expand_define(item: TokenStream) -> TokenStream { + let parsed = match parse(item) { + Ok(parsed) => parsed, + + Err(error) => { + if let Some(token) = error.failed_at() { + emit_error(token.span().unwrap(), error); + + return TokenStream::new(); + } - let additional = grammar::AttributeIdBody::parse_all(&mut attr_tokens)?; - let parsed = grammar::Parsed::parse_all(&mut item_tokens)?; + let message = error.to_string(); + return quote!(compile_error!(#message)); + } + }; + + match parsed { + grammar::Parsed::Struct(parsed) => expand_struct(parsed), + grammar::Parsed::Enum(parsed) => { + emit_error( + AsRef::::as_ref(&parsed._enum) + .span() + .unwrap(), + "use `#[derive(Id)]` for enum Id types; `define_id!` only supports structs", + ); + TokenStream::new() + } + } +} - Ok((additional.into(), parsed)) +#[expect(clippy::result_large_err)] +fn parse(item: TokenStream) -> Result { + let mut tokens = item.to_token_iter(); + grammar::Parsed::parse_all(&mut tokens) } diff --git a/libs/@local/hashql/macros/src/id/struct.rs b/libs/@local/hashql/macros/src/id/struct.rs index 41e99f7c995..e70599c5b94 100644 --- a/libs/@local/hashql/macros/src/id/struct.rs +++ b/libs/@local/hashql/macros/src/id/struct.rs @@ -102,8 +102,7 @@ impl From for Constraint { } #[expect(clippy::too_many_lines, reason = "macro")] -pub(crate) fn expand_struct( - additional_attributes: Vec, +pub(super) fn expand_struct( grammar::ParsedStruct { attributes, visibility, @@ -119,7 +118,7 @@ pub(crate) fn expand_struct( display, traits, extra, - } = Attributes::parse(additional_attributes, attributes); + } = Attributes::parse(attributes); let vis = visibility.into_token_stream(); let int = body.content.r#type.to_token_stream(); diff --git a/libs/@local/hashql/macros/src/lib.rs b/libs/@local/hashql/macros/src/lib.rs index 9ef507ef2eb..2c3cc5159c4 100644 --- a/libs/@local/hashql/macros/src/lib.rs +++ b/libs/@local/hashql/macros/src/lib.rs @@ -11,18 +11,19 @@ use core::fmt::Display; use proc_macro::{Diagnostic, Level, Span, TokenStream}; -/// Defines an enum as an [`Id`] type. +/// Derives [`Id`] trait implementations for an enum with unit variants. /// -/// This attribute macro works on enums with unit variants, generating sequential -/// discriminants, conversion methods, and trait implementations. +/// Generates sequential discriminants, conversion methods, and trait +/// implementations. For struct-based Id types, use [`define_id!`] instead. /// -/// For struct-based Id types, use [`define_id!`] instead, since attribute macros -/// require syntactically valid Rust on the annotated item. +/// The enum must have `#[repr(u8)]` (or the appropriate integer type for the +/// variant count) and derive the standard traits required by [`Id`]. /// /// # Example /// /// ```ignore -/// #[hashql_macros::id] +/// #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, hashql_macros::Id)] +/// #[repr(u8)] /// pub enum TargetId { /// Interpreter, /// Postgres, @@ -30,31 +31,32 @@ use proc_macro::{Diagnostic, Level, Span, TokenStream}; /// } /// ``` /// +/// By default, a [`Display`] implementation is generated using lowercased +/// variant names. +/// /// # Attributes /// -/// Attributes can be passed either as arguments to `#[id(...)]` or as a -/// separate `#[id(...)]` attribute on the item: +/// Configuration is passed via `#[id(...)]` helper attributes: /// /// - `crate = path` — path to the `hashql_core` crate (default: `::hashql_core`) /// - `const` — add `const` to trait impl blocks /// - `derive(Step)` — implement [`core::iter::Step`] /// - `display = "format"` — implement [`Display`] with a format string -/// - `display = "auto"` — implement [`Display`] using lowercased variant names /// - `display = !` — suppress the [`Display`] implementation -#[proc_macro_attribute] -pub fn id(attr: TokenStream, item: TokenStream) -> TokenStream { - id::expand(attr.into(), item.into()).into() +#[proc_macro_derive(Id, attributes(id))] +pub fn derive_id(item: TokenStream) -> TokenStream { + id::expand_derive(item.into()).into() } -/// Defines a type as an [`Id`]. +/// Defines a struct as an [`Id`] type. /// -/// This is a function-like macro that supports both struct and enum shapes. -/// Struct-based Id types must use this macro because their syntax (`u32 is 0..=MAX`) -/// is not valid Rust, which precludes use of the `#[id]` attribute macro. +/// Creates a newtype wrapper around an integer with a valid range. This is a +/// function-like macro because the struct body syntax (`u32 is 0..=MAX`) is not +/// valid Rust. /// -/// # Struct +/// For enum Id types, use `#[derive(Id)]` instead. /// -/// Creates a newtype wrapper around an integer with a valid range: +/// # Example /// /// ```ignore /// define_id! { @@ -69,22 +71,6 @@ pub fn id(attr: TokenStream, item: TokenStream) -> TokenStream { /// The range bound determines valid values. Inclusive (`..=`) and exclusive (`..`) /// ranges are both supported. /// -/// # Enum -/// -/// Creates an enum with sequential discriminants: -/// -/// ```ignore -/// define_id! { -/// pub enum TargetId { -/// Interpreter, -/// Postgres, -/// Embedding, -/// } -/// } -/// ``` -/// -/// The backing integer type is inferred from the number of variants. -/// /// # Attributes /// /// Placed inside an `#[id(...)]` annotation on the item: @@ -93,25 +79,20 @@ pub fn id(attr: TokenStream, item: TokenStream) -> TokenStream { /// - `const` — add `const` to trait impl blocks /// - `derive(Step)` — implement [`core::iter::Step`] /// - `display = "format"` — implement [`Display`] with a format string -/// - `display = "auto"` — implement [`Display`] using the inner value (struct) or lowercased -/// variant names (enum) /// - `display = !` — suppress the [`Display`] implementation /// +/// By default, a [`Display`] implementation is generated using the inner value. +/// /// # Generated items /// -/// For both shapes, the macro generates: /// - [`Id`] trait implementation /// - [`HasId`] trait implementation /// - [`TryFrom`], [`TryFrom`], [`TryFrom`] implementations /// - [`Debug`] and (by default) [`Display`] implementations -/// -/// Struct-specific: `new`, `new_unchecked` constructors. -/// -/// Enum-specific: `VARIANT_COUNT`, `all`, `try_from_discriminant`, -/// `from_discriminant`, `from_discriminant_unchecked`, `into_discriminant`. +/// - `new`, `new_unchecked` constructors #[proc_macro] pub fn define_id(item: TokenStream) -> TokenStream { - id::expand(TokenStream::new().into(), item.into()).into() + id::expand_define(item.into()).into() } /// Generates a pre-interned symbol table. diff --git a/libs/@local/hashql/mir/src/pass/execution/target.rs b/libs/@local/hashql/mir/src/pass/execution/target.rs index 706fd8c2a9b..c7c9a50ab06 100644 --- a/libs/@local/hashql/mir/src/pass/execution/target.rs +++ b/libs/@local/hashql/mir/src/pass/execution/target.rs @@ -1,4 +1,4 @@ -use hashql_core::id::{IdArray, bit_vec::FiniteBitSet}; +use hashql_core::id::{Id, IdArray, bit_vec::FiniteBitSet}; /// Execution backend that a basic block can be assigned to. /// @@ -9,7 +9,7 @@ use hashql_core::id::{IdArray, bit_vec::FiniteBitSet}; /// The discriminant order determines iteration order in [`TargetId::all`] and affects cost /// estimation during placement. The interpreter is evaluated last so it can incorporate traversal /// costs computed by the other backends. -#[hashql_core::id] +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Id)] pub enum TargetId { /// In-process evaluator that supports all MIR operations. /// From d0e655c8e849a4b0120c2adc6c2ec5dcb46e7235 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sat, 28 Feb 2026 07:53:12 +0100 Subject: [PATCH 14/15] chore: address feedback --- libs/@local/hashql/core/benches/bit_matrix.rs | 2 +- libs/@local/hashql/core/src/graph/linked.rs | 2 +- libs/@local/hashql/core/src/id/array.rs | 24 +++++++++---------- libs/@local/hashql/core/src/id/mod.rs | 12 +++++----- libs/@local/hashql/core/src/id/slice.rs | 10 ++++---- libs/@local/hashql/core/src/id/vec.rs | 18 +++++++------- libs/@local/hashql/core/src/intern/map.rs | 20 ++++++++-------- libs/@local/hashql/core/src/symbol/lookup.rs | 14 +++++------ libs/@local/hashql/macros/src/id/struct.rs | 20 ++++++++++++++-- libs/@local/hashql/macros/src/lib.rs | 10 +++++--- 10 files changed, 76 insertions(+), 56 deletions(-) diff --git a/libs/@local/hashql/core/benches/bit_matrix.rs b/libs/@local/hashql/core/benches/bit_matrix.rs index 651ab39ff93..4e95d82dc8d 100644 --- a/libs/@local/hashql/core/benches/bit_matrix.rs +++ b/libs/@local/hashql/core/benches/bit_matrix.rs @@ -10,7 +10,7 @@ use hashql_core::id::{ newtype, }; -newtype!(struct BenchId(u32 is 0..=u32::MAX)); +newtype!(struct BenchId(u64 is 0..=u64::MAX)); // ============================================================================= // Dense BitMatrix diff --git a/libs/@local/hashql/core/src/graph/linked.rs b/libs/@local/hashql/core/src/graph/linked.rs index 9c779ff5786..569139dbb00 100644 --- a/libs/@local/hashql/core/src/graph/linked.rs +++ b/libs/@local/hashql/core/src/graph/linked.rs @@ -319,7 +319,7 @@ impl LinkedGraph { /// # use hashql_core::graph::LinkedGraph; /// # use hashql_core::id::{Id, IdVec}; /// # - /// # hashql_core::id::newtype!(struct MyId(usize is 0..=usize::MAX)); + /// # hashql_core::id::newtype!(struct MyId(u32 is 0..=u32::MAX)); /// # /// let mut items: IdVec = IdVec::new(); /// items.push("first"); diff --git a/libs/@local/hashql/core/src/id/array.rs b/libs/@local/hashql/core/src/id/array.rs index b075621fadb..5a15325251c 100644 --- a/libs/@local/hashql/core/src/id/array.rs +++ b/libs/@local/hashql/core/src/id/array.rs @@ -19,7 +19,7 @@ use super::{Id, IdSlice}; /// # Examples /// /// ``` -/// # use hashql_core::{id::{IdArray, Id as _}, newtype}; +/// # use hashql_core::id::{IdArray, Id as _, newtype}; /// # newtype!(struct TargetId(u32 is 0..=3)); /// // Create an array where each element is initialized based on its index /// let costs = IdArray::::from_fn(|id| id.as_u32() * 10); @@ -41,7 +41,7 @@ impl IdArray { /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdArray, Id as _}, newtype}; + /// # use hashql_core::id::{IdArray, Id as _, newtype}; /// # newtype!(struct SlotId(u32 is 0..=2)); /// let array = IdArray::::from_raw(["a", "b", "c"]); /// assert_eq!(array[SlotId::new(1)], "b"); @@ -59,7 +59,7 @@ impl IdArray { /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdArray, Id as _}, newtype}; + /// # use hashql_core::id::{IdArray, Id as _, newtype}; /// # newtype!(struct SlotId(u32 is 0..=2)); /// let array = IdArray::::from_raw([1, 2, 3]); /// let raw: [i32; 3] = array.into_raw(); @@ -82,7 +82,7 @@ impl IdArray { /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdArray, Id as _}, newtype}; + /// # use hashql_core::id::{IdArray, Id as _, newtype}; /// # newtype!(struct NodeId(u32 is 0..=100)); /// // Create an array where each element equals its index squared /// let squares = IdArray::::from_fn(|id| id.as_u32() * id.as_u32()); @@ -101,7 +101,7 @@ impl IdArray { /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdArray, Id as _}, newtype}; + /// # use hashql_core::id::{IdArray, Id as _, newtype}; /// # newtype!(struct SlotId(u32 is 0..=4)); /// let array = IdArray::::from_elem(42); /// @@ -121,7 +121,7 @@ impl IdArray { /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdArray, Id as _}, newtype}; + /// # use hashql_core::id::{IdArray, Id as _, newtype}; /// # newtype!(struct SlotId(u32 is 0..=2)); /// let array = /// IdArray::::from_raw(["a".to_string(), "b".to_string(), "c".to_string()]); @@ -138,7 +138,7 @@ impl IdArray { /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdArray, Id as _}, newtype}; + /// # use hashql_core::id::{IdArray, Id as _, newtype}; /// # newtype!(struct SlotId(u32 is 0..=2)); /// let mut array = IdArray::::from_raw([1, 2, 3]); /// for elem in array.each_mut().into_raw() { @@ -157,7 +157,7 @@ impl IdArray { /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdArray, Id as _}, newtype}; + /// # use hashql_core::id::{IdArray, Id as _, newtype}; /// # newtype!(struct SlotId(u32 is 0..=2)); /// let array = IdArray::::from_raw([1, 2, 3]); /// let doubled: IdArray = array.map(|x| x * 2); @@ -175,7 +175,7 @@ impl IdArray { /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdArray, Id as _}, newtype}; + /// # use hashql_core::id::{IdArray, Id as _, newtype}; /// # newtype!(struct SlotId(u32 is 0..=100)); /// let array = IdArray::::from_raw([10, 20, 30]); /// let indexed: IdArray = @@ -202,7 +202,7 @@ impl IdArray { /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdArray, Id as _}, newtype}; + /// # use hashql_core::id::{IdArray, Id as _, newtype}; /// # newtype!(struct SlotId(u32 is 0..=2)); /// let array = IdArray::::from_raw([1, 2, 3]); /// let slice = array.as_slice(); @@ -220,7 +220,7 @@ impl IdArray { /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdArray, Id as _}, newtype}; + /// # use hashql_core::id::{IdArray, Id as _, newtype}; /// # newtype!(struct SlotId(u32 is 0..=2)); /// let mut array = IdArray::::from_raw([1, 2, 3]); /// array.as_mut_slice()[SlotId::new(1)] = 42; @@ -239,7 +239,7 @@ impl IdArray { /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdArray, Id as _}, newtype}; + /// # use hashql_core::id::{IdArray, Id as _, newtype}; /// # newtype!(struct SlotId(u32 is 0..=2)); /// let array = IdArray::::from_raw(["a", "b", "c"]); /// let pairs: Vec<_> = array.into_iter_enumerated().collect(); diff --git a/libs/@local/hashql/core/src/id/mod.rs b/libs/@local/hashql/core/src/id/mod.rs index 9cfa413505c..3a500e8429a 100644 --- a/libs/@local/hashql/core/src/id/mod.rs +++ b/libs/@local/hashql/core/src/id/mod.rs @@ -166,7 +166,7 @@ pub trait Id: /// # Examples /// /// ``` -/// # use hashql_core::{id::{HasId, Id}, newtype}; +/// # use hashql_core::id::{HasId, Id, newtype}; /// # newtype!(struct UserId(u32 is 0..=100)); /// struct User { /// id: UserId, @@ -219,7 +219,7 @@ where /// # Examples /// /// ``` -/// # use hashql_core::{id::IdProducer, newtype}; +/// # use hashql_core::id::{IdProducer, newtype}; /// # newtype!(struct NodeId(u32 is 0..=1000)); /// let producer = IdProducer::::new(); /// let id1 = producer.next(); @@ -280,7 +280,7 @@ impl Default for IdProducer { /// # Examples /// /// ``` -/// # use hashql_core::{id::IdCounter, newtype}; +/// # use hashql_core::id::{IdCounter, newtype}; /// # newtype!(struct NodeId(u32 is 0..=1000)); /// let mut counter = IdCounter::::new(); /// let id1 = counter.next(); @@ -364,7 +364,7 @@ where /// # Examples /// /// ``` -/// # use hashql_core::{newtype, newtype_producer}; +/// # use hashql_core::id::{newtype, newtype_producer}; /// # newtype!(struct NodeId(u32 is 0..=1000)); /// newtype_producer!(pub struct NodeIdProducer(NodeId)); /// ``` @@ -380,7 +380,7 @@ macro_rules! newtype_producer { /// # Examples /// /// ``` -/// # use hashql_core::{newtype, newtype_counter}; +/// # use hashql_core::id::{newtype, newtype_counter}; /// # newtype!(struct NodeId(u32 is 0..=1000)); /// newtype_counter!(pub struct NodeIdCounter(NodeId)); /// ``` @@ -413,7 +413,7 @@ macro_rules! newtype_counter { /// ``` /// # #![feature(allocator_api, macro_metavar_expr_concat)] /// # extern crate alloc; -/// # use hashql_core::{newtype, newtype_collections}; +/// # use hashql_core::id::{newtype, newtype_collections}; /// # newtype!(struct NodeId(u32 is 0..=1000)); /// newtype_collections!(pub type Node* from NodeId); /// // Creates: NodeSlice, NodeVec, NodeUnionFind, NodeSet, NodeMap, etc. diff --git a/libs/@local/hashql/core/src/id/slice.rs b/libs/@local/hashql/core/src/id/slice.rs index 6003f9b0535..2f560b7cf5f 100644 --- a/libs/@local/hashql/core/src/id/slice.rs +++ b/libs/@local/hashql/core/src/id/slice.rs @@ -20,7 +20,7 @@ use super::{Id, index::IntoSliceIndex, vec::IdVec}; /// # Examples /// /// ``` -/// # use hashql_core::{id::{IdSlice, Id as _}, newtype}; +/// # use hashql_core::id::{IdSlice, Id as _, newtype}; /// # newtype!(struct UserId(u32 is 0..=0xFFFF_FF00)); /// let data = [10, 20, 30]; /// let slice = IdSlice::::from_raw(&data); @@ -303,7 +303,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdVec, Id as _}, newtype}; + /// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// let mut vec = IdVec::>::new(); /// vec.insert(MyId::from_usize(0), "hello".to_string()); @@ -320,7 +320,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdVec, Id as _}, newtype}; + /// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// let mut vec = IdVec::>::new(); /// vec.insert(MyId::from_usize(0), "hello".to_string()); @@ -338,7 +338,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdVec, Id as _}, newtype}; + /// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// let mut vec = IdVec::>::new(); /// vec.insert(MyId::from_usize(0), "hello".to_string()); @@ -358,7 +358,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdVec, Id as _}, newtype}; + /// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// let mut vec = IdVec::>::new(); /// vec.insert(MyId::from_usize(0), "hello".to_string()); diff --git a/libs/@local/hashql/core/src/id/vec.rs b/libs/@local/hashql/core/src/id/vec.rs index 221d06fe25c..5bed2f54ad0 100644 --- a/libs/@local/hashql/core/src/id/vec.rs +++ b/libs/@local/hashql/core/src/id/vec.rs @@ -23,7 +23,7 @@ use crate::heap::{FromIteratorIn, TryCloneIn}; /// # Examples /// /// ``` -/// # use hashql_core::{id::{IdVec, Id as _}, newtype}; +/// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct UserId(u32 is 0..=0xFFFF_FF00)); /// let mut users = IdVec::::new(); /// let user_id = users.push("Alice".to_string()); @@ -75,7 +75,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdVec, Id as _}, newtype}; + /// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct MyId(u32 is 0..=100)); /// let vec = IdVec::::from_elem(42, 5); /// assert_eq!(vec.len(), 5); @@ -165,7 +165,7 @@ where /// /// ``` /// # #![feature(allocator_api)] - /// # use hashql_core::{id::{IdVec, Id as _}, newtype}; + /// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct MyId(u32 is 0..=100)); /// use std::alloc::Global; /// let vec = IdVec::::from_elem_in(42, 5, Global); @@ -188,7 +188,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdVec, Id as _}, newtype}; + /// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct MyId(u32 is 0..=100)); /// let domain = IdVec::::from_elem("x".to_string(), 3); /// let vec = IdVec::::from_domain(0, &domain); @@ -288,7 +288,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdVec, Id as _}, newtype}; + /// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// let mut vec = IdVec::::new(); /// let id = vec.push("hello".to_string()); @@ -308,7 +308,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdVec, Id as _}, newtype}; + /// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct MyId(u32 is 0..=100)); /// let mut vec = IdVec::::new(); /// let id = vec.push_with(|id| format!("item_{}", id.as_u32())); @@ -353,7 +353,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdVec, Id as _}, newtype}; + /// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct MyId(u32 is 0..=100)); /// let mut vec = IdVec::::new(); /// let value = vec.fill_until(MyId::new(5), || 0); @@ -442,7 +442,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdVec, Id as _}, newtype}; + /// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// let mut vec = IdVec::>::new(); /// vec.insert(MyId::from_usize(5), "hello".to_string()); @@ -463,7 +463,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{id::{IdVec, Id as _}, newtype}; + /// # use hashql_core::id::{IdVec, Id as _, newtype}; /// # newtype!(struct MyId(u32 is 0..=100)); /// let mut vec = IdVec::>::new(); /// let value = vec.get_or_insert_with(MyId::new(2), || "hello".to_string()); diff --git a/libs/@local/hashql/core/src/intern/map.rs b/libs/@local/hashql/core/src/intern/map.rs index 41e2a0e8e39..919557a0c15 100644 --- a/libs/@local/hashql/core/src/intern/map.rs +++ b/libs/@local/hashql/core/src/intern/map.rs @@ -149,7 +149,7 @@ where /// # Examples /// /// ``` -/// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id}, newtype}; +/// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id, newtype}}; /// # newtype!(struct ValueId(u32 is 0..=0xFFFF_FF00)); /// # #[derive(Debug, PartialEq, Eq, Hash)] /// # struct Value { @@ -239,7 +239,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id}, newtype}; + /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id, newtype}}; /// # newtype!(struct TypeId(u32 is 0..=0xFFFF_FF00)); /// # #[derive(Debug, PartialEq, Eq, Hash)] /// # struct TypeInfo { @@ -270,7 +270,7 @@ where /// Use case for compiler type systems: /// /// ``` - /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id}, newtype}; + /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id, newtype}}; /// # newtype!(struct TypeId(u32 is 0..=0xFFFF_FF00)); /// # #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] /// # enum PrimitiveType { I32, I64, F32, F64, Bool, String } @@ -392,7 +392,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id}, newtype}; + /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id, newtype}}; /// # newtype!(struct ValueId(u32 is 0..=0xFFFF_FF00)); /// # #[derive(Debug, PartialEq, Eq, Hash)] /// # struct Value { @@ -434,7 +434,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned, Provisioned}, id::{HasId, Id}, newtype}; + /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned, Provisioned}, id::{HasId, Id, newtype}}; /// # newtype!(struct ValueId(u32 is 0..=0xFFFF_FF00)); /// # #[derive(Debug, PartialEq, Eq, Hash)] /// # struct Value { @@ -523,7 +523,7 @@ where /// Creating a self-referential linked list node: /// /// ``` - /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned, Provisioned}, id::{HasId, Id}, newtype}; + /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned, Provisioned}, id::{HasId, Id, newtype}}; /// # newtype!(struct NodeId(u32 is 0..=0xFFFF_FF00)); /// # #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] /// # struct PartialNode { value: i32, next: Option } @@ -583,7 +583,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id}, newtype}; + /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id, newtype}}; /// # newtype!(struct ValueId(u32 is 0..=0xFFFF_FF00)); /// # #[derive(Debug, PartialEq, Eq, Hash)] /// # struct Value { @@ -628,7 +628,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id}, newtype}; + /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id, newtype}}; /// # newtype!(struct ValueId(u32 is 0..=0xFFFF_FF00)); /// # #[derive(Debug, PartialEq, Eq, Hash)] /// # struct Value { @@ -672,7 +672,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id}, newtype}; + /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id, newtype}}; /// # newtype!(struct ValueId(u32 is 0..=0xFFFF_FF00)); /// # #[derive(Debug, PartialEq, Eq, Hash)] /// # struct Value { @@ -726,7 +726,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id}, newtype}; + /// # use hashql_core::{heap::Heap, intern::{InternMap, Decompose, Interned}, id::{HasId, Id, newtype}}; /// # newtype!(struct ValueId(u32 is 0..=0xFFFF_FF00)); /// # #[derive(Debug, PartialEq, Eq, Hash)] /// # struct Value { diff --git a/libs/@local/hashql/core/src/symbol/lookup.rs b/libs/@local/hashql/core/src/symbol/lookup.rs index 61c6360b0b4..5b9abc41e28 100644 --- a/libs/@local/hashql/core/src/symbol/lookup.rs +++ b/libs/@local/hashql/core/src/symbol/lookup.rs @@ -45,7 +45,7 @@ enum SymbolLookupInner<'heap, I> { /// # Examples /// /// ``` -/// # use hashql_core::{heap::Heap, symbol::SymbolLookup, newtype, id::Id as _}; +/// # use hashql_core::{heap::Heap, symbol::SymbolLookup, id::{Id as _, newtype}}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// # let mut heap = Heap::new(); /// # let symbol = heap.intern_symbol("example"); @@ -86,7 +86,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{symbol::SymbolLookup, newtype}; + /// # use hashql_core::{symbol::SymbolLookup, id::newtype}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// let table = SymbolLookup::::dense(); /// // Insertions must be sequential: 0, 1, 2, ... @@ -107,7 +107,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{symbol::SymbolLookup, newtype}; + /// # use hashql_core::{symbol::SymbolLookup, id::newtype}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// let table = SymbolLookup::::gapped(); /// // Insertions can have gaps: 0, 5, 3, 10, ... @@ -127,7 +127,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{symbol::SymbolLookup, newtype}; + /// # use hashql_core::{symbol::SymbolLookup, id::newtype}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// let table = SymbolLookup::::sparse(); /// // Insertions can be in any order: 100, 5, 1000, ... @@ -155,7 +155,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{heap::Heap, symbol::SymbolLookup, newtype, id::Id as _}; + /// # use hashql_core::{heap::Heap, symbol::SymbolLookup, id::{Id as _, newtype}}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// # let mut heap = Heap::new(); /// # let symbol = heap.intern_symbol("example"); @@ -167,7 +167,7 @@ where /// Non-sequential insertions will panic in dense tables: /// /// ```should_panic - /// # use hashql_core::{heap::Heap, symbol::SymbolLookup, newtype, id::Id as _}; + /// # use hashql_core::{heap::Heap, symbol::SymbolLookup, id::{Id as _, newtype}}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// # let mut heap = Heap::new(); /// # let symbol = heap.intern_symbol("example"); @@ -203,7 +203,7 @@ where /// # Examples /// /// ``` - /// # use hashql_core::{heap::Heap, symbol::SymbolLookup, newtype, id::Id as _}; + /// # use hashql_core::{heap::Heap, symbol::SymbolLookup, id::{Id as _, newtype}}; /// # newtype!(struct MyId(u32 is 0..=0xFFFF_FF00)); /// # let mut heap = Heap::new(); /// # let symbol = heap.intern_symbol("example"); diff --git a/libs/@local/hashql/macros/src/id/struct.rs b/libs/@local/hashql/macros/src/id/struct.rs index e70599c5b94..717012c8d89 100644 --- a/libs/@local/hashql/macros/src/id/struct.rs +++ b/libs/@local/hashql/macros/src/id/struct.rs @@ -135,6 +135,20 @@ pub(super) fn expand_struct( let min = &constraint.min; let max = &constraint.max; + let max_value = match constraint.kind { + RangeKind::Inclusive => quote! { #max }, + RangeKind::Exclusive => quote! { #max - 1 }, + }; + + let range_assertion = match constraint.kind { + RangeKind::Inclusive => quote! { + const _: () = assert!((#min as #scalar) <= (#max as #scalar), "inclusive range requires min <= max"); + }, + RangeKind::Exclusive => quote! { + const _: () = assert!((#min as #scalar) < (#max as #scalar), "exclusive range requires min < max"); + }, + }; + let range_end = match constraint.kind { RangeKind::Inclusive => format!("{max}]"), RangeKind::Exclusive => format!("{max})"), @@ -179,11 +193,13 @@ pub(super) fn expand_struct( } } + #range_assertion + #[automatically_derived] #[expect(clippy::cast_possible_truncation, clippy::cast_lossless)] impl #konst #krate::id::Id for #name { const MIN: Self = Self::new(#min); - const MAX: Self = Self::new(#max); + const MAX: Self = Self::new(#max_value); fn from_u32(value: u32) -> Self { #u32_assertion @@ -270,7 +286,7 @@ pub(super) fn expand_struct( ::core::result::Result::Err(#krate::id::IdError::OutOfRange { value: value as u64, min: #min as u64, - max: #max as u64, + max: (#max_value) as u64, }) } } diff --git a/libs/@local/hashql/macros/src/lib.rs b/libs/@local/hashql/macros/src/lib.rs index 2c3cc5159c4..f01d83b20bf 100644 --- a/libs/@local/hashql/macros/src/lib.rs +++ b/libs/@local/hashql/macros/src/lib.rs @@ -66,7 +66,11 @@ pub fn derive_id(item: TokenStream) -> TokenStream { /// } /// ``` /// -/// Supported backing types: `u8`, `u16`, `u32`, `u64`, `u128`. +/// Supported backing types: `u8`, `u16`, `u32`, `u64`, `u128`. `usize` is +/// intentionally excluded: proc macros are compiled for and run on the host, +/// so they cannot determine the target's pointer width. This makes it +/// impossible to select the correct widening cast for range assertions during +/// cross-compilation. /// /// The range bound determines valid values. Inclusive (`..=`) and exclusive (`..`) /// ranges are both supported. @@ -86,7 +90,7 @@ pub fn derive_id(item: TokenStream) -> TokenStream { /// # Generated items /// /// - [`Id`] trait implementation -/// - [`HasId`] trait implementation +/// - `HasId` trait implementation /// - [`TryFrom`], [`TryFrom`], [`TryFrom`] implementations /// - [`Debug`] and (by default) [`Display`] implementations /// - `new`, `new_unchecked` constructors @@ -101,7 +105,7 @@ pub fn define_id(item: TokenStream) -> TokenStream { /// /// 1. `SYMBOLS` — a static slice of string values for interner pre-population /// 2. Symbol constants — `Symbol<'static>` constants with companion `ConstantSymbol` modules -/// 3. `LOOKUP` — a static slice mapping string values to their [`Repr`] for fast lookup +/// 3. `LOOKUP` — a static slice mapping string values to their `Repr` for fast lookup /// /// # Syntax /// From b816f15dbf2c701a5f61e386ac1b376d24cd4a04 Mon Sep 17 00:00:00 2001 From: Bilal Mahmoud Date: Sat, 28 Feb 2026 15:01:01 +0100 Subject: [PATCH 15/15] fix: suggestions from code review --- libs/@local/hashql/macros/LICENSE.md | 606 +++++++++++++++++++++ libs/@local/hashql/macros/src/id/common.rs | 4 +- libs/@local/hashql/macros/src/id/enum.rs | 6 +- libs/@local/hashql/macros/src/id/struct.rs | 6 +- 4 files changed, 619 insertions(+), 3 deletions(-) create mode 100644 libs/@local/hashql/macros/LICENSE.md diff --git a/libs/@local/hashql/macros/LICENSE.md b/libs/@local/hashql/macros/LICENSE.md new file mode 100644 index 00000000000..8ebfe728d7d --- /dev/null +++ b/libs/@local/hashql/macros/LICENSE.md @@ -0,0 +1,606 @@ +# GNU Affero General Public License + +_Version 3, 19 November 2007_ +_Copyright © 2007 Free Software Foundation, Inc. <>_ + +Everyone is permitted to copy and distribute verbatim copies +of this license document, but changing it is not allowed. + +## Preamble + +The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + +The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + +When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + +Developers that use our General Public Licenses protect your rights +with two steps: **(1)** assert copyright on the software, and **(2)** offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + +A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + +The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + +An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + +The precise terms and conditions for copying, distribution and +modification follow. + +## TERMS AND CONDITIONS + +### 0. Definitions + +“This License” refers to version 3 of the GNU Affero General Public License. + +“Copyright” also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + +“The Program” refers to any copyrightable work licensed under this +License. Each licensee is addressed as “you”. “Licensees” and +“recipients” may be individuals or organizations. + +To “modify” a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a “modified version” of the +earlier work or a work “based on” the earlier work. + +A “covered work” means either the unmodified Program or a work based +on the Program. + +To “propagate” a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + +To “convey” a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + +An interactive user interface displays “Appropriate Legal Notices” +to the extent that it includes a convenient and prominently visible +feature that **(1)** displays an appropriate copyright notice, and **(2)** +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + +### 1. Source Code + +The “source code” for a work means the preferred form of the work +for making modifications to it. “Object code” means any non-source +form of a work. + +A “Standard Interface” means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + +The “System Libraries” of an executable work include anything, other +than the work as a whole, that **(a)** is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and **(b)** serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +“Major Component”, in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + +The “Corresponding Source” for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + +The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + +The Corresponding Source for a work in source code form is that +same work. + +### 2. Basic Permissions + +All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + +You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + +Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + +### 3. Protecting Users' Legal Rights From Anti-Circumvention Law + +No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + +When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + +### 4. Conveying Verbatim Copies + +You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + +You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + +### 5. Conveying Modified Source Versions + +You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + +- **a)** The work must carry prominent notices stating that you modified + it, and giving a relevant date. +- **b)** The work must carry prominent notices stating that it is + released under this License and any conditions added under section 7. + This requirement modifies the requirement in section 4 to + “keep intact all notices”. +- **c)** You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. +- **d)** If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + +A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +“aggregate” if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + +### 6. Conveying Non-Source Forms + +You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + +- **a)** Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. +- **b)** Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either **(1)** a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or **(2)** access to copy the + Corresponding Source from a network server at no charge. +- **c)** Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. +- **d)** Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. +- **e)** Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + +A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + +A “User Product” is either **(1)** a “consumer product”, which means any +tangible personal property which is normally used for personal, family, +or household purposes, or **(2)** anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, “normally used” refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + +“Installation Information” for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + +If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + +The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + +Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + +### 7. Additional Terms + +“Additional permissions” are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + +When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + +Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + +- **a)** Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or +- **b)** Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or +- **c)** Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or +- **d)** Limiting the use for publicity purposes of names of licensors or + authors of the material; or +- **e)** Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or +- **f)** Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + +All other non-permissive additional terms are considered “further +restrictions” within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + +If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + +Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + +### 8. Termination + +You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + +However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated **(a)** +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and **(b)** permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + +Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + +Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + +### 9. Acceptance Not Required for Having Copies + +You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + +### 10. Automatic Licensing of Downstream Recipients + +Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + +An “entity transaction” is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + +You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + +### 11. Patents + +A “contributor” is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's “contributor version”. + +A contributor's “essential patent claims” are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, “control” includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + +Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + +In the following three paragraphs, a “patent license” is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To “grant” such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + +If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either **(1)** cause the Corresponding Source to be so +available, or **(2)** arrange to deprive yourself of the benefit of the +patent license for this particular work, or **(3)** arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. “Knowingly relying” means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + +If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + +A patent license is “discriminatory” if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license **(a)** in connection with copies of the covered work +conveyed by you (or copies made from those copies), or **(b)** primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + +Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + +### 12. No Surrender of Others' Freedom + +If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + +### 13. Remote Network Interaction; Use with the GNU General Public License + +Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + +Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + +### 14. Revised Versions of this License + +The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License “or any later version” applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + +If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + +Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + +### 15. Disclaimer of Warranty + +THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +### 16. Limitation of Liability + +IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + +### 17. Interpretation of Sections 15 and 16 + +If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. diff --git a/libs/@local/hashql/macros/src/id/common.rs b/libs/@local/hashql/macros/src/id/common.rs index 66d9418476e..7bf933965af 100644 --- a/libs/@local/hashql/macros/src/id/common.rs +++ b/libs/@local/hashql/macros/src/id/common.rs @@ -14,7 +14,9 @@ pub(crate) enum IntegerScalar { impl IntegerScalar { pub(crate) const fn from_variant_count(count: usize) -> Self { - match count { + // Match on the maximum discriminant (count - 1) to find the smallest + // integer type that can represent all variants 0..count. + match count.saturating_sub(1) { 0..=0xFF => Self::U8, 0x100..=0xFFFF => Self::U16, 0x1_0000..=0xFFFF_FFFF => Self::U32, diff --git a/libs/@local/hashql/macros/src/id/enum.rs b/libs/@local/hashql/macros/src/id/enum.rs index 730efea41c5..e3d868d6f0f 100644 --- a/libs/@local/hashql/macros/src/id/enum.rs +++ b/libs/@local/hashql/macros/src/id/enum.rs @@ -8,7 +8,11 @@ use crate::id::{ common::IntegerScalar, }; -#[expect(clippy::too_many_lines, reason = "macro")] +#[expect( + clippy::too_many_lines, + reason = "mostly mechanical quote! blocks with minimal logic; splitting would require \ + threading many local variables for no clarity gain" +)] pub(super) fn expand_enum( grammar::ParsedEnum { attributes, diff --git a/libs/@local/hashql/macros/src/id/struct.rs b/libs/@local/hashql/macros/src/id/struct.rs index 717012c8d89..b3d6b27841f 100644 --- a/libs/@local/hashql/macros/src/id/struct.rs +++ b/libs/@local/hashql/macros/src/id/struct.rs @@ -101,7 +101,11 @@ impl From for Constraint { } } -#[expect(clippy::too_many_lines, reason = "macro")] +#[expect( + clippy::too_many_lines, + reason = "mostly mechanical quote! blocks with minimal logic; splitting would require \ + threading many local variables for no clarity gain" +)] pub(super) fn expand_struct( grammar::ParsedStruct { attributes,