Repo: packages in PhantomData

Signed-off-by: Luca Fulchir <luca.fulchir@runesauth.com>
This commit is contained in:
Luca Fulchir 2024-11-30 11:18:17 +01:00
parent a5722bff5c
commit f08ae227a2
Signed by: luca.fulchir
GPG Key ID: 8F6440603D13A78E
12 changed files with 435 additions and 178 deletions

23
Cargo.lock generated
View File

@ -73,7 +73,6 @@ dependencies = [
"bitflags", "bitflags",
"bok-macro", "bok-macro",
"macro_magic", "macro_magic",
"paste",
"proc-macro2", "proc-macro2",
"quote", "quote",
"semver", "semver",
@ -85,6 +84,7 @@ dependencies = [
name = "bok-macro" name = "bok-macro"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"convert_case",
"macro_magic", "macro_magic",
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -180,6 +180,15 @@ dependencies = [
"tiny-keccak", "tiny-keccak",
] ]
[[package]]
name = "convert_case"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
dependencies = [
"unicode-segmentation",
]
[[package]] [[package]]
name = "cpufeatures" name = "cpufeatures"
version = "0.2.14" version = "0.2.14"
@ -328,12 +337,6 @@ version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]]
name = "paste"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]] [[package]]
name = "prettyplease" name = "prettyplease"
version = "0.2.25" version = "0.2.25"
@ -442,6 +445,12 @@ version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]] [[package]]
name = "unicode-width" name = "unicode-width"
version = "0.2.0" version = "0.2.0"

View File

@ -13,7 +13,8 @@ categories = ["config"]
proc-macro = true proc-macro = true
[dependencies] [dependencies]
syn = { version = "2.0", features = ["full", "extra-traits"] } convert_case = { version = "^0.6" }
quote = { version = "1.0" }
proc-macro2 = "1.0"
macro_magic = { version = "0.5", features = ["proc_support"] } macro_magic = { version = "0.5", features = ["proc_support"] }
proc-macro2 = "1.0"
quote = { version = "1.0" }
syn = { version = "2.0", features = ["full", "extra-traits"] }

View File

@ -56,16 +56,44 @@ pub fn derive_repository(input: TokenStream) -> TokenStream {
/// Add multiple packages to a repo /// Add multiple packages to a repo
/// Usage: /// Usage:
/// ``` /// ```
/// #[::bok::impl_repo( /// #[::bok::repo_packages(Mypkg1, Mypkg2)]
/// Mypkg1, /// struct MyRepo{}
/// Mypkg2, /// ```
/// )] #[proc_macro_attribute]
pub fn repo_packages(attrs: TokenStream, input: TokenStream) -> TokenStream {
crate::repos::repo_packages(attrs, input)
}
/// Create the methods that will return the builders and packages
///
/// will actually merely rewrite the macro to get the actual Repo name
/// in `#[::bok_macro::repo_impl_methods(MyRepo)]`
///
/// Usage:
/// ```
/// #[::bok::repo_impl]
/// impl MyRepo{}
/// ```
//#[::macro_magic::import_tokens_attr]
#[proc_macro_attribute]
pub fn repo_impl(attrs: TokenStream, input: TokenStream) -> TokenStream {
crate::repos::repo_impl(attrs, input)
}
/// Internal. **Do not use unless you know what you are doing**
/// Create the methods that will return the builders and packages
/// Usage:
/// ```
/// #[::bok_macro::repo_impl(MyRepo)]
/// impl MyRepo{} /// impl MyRepo{}
/// ``` /// ```
#[::macro_magic::import_tokens_attr] #[::macro_magic::import_tokens_attr]
#[proc_macro_attribute] #[proc_macro_attribute]
pub fn impl_repository(attrs: TokenStream, input: TokenStream) -> TokenStream { pub fn repo_impl_methods(
crate::repos::impl_repo(attrs, input) attrs: TokenStream,
input: TokenStream,
) -> TokenStream {
crate::repos::repo_impl_methods(attrs, input, __source_path)
} }
// //

View File

@ -17,74 +17,123 @@
use ::proc_macro::TokenStream; use ::proc_macro::TokenStream;
use ::quote::quote; use ::quote::quote;
use ::syn::{ use ::syn::{parse_macro_input, DeriveInput, Fields, ItemImpl, ItemStruct};
parse_macro_input, DeriveInput, Fields, Item, ItemImpl, ItemStruct,
};
pub(crate) fn impl_repo(attrs: TokenStream, input: TokenStream) -> TokenStream { pub(crate) fn repo_impl(
let base = parse_macro_input!(attrs as ItemImpl); _attrs: TokenStream,
input: TokenStream,
) -> TokenStream {
let local = parse_macro_input!(input as ItemImpl); let local = parse_macro_input!(input as ItemImpl);
quote! {}.into() let reponame = &local.self_ty;
}
/*
// Either a series of paths, or a single one that we have to expand
pub(crate) enum BaseSet {
Paths(::std::vec::Vec<::syn::Path>),
Single((::syn::Path, ItemStruct)),
}
impl ::syn::parse::Parse for BaseSet {
fn parse(input: ::syn::parse::ParseStream) -> ::syn::parse::Result<Self> {
if let Ok(path) = input.parse() {
if let Ok(item_struct) = ItemStruct::parse(input) {
return Ok(BaseSet::Single((path, item_struct)));
}
}
use ::syn::punctuated::Punctuated; quote! {
let vars = #[::bok_macro::repo_impl_methods(#reponame)]
Punctuated::<::syn::Path, ::syn::Token![,]>::parse_separated_nonempty( #local
input,
)?;
if vars.len() >= 0 {
return Err(::syn::parse::Error::new(input.span(), "GOT TWO"));
}
Ok(BaseSet::Paths(vars.into_iter().collect()))
} }
.into()
} }
impl quote::ToTokens for BaseSet { pub(crate) fn repo_impl_methods(
fn to_tokens(&self, tokens: &mut ::proc_macro2::TokenStream) { attrs: TokenStream,
match self { input: TokenStream,
BaseSet::Paths(bases) => { __source_name: TokenStream,
for (i, base) in bases.iter().enumerate() { ) -> TokenStream {
if i > 0 { let base = parse_macro_input!(attrs as ItemStruct);
tokens.extend( let local = parse_macro_input!(input as ItemImpl);
::proc_macro2::Punct::new( let source_name = parse_macro_input!(__source_name as ::syn::Path);
',',
::proc_macro2::Spacing::Joint, if let ::syn::Type::Path(self_type_path) = local.self_ty.as_ref() {
if self_type_path.path != source_name {
return ::syn::Error::new(
proc_macro2::Span::call_site(),
"#[::bok_macro::repo_impl_methods(..)]: argument and impl \
type differ",
) )
.into_token_stream(), .to_compile_error()
); .into();
tokens.extend(base.to_token_stream());
} }
base.to_tokens(tokens); } else {
return ::syn::Error::new(
proc_macro2::Span::call_site(),
"#[::bok_macro::repo_impl_methods(..)]: argument and impl type \
differ",
)
.to_compile_error()
.into();
} }
}
BaseSet::Single((_, item_struct)) => item_struct.to_tokens(tokens),
}
}
}
impl ::macro_magic::mm_core::ForeignPath for BaseSet { let local_attrs = local.attrs.iter();
fn foreign_path(&self) -> &syn::Path { let generics = local.generics;
match self { let ident = &local.self_ty;
BaseSet::Paths(bases) => &bases[0], let items = local.items.iter();
BaseSet::Single((path, _)) => &path,
let ::syn::Fields::Named(base_fields) = base.fields else {
return ::syn::Error::new(
proc_macro2::Span::call_site(),
"#[::bok_macro::repo_impl_methods(..)]: type has unsupported \
unnamed fields",
)
.to_compile_error()
.into();
};
let mut fn_to_add = Vec::new();
for f in base_fields.named.iter() {
if !f
.ident
.as_ref()
.is_some_and(|id| id.to_string().starts_with("_p_"))
{
continue;
};
let ::syn::Type::Path(f_type) = &f.ty else {
continue;
};
let t_phantom = &f_type.path;
let t_id = {
let args = &t_phantom
.segments
.last()
.expect("t_phantom no last?")
.arguments;
let ::syn::PathArguments::AngleBracketed(bracketed) = &args else {
panic!("phantom without anglebracket?");
};
let ::syn::GenericArgument::Type(::syn::Type::Path(t)) =
bracketed.args.first().expect("phantom bracketed, no args")
else {
panic!("phantom bracketed, generic not a type path");
};
t.path.clone()
};
let pkg_name = {
let segment = t_id.segments.iter().last().unwrap();
&segment.ident
};
let pkg_fn: ::syn::ImplItemFn = ::syn::parse_quote! {
pub fn #pkg_name(&self) -> #t_id {
#t_id::default()
}
};
fn_to_add.push(pkg_fn);
}
let new_fn = fn_to_add.iter();
quote! {
#(#local_attrs)
*
impl #ident<#generics> {
#(#items)
*
#(#new_fn)
*
} }
} }
.into()
} }
*/
pub(crate) fn repository( pub(crate) fn repository(
attrs: TokenStream, attrs: TokenStream,
@ -101,6 +150,7 @@ pub(crate) fn repository(
.into(); .into();
}; };
// do not duplicate token export or derive macro
let local_attrs = local.attrs.iter().filter(|&x| { let local_attrs = local.attrs.iter().filter(|&x| {
match &x.meta { match &x.meta {
::syn::Meta::Path(p) => { ::syn::Meta::Path(p) => {
@ -145,57 +195,71 @@ pub(crate) fn repository(
let vis = local.vis; let vis = local.vis;
let base = parse_macro_input!(attrs as ItemStruct); let base = parse_macro_input!(attrs as ItemStruct);
let mut base_fields_extra = Vec::new();
let Fields::Named(base_fields) = &base.fields else { let Fields::Named(base_fields) = &base.fields else {
use ::syn::spanned::Spanned; use ::syn::spanned::Spanned;
return ::syn::Error::new( return ::syn::Error::new(
base.fields.span(), base.fields.span(),
"unnamed fields are not supported", "`#[::bok::repository(..)]`: base has unsupported unnamed fields",
) )
.to_compile_error() .to_compile_error()
.into(); .into();
}; };
// only add the base repo once // make sure base is a repo
let mut base_empty_found = local_fields if base_fields
.named .named
.iter() .iter()
.find(|&x| { .find(|&x| {
let Some(id) = &x.ident else { x.ident
return false; .as_ref()
}; .is_some_and(|id| id.to_string() == "_bok_repo")
id.to_string() == "bok_repo_empty_marker"
}) })
.is_some(); .is_none()
for f in base_fields.named.iter() { {
let Some(id) = &f.ident else { continue }; use ::syn::spanned::Spanned;
if id.to_string() == "bok_repo_empty_marker" { return ::syn::Error::new(
if base_empty_found { base.fields.span(),
continue; "`#[::bok::repository(..)]` base is not a repo",
} else { )
base_empty_found = true; .to_compile_error()
base_fields_extra.push(f); .into();
continue;
}
} }
let mut all_fields = Vec::<::syn::Field>::with_capacity(
local_fields.named.len() + base_fields.named.len(),
);
all_fields.extend(local_fields.named.iter().cloned());
// make sure there is always `_bok_repo` marker
if local_fields if local_fields
.named .named
.iter() .iter()
.find(|&x| { .find(|&f| {
let Some(id_local) = &x.ident else { f.ident
return false; .as_ref()
}; .is_some_and(|id| id.to_string() == "_bok_repo")
id_local.to_string() == id.to_string()
}) })
.is_some() .is_none()
{ {
continue; all_fields.push(::syn::parse_quote! {
} _bok_repo: ::std::marker::PhantomData<::bok::RepositoryEmpty>
base_fields_extra.push(f); });
}
for b_f in base_fields.named.iter() {
let Some(b_f_id) = &b_f.ident else { continue };
let b_f_id_str = b_f_id.to_string();
if all_fields
.iter()
.find(|&f| {
f.ident
.as_ref()
.is_some_and(|id| id.to_string() == b_f_id_str)
})
.is_none()
{
all_fields.push(b_f.clone());
}
} }
let mut all_fields = Vec::new();
all_fields.extend(base_fields_extra.iter());
all_fields.extend(local_fields.named.iter());
quote! { quote! {
#(#local_attrs) #(#local_attrs)
* *
@ -227,3 +291,182 @@ pub(crate) fn derive_repository(input: TokenStream) -> TokenStream {
TokenStream::from(expanded) TokenStream::from(expanded)
} }
struct PathList(Vec<::syn::Path>);
impl ::syn::parse::Parse for PathList {
fn parse(input: ::syn::parse::ParseStream) -> syn::Result<Self> {
use ::syn::punctuated::Punctuated;
let raw =
Punctuated::<::syn::Path, ::syn::Token![,]>::parse_terminated(
input,
)?;
let mut result = Vec::with_capacity(raw.len());
for r in raw.into_iter() {
result.push(r)
}
Ok(PathList(result))
}
}
pub(crate) fn repo_packages(
attrs: TokenStream,
input: TokenStream,
) -> TokenStream {
let local = parse_macro_input!(input as ItemStruct);
let local_attrs = local.attrs.iter();
let generics = local.generics;
let ident = local.ident;
let vis = local.vis;
use ::syn::spanned::Spanned;
let Fields::Named(ref local_fields) = local.fields else {
use ::syn::spanned::Spanned;
return ::syn::Error::new(
local.fields.span(),
"#[repo_packages(..)]: unnamed fields are not supported",
)
.to_compile_error()
.into();
};
// find the marker. we need it to separate things added manually
// from things we get by extending the other repositories
let Some(marker) = local_fields.named.iter().find(|&f| {
f.ident
.as_ref()
.is_some_and(|id| id.to_string() == "_bok_repo")
}) else {
use ::syn::spanned::Spanned;
return ::syn::Error::new(
local_fields.span(),
"#[repo_packages(..)]: struct is not a repository. Forgot \
'#[::bok::repository(..)]` first?",
)
.to_compile_error()
.into();
};
let fields_up_to_marker = local_fields
.named
.iter()
.take_while(|&f| {
f.ident
.as_ref()
.is_some_and(|id| id.to_string() != "_bok_repo")
})
.collect::<Vec<&::syn::Field>>();
let mut fields_after_marker = local_fields
.named
.iter()
.skip_while(|&f| {
f.ident
.as_ref()
.is_some_and(|id| id.to_string() != "_bok_repo")
})
.skip_while(|&f| {
f.ident
.as_ref()
.is_some_and(|id| id.to_string() == "_bok_repo")
})
.collect::<Vec<&::syn::Field>>();
let packages = parse_macro_input!(attrs as PathList);
// the packages added manually must not be repeated manually.
// but they will override any other package added by
// extending the repository
let mut fields_added = Vec::<::syn::Field>::with_capacity(packages.0.len());
for p in packages.0 {
let path_ident = &p.segments.last().unwrap().ident;
use ::convert_case::{Case, Casing};
let pkg_id = "_p_".to_owned()
+ path_ident.to_string().to_case(Case::Snake).as_str();
if fields_up_to_marker
.iter()
.find(|&f| {
if let Some(id) = &f.ident {
id.to_string() == pkg_id
} else {
false
}
})
.is_some()
{
use ::syn::spanned::Spanned;
return ::syn::Error::new(
local_fields.span(),
"#[repo_packages(..)]: package already present: ".to_owned()
+ pkg_id.as_str(),
)
.to_compile_error()
.into();
}
if fields_added
.iter()
.find(|&f| {
if let Some(id) = &f.ident {
id.to_string() == pkg_id
} else {
false
}
})
.is_some()
{
use ::syn::spanned::Spanned;
return ::syn::Error::new(
local_fields.span(),
"#[repo_packages(..)]: package added twice: ".to_owned()
+ pkg_id.as_str(),
)
.to_compile_error()
.into();
}
let pkg_ident =
::quote::format_ident!("{}", pkg_id, span = local_fields.span());
let new_pkg: ::syn::Field = ::syn::parse_quote! {
#pkg_ident : ::std::marker::PhantomData<#p>
};
fields_added.push(new_pkg);
fields_after_marker.retain(|&f| {
f.ident.as_ref().is_some_and(|id| id.to_string() != pkg_id)
});
}
let mut all_fields =
Vec::with_capacity(local_fields.named.len() + fields_added.len());
all_fields.extend(fields_up_to_marker.iter());
all_fields.extend(fields_added.iter());
all_fields.push(marker);
all_fields.extend(fields_after_marker.iter());
quote! {
#(#local_attrs)
*
#vis struct #ident<#generics> {
#(#all_fields),
*
}
}
.into()
}
fn path_to_snake_case(path: &::syn::Path) -> String {
let mut s = String::new();
let mut is_first = true;
for segment in path.segments.iter() {
if !is_first {
s += "_";
} else {
is_first = false
}
s += segment.ident.to_string().as_str();
}
use ::convert_case::{Case, Casing};
s.to_case(Case::Snake)
}

View File

@ -16,7 +16,7 @@
*/ */
mod conf; mod conf;
mod pkgs; mod repos;
/* /*
trait TP { trait TP {
@ -37,10 +37,10 @@ impl TP for P {
*/ */
fn main() { fn main() {
let one = pkgs::one::One::default(); let one = repos::pkgs::one::One::default();
let pkgs1 = pkgs::Pkgs1::default(); let pkgs1 = repos::Pkgs1::default();
let pkgs2 = pkgs::Pkgs2::default(); let pkgs2 = repos::Pkgs2::default();
use ::bok::Repository; use ::bok::Repository;
println!("pkgs1: {}", pkgs1.name()); println!("pkgs1: {}", pkgs1.name());
println!("pkgs2: {}", pkgs2.name()); println!("pkgs2: {}", pkgs2.name());

View File

@ -19,43 +19,37 @@
//! Example of two package repositories, where one //! Example of two package repositories, where one
//! extends ancd changes another //! extends ancd changes another
pub mod pkgs;
// FIXME: why?
use ::bok_macro::repo_impl_methods;
// Export multiple packages in this module // Export multiple packages in this module
use ::bok::repository; use ::bok::repository;
::bok::moduse! {
one,
two,
three,
}
/// ///
/// Base repository with some packages /// Base repository with some packages
#[::bok::repository(::bok::RepositoryEmpty)] #[::bok::repository(::bok::RepositoryEmpty)]
#[::bok::repo_packages(pkgs::one::One)]
#[derive(::std::default::Default)] #[derive(::std::default::Default)]
pub struct Pkgs1 { pub struct Pkgs1 {
r1: i32, r1: i32,
} }
// Add some packages to the repository #[::bok::repo_impl]
// all packages will have `::default()` values impl Pkgs1 {}
bok::repo_packages! {
Pkgs1 {
One,
}
}
/// ///
/// This repository extends and changes Pkgs1 /// This repository extends and changes Pkgs1
#[::bok::repository(Pkgs1)] #[::bok::repository(Pkgs1)]
#[::bok::repo_packages(pkgs::two::Two)]
#[derive(::std::default::Default)] #[derive(::std::default::Default)]
pub struct Pkgs2 { pub struct Pkgs2 {
r2: i32, r2: i32,
} }
// add a third package with `::default()` values #[::bok::repo_impl]
bok::repo_packages! { impl Pkgs2 {}
Pkgs2 {
Three,
}
}
/* /*
impl Pkgs2 { impl Pkgs2 {

View File

@ -0,0 +1,23 @@
/*
* Copyright 2024 Luca Fulchir <luca.fulchir@runesauth.com>
*
* Licensed under the Apache License, Version 2.0 with LLVM exception (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License and of the exception at
*
* http://www.apache.org/licenses/LICENSE-2.0
* https://spdx.org/licenses/LLVM-exception.html
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Export multiple packages in this module
::bok::moduse! {
one,
two,
three,
}

View File

@ -18,7 +18,7 @@
use ::bok::package; use ::bok::package;
/// Example package /// Example package
#[::bok::package(crate::pkgs::One)] #[::bok::package(super::one::One)]
pub struct Two { pub struct Two {
pub my_attr2: u32, pub my_attr2: u32,
} }

View File

@ -17,7 +17,6 @@ publish = false
bitflags = "2.4" bitflags = "2.4"
bok-macro = { path="../bok-macro" } bok-macro = { path="../bok-macro" }
macro_magic = { version = "0.5" } macro_magic = { version = "0.5" }
paste = "1.0"
proc-macro2 = "1.0" proc-macro2 = "1.0"
quote = "1.0" quote = "1.0"
semver = { version = "1.0" } semver = { version = "1.0" }

View File

@ -27,7 +27,8 @@ pub mod deps {
} }
pub use ::bok_macro::{ pub use ::bok_macro::{
package, package_impl, pkg_fn_to_code, repository, Package, Repository, package, package_impl, pkg_fn_to_code, repo_impl, repo_packages,
repository, Package, Repository,
}; };
pub use ::semver::{BuildMetadata, Prerelease, Version}; pub use ::semver::{BuildMetadata, Prerelease, Version};
@ -44,46 +45,6 @@ macro_rules! moduse {
)* )*
}; };
} }
// re-export `paste` crate for next macros
pub use ::paste;
/// Add multipla packages to a repo
/// e.g.:
/// bok::repo_packages! {
/// MyRepo {
/// Mypkg1,
/// Mypkg2,
/// }
/// }
#[macro_export]
macro_rules! repo_packages {
( $repo:ident { $($name:ident,)*} ) => {
impl $repo {
$crate::packages!{$($name ,)*}
}
};
}
/// Add multipla packages to a repo Impl
/// e.g.:
/// impl MyRepo {
/// bok::packages! {
/// Mypkg1,
/// Mypkg2,
/// }
/// }
#[macro_export]
macro_rules! packages {
($($name:ident,)*) => {
$crate::paste::paste! {
$(
pub fn [<$name:snake>] (&self) -> [<$name Builder>] {
$name::builder()
}
)*
}
};
}
/// Marks your struct as a repository /// Marks your struct as a repository
pub trait Repository: ::core::fmt::Debug { pub trait Repository: ::core::fmt::Debug {
@ -95,8 +56,7 @@ pub trait Repository: ::core::fmt::Debug {
#[::macro_magic::export_tokens] #[::macro_magic::export_tokens]
#[derive(::std::default::Default, Debug)] #[derive(::std::default::Default, Debug)]
pub struct RepositoryEmpty { pub struct RepositoryEmpty {
// export_tokens needs something to export _bok_repo: ::std::marker::PhantomData<Self>,
bok_repo_empty_marker: ::std::marker::PhantomData<Self>,
} }
impl Repository for RepositoryEmpty { impl Repository for RepositoryEmpty {