1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
|
use std::collections::HashSet;
use std::path::PathBuf;
use itertools::Itertools;
use proc_macro2::{Delimiter, Group, Ident};
use quote::{format_ident, quote, ToTokens};
const CNT: usize = 4;
fn main()
{
let out_dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
let impls = (0..CNT).flat_map(create_input_filter_impls).join("\n");
std::fs::write(out_dir.join("system_input_impls.rs"), impls).unwrap();
}
fn create_input_filter_impls(cnt: usize) -> Vec<proc_macro2::TokenStream>
{
let mut present = HashSet::new();
let elements = (0..cnt)
.map(|_| ElementKind::Element)
.chain(vec![ElementKind::Excluded; cnt])
.permutations(cnt)
.filter(|combination| {
if present.contains(combination) {
return false;
}
present.insert(combination.clone());
true
})
.map(|elements| {
elements
.into_iter()
.enumerate()
.map(|(index, element)| match element {
ElementKind::Element => {
IdentOrTuple::Ident(format_ident!("Elem{index}"))
}
ElementKind::Excluded => IdentOrTuple::Tuple,
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
elements
.into_iter()
.map(create_single_input_filter_impl)
.collect()
}
fn create_single_input_filter_impl(
elements: Vec<IdentOrTuple>,
) -> proc_macro2::TokenStream
{
let ident_elements = elements
.iter()
.filter(|element| matches!(element, IdentOrTuple::Ident(_)))
.collect::<Vec<_>>();
quote! {
impl<#(#ident_elements: Input,)*> InputFilter for (#(#elements,)*) {
type Filtered = (#(#ident_elements,)*);
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum ElementKind
{
Element,
Excluded,
}
#[derive(Debug)]
enum IdentOrTuple
{
Ident(Ident),
Tuple,
}
impl ToTokens for IdentOrTuple
{
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream)
{
match self {
Self::Ident(ident) => ident.to_tokens(tokens),
Self::Tuple => {
Group::new(Delimiter::Parenthesis, proc_macro2::TokenStream::new())
.to_tokens(tokens)
}
}
}
}
|