]> git.proxmox.com Git - rustc.git/blob - src/stdarch/crates/stdarch-verify/src/lib.rs
62ad41c48f1921beb3cd7e31750f93de767fddb6
[rustc.git] / src / stdarch / crates / stdarch-verify / src / lib.rs
1 extern crate proc_macro;
2 extern crate proc_macro2;
3 #[macro_use]
4 extern crate quote;
5 #[macro_use]
6 extern crate syn;
7
8 use proc_macro::TokenStream;
9 use std::{fs::File, io::Read, path::Path};
10 use syn::ext::IdentExt;
11
12 #[proc_macro]
13 pub fn x86_functions(input: TokenStream) -> TokenStream {
14 functions(input, &["core_arch/src/x86", "core_arch/src/x86_64"])
15 }
16
17 #[proc_macro]
18 pub fn arm_functions(input: TokenStream) -> TokenStream {
19 functions(input, &["core_arch/src/arm", "core_arch/src/aarch64"])
20 }
21
22 #[proc_macro]
23 pub fn mips_functions(input: TokenStream) -> TokenStream {
24 functions(input, &["core_arch/src/mips"])
25 }
26
27 fn functions(input: TokenStream, dirs: &[&str]) -> TokenStream {
28 let dir = Path::new(env!("CARGO_MANIFEST_DIR"));
29 let root = dir.parent().expect("root-dir not found");
30
31 let mut files = Vec::new();
32 for dir in dirs {
33 walk(&root.join(dir), &mut files);
34 }
35 assert!(!files.is_empty());
36
37 let mut functions = Vec::new();
38 for &mut (ref mut file, ref path) in &mut files {
39 for mut item in file.items.drain(..) {
40 match item {
41 syn::Item::Fn(f) => functions.push((f, path)),
42 syn::Item::Mod(ref mut m) => {
43 if let Some(ref mut m) = m.content {
44 for i in m.1.drain(..) {
45 if let syn::Item::Fn(f) = i {
46 functions.push((f, path))
47 }
48 }
49 }
50 }
51 _ => (),
52 }
53 }
54 }
55 assert!(!functions.is_empty());
56
57 let mut tests = std::collections::HashSet::<String>::new();
58 for f in &functions {
59 let id = format!("{}", f.0.sig.ident);
60 if id.starts_with("test_") {
61 tests.insert(id);
62 }
63 }
64 assert!(!tests.is_empty());
65
66 functions.retain(|&(ref f, _)| {
67 if let syn::Visibility::Public(_) = f.vis {
68 if f.sig.unsafety.is_some() {
69 return true;
70 }
71 }
72 false
73 });
74 assert!(!functions.is_empty());
75
76 let input = proc_macro2::TokenStream::from(input);
77
78 let functions = functions
79 .iter()
80 .map(|&(ref f, path)| {
81 let name = &f.sig.ident;
82 // println!("{}", name);
83 let mut arguments = Vec::new();
84 for input in f.sig.inputs.iter() {
85 let ty = match *input {
86 syn::FnArg::Typed(ref c) => &c.ty,
87 _ => panic!("invalid argument on {}", name),
88 };
89 arguments.push(to_type(ty));
90 }
91 let ret = match f.sig.output {
92 syn::ReturnType::Default => quote! { None },
93 syn::ReturnType::Type(_, ref t) => {
94 let ty = to_type(t);
95 quote! { Some(#ty) }
96 }
97 };
98 let instrs = find_instrs(&f.attrs);
99 let target_feature = if let Some(i) = find_target_feature(&f.attrs) {
100 quote! { Some(#i) }
101 } else {
102 quote! { None }
103 };
104 let required_const = find_required_const(&f.attrs);
105
106 // strip leading underscore from fn name when building a test
107 // _mm_foo -> mm_foo such that the test name is test_mm_foo.
108 let test_name_string = format!("{}", name);
109 let mut test_name_id = test_name_string.as_str();
110 while test_name_id.starts_with('_') {
111 test_name_id = &test_name_id[1..];
112 }
113 let has_test = tests.contains(&format!("test_{}", test_name_id));
114
115 quote! {
116 Function {
117 name: stringify!(#name),
118 arguments: &[#(#arguments),*],
119 ret: #ret,
120 target_feature: #target_feature,
121 instrs: &[#(#instrs),*],
122 file: stringify!(#path),
123 required_const: &[#(#required_const),*],
124 has_test: #has_test,
125 }
126 }
127 })
128 .collect::<Vec<_>>();
129
130 let ret = quote! { #input: &[Function] = &[#(#functions),*]; };
131 // println!("{}", ret);
132 ret.into()
133 }
134
135 fn to_type(t: &syn::Type) -> proc_macro2::TokenStream {
136 match *t {
137 syn::Type::Path(ref p) => match extract_path_ident(&p.path).to_string().as_ref() {
138 // x86 ...
139 "__m128" => quote! { &M128 },
140 "__m128d" => quote! { &M128D },
141 "__m128i" => quote! { &M128I },
142 "__m256" => quote! { &M256 },
143 "__m256d" => quote! { &M256D },
144 "__m256i" => quote! { &M256I },
145 "__m512" => quote! { &M512 },
146 "__m512d" => quote! { &M512D },
147 "__m512i" => quote! { &M512I },
148 "__mmask8" => quote! { &MMASK8 },
149 "__mmask16" => quote! { &MMASK16 },
150 "__m64" => quote! { &M64 },
151 "bool" => quote! { &BOOL },
152 "f32" => quote! { &F32 },
153 "f64" => quote! { &F64 },
154 "i16" => quote! { &I16 },
155 "i32" => quote! { &I32 },
156 "i64" => quote! { &I64 },
157 "i8" => quote! { &I8 },
158 "u16" => quote! { &U16 },
159 "u32" => quote! { &U32 },
160 "u64" => quote! { &U64 },
161 "u128" => quote! { &U128 },
162 "u8" => quote! { &U8 },
163 "Ordering" => quote! { &ORDERING },
164 "CpuidResult" => quote! { &CPUID },
165
166 // arm ...
167 "int8x4_t" => quote! { &I8X4 },
168 "int8x8_t" => quote! { &I8X8 },
169 "int8x8x2_t" => quote! { &I8X8X2 },
170 "int8x8x3_t" => quote! { &I8X8X3 },
171 "int8x8x4_t" => quote! { &I8X8X4 },
172 "int8x16x2_t" => quote! { &I8X16X2 },
173 "int8x16x3_t" => quote! { &I8X16X3 },
174 "int8x16x4_t" => quote! { &I8X16X4 },
175 "int8x16_t" => quote! { &I8X16 },
176 "int16x2_t" => quote! { &I16X2 },
177 "int16x4_t" => quote! { &I16X4 },
178 "int16x8_t" => quote! { &I16X8 },
179 "int32x2_t" => quote! { &I32X2 },
180 "int32x4_t" => quote! { &I32X4 },
181 "int64x1_t" => quote! { &I64X1 },
182 "int64x2_t" => quote! { &I64X2 },
183 "uint8x8_t" => quote! { &U8X8 },
184 "uint8x8x2_t" => quote! { &U8X8X2 },
185 "uint8x16x2_t" => quote! { &U8X16X2 },
186 "uint8x16x3_t" => quote! { &U8X16X3 },
187 "uint8x16x4_t" => quote! { &U8X16X4 },
188 "uint8x8x3_t" => quote! { &U8X8X3 },
189 "uint8x8x4_t" => quote! { &U8X8X4 },
190 "uint8x16_t" => quote! { &U8X16 },
191 "uint16x4_t" => quote! { &U16X4 },
192 "uint16x8_t" => quote! { &U16X8 },
193 "uint32x2_t" => quote! { &U32X2 },
194 "uint32x4_t" => quote! { &U32X4 },
195 "uint64x1_t" => quote! { &U64X1 },
196 "uint64x2_t" => quote! { &U64X2 },
197 "float32x2_t" => quote! { &F32X2 },
198 "float32x4_t" => quote! { &F32X4 },
199 "float64x1_t" => quote! { &F64X1 },
200 "float64x2_t" => quote! { &F64X2 },
201 "poly8x8_t" => quote! { &POLY8X8 },
202 "poly8x8x2_t" => quote! { &POLY8X8X2 },
203 "poly8x8x3_t" => quote! { &POLY8X8X3 },
204 "poly8x8x4_t" => quote! { &POLY8X8X4 },
205 "poly8x16x2_t" => quote! { &POLY8X16X2 },
206 "poly8x16x3_t" => quote! { &POLY8X16X3 },
207 "poly8x16x4_t" => quote! { &POLY8X16X4 },
208 "poly64_t" => quote! { &P64 },
209 "poly64x1_t" => quote! { &POLY64X1 },
210 "poly64x2_t" => quote! { &POLY64X2 },
211 "poly8x16_t" => quote! { &POLY8X16 },
212 "poly16x4_t" => quote! { &POLY16X4 },
213 "poly16x8_t" => quote! { &POLY16X8 },
214 "poly128_t" => quote! { &P128 },
215
216 "v16i8" => quote! { &v16i8 },
217 "v8i16" => quote! { &v8i16 },
218 "v4i32" => quote! { &v4i32 },
219 "v2i64" => quote! { &v2i64 },
220 "v16u8" => quote! { &v16u8 },
221 "v8u16" => quote! { &v8u16 },
222 "v4u32" => quote! { &v4u32 },
223 "v2u64" => quote! { &v2u64 },
224 "v8f16" => quote! { &v8f16 },
225 "v4f32" => quote! { &v4f32 },
226 "v2f64" => quote! { &v2f64 },
227
228 s => panic!("unsupported type: \"{}\"", s),
229 },
230 syn::Type::Ptr(syn::TypePtr {
231 ref elem,
232 ref mutability,
233 ..
234 })
235 | syn::Type::Reference(syn::TypeReference {
236 ref elem,
237 ref mutability,
238 ..
239 }) => {
240 // Both pointers and references can have a mut token (*mut and &mut)
241 if mutability.is_some() {
242 let tokens = to_type(&elem);
243 quote! { &Type::MutPtr(#tokens) }
244 } else {
245 // If they don't (*const or &) then they are "const"
246 let tokens = to_type(&elem);
247 quote! { &Type::ConstPtr(#tokens) }
248 }
249 }
250
251 syn::Type::Slice(_) => panic!("unsupported slice"),
252 syn::Type::Array(_) => panic!("unsupported array"),
253 syn::Type::Tuple(_) => quote! { &TUPLE },
254 syn::Type::Never(_) => quote! { &NEVER },
255 _ => panic!("unsupported type"),
256 }
257 }
258
259 fn extract_path_ident(path: &syn::Path) -> syn::Ident {
260 if path.leading_colon.is_some() {
261 panic!("unsupported leading colon in path")
262 }
263 if path.segments.len() != 1 {
264 panic!("unsupported path that needs name resolution")
265 }
266 match path.segments.first().expect("segment not found").arguments {
267 syn::PathArguments::None => {}
268 _ => panic!("unsupported path that has path arguments"),
269 }
270 path.segments
271 .first()
272 .expect("segment not found")
273 .ident
274 .clone()
275 }
276
277 fn walk(root: &Path, files: &mut Vec<(syn::File, String)>) {
278 for file in root.read_dir().unwrap() {
279 let file = file.unwrap();
280 if file.file_type().unwrap().is_dir() {
281 walk(&file.path(), files);
282 continue;
283 }
284 let path = file.path();
285 if path.extension().and_then(std::ffi::OsStr::to_str) != Some("rs") {
286 continue;
287 }
288
289 if path.file_name().and_then(std::ffi::OsStr::to_str) == Some("test.rs") {
290 continue;
291 }
292
293 let mut contents = String::new();
294 File::open(&path)
295 .unwrap_or_else(|_| panic!("can't open file at path: {}", path.display()))
296 .read_to_string(&mut contents)
297 .expect("failed to read file to string");
298
299 files.push((
300 syn::parse_str::<syn::File>(&contents).expect("failed to parse"),
301 path.display().to_string(),
302 ));
303 }
304 }
305
306 fn find_instrs(attrs: &[syn::Attribute]) -> Vec<String> {
307 struct AssertInstr {
308 instr: String,
309 }
310
311 // A small custom parser to parse out the instruction in `assert_instr`.
312 //
313 // TODO: should probably just reuse `Invoc` from the `assert-instr-macro`
314 // crate.
315 impl syn::parse::Parse for AssertInstr {
316 fn parse(content: syn::parse::ParseStream) -> syn::Result<Self> {
317 let input;
318 parenthesized!(input in content);
319 let _ = input.parse::<syn::Meta>()?;
320 let _ = input.parse::<Token![,]>()?;
321 let ident = input.parse::<syn::Ident>()?;
322 if ident != "assert_instr" {
323 return Err(input.error("expected `assert_instr`"));
324 }
325 let instrs;
326 parenthesized!(instrs in input);
327
328 let mut instr = String::new();
329 while !instrs.is_empty() {
330 if let Ok(lit) = instrs.parse::<syn::LitStr>() {
331 instr.push_str(&lit.value());
332 } else if let Ok(ident) = instrs.call(syn::Ident::parse_any) {
333 instr.push_str(&ident.to_string());
334 } else if instrs.parse::<Token![.]>().is_ok() {
335 instr.push_str(".");
336 } else if instrs.parse::<Token![,]>().is_ok() {
337 // consume everything remaining
338 drop(instrs.parse::<proc_macro2::TokenStream>());
339 break;
340 } else {
341 return Err(input.error("failed to parse instruction"));
342 }
343 }
344 Ok(Self { instr })
345 }
346 }
347
348 attrs
349 .iter()
350 .filter(|a| a.path.is_ident("cfg_attr"))
351 .filter_map(|a| {
352 syn::parse2::<AssertInstr>(a.tokens.clone())
353 .ok()
354 .map(|a| a.instr)
355 })
356 .collect()
357 }
358
359 fn find_target_feature(attrs: &[syn::Attribute]) -> Option<syn::Lit> {
360 attrs
361 .iter()
362 .flat_map(|a| {
363 if let Ok(a) = a.parse_meta() {
364 if let syn::Meta::List(i) = a {
365 if i.path.is_ident("target_feature") {
366 return i.nested;
367 }
368 }
369 }
370 syn::punctuated::Punctuated::new()
371 })
372 .filter_map(|nested| match nested {
373 syn::NestedMeta::Meta(m) => Some(m),
374 syn::NestedMeta::Lit(_) => None,
375 })
376 .find_map(|m| match m {
377 syn::Meta::NameValue(ref i) if i.path.is_ident("enable") => Some(i.clone().lit),
378 _ => None,
379 })
380 }
381
382 fn find_required_const(attrs: &[syn::Attribute]) -> Vec<usize> {
383 attrs
384 .iter()
385 .flat_map(|a| {
386 if a.path.segments[0].ident == "rustc_args_required_const" {
387 syn::parse::<RustcArgsRequiredConst>(a.tokens.clone().into())
388 .unwrap()
389 .args
390 } else {
391 Vec::new()
392 }
393 })
394 .collect()
395 }
396
397 struct RustcArgsRequiredConst {
398 args: Vec<usize>,
399 }
400
401 impl syn::parse::Parse for RustcArgsRequiredConst {
402 fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
403 let content;
404 parenthesized!(content in input);
405 let list =
406 syn::punctuated::Punctuated::<syn::LitInt, Token![,]>::parse_terminated(&content)?;
407 Ok(Self {
408 args: list
409 .into_iter()
410 .map(|a| a.base10_parse::<usize>())
411 .collect::<syn::Result<_>>()?,
412 })
413 }
414 }