]> git.proxmox.com Git - rustc.git/blob - src/stdarch/crates/stdarch-verify/src/lib.rs
New upstream version 1.44.1+dfsg1
[rustc.git] / src / stdarch / crates / stdarch-verify / src / lib.rs
1 extern crate proc_macro;
2 extern crate proc_macro2;
3 #[macro_use]
4 extern crate quote;
5 #[macro_use]
6 extern crate syn;
7
8 use proc_macro::TokenStream;
9 use std::{fs::File, io::Read, path::Path};
10 use syn::ext::IdentExt;
11
12 #[proc_macro]
13 pub fn x86_functions(input: TokenStream) -> TokenStream {
14 functions(input, &["core_arch/src/x86", "core_arch/src/x86_64"])
15 }
16
17 #[proc_macro]
18 pub fn arm_functions(input: TokenStream) -> TokenStream {
19 functions(input, &["core_arch/src/arm", "core_arch/src/aarch64"])
20 }
21
22 #[proc_macro]
23 pub fn mips_functions(input: TokenStream) -> TokenStream {
24 functions(input, &["core_arch/src/mips"])
25 }
26
27 fn functions(input: TokenStream, dirs: &[&str]) -> TokenStream {
28 let dir = Path::new(env!("CARGO_MANIFEST_DIR"));
29 let root = dir.parent().expect("root-dir not found");
30
31 let mut files = Vec::new();
32 for dir in dirs {
33 walk(&root.join(dir), &mut files);
34 }
35 assert!(!files.is_empty());
36
37 let mut functions = Vec::new();
38 for &mut (ref mut file, ref path) in &mut files {
39 for mut item in file.items.drain(..) {
40 match item {
41 syn::Item::Fn(f) => functions.push((f, path)),
42 syn::Item::Mod(ref mut m) => {
43 if let Some(ref mut m) = m.content {
44 for i in m.1.drain(..) {
45 if let syn::Item::Fn(f) = i {
46 functions.push((f, path))
47 }
48 }
49 }
50 }
51 _ => (),
52 }
53 }
54 }
55 assert!(!functions.is_empty());
56
57 let mut tests = std::collections::HashSet::<String>::new();
58 for f in &functions {
59 let id = format!("{}", f.0.sig.ident);
60 if id.starts_with("test_") {
61 tests.insert(id);
62 }
63 }
64 assert!(!tests.is_empty());
65
66 functions.retain(|&(ref f, _)| {
67 if let syn::Visibility::Public(_) = f.vis {
68 if f.sig.unsafety.is_some() {
69 return true;
70 }
71 }
72 false
73 });
74 assert!(!functions.is_empty());
75
76 let input = proc_macro2::TokenStream::from(input);
77
78 let functions = functions
79 .iter()
80 .map(|&(ref f, path)| {
81 let name = &f.sig.ident;
82 // println!("{}", name);
83 let mut arguments = Vec::new();
84 for input in f.sig.inputs.iter() {
85 let ty = match *input {
86 syn::FnArg::Typed(ref c) => &c.ty,
87 _ => panic!("invalid argument on {}", name),
88 };
89 arguments.push(to_type(ty));
90 }
91 let ret = match f.sig.output {
92 syn::ReturnType::Default => quote! { None },
93 syn::ReturnType::Type(_, ref t) => {
94 let ty = to_type(t);
95 quote! { Some(#ty) }
96 }
97 };
98 let instrs = find_instrs(&f.attrs);
99 let target_feature = if let Some(i) = find_target_feature(&f.attrs) {
100 quote! { Some(#i) }
101 } else {
102 quote! { None }
103 };
104 let required_const = find_required_const(&f.attrs);
105
106 // strip leading underscore from fn name when building a test
107 // _mm_foo -> mm_foo such that the test name is test_mm_foo.
108 let test_name_string = format!("{}", name);
109 let mut test_name_id = test_name_string.as_str();
110 while test_name_id.starts_with('_') {
111 test_name_id = &test_name_id[1..];
112 }
113 let has_test = tests.contains(&format!("test_{}", test_name_id));
114
115 quote! {
116 Function {
117 name: stringify!(#name),
118 arguments: &[#(#arguments),*],
119 ret: #ret,
120 target_feature: #target_feature,
121 instrs: &[#(#instrs),*],
122 file: stringify!(#path),
123 required_const: &[#(#required_const),*],
124 has_test: #has_test,
125 }
126 }
127 })
128 .collect::<Vec<_>>();
129
130 let ret = quote! { #input: &[Function] = &[#(#functions),*]; };
131 // println!("{}", ret);
132 ret.into()
133 }
134
135 fn to_type(t: &syn::Type) -> proc_macro2::TokenStream {
136 match *t {
137 syn::Type::Path(ref p) => match extract_path_ident(&p.path).to_string().as_ref() {
138 // x86 ...
139 "__m128" => quote! { &M128 },
140 "__m128d" => quote! { &M128D },
141 "__m128i" => quote! { &M128I },
142 "__m256" => quote! { &M256 },
143 "__m256d" => quote! { &M256D },
144 "__m256i" => quote! { &M256I },
145 "__m512" => quote! { &M512 },
146 "__m512d" => quote! { &M512D },
147 "__m512i" => quote! { &M512I },
148 "__mmask16" => quote! { &MMASK16 },
149 "__m64" => quote! { &M64 },
150 "bool" => quote! { &BOOL },
151 "f32" => quote! { &F32 },
152 "f64" => quote! { &F64 },
153 "i16" => quote! { &I16 },
154 "i32" => quote! { &I32 },
155 "i64" => quote! { &I64 },
156 "i8" => quote! { &I8 },
157 "u16" => quote! { &U16 },
158 "u32" => quote! { &U32 },
159 "u64" => quote! { &U64 },
160 "u128" => quote! { &U128 },
161 "u8" => quote! { &U8 },
162 "Ordering" => quote! { &ORDERING },
163 "CpuidResult" => quote! { &CPUID },
164
165 // arm ...
166 "int8x4_t" => quote! { &I8X4 },
167 "int8x8_t" => quote! { &I8X8 },
168 "int8x8x2_t" => quote! { &I8X8X2 },
169 "int8x8x3_t" => quote! { &I8X8X3 },
170 "int8x8x4_t" => quote! { &I8X8X4 },
171 "int8x16x2_t" => quote! { &I8X16X2 },
172 "int8x16x3_t" => quote! { &I8X16X3 },
173 "int8x16x4_t" => quote! { &I8X16X4 },
174 "int8x16_t" => quote! { &I8X16 },
175 "int16x2_t" => quote! { &I16X2 },
176 "int16x4_t" => quote! { &I16X4 },
177 "int16x8_t" => quote! { &I16X8 },
178 "int32x2_t" => quote! { &I32X2 },
179 "int32x4_t" => quote! { &I32X4 },
180 "int64x1_t" => quote! { &I64X1 },
181 "int64x2_t" => quote! { &I64X2 },
182 "uint8x8_t" => quote! { &U8X8 },
183 "uint8x8x2_t" => quote! { &U8X8X2 },
184 "uint8x16x2_t" => quote! { &U8X16X2 },
185 "uint8x16x3_t" => quote! { &U8X16X3 },
186 "uint8x16x4_t" => quote! { &U8X16X4 },
187 "uint8x8x3_t" => quote! { &U8X8X3 },
188 "uint8x8x4_t" => quote! { &U8X8X4 },
189 "uint8x16_t" => quote! { &U8X16 },
190 "uint16x4_t" => quote! { &U16X4 },
191 "uint16x8_t" => quote! { &U16X8 },
192 "uint32x2_t" => quote! { &U32X2 },
193 "uint32x4_t" => quote! { &U32X4 },
194 "uint64x1_t" => quote! { &U64X1 },
195 "uint64x2_t" => quote! { &U64X2 },
196 "float32x2_t" => quote! { &F32X2 },
197 "float32x4_t" => quote! { &F32X4 },
198 "float64x1_t" => quote! { &F64X1 },
199 "float64x2_t" => quote! { &F64X2 },
200 "poly8x8_t" => quote! { &POLY8X8 },
201 "poly8x8x2_t" => quote! { &POLY8X8X2 },
202 "poly8x8x3_t" => quote! { &POLY8X8X3 },
203 "poly8x8x4_t" => quote! { &POLY8X8X4 },
204 "poly8x16x2_t" => quote! { &POLY8X16X2 },
205 "poly8x16x3_t" => quote! { &POLY8X16X3 },
206 "poly8x16x4_t" => quote! { &POLY8X16X4 },
207 "poly64_t" => quote! { &P64 },
208 "poly64x1_t" => quote! { &POLY64X1 },
209 "poly64x2_t" => quote! { &POLY64X2 },
210 "poly8x16_t" => quote! { &POLY8X16 },
211 "poly16x4_t" => quote! { &POLY16X4 },
212 "poly16x8_t" => quote! { &POLY16X8 },
213 "poly128_t" => quote! { &P128 },
214
215 "v16i8" => quote! { &v16i8 },
216 "v8i16" => quote! { &v8i16 },
217 "v4i32" => quote! { &v4i32 },
218 "v2i64" => quote! { &v2i64 },
219 "v16u8" => quote! { &v16u8 },
220 "v8u16" => quote! { &v8u16 },
221 "v4u32" => quote! { &v4u32 },
222 "v2u64" => quote! { &v2u64 },
223 "v8f16" => quote! { &v8f16 },
224 "v4f32" => quote! { &v4f32 },
225 "v2f64" => quote! { &v2f64 },
226
227 s => panic!("unsupported type: \"{}\"", s),
228 },
229 syn::Type::Ptr(syn::TypePtr {
230 ref elem,
231 ref mutability,
232 ..
233 })
234 | syn::Type::Reference(syn::TypeReference {
235 ref elem,
236 ref mutability,
237 ..
238 }) => {
239 // Both pointers and references can have a mut token (*mut and &mut)
240 if mutability.is_some() {
241 let tokens = to_type(&elem);
242 quote! { &Type::MutPtr(#tokens) }
243 } else {
244 // If they don't (*const or &) then they are "const"
245 let tokens = to_type(&elem);
246 quote! { &Type::ConstPtr(#tokens) }
247 }
248 }
249
250 syn::Type::Slice(_) => panic!("unsupported slice"),
251 syn::Type::Array(_) => panic!("unsupported array"),
252 syn::Type::Tuple(_) => quote! { &TUPLE },
253 syn::Type::Never(_) => quote! { &NEVER },
254 _ => panic!("unsupported type"),
255 }
256 }
257
258 fn extract_path_ident(path: &syn::Path) -> syn::Ident {
259 if path.leading_colon.is_some() {
260 panic!("unsupported leading colon in path")
261 }
262 if path.segments.len() != 1 {
263 panic!("unsupported path that needs name resolution")
264 }
265 match path.segments.first().expect("segment not found").arguments {
266 syn::PathArguments::None => {}
267 _ => panic!("unsupported path that has path arguments"),
268 }
269 path.segments
270 .first()
271 .expect("segment not found")
272 .ident
273 .clone()
274 }
275
276 fn walk(root: &Path, files: &mut Vec<(syn::File, String)>) {
277 for file in root.read_dir().unwrap() {
278 let file = file.unwrap();
279 if file.file_type().unwrap().is_dir() {
280 walk(&file.path(), files);
281 continue;
282 }
283 let path = file.path();
284 if path.extension().and_then(std::ffi::OsStr::to_str) != Some("rs") {
285 continue;
286 }
287
288 if path.file_name().and_then(std::ffi::OsStr::to_str) == Some("test.rs") {
289 continue;
290 }
291
292 let mut contents = String::new();
293 File::open(&path)
294 .unwrap_or_else(|_| panic!("can't open file at path: {}", path.display()))
295 .read_to_string(&mut contents)
296 .expect("failed to read file to string");
297
298 files.push((
299 syn::parse_str::<syn::File>(&contents).expect("failed to parse"),
300 path.display().to_string(),
301 ));
302 }
303 }
304
305 fn find_instrs(attrs: &[syn::Attribute]) -> Vec<String> {
306 struct AssertInstr {
307 instr: String,
308 }
309
310 // A small custom parser to parse out the instruction in `assert_instr`.
311 //
312 // TODO: should probably just reuse `Invoc` from the `assert-instr-macro`
313 // crate.
314 impl syn::parse::Parse for AssertInstr {
315 fn parse(content: syn::parse::ParseStream) -> syn::Result<Self> {
316 let input;
317 parenthesized!(input in content);
318 let _ = input.parse::<syn::Meta>()?;
319 let _ = input.parse::<Token![,]>()?;
320 let ident = input.parse::<syn::Ident>()?;
321 if ident != "assert_instr" {
322 return Err(input.error("expected `assert_instr`"));
323 }
324 let instrs;
325 parenthesized!(instrs in input);
326
327 let mut instr = String::new();
328 while !instrs.is_empty() {
329 if let Ok(lit) = instrs.parse::<syn::LitStr>() {
330 instr.push_str(&lit.value());
331 } else if let Ok(ident) = instrs.call(syn::Ident::parse_any) {
332 instr.push_str(&ident.to_string());
333 } else if instrs.parse::<Token![.]>().is_ok() {
334 instr.push_str(".");
335 } else if instrs.parse::<Token![,]>().is_ok() {
336 // consume everything remaining
337 drop(instrs.parse::<proc_macro2::TokenStream>());
338 break;
339 } else {
340 return Err(input.error("failed to parse instruction"));
341 }
342 }
343 Ok(Self { instr })
344 }
345 }
346
347 attrs
348 .iter()
349 .filter(|a| a.path.is_ident("cfg_attr"))
350 .filter_map(|a| {
351 syn::parse2::<AssertInstr>(a.tokens.clone())
352 .ok()
353 .map(|a| a.instr)
354 })
355 .collect()
356 }
357
358 fn find_target_feature(attrs: &[syn::Attribute]) -> Option<syn::Lit> {
359 attrs
360 .iter()
361 .flat_map(|a| {
362 if let Ok(a) = a.parse_meta() {
363 if let syn::Meta::List(i) = a {
364 if i.path.is_ident("target_feature") {
365 return i.nested;
366 }
367 }
368 }
369 syn::punctuated::Punctuated::new()
370 })
371 .filter_map(|nested| match nested {
372 syn::NestedMeta::Meta(m) => Some(m),
373 syn::NestedMeta::Lit(_) => None,
374 })
375 .find_map(|m| match m {
376 syn::Meta::NameValue(ref i) if i.path.is_ident("enable") => Some(i.clone().lit),
377 _ => None,
378 })
379 }
380
381 fn find_required_const(attrs: &[syn::Attribute]) -> Vec<usize> {
382 attrs
383 .iter()
384 .flat_map(|a| {
385 if a.path.segments[0].ident == "rustc_args_required_const" {
386 syn::parse::<RustcArgsRequiredConst>(a.tokens.clone().into())
387 .unwrap()
388 .args
389 } else {
390 Vec::new()
391 }
392 })
393 .collect()
394 }
395
396 struct RustcArgsRequiredConst {
397 args: Vec<usize>,
398 }
399
400 impl syn::parse::Parse for RustcArgsRequiredConst {
401 fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
402 let content;
403 parenthesized!(content in input);
404 let list =
405 syn::punctuated::Punctuated::<syn::LitInt, Token![,]>::parse_terminated(&content)?;
406 Ok(Self {
407 args: list
408 .into_iter()
409 .map(|a| a.base10_parse::<usize>())
410 .collect::<syn::Result<_>>()?,
411 })
412 }
413 }