Skip to content

Commit 802b7d4

Browse files
nbdd0121BennoLossin
authored andcommitted
rust: macros: use quote! from vendored crate
With `quote` crate now vendored in the kernel, we can remove our custom `quote!` macro implementation and just rely on that crate instead. The `quote` crate uses types from the `proc-macro2` library so we also update to use that, and perform conversion in the top-level lib.rs. Clippy complains about unnecessary `.to_string()` as `proc-macro2` provides additional `PartialEq` impl, so they are removed. Reviewed-by: Tamir Duberstein <tamird@gmail.com> Reviewed-by: Benno Lossin <lossin@kernel.org> Signed-off-by: Gary Guo <gary@garyguo.net>
1 parent 9d5653e commit 802b7d4

10 files changed

Lines changed: 32 additions & 205 deletions

File tree

rust/macros/concat_idents.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
// SPDX-License-Identifier: GPL-2.0
22

3-
use proc_macro::{token_stream, Ident, TokenStream, TokenTree};
3+
use proc_macro2::{token_stream, Ident, TokenStream, TokenTree};
44

55
use crate::helpers::expect_punct;
66

rust/macros/export.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
// SPDX-License-Identifier: GPL-2.0
22

3+
use proc_macro2::TokenStream;
4+
use quote::quote;
5+
36
use crate::helpers::function_name;
4-
use proc_macro::TokenStream;
57

68
/// Please see [`crate::export`] for documentation.
79
pub(crate) fn export(_attr: TokenStream, ts: TokenStream) -> TokenStream {

rust/macros/fmt.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
// SPDX-License-Identifier: GPL-2.0
22

3-
use proc_macro::{Ident, TokenStream, TokenTree};
43
use std::collections::BTreeSet;
54

5+
use proc_macro2::{Ident, TokenStream, TokenTree};
6+
use quote::quote_spanned;
7+
68
/// Please see [`crate::fmt`] for documentation.
79
pub(crate) fn fmt(input: TokenStream) -> TokenStream {
810
let mut input = input.into_iter();

rust/macros/helpers.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
// SPDX-License-Identifier: GPL-2.0
22

3-
use proc_macro::{token_stream, Group, Ident, TokenStream, TokenTree};
3+
use proc_macro2::{token_stream, Group, Ident, TokenStream, TokenTree};
44

55
pub(crate) fn try_ident(it: &mut token_stream::IntoIter) -> Option<String> {
66
if let Some(TokenTree::Ident(ident)) = it.next() {
@@ -86,7 +86,7 @@ pub(crate) fn function_name(input: TokenStream) -> Option<Ident> {
8686
let mut input = input.into_iter();
8787
while let Some(token) = input.next() {
8888
match token {
89-
TokenTree::Ident(i) if i.to_string() == "fn" => {
89+
TokenTree::Ident(i) if i == "fn" => {
9090
if let Some(TokenTree::Ident(i)) = input.next() {
9191
return Some(i);
9292
}

rust/macros/kunit.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,11 @@
44
//!
55
//! Copyright (c) 2023 José Expósito <jose.exposito89@gmail.com>
66
7-
use proc_macro::{Delimiter, Group, TokenStream, TokenTree};
87
use std::collections::HashMap;
98
use std::fmt::Write;
109

10+
use proc_macro2::{Delimiter, Group, TokenStream, TokenTree};
11+
1112
pub(crate) fn kunit_tests(attr: TokenStream, ts: TokenStream) -> TokenStream {
1213
let attr = attr.to_string();
1314

@@ -59,7 +60,7 @@ pub(crate) fn kunit_tests(attr: TokenStream, ts: TokenStream) -> TokenStream {
5960
}
6061
_ => (),
6162
},
62-
TokenTree::Ident(i) if i.to_string() == "fn" && attributes.contains_key("test") => {
63+
TokenTree::Ident(i) if i == "fn" && attributes.contains_key("test") => {
6364
if let Some(TokenTree::Ident(test_name)) = body_it.next() {
6465
tests.push((test_name, attributes.remove("cfg").unwrap_or_default()))
6566
}

rust/macros/lib.rs

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,6 @@
1111
// to avoid depending on the full `proc_macro_span` on Rust >= 1.88.0.
1212
#![cfg_attr(not(CONFIG_RUSTC_HAS_SPAN_FILE), feature(proc_macro_span))]
1313

14-
#[macro_use]
15-
mod quote;
1614
mod concat_idents;
1715
mod export;
1816
mod fmt;
@@ -132,7 +130,7 @@ use proc_macro::TokenStream;
132130
/// the kernel module.
133131
#[proc_macro]
134132
pub fn module(ts: TokenStream) -> TokenStream {
135-
module::module(ts)
133+
module::module(ts.into()).into()
136134
}
137135

138136
/// Declares or implements a vtable trait.
@@ -207,7 +205,7 @@ pub fn module(ts: TokenStream) -> TokenStream {
207205
/// [`kernel::error::VTABLE_DEFAULT_ERROR`]: ../kernel/error/constant.VTABLE_DEFAULT_ERROR.html
208206
#[proc_macro_attribute]
209207
pub fn vtable(attr: TokenStream, ts: TokenStream) -> TokenStream {
210-
vtable::vtable(attr, ts)
208+
vtable::vtable(attr.into(), ts.into()).into()
211209
}
212210

213211
/// Export a function so that C code can call it via a header file.
@@ -230,7 +228,7 @@ pub fn vtable(attr: TokenStream, ts: TokenStream) -> TokenStream {
230228
/// automatically exported with `EXPORT_SYMBOL_GPL`.
231229
#[proc_macro_attribute]
232230
pub fn export(attr: TokenStream, ts: TokenStream) -> TokenStream {
233-
export::export(attr, ts)
231+
export::export(attr.into(), ts.into()).into()
234232
}
235233

236234
/// Like [`core::format_args!`], but automatically wraps arguments in [`kernel::fmt::Adapter`].
@@ -248,7 +246,7 @@ pub fn export(attr: TokenStream, ts: TokenStream) -> TokenStream {
248246
/// [`pr_info!`]: ../kernel/macro.pr_info.html
249247
#[proc_macro]
250248
pub fn fmt(input: TokenStream) -> TokenStream {
251-
fmt::fmt(input)
249+
fmt::fmt(input.into()).into()
252250
}
253251

254252
/// Concatenate two identifiers.
@@ -306,7 +304,7 @@ pub fn fmt(input: TokenStream) -> TokenStream {
306304
/// ```
307305
#[proc_macro]
308306
pub fn concat_idents(ts: TokenStream) -> TokenStream {
309-
concat_idents::concat_idents(ts)
307+
concat_idents::concat_idents(ts.into()).into()
310308
}
311309

312310
/// Paste identifiers together.
@@ -444,9 +442,12 @@ pub fn concat_idents(ts: TokenStream) -> TokenStream {
444442
/// [`paste`]: https://docs.rs/paste/
445443
#[proc_macro]
446444
pub fn paste(input: TokenStream) -> TokenStream {
447-
let mut tokens = input.into_iter().collect();
445+
let mut tokens = proc_macro2::TokenStream::from(input).into_iter().collect();
448446
paste::expand(&mut tokens);
449-
tokens.into_iter().collect()
447+
tokens
448+
.into_iter()
449+
.collect::<proc_macro2::TokenStream>()
450+
.into()
450451
}
451452

452453
/// Registers a KUnit test suite and its test cases using a user-space like syntax.
@@ -473,5 +474,5 @@ pub fn paste(input: TokenStream) -> TokenStream {
473474
/// ```
474475
#[proc_macro_attribute]
475476
pub fn kunit_tests(attr: TokenStream, ts: TokenStream) -> TokenStream {
476-
kunit::kunit_tests(attr, ts)
477+
kunit::kunit_tests(attr.into(), ts.into()).into()
477478
}

rust/macros/module.rs

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
11
// SPDX-License-Identifier: GPL-2.0
22

3-
use crate::helpers::*;
4-
use proc_macro::{token_stream, Delimiter, Literal, TokenStream, TokenTree};
53
use std::fmt::Write;
64

5+
use proc_macro2::{token_stream, Delimiter, Literal, TokenStream, TokenTree};
6+
7+
use crate::helpers::*;
8+
79
fn expect_string_array(it: &mut token_stream::IntoIter) -> Vec<String> {
810
let group = expect_group(it);
911
assert_eq!(group.delimiter(), Delimiter::Bracket);

rust/macros/paste.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
// SPDX-License-Identifier: GPL-2.0
22

3-
use proc_macro::{Delimiter, Group, Ident, Spacing, Span, TokenTree};
3+
use proc_macro2::{Delimiter, Group, Ident, Spacing, Span, TokenTree};
44

55
fn concat_helper(tokens: &[TokenTree]) -> Vec<(String, Span)> {
66
let mut tokens = tokens.iter();

rust/macros/quote.rs

Lines changed: 0 additions & 182 deletions
This file was deleted.

rust/macros/vtable.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
11
// SPDX-License-Identifier: GPL-2.0
22

3-
use proc_macro::{Delimiter, Group, TokenStream, TokenTree};
43
use std::collections::HashSet;
54
use std::fmt::Write;
65

6+
use proc_macro2::{Delimiter, Group, TokenStream, TokenTree};
7+
78
pub(crate) fn vtable(_attr: TokenStream, ts: TokenStream) -> TokenStream {
89
let mut tokens: Vec<_> = ts.into_iter().collect();
910

@@ -31,15 +32,15 @@ pub(crate) fn vtable(_attr: TokenStream, ts: TokenStream) -> TokenStream {
3132
let mut consts = HashSet::new();
3233
while let Some(token) = body_it.next() {
3334
match token {
34-
TokenTree::Ident(ident) if ident.to_string() == "fn" => {
35+
TokenTree::Ident(ident) if ident == "fn" => {
3536
let fn_name = match body_it.next() {
3637
Some(TokenTree::Ident(ident)) => ident.to_string(),
3738
// Possibly we've encountered a fn pointer type instead.
3839
_ => continue,
3940
};
4041
functions.push(fn_name);
4142
}
42-
TokenTree::Ident(ident) if ident.to_string() == "const" => {
43+
TokenTree::Ident(ident) if ident == "const" => {
4344
let const_name = match body_it.next() {
4445
Some(TokenTree::Ident(ident)) => ident.to_string(),
4546
// Possibly we've encountered an inline const block instead.

0 commit comments

Comments
 (0)