diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 95c6d6fdad..3b418fe6e8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -33,6 +33,7 @@ jobs: with: name: Cargo.lock path: Cargo.lock + continue-on-error: true build: name: ${{matrix.name || format('Rust {0}', matrix.rust)}} @@ -42,7 +43,7 @@ jobs: strategy: fail-fast: false matrix: - rust: [stable, beta, 1.63.0, 1.61.0] + rust: [stable, beta, 1.65.0, 1.61.0] include: - rust: nightly components: rustc-dev @@ -50,8 +51,11 @@ jobs: name: WebAssembly target: wasm32-unknown-unknown - rust: nightly - name: WASI - target: wasm32-wasi + name: WASI preview1 + target: wasm32-wasip1 + - rust: nightly + name: WASI preview2 + target: wasm32-wasip2 - rust: nightly name: Windows os: windows diff --git a/Cargo.toml b/Cargo.toml index 68ffa971b0..879ace9650 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "syn" -version = "2.0.85" +version = "2.0.96" authors = ["David Tolnay "] categories = ["development-tools::procedural-macro-helpers", "parser-implementations"] description = "Parser for Rust source code" @@ -35,7 +35,7 @@ proc-macro = ["proc-macro2/proc-macro", "quote?/proc-macro"] test = ["syn-test-suite/all-features"] [dependencies] -proc-macro2 = { version = "1.0.83", default-features = false } +proc-macro2 = { version = "1.0.91", default-features = false } quote = { version = "1.0.35", optional = true, default-features = false } unicode-ident = "1" diff --git a/benches/rust.rs b/benches/rust.rs index 75a9f91010..96770ddfd3 100644 --- a/benches/rust.rs +++ b/benches/rust.rs @@ -60,7 +60,10 @@ mod librustc_parse { use crate::repo; use rustc_data_structures::sync::Lrc; use rustc_error_messages::FluentBundle; - use rustc_errors::{emitter::Emitter, translation::Translate, DiagCtxt, DiagInner}; + use rustc_errors::emitter::Emitter; + use rustc_errors::registry::Registry; + use rustc_errors::translation::Translate; + use rustc_errors::{DiagCtxt, DiagInner}; use rustc_session::parse::ParseSess; use rustc_span::source_map::{FilePathMapping, SourceMap}; use rustc_span::FileName; @@ -70,7 +73,7 @@ mod librustc_parse { struct SilentEmitter; impl Emitter for SilentEmitter { - fn emit_diagnostic(&mut self, _diag: DiagInner) {} + fn emit_diagnostic(&mut self, _diag: DiagInner, _registry: &Registry) {} fn source_map(&self) -> Option<&SourceMap> { None } diff --git a/codegen/Cargo.toml b/codegen/Cargo.toml index 1e25da5129..76fe30674a 100644 --- a/codegen/Cargo.toml +++ b/codegen/Cargo.toml @@ -24,7 +24,7 @@ serde = "1.0.88" serde_json = "1.0.38" syn = { version = "2", features = ["derive", "full", "parsing", "printing"], default-features = false } syn-codegen = { path = "../json", default-features = false } -toml = "0.8" +toml = { version = "0.8", default-features = false, features = ["parse"] } [workspace] [patch.crates-io] diff --git a/codegen/src/snapshot.rs b/codegen/src/snapshot.rs index 17588dbcc0..c138830dd7 100644 --- a/codegen/src/snapshot.rs +++ b/codegen/src/snapshot.rs @@ -275,7 +275,7 @@ fn expand_impl_body(defs: &Definitions, node: &Node, name: &str, val: &Operand) for node in &defs.types { if node.ident == *inner { if let Data::Enum(variants) = &node.data { - if variants.get("None").map_or(false, Vec::is_empty) { + if variants.get("None").is_some_and(Vec::is_empty) { let ty = rust_type(ty); call = quote! { match #val.#ident { diff --git a/examples/dump-syntax/Cargo.toml b/examples/dump-syntax/Cargo.toml index 6239141bb7..032a374291 100644 --- a/examples/dump-syntax/Cargo.toml +++ b/examples/dump-syntax/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" publish = false [dependencies] -colored = "2" +colored = "3" proc-macro2 = { version = "1", features = ["span-locations"] } [dependencies.syn] diff --git a/src/buffer.rs b/src/buffer.rs index c28440a29b..b4d1980725 100644 --- a/src/buffer.rs +++ b/src/buffer.rs @@ -183,52 +183,6 @@ impl<'a> Cursor<'a> { self.ptr == self.scope } - /// If the cursor is pointing at a `Group` with the given delimiter, returns - /// a cursor into that group and one pointing to the next `TokenTree`. - pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> { - // If we're not trying to enter a none-delimited group, we want to - // ignore them. We have to make sure to _not_ ignore them when we want - // to enter them, of course. For obvious reasons. - if delim != Delimiter::None { - self.ignore_none(); - } - - if let Entry::Group(group, end_offset) = self.entry() { - if group.delimiter() == delim { - let span = group.delim_span(); - let end_of_group = unsafe { self.ptr.add(*end_offset) }; - let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; - let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; - return Some((inside_of_group, span, after_group)); - } - } - - None - } - - pub(crate) fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> { - if let Entry::Group(group, end_offset) = self.entry() { - let delimiter = group.delimiter(); - let span = group.delim_span(); - let end_of_group = unsafe { self.ptr.add(*end_offset) }; - let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; - let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; - return Some((inside_of_group, delimiter, span, after_group)); - } - - None - } - - pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> { - if let Entry::Group(group, end_offset) = self.entry() { - let end_of_group = unsafe { self.ptr.add(*end_offset) }; - let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; - return Some((group.clone(), after_group)); - } - - None - } - /// If the cursor is pointing at a `Ident`, returns it along with a cursor /// pointing at the next `TokenTree`. pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> { @@ -279,6 +233,54 @@ impl<'a> Cursor<'a> { } } + /// If the cursor is pointing at a `Group` with the given delimiter, returns + /// a cursor into that group and one pointing to the next `TokenTree`. + pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> { + // If we're not trying to enter a none-delimited group, we want to + // ignore them. We have to make sure to _not_ ignore them when we want + // to enter them, of course. For obvious reasons. + if delim != Delimiter::None { + self.ignore_none(); + } + + if let Entry::Group(group, end_offset) = self.entry() { + if group.delimiter() == delim { + let span = group.delim_span(); + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((inside_of_group, span, after_group)); + } + } + + None + } + + /// If the cursor is pointing at a `Group`, returns a cursor into the group + /// and one pointing to the next `TokenTree`. + pub fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> { + if let Entry::Group(group, end_offset) = self.entry() { + let delimiter = group.delimiter(); + let span = group.delim_span(); + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((inside_of_group, delimiter, span, after_group)); + } + + None + } + + pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> { + if let Entry::Group(group, end_offset) = self.entry() { + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((group.clone(), after_group)); + } + + None + } + /// Copies all remaining tokens visible from this cursor into a /// `TokenStream`. pub fn token_stream(self) -> TokenStream { diff --git a/src/classify.rs b/src/classify.rs index b3a6ae960e..8eab19dbc3 100644 --- a/src/classify.rs +++ b/src/classify.rs @@ -63,96 +63,10 @@ pub(crate) fn requires_comma_to_be_match_arm(expr: &Expr) -> bool { | Expr::Tuple(_) | Expr::Unary(_) | Expr::Yield(_) - | Expr::Verbatim(_) => true + | Expr::Verbatim(_) => true, } } -#[cfg(all(feature = "printing", feature = "full"))] -pub(crate) fn confusable_with_adjacent_block(mut expr: &Expr) -> bool { - let mut stack = Vec::new(); - - while let Some(next) = match expr { - Expr::Assign(e) => { - stack.push(&e.right); - Some(&e.left) - } - Expr::Await(e) => Some(&e.base), - Expr::Binary(e) => { - stack.push(&e.right); - Some(&e.left) - } - Expr::Break(e) => { - if let Some(Expr::Block(_)) = e.expr.as_deref() { - return true; - } - stack.pop() - } - Expr::Call(e) => Some(&e.func), - Expr::Cast(e) => Some(&e.expr), - Expr::Closure(e) => Some(&e.body), - Expr::Field(e) => Some(&e.base), - Expr::Index(e) => Some(&e.expr), - Expr::MethodCall(e) => Some(&e.receiver), - Expr::Range(e) => { - if let Some(Expr::Block(_)) = e.end.as_deref() { - return true; - } - match (&e.start, &e.end) { - (Some(start), end) => { - stack.extend(end); - Some(start) - } - (None, Some(end)) => Some(end), - (None, None) => stack.pop(), - } - } - Expr::RawAddr(e) => Some(&e.expr), - Expr::Reference(e) => Some(&e.expr), - Expr::Return(e) => { - if e.expr.is_none() && stack.is_empty() { - return true; - } - stack.pop() - } - Expr::Struct(_) => return true, - Expr::Try(e) => Some(&e.expr), - Expr::Unary(e) => Some(&e.expr), - Expr::Yield(e) => { - if e.expr.is_none() && stack.is_empty() { - return true; - } - stack.pop() - } - - Expr::Array(_) - | Expr::Async(_) - | Expr::Block(_) - | Expr::Const(_) - | Expr::Continue(_) - | Expr::ForLoop(_) - | Expr::Group(_) - | Expr::If(_) - | Expr::Infer(_) - | Expr::Let(_) - | Expr::Lit(_) - | Expr::Loop(_) - | Expr::Macro(_) - | Expr::Match(_) - | Expr::Paren(_) - | Expr::Path(_) - | Expr::Repeat(_) - | Expr::TryBlock(_) - | Expr::Tuple(_) - | Expr::Unsafe(_) - | Expr::Verbatim(_) - | Expr::While(_) => stack.pop(), - } { - expr = next; - } - - false -} - #[cfg(feature = "printing")] pub(crate) fn trailing_unparameterized_path(mut ty: &Type) -> bool { loop { diff --git a/src/data.rs b/src/data.rs index 9e73f02d3c..96db2a0b7c 100644 --- a/src/data.rs +++ b/src/data.rs @@ -248,6 +248,8 @@ pub(crate) mod parsing { use crate::parse::discouraged::Speculative as _; use crate::parse::{Parse, ParseStream}; use crate::restriction::{FieldMutability, Visibility}; + #[cfg(not(feature = "full"))] + use crate::scan_expr::scan_expr; use crate::token; use crate::ty::Type; use crate::verbatim; @@ -276,7 +278,7 @@ pub(crate) mod parsing { let mut discriminant: Result = ahead.parse(); if discriminant.is_ok() { input.advance_to(&ahead); - } else if scan_lenient_discriminant(input).is_ok() { + } else if scan_expr(input).is_ok() { discriminant = Ok(Expr::Verbatim(verbatim::between(&begin, input))); } discriminant? @@ -294,85 +296,6 @@ pub(crate) mod parsing { } } - #[cfg(not(feature = "full"))] - pub(crate) fn scan_lenient_discriminant(input: ParseStream) -> Result<()> { - use crate::expr::Member; - use crate::lifetime::Lifetime; - use crate::lit::Lit; - use crate::lit::LitFloat; - use crate::op::{BinOp, UnOp}; - use crate::path::{self, AngleBracketedGenericArguments}; - use proc_macro2::Delimiter::{self, Brace, Bracket, Parenthesis}; - - let consume = |delimiter: Delimiter| { - Result::unwrap(input.step(|cursor| match cursor.group(delimiter) { - Some((_inside, _span, rest)) => Ok((true, rest)), - None => Ok((false, *cursor)), - })) - }; - - macro_rules! consume { - [$token:tt] => { - input.parse::>().unwrap().is_some() - }; - } - - let mut initial = true; - let mut depth = 0usize; - loop { - if initial { - if consume![&] { - input.parse::>()?; - } else if consume![if] || consume![match] || consume![while] { - depth += 1; - } else if input.parse::>()?.is_some() - || (consume(Brace) || consume(Bracket) || consume(Parenthesis)) - || (consume![async] || consume![const] || consume![loop] || consume![unsafe]) - && (consume(Brace) || break) - { - initial = false; - } else if consume![let] { - while !consume![=] { - if !((consume![|] || consume![ref] || consume![mut] || consume![@]) - || (consume![!] || input.parse::>()?.is_some()) - || (consume![..=] || consume![..] || consume![&] || consume![_]) - || (consume(Brace) || consume(Bracket) || consume(Parenthesis))) - { - path::parsing::qpath(input, true)?; - } - } - } else if input.parse::>()?.is_some() && !consume![:] { - break; - } else if input.parse::().is_err() { - path::parsing::qpath(input, true)?; - initial = consume![!] || depth == 0 && input.peek(token::Brace); - } - } else if input.is_empty() || input.peek(Token![,]) { - return Ok(()); - } else if depth > 0 && consume(Brace) { - if consume![else] && !consume(Brace) { - initial = consume![if] || break; - } else { - depth -= 1; - } - } else if input.parse::().is_ok() || (consume![..] | consume![=]) { - initial = true; - } else if consume![.] { - if input.parse::>()?.is_none() - && (input.parse::()?.is_named() && consume![::]) - { - AngleBracketedGenericArguments::do_parse(None, input)?; - } - } else if consume![as] { - input.parse::()?; - } else if !(consume(Brace) || consume(Bracket) || consume(Parenthesis)) { - break; - } - } - - Err(input.error("unsupported expression")) - } - #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for FieldsNamed { fn parse(input: ParseStream) -> Result { diff --git a/src/expr.rs b/src/expr.rs index dc18cd1aa3..07024e4ff5 100644 --- a/src/expr.rs +++ b/src/expr.rs @@ -1,15 +1,17 @@ use crate::attr::Attribute; #[cfg(all(feature = "parsing", feature = "full"))] use crate::error::Result; +#[cfg(feature = "parsing")] +use crate::ext::IdentExt as _; #[cfg(feature = "full")] use crate::generics::BoundLifetimes; use crate::ident::Ident; -#[cfg(feature = "full")] +#[cfg(any(feature = "parsing", feature = "full"))] use crate::lifetime::Lifetime; use crate::lit::Lit; use crate::mac::Macro; use crate::op::{BinOp, UnOp}; -#[cfg(all(feature = "parsing", feature = "full"))] +#[cfg(feature = "parsing")] use crate::parse::ParseStream; #[cfg(feature = "full")] use crate::pat::Pat; @@ -889,6 +891,36 @@ impl Expr { parsing::parse_with_earlier_boundary_rule(input) } + /// Returns whether the next token in the parse stream is one that might + /// possibly form the beginning of an expr. + /// + /// This classification is a load-bearing part of the grammar of some Rust + /// expressions, notably `return` and `break`. For example `return < …` will + /// never parse `<` as a binary operator regardless of what comes after, + /// because `<` is a legal starting token for an expression and so it's + /// required to be continued as a return value, such as `return ::CONST`. Meanwhile `return > …` treats the `>` as a binary + /// operator because it cannot be a starting token for any Rust expression. + #[cfg(feature = "parsing")] + #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] + pub fn peek(input: ParseStream) -> bool { + input.peek(Ident::peek_any) && !input.peek(Token![as]) // value name or keyword + || input.peek(token::Paren) // tuple + || input.peek(token::Bracket) // array + || input.peek(token::Brace) // block + || input.peek(Lit) // literal + || input.peek(Token![!]) && !input.peek(Token![!=]) // operator not + || input.peek(Token![-]) && !input.peek(Token![-=]) && !input.peek(Token![->]) // unary minus + || input.peek(Token![*]) && !input.peek(Token![*=]) // dereference + || input.peek(Token![|]) && !input.peek(Token![|=]) // closure + || input.peek(Token![&]) && !input.peek(Token![&=]) // reference + || input.peek(Token![..]) // range + || input.peek(Token![<]) && !input.peek(Token![<=]) && !input.peek(Token![<<=]) // associated path + || input.peek(Token![::]) // absolute path + || input.peek(Lifetime) // labeled loop + || input.peek(Token![#]) // expression attributes + } + #[cfg(all(feature = "parsing", feature = "full"))] pub(crate) fn replace_attrs(&mut self, new: Vec) -> Vec { match self { @@ -1004,6 +1036,16 @@ impl IdentFragment for Member { } } +#[cfg(any(feature = "parsing", feature = "printing"))] +impl Member { + pub(crate) fn is_named(&self) -> bool { + match self { + Member::Named(_) => true, + Member::Unnamed(_) => false, + } + } +} + ast_struct! { /// The index of an unnamed tuple struct field. #[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))] @@ -1147,8 +1189,6 @@ pub(crate) mod parsing { FieldValue, Index, Member, }; #[cfg(feature = "full")] - use crate::ext::IdentExt as _; - #[cfg(feature = "full")] use crate::generics::BoundLifetimes; use crate::ident::Ident; #[cfg(feature = "full")] @@ -1266,25 +1306,6 @@ pub(crate) mod parsing { } } - #[cfg(feature = "full")] - fn can_begin_expr(input: ParseStream) -> bool { - input.peek(Ident::peek_any) // value name or keyword - || input.peek(token::Paren) // tuple - || input.peek(token::Bracket) // array - || input.peek(token::Brace) // block - || input.peek(Lit) // literal - || input.peek(Token![!]) && !input.peek(Token![!=]) // operator not - || input.peek(Token![-]) && !input.peek(Token![-=]) && !input.peek(Token![->]) // unary minus - || input.peek(Token![*]) && !input.peek(Token![*=]) // dereference - || input.peek(Token![|]) && !input.peek(Token![|=]) // closure - || input.peek(Token![&]) && !input.peek(Token![&=]) // reference - || input.peek(Token![..]) // range notation - || input.peek(Token![<]) && !input.peek(Token![<=]) && !input.peek(Token![<<=]) // associated path - || input.peek(Token![::]) // global path - || input.peek(Lifetime) // labeled loop - || input.peek(Token![#]) // expression attributes - } - #[cfg(feature = "full")] fn parse_expr( input: ParseStream, @@ -1294,15 +1315,19 @@ pub(crate) mod parsing { ) -> Result { loop { let ahead = input.fork(); - if let Expr::Range(ExprRange { end: Some(_), .. }) = lhs { - // A range with an upper bound cannot be the left-hand side of - // another binary operator. + if let Expr::Range(_) = lhs { + // A range cannot be the left-hand side of another binary operator. break; } else if let Ok(op) = ahead.parse::() { let precedence = Precedence::of_binop(&op); if precedence < base { break; } + if precedence == Precedence::Assign { + if let Expr::Range(_) = lhs { + break; + } + } if precedence == Precedence::Compare { if let Expr::Binary(lhs) = &lhs { if Precedence::of_binop(&lhs.op) == Precedence::Compare { @@ -1318,7 +1343,13 @@ pub(crate) mod parsing { op, right, }); - } else if Precedence::Assign >= base && input.peek(Token![=]) && !input.peek(Token![=>]) + } else if Precedence::Assign >= base + && input.peek(Token![=]) + && !input.peek(Token![=>]) + && match lhs { + Expr::Range(_) => false, + _ => true, + } { let eq_token: Token![=] = input.parse()?; let right = parse_binop_rhs(input, allow_struct, Precedence::Assign)?; @@ -1652,7 +1683,12 @@ pub(crate) mod parsing { bracket_token: bracketed!(content in input), index: content.parse()?, }); - } else if input.peek(Token![?]) { + } else if input.peek(Token![?]) + && match e { + Expr::Range(_) => false, + _ => true, + } + { e = Expr::Try(ExprTry { attrs: Vec::new(), expr: Box::new(e), @@ -2207,7 +2243,6 @@ pub(crate) mod parsing { if lookahead.peek(Token![if]) { expr.else_branch = Some((else_token, Box::new(Expr::PLACEHOLDER))); clauses.push(expr); - continue; } else if lookahead.peek(token::Brace) { expr.else_branch = Some(( else_token, @@ -2307,10 +2342,7 @@ pub(crate) mod parsing { let brace_token = braced!(content in input); attr::parsing::parse_inner(&content, &mut attrs)?; - let mut arms = Vec::new(); - while !content.is_empty() { - arms.push(content.call(Arm::parse)?); - } + let arms = Arm::parse_multiple(&content)?; Ok(ExprMatch { attrs, @@ -2439,7 +2471,7 @@ pub(crate) mod parsing { attrs: Vec::new(), return_token: input.parse()?, expr: { - if can_begin_expr(input) { + if Expr::peek(input) { Some(input.parse()?) } else { None @@ -2477,7 +2509,7 @@ pub(crate) mod parsing { attrs: Vec::new(), yield_token: input.parse()?, expr: { - if can_begin_expr(input) { + if Expr::peek(input) { Some(input.parse()?) } else { None @@ -2690,7 +2722,7 @@ pub(crate) mod parsing { } input.advance_to(&ahead); - let expr = if can_begin_expr(input) && (allow_struct.0 || !input.peek(token::Brace)) { + let expr = if Expr::peek(input) && (allow_struct.0 || !input.peek(token::Brace)) { Some(input.parse()?) } else { None @@ -2849,7 +2881,23 @@ pub(crate) mod parsing { || input.peek(Token![,]) || input.peek(Token![;]) || input.peek(Token![.]) && !input.peek(Token![..]) - || !allow_struct.0 && input.peek(token::Brace)) + || input.peek(Token![?]) + || input.peek(Token![=>]) + || !allow_struct.0 && input.peek(token::Brace) + || input.peek(Token![=]) + || input.peek(Token![+]) + || input.peek(Token![/]) + || input.peek(Token![%]) + || input.peek(Token![^]) + || input.peek(Token![>]) + || input.peek(Token![<=]) + || input.peek(Token![!=]) + || input.peek(Token![-=]) + || input.peek(Token![*=]) + || input.peek(Token![&=]) + || input.peek(Token![|=]) + || input.peek(Token![<<=]) + || input.peek(Token![as])) { Ok(None) } else { @@ -2924,6 +2972,17 @@ pub(crate) mod parsing { } } + #[cfg(feature = "full")] + impl Arm { + pub(crate) fn parse_multiple(input: ParseStream) -> Result> { + let mut arms = Vec::new(); + while !input.is_empty() { + arms.push(input.call(Arm::parse)?); + } + Ok(arms) + } + } + #[cfg(feature = "full")] #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Arm { @@ -3010,15 +3069,6 @@ pub(crate) mod parsing { Ok(!trailing_dot) } - impl Member { - pub(crate) fn is_named(&self) -> bool { - match self { - Member::Named(_) => true, - Member::Unnamed(_) => false, - } - } - } - #[cfg(feature = "full")] #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for PointerMutability { @@ -3100,17 +3150,7 @@ pub(crate) mod printing { #[cfg(not(feature = "full"))] pub(crate) fn outer_attrs_to_tokens(_attrs: &[Attribute], _tokens: &mut TokenStream) {} - #[cfg(feature = "full")] - fn print_condition(expr: &Expr, tokens: &mut TokenStream) { - print_subexpression( - expr, - classify::confusable_with_adjacent_block(expr), - tokens, - FixupContext::new_condition(), - ); - } - - fn print_subexpression( + pub(crate) fn print_subexpression( expr: &Expr, needs_group: bool, tokens: &mut TokenStream, @@ -3142,7 +3182,7 @@ pub(crate) mod printing { pub(crate) fn print_expr(expr: &Expr, tokens: &mut TokenStream, mut fixup: FixupContext) { #[cfg(feature = "full")] - let needs_group = fixup.would_cause_statement_boundary(expr); + let needs_group = fixup.parenthesize(expr); #[cfg(not(feature = "full"))] let needs_group = false; @@ -3167,7 +3207,7 @@ pub(crate) mod printing { Expr::Call(e) => print_expr_call(e, tokens, fixup), Expr::Cast(e) => print_expr_cast(e, tokens, fixup), #[cfg(feature = "full")] - Expr::Closure(e) => e.to_tokens(tokens), + Expr::Closure(e) => print_expr_closure(e, tokens, fixup), #[cfg(feature = "full")] Expr::Const(e) => e.to_tokens(tokens), #[cfg(feature = "full")] @@ -3251,18 +3291,14 @@ pub(crate) mod printing { #[cfg(feature = "full")] fn print_expr_assign(e: &ExprAssign, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); - print_subexpression( - &e.left, - Precedence::of(&e.left) <= Precedence::Range, - tokens, - fixup.leftmost_subexpression(), - ); + let (left_prec, left_fixup) = + fixup.leftmost_subexpression_with_operator(&e.left, false, false, Precedence::Assign); + print_subexpression(&e.left, left_prec <= Precedence::Range, tokens, left_fixup); e.eq_token.to_tokens(tokens); - print_subexpression( + print_expr( &e.right, - fixup.trailing_precedence(&e.right) < Precedence::Assign, tokens, - fixup.subsequent_subexpression(), + fixup.rightmost_subexpression_fixup(false, false, Precedence::Assign), ); } @@ -3288,11 +3324,12 @@ pub(crate) mod printing { #[cfg(feature = "full")] fn print_expr_await(e: &ExprAwait, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_dot(&e.base); print_subexpression( &e.base, - Precedence::of(&e.base) < Precedence::Unambiguous, + left_prec < Precedence::Unambiguous, tokens, - fixup.leftmost_subexpression_with_dot(), + left_fixup, ); e.dot_token.to_tokens(tokens); e.await_token.to_tokens(tokens); @@ -3308,7 +3345,9 @@ pub(crate) mod printing { fn print_expr_binary(e: &ExprBinary, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); - let left_fixup = fixup.leftmost_subexpression_with_begin_operator( + let binop_prec = Precedence::of_binop(&e.op); + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_operator( + &e.left, #[cfg(feature = "full")] match &e.op { BinOp::Sub(_) @@ -3325,25 +3364,29 @@ pub(crate) mod printing { BinOp::Shl(_) | BinOp::Lt(_) => true, _ => false, }, + #[cfg(feature = "full")] + binop_prec, ); - - let binop_prec = Precedence::of_binop(&e.op); - let left_prec = left_fixup.leading_precedence(&e.left); - let right_prec = fixup.trailing_precedence(&e.right); - let (left_needs_group, right_needs_group) = match binop_prec { - Precedence::Assign => (left_prec <= Precedence::Range, right_prec < binop_prec), - Precedence::Compare => (left_prec <= binop_prec, right_prec <= binop_prec), - _ => (left_prec < binop_prec, right_prec <= binop_prec), + let left_needs_group = match binop_prec { + Precedence::Assign => left_prec <= Precedence::Range, + Precedence::Compare => left_prec <= binop_prec, + _ => left_prec < binop_prec, }; + let right_fixup = fixup.rightmost_subexpression_fixup( + #[cfg(feature = "full")] + false, + #[cfg(feature = "full")] + false, + #[cfg(feature = "full")] + binop_prec, + ); + let right_needs_group = binop_prec != Precedence::Assign + && right_fixup.rightmost_subexpression_precedence(&e.right) <= binop_prec; + print_subexpression(&e.left, left_needs_group, tokens, left_fixup); e.op.to_tokens(tokens); - print_subexpression( - &e.right, - right_needs_group, - tokens, - fixup.subsequent_subexpression(), - ); + print_subexpression(&e.right, right_needs_group, tokens, right_fixup); } #[cfg(feature = "full")] @@ -3379,7 +3422,7 @@ pub(crate) mod printing { // ^---------------------------------^ e.label.is_none() && classify::expr_leading_label(value), tokens, - fixup.subsequent_subexpression(), + fixup.rightmost_subexpression_fixup(true, true, Precedence::Jump), ); } } @@ -3394,22 +3437,20 @@ pub(crate) mod printing { fn print_expr_call(e: &ExprCall, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); - let call_precedence = if let Expr::Field(_) = &*e.func { - Precedence::MIN - } else { - Precedence::Unambiguous - }; - let func_fixup = fixup.leftmost_subexpression_with_begin_operator( + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_operator( + &e.func, #[cfg(feature = "full")] true, false, + #[cfg(feature = "full")] + Precedence::Unambiguous, ); - print_subexpression( - &e.func, - func_fixup.leading_precedence(&e.func) < call_precedence, - tokens, - func_fixup, - ); + let needs_group = if let Expr::Field(func) = &*e.func { + func.member.is_named() + } else { + left_prec < Precedence::Unambiguous + }; + print_subexpression(&e.func, needs_group, tokens, left_fixup); e.paren_token.surround(tokens, |tokens| { e.args.to_tokens(tokens); @@ -3425,12 +3466,15 @@ pub(crate) mod printing { fn print_expr_cast(e: &ExprCast, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); - print_subexpression( + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_operator( &e.expr, - Precedence::of(&e.expr) < Precedence::Cast, - tokens, - fixup.leftmost_subexpression(), + #[cfg(feature = "full")] + false, + false, + #[cfg(feature = "full")] + Precedence::Cast, ); + print_subexpression(&e.expr, left_prec < Precedence::Cast, tokens, left_fixup); e.as_token.to_tokens(tokens); e.ty.to_tokens(tokens); } @@ -3439,23 +3483,34 @@ pub(crate) mod printing { #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprClosure { fn to_tokens(&self, tokens: &mut TokenStream) { - outer_attrs_to_tokens(&self.attrs, tokens); - self.lifetimes.to_tokens(tokens); - self.constness.to_tokens(tokens); - self.movability.to_tokens(tokens); - self.asyncness.to_tokens(tokens); - self.capture.to_tokens(tokens); - self.or1_token.to_tokens(tokens); - self.inputs.to_tokens(tokens); - self.or2_token.to_tokens(tokens); - self.output.to_tokens(tokens); - if matches!(self.output, ReturnType::Default) || matches!(*self.body, Expr::Block(_)) { - self.body.to_tokens(tokens); - } else { - token::Brace::default().surround(tokens, |tokens| { - print_expr(&self.body, tokens, FixupContext::new_stmt()); - }); - } + print_expr_closure(self, tokens, FixupContext::NONE); + } + } + + #[cfg(feature = "full")] + fn print_expr_closure(e: &ExprClosure, tokens: &mut TokenStream, fixup: FixupContext) { + outer_attrs_to_tokens(&e.attrs, tokens); + e.lifetimes.to_tokens(tokens); + e.constness.to_tokens(tokens); + e.movability.to_tokens(tokens); + e.asyncness.to_tokens(tokens); + e.capture.to_tokens(tokens); + e.or1_token.to_tokens(tokens); + e.inputs.to_tokens(tokens); + e.or2_token.to_tokens(tokens); + e.output.to_tokens(tokens); + if matches!(e.output, ReturnType::Default) + || matches!(&*e.body, Expr::Block(body) if body.attrs.is_empty() && body.label.is_none()) + { + print_expr( + &e.body, + tokens, + fixup.rightmost_subexpression_fixup(false, false, Precedence::Jump), + ); + } else { + token::Brace::default().surround(tokens, |tokens| { + print_expr(&e.body, tokens, FixupContext::new_stmt()); + }); } } @@ -3491,11 +3546,12 @@ pub(crate) mod printing { fn print_expr_field(e: &ExprField, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_dot(&e.base); print_subexpression( &e.base, - Precedence::of(&e.base) < Precedence::Unambiguous, + left_prec < Precedence::Unambiguous, tokens, - fixup.leftmost_subexpression_with_dot(), + left_fixup, ); e.dot_token.to_tokens(tokens); e.member.to_tokens(tokens); @@ -3510,7 +3566,7 @@ pub(crate) mod printing { self.for_token.to_tokens(tokens); self.pat.to_tokens(tokens); self.in_token.to_tokens(tokens); - print_condition(&self.expr, tokens); + print_expr(&self.expr, tokens, FixupContext::new_condition()); self.body.brace_token.surround(tokens, |tokens| { inner_attrs_to_tokens(&self.attrs, tokens); tokens.append_all(&self.body.stmts); @@ -3537,7 +3593,7 @@ pub(crate) mod printing { let mut expr = self; loop { expr.if_token.to_tokens(tokens); - print_condition(&expr.cond, tokens); + print_expr(&expr.cond, tokens, FixupContext::new_condition()); expr.then_branch.to_tokens(tokens); let (else_token, else_) = match &expr.else_branch { @@ -3576,16 +3632,19 @@ pub(crate) mod printing { fn print_expr_index(e: &ExprIndex, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); - let obj_fixup = fixup.leftmost_subexpression_with_begin_operator( + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_operator( + &e.expr, #[cfg(feature = "full")] true, false, + #[cfg(feature = "full")] + Precedence::Unambiguous, ); print_subexpression( &e.expr, - obj_fixup.leading_precedence(&e.expr) < Precedence::Unambiguous, + left_prec < Precedence::Unambiguous, tokens, - obj_fixup, + left_fixup, ); e.bracket_token.surround(tokens, |tokens| { e.index.to_tokens(tokens); @@ -3615,12 +3674,8 @@ pub(crate) mod printing { e.let_token.to_tokens(tokens); e.pat.to_tokens(tokens); e.eq_token.to_tokens(tokens); - print_subexpression( - &e.expr, - fixup.needs_group_as_let_scrutinee(&e.expr), - tokens, - FixupContext::NONE, - ); + let (right_prec, right_fixup) = fixup.rightmost_subexpression(&e.expr, Precedence::Let); + print_subexpression(&e.expr, right_prec < Precedence::Let, tokens, right_fixup); } #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] @@ -3659,7 +3714,7 @@ pub(crate) mod printing { fn to_tokens(&self, tokens: &mut TokenStream) { outer_attrs_to_tokens(&self.attrs, tokens); self.match_token.to_tokens(tokens); - print_condition(&self.expr, tokens); + print_expr(&self.expr, tokens, FixupContext::new_condition()); self.brace_token.surround(tokens, |tokens| { inner_attrs_to_tokens(&self.attrs, tokens); for (i, arm) in self.arms.iter().enumerate() { @@ -3687,11 +3742,12 @@ pub(crate) mod printing { fn print_expr_method_call(e: &ExprMethodCall, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_dot(&e.receiver); print_subexpression( &e.receiver, - Precedence::of(&e.receiver) < Precedence::Unambiguous, + left_prec < Precedence::Unambiguous, tokens, - fixup.leftmost_subexpression_with_dot(), + left_fixup, ); e.dot_token.to_tokens(tokens); e.method.to_tokens(tokens); @@ -3737,21 +3793,15 @@ pub(crate) mod printing { fn print_expr_range(e: &ExprRange, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); if let Some(start) = &e.start { - print_subexpression( - start, - Precedence::of(start) <= Precedence::Range, - tokens, - fixup.leftmost_subexpression(), - ); + let (left_prec, left_fixup) = + fixup.leftmost_subexpression_with_operator(start, true, false, Precedence::Range); + print_subexpression(start, left_prec <= Precedence::Range, tokens, left_fixup); } e.limits.to_tokens(tokens); if let Some(end) = &e.end { - print_subexpression( - end, - fixup.trailing_precedence(end) <= Precedence::Range, - tokens, - fixup.subsequent_subexpression(), - ); + let right_fixup = fixup.rightmost_subexpression_fixup(false, true, Precedence::Range); + let right_prec = right_fixup.rightmost_subexpression_precedence(end); + print_subexpression(end, right_prec <= Precedence::Range, tokens, right_fixup); } } @@ -3769,11 +3819,12 @@ pub(crate) mod printing { e.and_token.to_tokens(tokens); e.raw.to_tokens(tokens); e.mutability.to_tokens(tokens); + let (right_prec, right_fixup) = fixup.rightmost_subexpression(&e.expr, Precedence::Prefix); print_subexpression( &e.expr, - fixup.trailing_precedence(&e.expr) < Precedence::Prefix, + right_prec < Precedence::Prefix, tokens, - fixup.subsequent_subexpression(), + right_fixup, ); } @@ -3788,11 +3839,16 @@ pub(crate) mod printing { outer_attrs_to_tokens(&e.attrs, tokens); e.and_token.to_tokens(tokens); e.mutability.to_tokens(tokens); + let (right_prec, right_fixup) = fixup.rightmost_subexpression( + &e.expr, + #[cfg(feature = "full")] + Precedence::Prefix, + ); print_subexpression( &e.expr, - fixup.trailing_precedence(&e.expr) < Precedence::Prefix, + right_prec < Precedence::Prefix, tokens, - fixup.subsequent_subexpression(), + right_fixup, ); } @@ -3822,7 +3878,11 @@ pub(crate) mod printing { outer_attrs_to_tokens(&e.attrs, tokens); e.return_token.to_tokens(tokens); if let Some(expr) = &e.expr { - print_expr(expr, tokens, fixup.subsequent_subexpression()); + print_expr( + expr, + tokens, + fixup.rightmost_subexpression_fixup(true, false, Precedence::Jump), + ); } } @@ -3854,11 +3914,12 @@ pub(crate) mod printing { #[cfg(feature = "full")] fn print_expr_try(e: &ExprTry, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); + let (left_prec, left_fixup) = fixup.leftmost_subexpression_with_dot(&e.expr); print_subexpression( &e.expr, - Precedence::of(&e.expr) < Precedence::Unambiguous, + left_prec < Precedence::Unambiguous, tokens, - fixup.leftmost_subexpression_with_dot(), + left_fixup, ); e.question_token.to_tokens(tokens); } @@ -3898,11 +3959,16 @@ pub(crate) mod printing { fn print_expr_unary(e: &ExprUnary, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); e.op.to_tokens(tokens); + let (right_prec, right_fixup) = fixup.rightmost_subexpression( + &e.expr, + #[cfg(feature = "full")] + Precedence::Prefix, + ); print_subexpression( &e.expr, - fixup.trailing_precedence(&e.expr) < Precedence::Prefix, + right_prec < Precedence::Prefix, tokens, - fixup.subsequent_subexpression(), + right_fixup, ); } @@ -3926,7 +3992,7 @@ pub(crate) mod printing { outer_attrs_to_tokens(&self.attrs, tokens); self.label.to_tokens(tokens); self.while_token.to_tokens(tokens); - print_condition(&self.cond, tokens); + print_expr(&self.cond, tokens, FixupContext::new_condition()); self.body.brace_token.surround(tokens, |tokens| { inner_attrs_to_tokens(&self.attrs, tokens); tokens.append_all(&self.body.stmts); @@ -3947,7 +4013,11 @@ pub(crate) mod printing { outer_attrs_to_tokens(&e.attrs, tokens); e.yield_token.to_tokens(tokens); if let Some(expr) = &e.expr { - print_expr(expr, tokens, fixup.subsequent_subexpression()); + print_expr( + expr, + tokens, + fixup.rightmost_subexpression_fixup(true, false, Precedence::Jump), + ); } } diff --git a/src/fixup.rs b/src/fixup.rs index 58ed9e73d4..b0973df258 100644 --- a/src/fixup.rs +++ b/src/fixup.rs @@ -1,8 +1,19 @@ use crate::classify; use crate::expr::Expr; +#[cfg(feature = "full")] +use crate::expr::{ + ExprBreak, ExprRange, ExprRawAddr, ExprReference, ExprReturn, ExprUnary, ExprYield, +}; use crate::precedence::Precedence; +#[cfg(feature = "full")] +use crate::ty::ReturnType; pub(crate) struct FixupContext { + #[cfg(feature = "full")] + previous_operator: Precedence, + #[cfg(feature = "full")] + next_operator: Precedence, + // Print expression such that it can be parsed back as a statement // consisting of the original expression. // @@ -89,16 +100,25 @@ pub(crate) struct FixupContext { // } // #[cfg(feature = "full")] - parenthesize_exterior_struct_lit: bool, + condition: bool, // This is the difference between: // - // let _ = 1 + return 1; // no parens if rightmost subexpression + // if break Struct {} == (break) {} // needs parens // - // let _ = 1 + (return 1) + 1; // needs parens + // if break break == Struct {} {} // no parens // #[cfg(feature = "full")] - parenthesize_exterior_jump: bool, + rightmost_subexpression_in_condition: bool, + + // This is the difference between: + // + // if break ({ x }).field + 1 {} needs parens + // + // if break 1 + { x }.field {} // no parens + // + #[cfg(feature = "full")] + leftmost_subexpression_in_optional_operand: bool, // This is the difference between: // @@ -109,6 +129,15 @@ pub(crate) struct FixupContext { #[cfg(feature = "full")] next_operator_can_begin_expr: bool, + // This is the difference between: + // + // let _ = 1 + return 1; // no parens if rightmost subexpression + // + // let _ = 1 + (return 1) + 1; // needs parens + // + #[cfg(feature = "full")] + next_operator_can_continue_expr: bool, + // This is the difference between: // // let _ = x as u8 + T; @@ -123,6 +152,10 @@ impl FixupContext { /// The default amount of fixing is minimal fixing. Fixups should be turned /// on in a targeted fashion where needed. pub const NONE: Self = FixupContext { + #[cfg(feature = "full")] + previous_operator: Precedence::MIN, + #[cfg(feature = "full")] + next_operator: Precedence::MIN, #[cfg(feature = "full")] stmt: false, #[cfg(feature = "full")] @@ -132,11 +165,15 @@ impl FixupContext { #[cfg(feature = "full")] leftmost_subexpression_in_match_arm: false, #[cfg(feature = "full")] - parenthesize_exterior_struct_lit: false, + condition: false, + #[cfg(feature = "full")] + rightmost_subexpression_in_condition: false, #[cfg(feature = "full")] - parenthesize_exterior_jump: false, + leftmost_subexpression_in_optional_operand: false, #[cfg(feature = "full")] next_operator_can_begin_expr: false, + #[cfg(feature = "full")] + next_operator_can_continue_expr: false, next_operator_can_begin_generics: false, }; @@ -167,7 +204,8 @@ impl FixupContext { #[cfg(feature = "full")] pub fn new_condition() -> Self { FixupContext { - parenthesize_exterior_struct_lit: true, + condition: true, + rightmost_subexpression_in_condition: true, ..FixupContext::NONE } } @@ -183,8 +221,16 @@ impl FixupContext { /// /// Not every expression has a leftmost subexpression. For example neither /// `-$a` nor `[$a]` have one. - pub fn leftmost_subexpression(self) -> Self { - FixupContext { + pub fn leftmost_subexpression_with_operator( + self, + expr: &Expr, + #[cfg(feature = "full")] next_operator_can_begin_expr: bool, + next_operator_can_begin_generics: bool, + #[cfg(feature = "full")] precedence: Precedence, + ) -> (Precedence, Self) { + let fixup = FixupContext { + #[cfg(feature = "full")] + next_operator: precedence, #[cfg(feature = "full")] stmt: false, #[cfg(feature = "full")] @@ -195,17 +241,26 @@ impl FixupContext { leftmost_subexpression_in_match_arm: self.match_arm || self.leftmost_subexpression_in_match_arm, #[cfg(feature = "full")] - parenthesize_exterior_jump: true, + rightmost_subexpression_in_condition: false, + #[cfg(feature = "full")] + next_operator_can_begin_expr, + #[cfg(feature = "full")] + next_operator_can_continue_expr: true, + next_operator_can_begin_generics, ..self - } + }; + + (fixup.leftmost_subexpression_precedence(expr), fixup) } /// Transform this fixup into the one that should apply when printing a /// leftmost subexpression followed by a `.` or `?` token, which confer /// different statement boundary rules compared to other leftmost /// subexpressions. - pub fn leftmost_subexpression_with_dot(self) -> Self { - FixupContext { + pub fn leftmost_subexpression_with_dot(self, expr: &Expr) -> (Precedence, Self) { + let fixup = FixupContext { + #[cfg(feature = "full")] + next_operator: Precedence::Unambiguous, #[cfg(feature = "full")] stmt: self.stmt || self.leftmost_subexpression_in_stmt, #[cfg(feature = "full")] @@ -215,37 +270,67 @@ impl FixupContext { #[cfg(feature = "full")] leftmost_subexpression_in_match_arm: false, #[cfg(feature = "full")] - parenthesize_exterior_jump: true, + rightmost_subexpression_in_condition: false, + #[cfg(feature = "full")] + next_operator_can_begin_expr: false, + #[cfg(feature = "full")] + next_operator_can_continue_expr: true, + next_operator_can_begin_generics: false, ..self + }; + + (fixup.leftmost_subexpression_precedence(expr), fixup) + } + + fn leftmost_subexpression_precedence(self, expr: &Expr) -> Precedence { + #[cfg(feature = "full")] + if !self.next_operator_can_begin_expr || self.next_operator == Precedence::Range { + if let Scan::Bailout = scan_right(expr, self, Precedence::MIN, 0, 0) { + if scan_left(expr, self) { + return Precedence::Unambiguous; + } + } } + + self.precedence(expr) } - /// Transform this fixup into the one that should apply when printing a - /// leftmost subexpression followed by punctuation that is legal as the - /// first token of an expression. - pub fn leftmost_subexpression_with_begin_operator( + /// Transform this fixup into the one that should apply when printing the + /// rightmost subexpression of the current expression. + /// + /// The rightmost subexpression is any subexpression that has a different + /// first token than the current expression, but has the same last token. + /// + /// For example in `$a + $b` and `-$b`, the subexpression `$b` is a + /// rightmost subexpression. + /// + /// Not every expression has a rightmost subexpression. For example neither + /// `[$b]` nor `$a.f($b)` have one. + pub fn rightmost_subexpression( self, - #[cfg(feature = "full")] next_operator_can_begin_expr: bool, - next_operator_can_begin_generics: bool, - ) -> Self { - FixupContext { + expr: &Expr, + #[cfg(feature = "full")] precedence: Precedence, + ) -> (Precedence, Self) { + let fixup = self.rightmost_subexpression_fixup( #[cfg(feature = "full")] - next_operator_can_begin_expr, - next_operator_can_begin_generics, - ..self.leftmost_subexpression() - } + false, + #[cfg(feature = "full")] + false, + #[cfg(feature = "full")] + precedence, + ); + (fixup.rightmost_subexpression_precedence(expr), fixup) } - /// Transform this fixup into the one that should apply when printing any - /// subexpression that is neither a leftmost subexpression nor surrounded in - /// delimiters. - /// - /// This is for any subexpression that has a different first token than the - /// current expression, and is not surrounded by a paren/bracket/brace. For - /// example the `$b` in `$a + $b` and `-$b`, but not the one in `[$b]` or - /// `$a.f($b)`. - pub fn subsequent_subexpression(self) -> Self { + pub fn rightmost_subexpression_fixup( + self, + #[cfg(feature = "full")] reset_allow_struct: bool, + #[cfg(feature = "full")] optional_operand: bool, + #[cfg(feature = "full")] precedence: Precedence, + ) -> Self { FixupContext { + #[cfg(feature = "full")] + previous_operator: precedence, #[cfg(feature = "full")] stmt: false, #[cfg(feature = "full")] @@ -254,61 +339,83 @@ impl FixupContext { match_arm: false, #[cfg(feature = "full")] leftmost_subexpression_in_match_arm: false, + #[cfg(feature = "full")] + condition: self.condition && !reset_allow_struct, + #[cfg(feature = "full")] + leftmost_subexpression_in_optional_operand: self.condition && optional_operand, ..self } } + pub fn rightmost_subexpression_precedence(self, expr: &Expr) -> Precedence { + let default_prec = self.precedence(expr); + + #[cfg(feature = "full")] + if match self.previous_operator { + Precedence::Assign | Precedence::Let | Precedence::Prefix => { + default_prec < self.previous_operator + } + _ => default_prec <= self.previous_operator, + } && match self.next_operator { + Precedence::Range | Precedence::Or | Precedence::And => true, + _ => !self.next_operator_can_begin_expr, + } { + if let Scan::Bailout | Scan::Fail = scan_right(expr, self, self.previous_operator, 1, 0) + { + if scan_left(expr, self) { + return Precedence::Prefix; + } + } + } + + default_prec + } + /// Determine whether parentheses are needed around the given expression to - /// head off an unintended statement boundary. - /// - /// The documentation on `FixupContext::leftmost_subexpression_in_stmt` has - /// examples. + /// head off the early termination of a statement or condition. #[cfg(feature = "full")] - pub fn would_cause_statement_boundary(self, expr: &Expr) -> bool { + pub fn parenthesize(self, expr: &Expr) -> bool { (self.leftmost_subexpression_in_stmt && !classify::requires_semi_to_be_stmt(expr)) || ((self.stmt || self.leftmost_subexpression_in_stmt) && matches!(expr, Expr::Let(_))) || (self.leftmost_subexpression_in_match_arm && !classify::requires_comma_to_be_match_arm(expr)) + || (self.condition && matches!(expr, Expr::Struct(_))) + || (self.rightmost_subexpression_in_condition + && matches!( + expr, + Expr::Return(ExprReturn { expr: None, .. }) + | Expr::Yield(ExprYield { expr: None, .. }) + )) + || (self.rightmost_subexpression_in_condition + && !self.condition + && matches!( + expr, + Expr::Break(ExprBreak { expr: None, .. }) + | Expr::Path(_) + | Expr::Range(ExprRange { end: None, .. }) + )) + || (self.leftmost_subexpression_in_optional_operand + && matches!(expr, Expr::Block(expr) if expr.attrs.is_empty() && expr.label.is_none())) } - /// Determine whether parentheses are needed around the given `let` - /// scrutinee. - /// - /// In `if let _ = $e {}`, some examples of `$e` that would need parentheses - /// are: - /// - /// - `Struct {}.f()`, because otherwise the `{` would be misinterpreted - /// as the opening of the if's then-block. - /// - /// - `true && false`, because otherwise this would be misinterpreted as a - /// "let chain". - #[cfg(feature = "full")] - pub fn needs_group_as_let_scrutinee(self, expr: &Expr) -> bool { - self.parenthesize_exterior_struct_lit && classify::confusable_with_adjacent_block(expr) - || self.trailing_precedence(expr) < Precedence::Let - } - - /// Determines the effective precedence of a left subexpression. Some - /// expressions have lower precedence when adjacent to particular operators. - pub fn leading_precedence(self, expr: &Expr) -> Precedence { + /// Determines the effective precedence of a subexpression. Some expressions + /// have higher or lower precedence when adjacent to particular operators. + fn precedence(self, expr: &Expr) -> Precedence { #[cfg(feature = "full")] if self.next_operator_can_begin_expr { // Decrease precedence of value-less jumps when followed by an // operator that would otherwise get interpreted as beginning a // value for the jump. - if let Expr::Break(_) | Expr::Return(_) | Expr::Yield(_) = expr { + if let Expr::Break(ExprBreak { expr: None, .. }) + | Expr::Return(ExprReturn { expr: None, .. }) + | Expr::Yield(ExprYield { expr: None, .. }) = expr + { return Precedence::Jump; } } - self.precedence(expr) - } - /// Determines the effective precedence of a right subexpression. Some - /// expressions have higher precedence on the right side of a binary - /// operator than on the left. - pub fn trailing_precedence(self, expr: &Expr) -> Precedence { #[cfg(feature = "full")] - if !self.parenthesize_exterior_jump { + if !self.next_operator_can_continue_expr { match expr { // Increase precedence of expressions that extend to the end of // current statement or group. @@ -323,10 +430,7 @@ impl FixupContext { _ => {} } } - self.precedence(expr) - } - fn precedence(self, expr: &Expr) -> Precedence { if self.next_operator_can_begin_generics { if let Expr::Cast(cast) = expr { if classify::trailing_unparameterized_path(&cast.ty) { @@ -334,6 +438,7 @@ impl FixupContext { } } } + Precedence::of(expr) } } @@ -345,3 +450,321 @@ impl Clone for FixupContext { *self } } + +#[cfg(feature = "full")] +enum Scan { + Fail, + Bailout, + Consume, +} + +#[cfg(feature = "full")] +impl Copy for Scan {} + +#[cfg(feature = "full")] +impl Clone for Scan { + fn clone(&self) -> Self { + *self + } +} + +#[cfg(feature = "full")] +impl PartialEq for Scan { + fn eq(&self, other: &Self) -> bool { + *self as u8 == *other as u8 + } +} + +#[cfg(feature = "full")] +fn scan_left(expr: &Expr, fixup: FixupContext) -> bool { + match expr { + Expr::Assign(_) => fixup.previous_operator <= Precedence::Assign, + Expr::Binary(e) => match Precedence::of_binop(&e.op) { + Precedence::Assign => fixup.previous_operator <= Precedence::Assign, + binop_prec => fixup.previous_operator < binop_prec, + }, + Expr::Cast(_) => fixup.previous_operator < Precedence::Cast, + Expr::Range(e) => e.start.is_none() || fixup.previous_operator < Precedence::Assign, + _ => true, + } +} + +#[cfg(feature = "full")] +fn scan_right( + expr: &Expr, + fixup: FixupContext, + precedence: Precedence, + fail_offset: u8, + bailout_offset: u8, +) -> Scan { + let consume_by_precedence = if match precedence { + Precedence::Assign | Precedence::Compare => precedence <= fixup.next_operator, + _ => precedence < fixup.next_operator, + } || fixup.next_operator == Precedence::MIN + { + Scan::Consume + } else { + Scan::Bailout + }; + if fixup.parenthesize(expr) { + return consume_by_precedence; + } + match expr { + Expr::Assign(e) => { + if match fixup.next_operator { + Precedence::Unambiguous => fail_offset >= 2, + _ => bailout_offset >= 1, + } { + return Scan::Consume; + } + let right_fixup = fixup.rightmost_subexpression_fixup(false, false, Precedence::Assign); + let scan = scan_right( + &e.right, + right_fixup, + Precedence::Assign, + match fixup.next_operator { + Precedence::Unambiguous => fail_offset, + _ => 1, + }, + 1, + ); + if let Scan::Bailout | Scan::Consume = scan { + Scan::Consume + } else if let Precedence::Unambiguous = fixup.next_operator { + Scan::Fail + } else { + Scan::Bailout + } + } + Expr::Binary(e) => { + if match fixup.next_operator { + Precedence::Unambiguous => { + fail_offset >= 2 + && (consume_by_precedence == Scan::Consume || bailout_offset >= 1) + } + _ => bailout_offset >= 1, + } { + return Scan::Consume; + } + let binop_prec = Precedence::of_binop(&e.op); + if binop_prec == Precedence::Compare && fixup.next_operator == Precedence::Compare { + return Scan::Consume; + } + let right_fixup = fixup.rightmost_subexpression_fixup(false, false, binop_prec); + let scan = scan_right( + &e.right, + right_fixup, + binop_prec, + match fixup.next_operator { + Precedence::Unambiguous => fail_offset, + _ => 1, + }, + consume_by_precedence as u8 - Scan::Bailout as u8, + ); + match scan { + Scan::Fail => {} + Scan::Bailout => return consume_by_precedence, + Scan::Consume => return Scan::Consume, + } + let right_needs_group = binop_prec != Precedence::Assign + && right_fixup.rightmost_subexpression_precedence(&e.right) <= binop_prec; + if right_needs_group { + consume_by_precedence + } else if let (Scan::Fail, Precedence::Unambiguous) = (scan, fixup.next_operator) { + Scan::Fail + } else { + Scan::Bailout + } + } + Expr::RawAddr(ExprRawAddr { expr, .. }) + | Expr::Reference(ExprReference { expr, .. }) + | Expr::Unary(ExprUnary { expr, .. }) => { + if match fixup.next_operator { + Precedence::Unambiguous => { + fail_offset >= 2 + && (consume_by_precedence == Scan::Consume || bailout_offset >= 1) + } + _ => bailout_offset >= 1, + } { + return Scan::Consume; + } + let right_fixup = fixup.rightmost_subexpression_fixup(false, false, Precedence::Prefix); + let scan = scan_right( + expr, + right_fixup, + precedence, + match fixup.next_operator { + Precedence::Unambiguous => fail_offset, + _ => 1, + }, + consume_by_precedence as u8 - Scan::Bailout as u8, + ); + match scan { + Scan::Fail => {} + Scan::Bailout => return consume_by_precedence, + Scan::Consume => return Scan::Consume, + } + if right_fixup.rightmost_subexpression_precedence(expr) < Precedence::Prefix { + consume_by_precedence + } else if let (Scan::Fail, Precedence::Unambiguous) = (scan, fixup.next_operator) { + Scan::Fail + } else { + Scan::Bailout + } + } + Expr::Range(e) => match &e.end { + Some(end) => { + if fail_offset >= 2 { + return Scan::Consume; + } + let right_fixup = + fixup.rightmost_subexpression_fixup(false, true, Precedence::Range); + let scan = scan_right( + end, + right_fixup, + Precedence::Range, + fail_offset, + match fixup.next_operator { + Precedence::Assign | Precedence::Range => 0, + _ => 1, + }, + ); + if match (scan, fixup.next_operator) { + (Scan::Fail, _) => false, + (Scan::Bailout, Precedence::Assign | Precedence::Range) => false, + (Scan::Bailout | Scan::Consume, _) => true, + } { + return Scan::Consume; + } + if right_fixup.rightmost_subexpression_precedence(end) <= Precedence::Range { + Scan::Consume + } else { + Scan::Fail + } + } + None => { + if fixup.next_operator_can_begin_expr { + Scan::Consume + } else { + Scan::Fail + } + } + }, + Expr::Break(e) => match &e.expr { + Some(value) => { + if bailout_offset >= 1 || e.label.is_none() && classify::expr_leading_label(value) { + return Scan::Consume; + } + let right_fixup = fixup.rightmost_subexpression_fixup(true, true, Precedence::Jump); + match scan_right(value, right_fixup, Precedence::Jump, 1, 1) { + Scan::Fail => Scan::Bailout, + Scan::Bailout | Scan::Consume => Scan::Consume, + } + } + None => match fixup.next_operator { + Precedence::Assign if precedence > Precedence::Assign => Scan::Fail, + _ => Scan::Consume, + }, + }, + Expr::Return(ExprReturn { expr, .. }) | Expr::Yield(ExprYield { expr, .. }) => match expr { + Some(e) => { + if bailout_offset >= 1 { + return Scan::Consume; + } + let right_fixup = + fixup.rightmost_subexpression_fixup(true, false, Precedence::Jump); + match scan_right(e, right_fixup, Precedence::Jump, 1, 1) { + Scan::Fail => Scan::Bailout, + Scan::Bailout | Scan::Consume => Scan::Consume, + } + } + None => match fixup.next_operator { + Precedence::Assign if precedence > Precedence::Assign => Scan::Fail, + _ => Scan::Consume, + }, + }, + Expr::Closure(e) => { + if matches!(e.output, ReturnType::Default) + || matches!(&*e.body, Expr::Block(body) if body.attrs.is_empty() && body.label.is_none()) + { + if bailout_offset >= 1 { + return Scan::Consume; + } + let right_fixup = + fixup.rightmost_subexpression_fixup(false, false, Precedence::Jump); + match scan_right(&e.body, right_fixup, Precedence::Jump, 1, 1) { + Scan::Fail => Scan::Bailout, + Scan::Bailout | Scan::Consume => Scan::Consume, + } + } else { + Scan::Consume + } + } + Expr::Let(e) => { + if bailout_offset >= 1 { + return Scan::Consume; + } + let right_fixup = fixup.rightmost_subexpression_fixup(false, false, Precedence::Let); + let scan = scan_right( + &e.expr, + right_fixup, + Precedence::Let, + 1, + if fixup.next_operator < Precedence::Let { + 0 + } else { + 1 + }, + ); + match scan { + Scan::Fail | Scan::Bailout if fixup.next_operator < Precedence::Let => { + return Scan::Bailout; + } + Scan::Consume => return Scan::Consume, + _ => {} + } + if right_fixup.rightmost_subexpression_precedence(&e.expr) < Precedence::Let { + Scan::Consume + } else if let Scan::Fail = scan { + Scan::Bailout + } else { + Scan::Consume + } + } + Expr::Array(_) + | Expr::Async(_) + | Expr::Await(_) + | Expr::Block(_) + | Expr::Call(_) + | Expr::Cast(_) + | Expr::Const(_) + | Expr::Continue(_) + | Expr::Field(_) + | Expr::ForLoop(_) + | Expr::Group(_) + | Expr::If(_) + | Expr::Index(_) + | Expr::Infer(_) + | Expr::Lit(_) + | Expr::Loop(_) + | Expr::Macro(_) + | Expr::Match(_) + | Expr::MethodCall(_) + | Expr::Paren(_) + | Expr::Path(_) + | Expr::Repeat(_) + | Expr::Struct(_) + | Expr::Try(_) + | Expr::TryBlock(_) + | Expr::Tuple(_) + | Expr::Unsafe(_) + | Expr::Verbatim(_) + | Expr::While(_) => match fixup.next_operator { + Precedence::Assign | Precedence::Range if precedence == Precedence::Range => Scan::Fail, + _ if precedence == Precedence::Let && fixup.next_operator < Precedence::Let => { + Scan::Fail + } + _ => consume_by_precedence, + }, + } +} diff --git a/src/generics.rs b/src/generics.rs index 1e8fccf039..b0723691e4 100644 --- a/src/generics.rs +++ b/src/generics.rs @@ -188,11 +188,7 @@ impl<'a> Iterator for Lifetimes<'a> { type Item = &'a LifetimeParam; fn next(&mut self) -> Option { - let next = match self.0.next() { - Some(item) => item, - None => return None, - }; - if let GenericParam::Lifetime(lifetime) = next { + if let GenericParam::Lifetime(lifetime) = self.0.next()? { Some(lifetime) } else { self.next() @@ -206,11 +202,7 @@ impl<'a> Iterator for LifetimesMut<'a> { type Item = &'a mut LifetimeParam; fn next(&mut self) -> Option { - let next = match self.0.next() { - Some(item) => item, - None => return None, - }; - if let GenericParam::Lifetime(lifetime) = next { + if let GenericParam::Lifetime(lifetime) = self.0.next()? { Some(lifetime) } else { self.next() @@ -224,11 +216,7 @@ impl<'a> Iterator for TypeParams<'a> { type Item = &'a TypeParam; fn next(&mut self) -> Option { - let next = match self.0.next() { - Some(item) => item, - None => return None, - }; - if let GenericParam::Type(type_param) = next { + if let GenericParam::Type(type_param) = self.0.next()? { Some(type_param) } else { self.next() @@ -242,11 +230,7 @@ impl<'a> Iterator for TypeParamsMut<'a> { type Item = &'a mut TypeParam; fn next(&mut self) -> Option { - let next = match self.0.next() { - Some(item) => item, - None => return None, - }; - if let GenericParam::Type(type_param) = next { + if let GenericParam::Type(type_param) = self.0.next()? { Some(type_param) } else { self.next() @@ -260,11 +244,7 @@ impl<'a> Iterator for ConstParams<'a> { type Item = &'a ConstParam; fn next(&mut self) -> Option { - let next = match self.0.next() { - Some(item) => item, - None => return None, - }; - if let GenericParam::Const(const_param) = next { + if let GenericParam::Const(const_param) = self.0.next()? { Some(const_param) } else { self.next() @@ -278,11 +258,7 @@ impl<'a> Iterator for ConstParamsMut<'a> { type Item = &'a mut ConstParam; fn next(&mut self) -> Option { - let next = match self.0.next() { - Some(item) => item, - None => return None, - }; - if let GenericParam::Const(const_param) = next { + if let GenericParam::Const(const_param) = self.0.next()? { Some(const_param) } else { self.next() diff --git a/src/lib.rs b/src/lib.rs index 8d11455b9d..96132e785d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -249,7 +249,7 @@ //! dynamic library libproc_macro from rustc toolchain. // Syn types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/syn/2.0.85")] +#![doc(html_root_url = "https://docs.rs/syn/2.0.96")] #![cfg_attr(docsrs, feature(doc_cfg))] #![deny(unsafe_op_in_unsafe_fn)] #![allow(non_camel_case_types)] @@ -264,8 +264,10 @@ clippy::derivable_impls, clippy::diverging_sub_expression, clippy::doc_markdown, + clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_auto_deref, + clippy::fn_params_excessive_bools, clippy::if_not_else, clippy::inherent_to_string, clippy::into_iter_without_iter, @@ -307,6 +309,8 @@ clippy::wildcard_imports, )] +extern crate self as syn; + #[cfg(feature = "proc-macro")] extern crate proc_macro; @@ -509,6 +513,9 @@ pub use crate::restriction::{FieldMutability, VisRestricted, Visibility}; mod sealed; +#[cfg(all(feature = "parsing", feature = "derive", not(feature = "full")))] +mod scan_expr; + mod span; #[cfg(all(feature = "parsing", feature = "printing"))] diff --git a/src/lookahead.rs b/src/lookahead.rs index 75e3a658a3..da13ffc47a 100644 --- a/src/lookahead.rs +++ b/src/lookahead.rs @@ -2,8 +2,8 @@ use crate::buffer::Cursor; use crate::error::{self, Error}; use crate::sealed::lookahead::Sealed; use crate::span::IntoSpans; -use crate::token::Token; -use proc_macro2::Span; +use crate::token::{CustomToken, Token}; +use proc_macro2::{Delimiter, Span}; use std::cell::RefCell; /// Support for checking the next token in a stream to decide how to parse. @@ -110,7 +110,18 @@ impl<'a> Lookahead1<'a> { /// The error message will identify all of the expected token types that /// have been peeked against this lookahead instance. pub fn error(self) -> Error { - let comparisons = self.comparisons.into_inner(); + let mut comparisons = self.comparisons.into_inner(); + comparisons.retain_mut(|display| { + if *display == "`)`" { + *display = match self.cursor.scope_delimiter() { + Delimiter::Parenthesis => "`)`", + Delimiter::Brace => "`}`", + Delimiter::Bracket => "`]`", + Delimiter::None => return false, + } + } + true + }); match comparisons.len() { 0 => { if self.cursor.eof() { @@ -150,6 +161,160 @@ pub trait Peek: Sealed { type Token: Token; } +/// Pseudo-token used for peeking the end of a parse stream. +/// +/// This type is only useful as an argument to one of the following functions: +/// +/// - [`ParseStream::peek`][crate::parse::ParseBuffer::peek] +/// - [`ParseStream::peek2`][crate::parse::ParseBuffer::peek2] +/// - [`ParseStream::peek3`][crate::parse::ParseBuffer::peek3] +/// - [`Lookahead1::peek`] +/// +/// The peek will return `true` if there are no remaining tokens after that +/// point in the parse stream. +/// +/// # Example +/// +/// Suppose we are parsing attributes containing core::fmt inspired formatting +/// arguments: +/// +/// - `#[fmt("simple example")]` +/// - `#[fmt("interpolation e{}ample", self.x)]` +/// - `#[fmt("interpolation e{x}ample")]` +/// +/// and we want to recognize the cases where no interpolation occurs so that +/// more efficient code can be generated. +/// +/// The following implementation uses `input.peek(Token![,]) && +/// input.peek2(End)` to recognize the case of a trailing comma without +/// consuming the comma from the parse stream, because if it isn't a trailing +/// comma, that same comma needs to be parsed as part of `args`. +/// +/// ``` +/// use proc_macro2::TokenStream; +/// use quote::quote; +/// use syn::parse::{End, Parse, ParseStream, Result}; +/// use syn::{parse_quote, Attribute, LitStr, Token}; +/// +/// struct FormatArgs { +/// template: LitStr, // "...{}..." +/// args: TokenStream, // , self.x +/// } +/// +/// impl Parse for FormatArgs { +/// fn parse(input: ParseStream) -> Result { +/// let template: LitStr = input.parse()?; +/// +/// let args = if input.is_empty() +/// || input.peek(Token![,]) && input.peek2(End) +/// { +/// input.parse::>()?; +/// TokenStream::new() +/// } else { +/// input.parse()? +/// }; +/// +/// Ok(FormatArgs { +/// template, +/// args, +/// }) +/// } +/// } +/// +/// fn main() -> Result<()> { +/// let attrs: Vec = parse_quote! { +/// #[fmt("simple example")] +/// #[fmt("interpolation e{}ample", self.x)] +/// #[fmt("interpolation e{x}ample")] +/// }; +/// +/// for attr in &attrs { +/// let FormatArgs { template, args } = attr.parse_args()?; +/// let requires_fmt_machinery = +/// !args.is_empty() || template.value().contains(['{', '}']); +/// let out = if requires_fmt_machinery { +/// quote! { +/// ::core::write!(__formatter, #template #args) +/// } +/// } else { +/// quote! { +/// __formatter.write_str(#template) +/// } +/// }; +/// println!("{}", out); +/// } +/// Ok(()) +/// } +/// ``` +/// +/// Implementing this parsing logic without `peek2(End)` is more clumsy because +/// we'd need a parse stream actually advanced past the comma before being able +/// to find out whether there is anything after it. It would look something +/// like: +/// +/// ``` +/// # use proc_macro2::TokenStream; +/// # use syn::parse::{ParseStream, Result}; +/// # use syn::Token; +/// # +/// # fn parse(input: ParseStream) -> Result<()> { +/// use syn::parse::discouraged::Speculative as _; +/// +/// let ahead = input.fork(); +/// ahead.parse::>()?; +/// let args = if ahead.is_empty() { +/// input.advance_to(&ahead); +/// TokenStream::new() +/// } else { +/// input.parse()? +/// }; +/// # Ok(()) +/// # } +/// ``` +/// +/// or: +/// +/// ``` +/// # use proc_macro2::TokenStream; +/// # use syn::parse::{ParseStream, Result}; +/// # use syn::Token; +/// # +/// # fn parse(input: ParseStream) -> Result<()> { +/// use quote::ToTokens as _; +/// +/// let comma: Option = input.parse()?; +/// let mut args = TokenStream::new(); +/// if !input.is_empty() { +/// comma.to_tokens(&mut args); +/// input.parse::()?.to_tokens(&mut args); +/// } +/// # Ok(()) +/// # } +/// ``` +pub struct End; + +impl Copy for End {} + +impl Clone for End { + fn clone(&self) -> Self { + *self + } +} + +impl Peek for End { + type Token = Self; +} + +impl CustomToken for End { + fn peek(cursor: Cursor) -> bool { + cursor.eof() + } + + fn display() -> &'static str { + "`)`" // Lookahead1 error message will fill in the expected close delimiter + } +} + impl T, T: Token> Peek for F { type Token = T; } @@ -163,3 +328,5 @@ impl IntoSpans for TokenMarker { } impl T, T: Token> Sealed for F {} + +impl Sealed for End {} diff --git a/src/parse.rs b/src/parse.rs index f000711635..b7f8831d67 100644 --- a/src/parse.rs +++ b/src/parse.rs @@ -202,7 +202,7 @@ use std::rc::Rc; use std::str::FromStr; pub use crate::error::{Error, Result}; -pub use crate::lookahead::{Lookahead1, Peek}; +pub use crate::lookahead::{End, Lookahead1, Peek}; /// Parsing interface implemented by all types that can be parsed in a default /// way from a token stream. @@ -751,6 +751,11 @@ impl<'a> ParseBuffer<'a> { /// set of delimiters, as well as at the end of the tokens provided to the /// outermost parsing entry point. /// + /// This is equivalent to + /// .peek(syn::parse::End). + /// Use `.peek2(End)` or `.peek3(End)` to look for the end of a parse stream + /// further ahead than the current position. + /// /// # Example /// /// ``` diff --git a/src/parse_quote.rs b/src/parse_quote.rs index 22cd98effb..2db20597c4 100644 --- a/src/parse_quote.rs +++ b/src/parse_quote.rs @@ -53,11 +53,22 @@ /// /// - [`Attribute`] — parses one attribute, allowing either outer like `#[...]` /// or inner like `#![...]` +/// - [`Vec`] — parses multiple attributes, including mixed kinds in +/// any order /// - [`Punctuated`] — parses zero or more `T` separated by punctuation /// `P` with optional trailing punctuation +/// - [`Vec`] — parses arms separated by optional commas according to the +/// same grammar as the inside of a `match` expression /// - [`Vec`] — parses the same as `Block::parse_within` +/// - [`Pat`], [`Box`] — parses the same as +/// `Pat::parse_multi_with_leading_vert` +/// - [`Field`] — parses a named or unnamed struct field /// +/// [`Vec`]: Attribute +/// [`Vec`]: Arm /// [`Vec`]: Block::parse_within +/// [`Pat`]: Pat::parse_multi_with_leading_vert +/// [`Box`]: Pat::parse_multi_with_leading_vert /// /// # Panics /// @@ -140,7 +151,7 @@ use crate::punctuated::Punctuated; #[cfg(any(feature = "full", feature = "derive"))] use crate::{attr, Attribute, Field, FieldMutability, Ident, Type, Visibility}; #[cfg(feature = "full")] -use crate::{Block, Pat, Stmt}; +use crate::{Arm, Block, Pat, Stmt}; #[cfg(any(feature = "full", feature = "derive"))] impl ParseQuote for Attribute { @@ -153,6 +164,17 @@ impl ParseQuote for Attribute { } } +#[cfg(any(feature = "full", feature = "derive"))] +impl ParseQuote for Vec { + fn parse(input: ParseStream) -> Result { + let mut attrs = Vec::new(); + while !input.is_empty() { + attrs.push(ParseQuote::parse(input)?); + } + Ok(attrs) + } +} + #[cfg(any(feature = "full", feature = "derive"))] impl ParseQuote for Field { fn parse(input: ParseStream) -> Result { @@ -209,3 +231,10 @@ impl ParseQuote for Vec { Block::parse_within(input) } } + +#[cfg(feature = "full")] +impl ParseQuote for Vec { + fn parse(input: ParseStream) -> Result { + Arm::parse_multiple(input) + } +} diff --git a/src/path.rs b/src/path.rs index ce38eff313..7744d28338 100644 --- a/src/path.rs +++ b/src/path.rs @@ -528,7 +528,10 @@ pub(crate) mod parsing { input.parse()? }; - if !expr_style && input.peek(Token![<]) && !input.peek(Token![<=]) + if !expr_style + && input.peek(Token![<]) + && !input.peek(Token![<=]) + && !input.peek(Token![<<=]) || input.peek(Token![::]) && input.peek3(Token![<]) { Ok(PathSegment { diff --git a/src/precedence.rs b/src/precedence.rs index 936a0c51d2..1891bfc202 100644 --- a/src/precedence.rs +++ b/src/precedence.rs @@ -1,7 +1,14 @@ +#[cfg(all(feature = "printing", feature = "full"))] +use crate::attr::{AttrStyle, Attribute}; #[cfg(feature = "printing")] use crate::expr::Expr; #[cfg(all(feature = "printing", feature = "full"))] -use crate::expr::{ExprBreak, ExprReturn, ExprYield}; +use crate::expr::{ + ExprArray, ExprAsync, ExprAwait, ExprBlock, ExprBreak, ExprCall, ExprConst, ExprContinue, + ExprField, ExprForLoop, ExprGroup, ExprIf, ExprIndex, ExprInfer, ExprLit, ExprLoop, ExprMacro, + ExprMatch, ExprMethodCall, ExprParen, ExprPath, ExprRepeat, ExprReturn, ExprStruct, ExprTry, + ExprTryBlock, ExprTuple, ExprUnsafe, ExprWhile, ExprYield, +}; use crate::op::BinOp; #[cfg(all(feature = "printing", feature = "full"))] use crate::ty::ReturnType; @@ -82,11 +89,21 @@ impl Precedence { #[cfg(feature = "printing")] pub(crate) fn of(e: &Expr) -> Self { + #[cfg(feature = "full")] + fn prefix_attrs(attrs: &[Attribute]) -> Precedence { + for attr in attrs { + if let AttrStyle::Outer = attr.style { + return Precedence::Prefix; + } + } + Precedence::Unambiguous + } + match e { #[cfg(feature = "full")] Expr::Closure(e) => match e.output { ReturnType::Default => Precedence::Jump, - ReturnType::Type(..) => Precedence::Unambiguous, + ReturnType::Type(..) => prefix_attrs(&e.attrs), }, #[cfg(feature = "full")] @@ -104,6 +121,36 @@ impl Precedence { Expr::Cast(_) => Precedence::Cast, Expr::RawAddr(_) | Expr::Reference(_) | Expr::Unary(_) => Precedence::Prefix, + #[cfg(feature = "full")] + Expr::Array(ExprArray { attrs, .. }) + | Expr::Async(ExprAsync { attrs, .. }) + | Expr::Await(ExprAwait { attrs, .. }) + | Expr::Block(ExprBlock { attrs, .. }) + | Expr::Call(ExprCall { attrs, .. }) + | Expr::Const(ExprConst { attrs, .. }) + | Expr::Continue(ExprContinue { attrs, .. }) + | Expr::Field(ExprField { attrs, .. }) + | Expr::ForLoop(ExprForLoop { attrs, .. }) + | Expr::Group(ExprGroup { attrs, .. }) + | Expr::If(ExprIf { attrs, .. }) + | Expr::Index(ExprIndex { attrs, .. }) + | Expr::Infer(ExprInfer { attrs, .. }) + | Expr::Lit(ExprLit { attrs, .. }) + | Expr::Loop(ExprLoop { attrs, .. }) + | Expr::Macro(ExprMacro { attrs, .. }) + | Expr::Match(ExprMatch { attrs, .. }) + | Expr::MethodCall(ExprMethodCall { attrs, .. }) + | Expr::Paren(ExprParen { attrs, .. }) + | Expr::Path(ExprPath { attrs, .. }) + | Expr::Repeat(ExprRepeat { attrs, .. }) + | Expr::Struct(ExprStruct { attrs, .. }) + | Expr::Try(ExprTry { attrs, .. }) + | Expr::TryBlock(ExprTryBlock { attrs, .. }) + | Expr::Tuple(ExprTuple { attrs, .. }) + | Expr::Unsafe(ExprUnsafe { attrs, .. }) + | Expr::While(ExprWhile { attrs, .. }) => prefix_attrs(attrs), + + #[cfg(not(feature = "full"))] Expr::Array(_) | Expr::Async(_) | Expr::Await(_) @@ -130,9 +177,10 @@ impl Precedence { | Expr::TryBlock(_) | Expr::Tuple(_) | Expr::Unsafe(_) - | Expr::Verbatim(_) | Expr::While(_) => Precedence::Unambiguous, + Expr::Verbatim(_) => Precedence::Unambiguous, + #[cfg(not(feature = "full"))] Expr::Break(_) | Expr::Closure(_) | Expr::Return(_) | Expr::Yield(_) => unreachable!(), } diff --git a/src/scan_expr.rs b/src/scan_expr.rs new file mode 100644 index 0000000000..155b5b63bf --- /dev/null +++ b/src/scan_expr.rs @@ -0,0 +1,264 @@ +use self::{Action::*, Input::*}; +use proc_macro2::{Delimiter, Ident, Spacing, TokenTree}; +use syn::parse::{ParseStream, Result}; +use syn::{AngleBracketedGenericArguments, BinOp, Expr, ExprPath, Lifetime, Lit, Token, Type}; + +enum Input { + Keyword(&'static str), + Punct(&'static str), + ConsumeAny, + ConsumeBinOp, + ConsumeBrace, + ConsumeDelimiter, + ConsumeIdent, + ConsumeLifetime, + ConsumeLiteral, + ConsumeNestedBrace, + ExpectPath, + ExpectTurbofish, + ExpectType, + CanBeginExpr, + Otherwise, + Empty, +} + +enum Action { + SetState(&'static [(Input, Action)]), + IncDepth, + DecDepth, + Finish, +} + +static INIT: [(Input, Action); 28] = [ + (ConsumeDelimiter, SetState(&POSTFIX)), + (Keyword("async"), SetState(&ASYNC)), + (Keyword("break"), SetState(&BREAK_LABEL)), + (Keyword("const"), SetState(&CONST)), + (Keyword("continue"), SetState(&CONTINUE)), + (Keyword("for"), SetState(&FOR)), + (Keyword("if"), IncDepth), + (Keyword("let"), SetState(&PATTERN)), + (Keyword("loop"), SetState(&BLOCK)), + (Keyword("match"), IncDepth), + (Keyword("move"), SetState(&CLOSURE)), + (Keyword("return"), SetState(&RETURN)), + (Keyword("static"), SetState(&CLOSURE)), + (Keyword("unsafe"), SetState(&BLOCK)), + (Keyword("while"), IncDepth), + (Keyword("yield"), SetState(&RETURN)), + (Keyword("_"), SetState(&POSTFIX)), + (Punct("!"), SetState(&INIT)), + (Punct("#"), SetState(&[(ConsumeDelimiter, SetState(&INIT))])), + (Punct("&"), SetState(&REFERENCE)), + (Punct("*"), SetState(&INIT)), + (Punct("-"), SetState(&INIT)), + (Punct("..="), SetState(&INIT)), + (Punct(".."), SetState(&RANGE)), + (Punct("|"), SetState(&CLOSURE_ARGS)), + (ConsumeLifetime, SetState(&[(Punct(":"), SetState(&INIT))])), + (ConsumeLiteral, SetState(&POSTFIX)), + (ExpectPath, SetState(&PATH)), +]; + +static POSTFIX: [(Input, Action); 10] = [ + (Keyword("as"), SetState(&[(ExpectType, SetState(&POSTFIX))])), + (Punct("..="), SetState(&INIT)), + (Punct(".."), SetState(&RANGE)), + (Punct("."), SetState(&DOT)), + (Punct("?"), SetState(&POSTFIX)), + (ConsumeBinOp, SetState(&INIT)), + (Punct("="), SetState(&INIT)), + (ConsumeNestedBrace, SetState(&IF_THEN)), + (ConsumeDelimiter, SetState(&POSTFIX)), + (Empty, Finish), +]; + +static ASYNC: [(Input, Action); 3] = [ + (Keyword("move"), SetState(&ASYNC)), + (Punct("|"), SetState(&CLOSURE_ARGS)), + (ConsumeBrace, SetState(&POSTFIX)), +]; + +static BLOCK: [(Input, Action); 1] = [(ConsumeBrace, SetState(&POSTFIX))]; + +static BREAK_LABEL: [(Input, Action); 2] = [ + (ConsumeLifetime, SetState(&BREAK_VALUE)), + (Otherwise, SetState(&BREAK_VALUE)), +]; + +static BREAK_VALUE: [(Input, Action); 3] = [ + (ConsumeNestedBrace, SetState(&IF_THEN)), + (CanBeginExpr, SetState(&INIT)), + (Otherwise, SetState(&POSTFIX)), +]; + +static CLOSURE: [(Input, Action); 6] = [ + (Keyword("async"), SetState(&CLOSURE)), + (Keyword("move"), SetState(&CLOSURE)), + (Punct(","), SetState(&CLOSURE)), + (Punct(">"), SetState(&CLOSURE)), + (Punct("|"), SetState(&CLOSURE_ARGS)), + (ConsumeLifetime, SetState(&CLOSURE)), +]; + +static CLOSURE_ARGS: [(Input, Action); 2] = [ + (Punct("|"), SetState(&CLOSURE_RET)), + (ConsumeAny, SetState(&CLOSURE_ARGS)), +]; + +static CLOSURE_RET: [(Input, Action); 2] = [ + (Punct("->"), SetState(&[(ExpectType, SetState(&BLOCK))])), + (Otherwise, SetState(&INIT)), +]; + +static CONST: [(Input, Action); 2] = [ + (Punct("|"), SetState(&CLOSURE_ARGS)), + (ConsumeBrace, SetState(&POSTFIX)), +]; + +static CONTINUE: [(Input, Action); 2] = [ + (ConsumeLifetime, SetState(&POSTFIX)), + (Otherwise, SetState(&POSTFIX)), +]; + +static DOT: [(Input, Action); 3] = [ + (Keyword("await"), SetState(&POSTFIX)), + (ConsumeIdent, SetState(&METHOD)), + (ConsumeLiteral, SetState(&POSTFIX)), +]; + +static FOR: [(Input, Action); 2] = [ + (Punct("<"), SetState(&CLOSURE)), + (Otherwise, SetState(&PATTERN)), +]; + +static IF_ELSE: [(Input, Action); 2] = [(Keyword("if"), SetState(&INIT)), (ConsumeBrace, DecDepth)]; +static IF_THEN: [(Input, Action); 2] = + [(Keyword("else"), SetState(&IF_ELSE)), (Otherwise, DecDepth)]; + +static METHOD: [(Input, Action); 1] = [(ExpectTurbofish, SetState(&POSTFIX))]; + +static PATH: [(Input, Action); 4] = [ + (Punct("!="), SetState(&INIT)), + (Punct("!"), SetState(&INIT)), + (ConsumeNestedBrace, SetState(&IF_THEN)), + (Otherwise, SetState(&POSTFIX)), +]; + +static PATTERN: [(Input, Action); 15] = [ + (ConsumeDelimiter, SetState(&PATTERN)), + (Keyword("box"), SetState(&PATTERN)), + (Keyword("in"), IncDepth), + (Keyword("mut"), SetState(&PATTERN)), + (Keyword("ref"), SetState(&PATTERN)), + (Keyword("_"), SetState(&PATTERN)), + (Punct("!"), SetState(&PATTERN)), + (Punct("&"), SetState(&PATTERN)), + (Punct("..="), SetState(&PATTERN)), + (Punct(".."), SetState(&PATTERN)), + (Punct("="), SetState(&INIT)), + (Punct("@"), SetState(&PATTERN)), + (Punct("|"), SetState(&PATTERN)), + (ConsumeLiteral, SetState(&PATTERN)), + (ExpectPath, SetState(&PATTERN)), +]; + +static RANGE: [(Input, Action); 6] = [ + (Punct("..="), SetState(&INIT)), + (Punct(".."), SetState(&RANGE)), + (Punct("."), SetState(&DOT)), + (ConsumeNestedBrace, SetState(&IF_THEN)), + (Empty, Finish), + (Otherwise, SetState(&INIT)), +]; + +static RAW: [(Input, Action); 3] = [ + (Keyword("const"), SetState(&INIT)), + (Keyword("mut"), SetState(&INIT)), + (Otherwise, SetState(&POSTFIX)), +]; + +static REFERENCE: [(Input, Action); 3] = [ + (Keyword("mut"), SetState(&INIT)), + (Keyword("raw"), SetState(&RAW)), + (Otherwise, SetState(&INIT)), +]; + +static RETURN: [(Input, Action); 2] = [ + (CanBeginExpr, SetState(&INIT)), + (Otherwise, SetState(&POSTFIX)), +]; + +pub(crate) fn scan_expr(input: ParseStream) -> Result<()> { + let mut state = INIT.as_slice(); + let mut depth = 0usize; + 'table: loop { + for rule in state { + if match rule.0 { + Input::Keyword(expected) => input.step(|cursor| match cursor.ident() { + Some((ident, rest)) if ident == expected => Ok((true, rest)), + _ => Ok((false, *cursor)), + })?, + Input::Punct(expected) => input.step(|cursor| { + let begin = *cursor; + let mut cursor = begin; + for (i, ch) in expected.chars().enumerate() { + match cursor.punct() { + Some((punct, _)) if punct.as_char() != ch => break, + Some((_, rest)) if i == expected.len() - 1 => { + return Ok((true, rest)); + } + Some((punct, rest)) if punct.spacing() == Spacing::Joint => { + cursor = rest; + } + _ => break, + } + } + Ok((false, begin)) + })?, + Input::ConsumeAny => input.parse::>()?.is_some(), + Input::ConsumeBinOp => input.parse::().is_ok(), + Input::ConsumeBrace | Input::ConsumeNestedBrace => { + (matches!(rule.0, Input::ConsumeBrace) || depth > 0) + && input.step(|cursor| match cursor.group(Delimiter::Brace) { + Some((_inside, _span, rest)) => Ok((true, rest)), + None => Ok((false, *cursor)), + })? + } + Input::ConsumeDelimiter => input.step(|cursor| match cursor.any_group() { + Some((_inside, _delimiter, _span, rest)) => Ok((true, rest)), + None => Ok((false, *cursor)), + })?, + Input::ConsumeIdent => input.parse::>()?.is_some(), + Input::ConsumeLifetime => input.parse::>()?.is_some(), + Input::ConsumeLiteral => input.parse::>()?.is_some(), + Input::ExpectPath => { + input.parse::()?; + true + } + Input::ExpectTurbofish => { + if input.peek(Token![::]) { + input.parse::()?; + } + true + } + Input::ExpectType => { + Type::without_plus(input)?; + true + } + Input::CanBeginExpr => Expr::peek(input), + Input::Otherwise => true, + Input::Empty => input.is_empty() || input.peek(Token![,]), + } { + state = match rule.1 { + Action::SetState(next) => next, + Action::IncDepth => (depth += 1, &INIT).1, + Action::DecDepth => (depth -= 1, &POSTFIX).1, + Action::Finish => return if depth == 0 { Ok(()) } else { break }, + }; + continue 'table; + } + } + return Err(input.error("unsupported expression")); + } +} diff --git a/src/stmt.rs b/src/stmt.rs index ac8238a98a..6261c7b7a9 100644 --- a/src/stmt.rs +++ b/src/stmt.rs @@ -208,7 +208,8 @@ pub(crate) mod parsing { if ahead.peek2(Ident) || ahead.peek2(Token![try]) { is_item_macro = true; } else if ahead.peek2(token::Brace) - && !(ahead.peek3(Token![.]) || ahead.peek3(Token![?])) + && !(ahead.peek3(Token![.]) && !ahead.peek3(Token![..]) + || ahead.peek3(Token![?])) { input.advance_to(&ahead); return stmt_mac(input, attrs, path).map(Stmt::Macro); @@ -452,11 +453,12 @@ pub(crate) mod printing { self.pat.to_tokens(tokens); if let Some(init) = &self.init { init.eq_token.to_tokens(tokens); - if init.diverge.is_some() && classify::expr_trailing_brace(&init.expr) { - token::Paren::default().surround(tokens, |tokens| init.expr.to_tokens(tokens)); - } else { - init.expr.to_tokens(tokens); - } + expr::printing::print_subexpression( + &init.expr, + init.diverge.is_some() && classify::expr_trailing_brace(&init.expr), + tokens, + FixupContext::NONE, + ); if let Some((else_token, diverge)) = &init.diverge { else_token.to_tokens(tokens); match &**diverge { diff --git a/syn.json b/syn.json index ed106885e0..1be1b648fc 100644 --- a/syn.json +++ b/syn.json @@ -1,5 +1,5 @@ { - "version": "2.0.85", + "version": "2.0.96", "types": [ { "ident": "Abi", diff --git a/tests/common/eq.rs b/tests/common/eq.rs index bf09494b4d..bae9b30d0a 100644 --- a/tests/common/eq.rs +++ b/tests/common/eq.rs @@ -15,7 +15,6 @@ use rustc_ast::ast::AssocItemConstraint; use rustc_ast::ast::AssocItemConstraintKind; use rustc_ast::ast::AssocItemKind; use rustc_ast::ast::AttrArgs; -use rustc_ast::ast::AttrArgsEq; use rustc_ast::ast::AttrId; use rustc_ast::ast::AttrItem; use rustc_ast::ast::AttrKind; @@ -150,6 +149,8 @@ use rustc_ast::ast::TyAliasWhereClauses; use rustc_ast::ast::TyKind; use rustc_ast::ast::UintTy; use rustc_ast::ast::UnOp; +use rustc_ast::ast::UnsafeBinderCastKind; +use rustc_ast::ast::UnsafeBinderTy; use rustc_ast::ast::UnsafeSource; use rustc_ast::ast::UseTree; use rustc_ast::ast::UseTreeKind; @@ -161,6 +162,7 @@ use rustc_ast::ast::WhereBoundPredicate; use rustc_ast::ast::WhereClause; use rustc_ast::ast::WhereEqPredicate; use rustc_ast::ast::WherePredicate; +use rustc_ast::ast::WherePredicateKind; use rustc_ast::ast::WhereRegionPredicate; use rustc_ast::ptr::P; use rustc_ast::token::{ @@ -308,6 +310,7 @@ macro_rules! spanless_eq_partial_eq { }; } +spanless_eq_partial_eq!(()); spanless_eq_partial_eq!(bool); spanless_eq_partial_eq!(u8); spanless_eq_partial_eq!(u16); @@ -493,14 +496,14 @@ spanless_eq_struct!(DelimSpacing; open close); spanless_eq_struct!(EnumDef; variants); spanless_eq_struct!(Expr; id kind span attrs !tokens); spanless_eq_struct!(ExprField; attrs id span ident expr is_shorthand is_placeholder); -spanless_eq_struct!(FieldDef; attrs id span vis ident ty is_placeholder); +spanless_eq_struct!(FieldDef; attrs id span vis safety ident ty default is_placeholder); spanless_eq_struct!(Fn; defaultness generics sig body); spanless_eq_struct!(FnDecl; inputs output); spanless_eq_struct!(FnHeader; constness coroutine_kind safety ext); spanless_eq_struct!(FnSig; header decl span); -spanless_eq_struct!(ForeignMod; safety abi items); +spanless_eq_struct!(ForeignMod; extern_span safety abi items); spanless_eq_struct!(FormatArgPosition; index kind span); -spanless_eq_struct!(FormatArgs; span template arguments); +spanless_eq_struct!(FormatArgs; span template arguments uncooked_fmt_str); spanless_eq_struct!(FormatArgument; kind expr); spanless_eq_struct!(FormatOptions; width precision alignment fill sign alternate zero_pad debug_hex); spanless_eq_struct!(FormatPlaceholder; argument span format_trait format_options); @@ -542,19 +545,20 @@ spanless_eq_struct!(Ty; id kind span tokens); spanless_eq_struct!(TyAlias; defaultness generics where_clauses bounds ty); spanless_eq_struct!(TyAliasWhereClause; !has_where_token span); spanless_eq_struct!(TyAliasWhereClauses; before after !split); +spanless_eq_struct!(UnsafeBinderTy; generic_params inner_ty); spanless_eq_struct!(UseTree; prefix kind span); spanless_eq_struct!(Variant; attrs id span !vis ident data disr_expr is_placeholder); spanless_eq_struct!(Visibility; kind span tokens); -spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds); +spanless_eq_struct!(WhereBoundPredicate; bound_generic_params bounded_ty bounds); spanless_eq_struct!(WhereClause; has_where_token predicates span); -spanless_eq_struct!(WhereEqPredicate; span lhs_ty rhs_ty); -spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds); +spanless_eq_struct!(WhereEqPredicate; lhs_ty rhs_ty); +spanless_eq_struct!(WherePredicate; kind id span); +spanless_eq_struct!(WhereRegionPredicate; lifetime bounds); spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0)); spanless_eq_enum!(AsmMacro; Asm GlobalAsm NakedAsm); spanless_eq_enum!(AssocItemConstraintKind; Equality(term) Bound(bounds)); spanless_eq_enum!(AssocItemKind; Const(0) Fn(0) Type(0) MacCall(0) Delegation(0) DelegationMac(0)); -spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(0 1)); -spanless_eq_enum!(AttrArgsEq; Ast(0) Hir(0)); +spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(eq_span expr)); spanless_eq_enum!(AttrStyle; Outer Inner); spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2 3) AttrsTarget(0)); spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt); @@ -599,10 +603,10 @@ spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces); spanless_eq_enum!(MatchKind; Prefix Postfix); spanless_eq_enum!(MetaItemKind; Word List(0) NameValue(0)); spanless_eq_enum!(MetaItemInner; MetaItem(0) Lit(0)); -spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded); +spanless_eq_enum!(ModKind; Loaded(0 1 2 3) Unloaded); spanless_eq_enum!(Movability; Static Movable); spanless_eq_enum!(Mutability; Mut Not); -spanless_eq_enum!(PatFieldsRest; Rest None); +spanless_eq_enum!(PatFieldsRest; Rest Recovered(0) None); spanless_eq_enum!(PreciseCapturingArg; Lifetime(0) Arg(0 1)); spanless_eq_enum!(RangeEnd; Included(0) Excluded); spanless_eq_enum!(RangeLimits; HalfOpen Closed); @@ -616,11 +620,12 @@ spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2 3)); spanless_eq_enum!(TraitObjectSyntax; Dyn DynStar None); spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128); spanless_eq_enum!(UnOp; Deref Not Neg); +spanless_eq_enum!(UnsafeBinderCastKind; Wrap Unwrap); spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided); spanless_eq_enum!(UseTreeKind; Simple(0) Nested(items span) Glob); spanless_eq_enum!(VariantData; Struct(fields recovered) Tuple(0 1) Unit(0)); spanless_eq_enum!(VisibilityKind; Public Restricted(path id shorthand) Inherited); -spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0)); +spanless_eq_enum!(WherePredicateKind; BoundPredicate(0) RegionPredicate(0) EqPredicate(0)); spanless_eq_enum!(CoroutineKind; Async(span closure_id return_impl_trait_id) Gen(span closure_id return_impl_trait_id) AsyncGen(span closure_id return_impl_trait_id)); @@ -631,7 +636,8 @@ spanless_eq_enum!(ExprKind; Array(0) ConstBlock(0) Call(0 1) MethodCall(0) Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1 2) Underscore Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0) InlineAsm(0) OffsetOf(0 1) MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0) - Yield(0) Yeet(0) Become(0) IncludedBytes(0) FormatArgs(0) Err(0) Dummy); + Yield(0) Yeet(0) Become(0) IncludedBytes(0) FormatArgs(0) + UnsafeBinderCast(0 1 2) Err(0) Dummy); spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr) InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const) Sym(sym) Label(block)); @@ -642,11 +648,12 @@ spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0) Const(0) Fn(0) spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0 1) CStr(0 1) Byte(0) Char(0) Int(0 1) Float(0 1) Bool(0) Err(0)); spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2 3) TupleStruct(0 1 2) - Or(0) Path(0 1) Tuple(0) Box(0) Deref(0) Ref(0 1) Lit(0) Range(0 1 2) - Slice(0) Rest Never Paren(0) MacCall(0) Err(0)); + Or(0) Path(0 1) Tuple(0) Box(0) Deref(0) Ref(0 1) Expr(0) Range(0 1 2) + Slice(0) Rest Never Guard(0 1) Paren(0) MacCall(0) Err(0)); spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Ref(0 1) PinnedRef(0 1) - BareFn(0) Never Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) - Typeof(0) Infer ImplicitSelf MacCall(0) CVarArgs Pat(0 1) Dummy Err(0)); + BareFn(0) UnsafeBinder(0) Never Tup(0) Path(0 1) TraitObject(0 1) + ImplTrait(0 1) Paren(0) Typeof(0) Infer ImplicitSelf MacCall(0) CVarArgs + Pat(0 1) Dummy Err(0)); impl SpanlessEq for Ident { fn eq(&self, other: &Self) -> bool { @@ -715,8 +722,8 @@ impl SpanlessEq for TokenKind { impl SpanlessEq for TokenStream { fn eq(&self, other: &Self) -> bool { - let mut this_trees = self.trees(); - let mut other_trees = other.trees(); + let mut this_trees = self.iter(); + let mut other_trees = other.iter(); loop { let this = match this_trees.next() { None => return other_trees.next().is_none(), @@ -774,7 +781,7 @@ fn doc_comment<'a>( Some(TokenTree::Delimited(_span, _spacing, Delimiter::Bracket, stream)) => stream, _ => return false, }; - let mut trees = stream.trees(); + let mut trees = stream.iter(); match trees.next() { Some(TokenTree::Token( Token { @@ -826,16 +833,6 @@ fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool { } } -fn is_escaped_literal_attr_args(value: &AttrArgsEq, unescaped: Symbol) -> bool { - match value { - AttrArgsEq::Ast(expr) => match &expr.kind { - ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped), - _ => false, - }, - AttrArgsEq::Hir(lit) => is_escaped_literal_meta_item_lit(lit, unescaped), - } -} - fn is_escaped_literal_meta_item_lit(lit: &MetaItemLit, unescaped: Symbol) -> bool { match lit { MetaItemLit { @@ -896,9 +893,10 @@ impl SpanlessEq for AttrKind { SpanlessEq::eq(&path, &normal2.item.path) && match &normal2.item.args { AttrArgs::Empty | AttrArgs::Delimited(_) => false, - AttrArgs::Eq(_span, value) => { - is_escaped_literal_attr_args(value, *unescaped) - } + AttrArgs::Eq { eq_span: _, expr } => match &expr.kind { + ExprKind::Lit(lit) => is_escaped_lit(lit, *unescaped), + _ => false, + }, } } (AttrKind::Normal(_), AttrKind::DocComment(..)) => SpanlessEq::eq(other, self), diff --git a/tests/common/mod.rs b/tests/common/mod.rs index c85ac0b4c9..ead830f811 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -3,3 +3,4 @@ pub mod eq; pub mod parse; +pub mod visit; diff --git a/tests/common/visit.rs b/tests/common/visit.rs new file mode 100644 index 0000000000..b4ae5ecd55 --- /dev/null +++ b/tests/common/visit.rs @@ -0,0 +1,94 @@ +use proc_macro2::{Delimiter, Group, TokenStream, TokenTree}; +use std::mem; +use syn::visit_mut::{self, VisitMut}; +use syn::{Expr, File, Generics, LifetimeParam, MacroDelimiter, Stmt, StmtMacro, TypeParam}; + +pub struct FlattenParens; + +impl FlattenParens { + pub fn visit_token_stream_mut(tokens: &mut TokenStream) { + *tokens = mem::take(tokens) + .into_iter() + .flat_map(|tt| { + if let TokenTree::Group(group) = tt { + let delimiter = group.delimiter(); + let mut content = group.stream(); + Self::visit_token_stream_mut(&mut content); + if let Delimiter::Parenthesis = delimiter { + content + } else { + TokenStream::from(TokenTree::Group(Group::new(delimiter, content))) + } + } else { + TokenStream::from(tt) + } + }) + .collect(); + } +} + +impl VisitMut for FlattenParens { + fn visit_expr_mut(&mut self, e: &mut Expr) { + while let Expr::Paren(paren) = e { + *e = mem::replace(&mut *paren.expr, Expr::PLACEHOLDER); + } + visit_mut::visit_expr_mut(self, e); + } +} + +pub struct AsIfPrinted; + +impl VisitMut for AsIfPrinted { + fn visit_file_mut(&mut self, file: &mut File) { + file.shebang = None; + visit_mut::visit_file_mut(self, file); + } + + fn visit_generics_mut(&mut self, generics: &mut Generics) { + if generics.params.is_empty() { + generics.lt_token = None; + generics.gt_token = None; + } + if let Some(where_clause) = &generics.where_clause { + if where_clause.predicates.is_empty() { + generics.where_clause = None; + } + } + visit_mut::visit_generics_mut(self, generics); + } + + fn visit_lifetime_param_mut(&mut self, param: &mut LifetimeParam) { + if param.bounds.is_empty() { + param.colon_token = None; + } + visit_mut::visit_lifetime_param_mut(self, param); + } + + fn visit_stmt_mut(&mut self, stmt: &mut Stmt) { + if let Stmt::Expr(expr, semi) = stmt { + if let Expr::Macro(e) = expr { + if match e.mac.delimiter { + MacroDelimiter::Brace(_) => true, + MacroDelimiter::Paren(_) | MacroDelimiter::Bracket(_) => semi.is_some(), + } { + let Expr::Macro(expr) = mem::replace(expr, Expr::PLACEHOLDER) else { + unreachable!(); + }; + *stmt = Stmt::Macro(StmtMacro { + attrs: expr.attrs, + mac: expr.mac, + semi_token: *semi, + }); + } + } + } + visit_mut::visit_stmt_mut(self, stmt); + } + + fn visit_type_param_mut(&mut self, param: &mut TypeParam) { + if param.bounds.is_empty() { + param.colon_token = None; + } + visit_mut::visit_type_param_mut(self, param); + } +} diff --git a/tests/repo/mod.rs b/tests/repo/mod.rs index 96f039cb35..81b29e2a0c 100644 --- a/tests/repo/mod.rs +++ b/tests/repo/mod.rs @@ -15,7 +15,7 @@ use std::path::{Path, PathBuf}; use tar::Archive; use walkdir::{DirEntry, WalkDir}; -const REVISION: &str = "86d69c705a552236a622eee3fdea94bf13c5f102"; +const REVISION: &str = "0aeaa5eb22180fdf12a8489e63c4daa18da6f236"; #[rustfmt::skip] static EXCLUDE_FILES: &[&str] = &[ @@ -29,11 +29,32 @@ static EXCLUDE_FILES: &[&str] = &[ "tests/rustdoc/inline_cross/auxiliary/non_lifetime_binders.rs", "tests/rustdoc/non_lifetime_binders.rs", + // TODO: unsafe binders: `unsafe<'a> &'a T` + // https://github.com/dtolnay/syn/issues/1791 + "src/tools/rustfmt/tests/source/unsafe-binders.rs", + "src/tools/rustfmt/tests/target/unsafe-binders.rs", + + // TODO: unsafe fields: `struct S { unsafe field: T }` + // https://github.com/dtolnay/syn/issues/1792 + "src/tools/rustfmt/tests/source/unsafe-field.rs", + "src/tools/rustfmt/tests/target/unsafe-field.rs", + "tests/ui/unsafe-fields/auxiliary/unsafe-fields-crate-dep.rs", + + // TODO: guard patterns: `match expr { (A if f()) | (B if g()) => {} }` + // https://github.com/dtolnay/syn/issues/1793 + "src/tools/rustfmt/tests/target/guard_patterns.rs", + + // TODO: struct field default: `struct S { field: i32 = 1 }` + // https://github.com/dtolnay/syn/issues/1774 + "tests/ui/structs/auxiliary/struct_field_default.rs", + "tests/ui/structs/default-field-values-support.rs", + // TODO: return type notation: `where T: Trait` and `where T::method(..): Send` // https://github.com/dtolnay/syn/issues/1434 - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_assoc_type_bound.rs", "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_in_path.rs", "src/tools/rustfmt/tests/target/return-type-notation.rs", + "tests/ui/associated-type-bounds/all-generics-lookup.rs", + "tests/ui/associated-type-bounds/implied-from-self-where-clause.rs", "tests/ui/associated-type-bounds/return-type-notation/basic.rs", "tests/ui/associated-type-bounds/return-type-notation/higher-ranked-bound-works.rs", "tests/ui/associated-type-bounds/return-type-notation/namespace-conflict.rs", @@ -169,6 +190,15 @@ static EXCLUDE_FILES: &[&str] = &[ // https://github.com/dtolnay/syn/issues/1632 "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_trait_bound.rs", "tests/ui/generic-const-items/const-trait-impl.rs", + "tests/ui/traits/const-traits/const-bound-in-host.rs", + "tests/ui/traits/const-traits/const-drop.rs", + "tests/ui/traits/const-traits/const-impl-trait.rs", + "tests/ui/traits/const-traits/const-in-closure.rs", + "tests/ui/traits/const-traits/dont-ice-on-const-pred-for-bounds.rs", + "tests/ui/traits/const-traits/effects/auxiliary/minicore.rs", + "tests/ui/traits/const-traits/effects/dont-prefer-param-env-for-infer-self-ty.rs", + "tests/ui/traits/const-traits/effects/minicore-const-fn-early-bound.rs", + "tests/ui/traits/const-traits/predicate-entailment-passes.rs", "tests/ui/traits/const-traits/tilde-const-syntax.rs", // TODO: unparenthesized half-open range pattern inside slice pattern: `[1..]` @@ -180,7 +210,7 @@ static EXCLUDE_FILES: &[&str] = &[ // https://github.com/dtolnay/syn/issues/1770 "src/tools/rustfmt/tests/source/pin_sugar.rs", "src/tools/rustfmt/tests/target/pin_sugar.rs", - "tests/ui/async-await/pin-sugar.rs", + "tests/ui/async-await/pin-ergonomics/sugar.rs", // TODO: `|| .. .method()` "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_range_method_call.rs", @@ -231,10 +261,8 @@ static EXCLUDE_FILES: &[&str] = &[ "tests/rustdoc/generic-associated-types/gats.rs", // Deprecated trait object syntax with parenthesized generic arguments and no dyn keyword - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/bare_dyn_types_with_paren_as_generic_args.rs", "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_fn_trait_args.rs", "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/typepathfn_with_coloncolon.rs", - "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/value_parameters_no_patterns.rs", "src/tools/rustfmt/tests/source/attrib.rs", "src/tools/rustfmt/tests/source/closure.rs", "src/tools/rustfmt/tests/source/existential_type.rs", @@ -268,7 +296,10 @@ static EXCLUDE_FILES: &[&str] = &[ "tests/ui/parser/bounds-obj-parens.rs", // Various extensions to Rust syntax made up by rust-analyzer + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_type_bound.rs", "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_path.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/field_expr.rs", + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_arg_bounds.rs", "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_abs_star.rs", "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs", "src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs", @@ -305,6 +336,7 @@ static EXCLUDE_FILES: &[&str] = &[ "tests/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs", "tests/ui/editions/edition-keywords-2015-2015.rs", "tests/ui/editions/edition-keywords-2015-2018.rs", + "tests/ui/lint/keyword-idents/auxiliary/multi_file_submod.rs", "tests/ui/lint/lint_pre_expansion_extern_module_aux.rs", "tests/ui/macros/macro-comma-support-rpass.rs", "tests/ui/macros/try-macro.rs", diff --git a/tests/test_expr.rs b/tests/test_expr.rs index f01fcb8ce2..67d5f0dd41 100644 --- a/tests/test_expr.rs +++ b/tests/test_expr.rs @@ -1,18 +1,36 @@ +#![cfg(not(miri))] +#![recursion_limit = "1024"] +#![feature(rustc_private)] #![allow( + clippy::match_like_matches_macro, clippy::needless_lifetimes, clippy::single_element_loop, - clippy::uninlined_format_args + clippy::too_many_lines, + clippy::uninlined_format_args, + clippy::unreadable_literal )] #[macro_use] mod macros; -use proc_macro2::{Delimiter, Group}; +mod common; + +use crate::common::visit::{AsIfPrinted, FlattenParens}; +use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream}; use quote::{quote, ToTokens as _}; -use std::mem; +use std::process::ExitCode; use syn::punctuated::Punctuated; -use syn::visit_mut::{self, VisitMut}; -use syn::{parse_quote, token, Expr, ExprRange, ExprTuple, Stmt, Token}; +use syn::visit_mut::VisitMut as _; +use syn::{ + parse_quote, token, AngleBracketedGenericArguments, Arm, BinOp, Block, Expr, ExprArray, + ExprAssign, ExprAsync, ExprAwait, ExprBinary, ExprBlock, ExprBreak, ExprCall, ExprCast, + ExprClosure, ExprConst, ExprContinue, ExprField, ExprForLoop, ExprIf, ExprIndex, ExprLet, + ExprLit, ExprLoop, ExprMacro, ExprMatch, ExprMethodCall, ExprPath, ExprRange, ExprRawAddr, + ExprReference, ExprReturn, ExprStruct, ExprTry, ExprTryBlock, ExprTuple, ExprUnary, ExprUnsafe, + ExprWhile, ExprYield, GenericArgument, Label, Lifetime, Lit, LitInt, Macro, MacroDelimiter, + Member, Pat, PatWild, Path, PathArguments, PathSegment, PointerMutability, QSelf, RangeLimits, + ReturnType, Stmt, Token, Type, TypePath, UnOp, +}; #[test] fn test_expr_parse() { @@ -385,6 +403,19 @@ fn test_range_precedence() { } "#); + snapshot!("() = .. + ()" as Expr, @r" + Expr::Binary { + left: Expr::Assign { + left: Expr::Tuple, + right: Expr::Range { + limits: RangeLimits::HalfOpen, + }, + }, + op: BinOp::Add, + right: Expr::Tuple, + } + "); + // A range with a lower bound cannot be the upper bound of another range, // and a range with an upper bound cannot be the lower bound of another // range. @@ -392,6 +423,100 @@ fn test_range_precedence() { syn::parse_str::("x .. x ..").unwrap_err(); } +#[test] +fn test_ranges_bailout() { + syn::parse_str::(".. ?").unwrap_err(); + syn::parse_str::(".. .field").unwrap_err(); + + snapshot!("return .. ?" as Expr, @r" + Expr::Try { + expr: Expr::Return { + expr: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + }, + } + "); + + snapshot!("break .. ?" as Expr, @r" + Expr::Try { + expr: Expr::Break { + expr: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + }, + } + "); + + snapshot!("|| .. ?" as Expr, @r" + Expr::Try { + expr: Expr::Closure { + output: ReturnType::Default, + body: Expr::Range { + limits: RangeLimits::HalfOpen, + }, + }, + } + "); + + snapshot!("return .. .field" as Expr, @r#" + Expr::Field { + base: Expr::Return { + expr: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + }, + member: Member::Named("field"), + } + "#); + + snapshot!("break .. .field" as Expr, @r#" + Expr::Field { + base: Expr::Break { + expr: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + }, + member: Member::Named("field"), + } + "#); + + snapshot!("|| .. .field" as Expr, @r#" + Expr::Field { + base: Expr::Closure { + output: ReturnType::Default, + body: Expr::Range { + limits: RangeLimits::HalfOpen, + }, + }, + member: Member::Named("field"), + } + "#); + + snapshot!("return .. = ()" as Expr, @r" + Expr::Assign { + left: Expr::Return { + expr: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + }, + right: Expr::Tuple, + } + "); + + snapshot!("return .. += ()" as Expr, @r" + Expr::Binary { + left: Expr::Return { + expr: Some(Expr::Range { + limits: RangeLimits::HalfOpen, + }), + }, + op: BinOp::AddAssign, + right: Expr::Tuple, + } + "); +} + #[test] fn test_ambiguous_label() { for stmt in [ @@ -665,17 +790,6 @@ fn test_chained_comparison() { #[test] fn test_fixup() { - struct FlattenParens; - - impl VisitMut for FlattenParens { - fn visit_expr_mut(&mut self, e: &mut Expr) { - while let Expr::Paren(paren) = e { - *e = mem::replace(&mut *paren.expr, Expr::PLACEHOLDER); - } - visit_mut::visit_expr_mut(self, e); - } - } - for tokens in [ quote! { 2 * (1 + 1) }, quote! { 0 + (0 + 0) }, @@ -685,23 +799,40 @@ fn test_fixup() { quote! { (1 + 1).abs() }, quote! { (lo..hi)[..] }, quote! { (a..b)..(c..d) }, + quote! { (x > ..) > x }, quote! { (&mut fut).await }, quote! { &mut (x as i32) }, quote! { -(x as i32) }, - quote! { if (S {} == 1) {} }, + quote! { if (S {}) == 1 {} }, quote! { { (m! {}) - 1 } }, quote! { match m { _ => ({}) - 1 } }, quote! { if let _ = (a && b) && c {} }, quote! { if let _ = (S {}) {} }, + quote! { if (S {}) == 0 && let Some(_) = x {} }, quote! { break ('a: loop { break 'a 1 } + 1) }, quote! { a + (|| b) + c }, quote! { if let _ = ((break) - 1 || true) {} }, quote! { if let _ = (break + 1 || true) {} }, + quote! { if break (break) {} }, + quote! { if break break {} {} }, + quote! { if return (..) {} }, + quote! { if return .. {} {} }, + quote! { if || (Struct {}) {} }, + quote! { if || (Struct {}).await {} }, + quote! { if break || Struct {}.await {} }, + quote! { if break 'outer 'block: {} {} }, + quote! { if ..'block: {} {} }, + quote! { if break ({}).await {} }, quote! { (break)() }, quote! { (..) = () }, quote! { (..) += () }, quote! { (1 < 2) == (3 < 4) }, quote! { { (let _ = ()) } }, + quote! { (#[attr] thing).field }, + quote! { (self.f)() }, + quote! { (return)..=return }, + quote! { 1 + (return)..=1 + return }, + quote! { .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. .. }, ] { let original: Expr = syn::parse2(tokens).unwrap(); @@ -714,9 +845,802 @@ fn test_fixup() { assert!( original == reconstructed, - "original: {}\nreconstructed: {}", + "original: {}\n{:#?}\nreconstructed: {}\n{:#?}", original.to_token_stream(), + crate::macros::debug::Lite(&original), reconstructed.to_token_stream(), + crate::macros::debug::Lite(&reconstructed), ); } } + +#[test] +fn test_permutations() -> ExitCode { + fn iter(depth: usize, f: &mut dyn FnMut(Expr)) { + let span = Span::call_site(); + + // Expr::Path + f(Expr::Path(ExprPath { + // `x` + attrs: Vec::new(), + qself: None, + path: Path::from(Ident::new("x", span)), + })); + if false { + f(Expr::Path(ExprPath { + // `x::` + attrs: Vec::new(), + qself: None, + path: Path { + leading_colon: None, + segments: Punctuated::from_iter([PathSegment { + ident: Ident::new("x", span), + arguments: PathArguments::AngleBracketed(AngleBracketedGenericArguments { + colon2_token: Some(Token![::](span)), + lt_token: Token![<](span), + args: Punctuated::from_iter([GenericArgument::Type(Type::Path( + TypePath { + qself: None, + path: Path::from(Ident::new("T", span)), + }, + ))]), + gt_token: Token![>](span), + }), + }]), + }, + })); + f(Expr::Path(ExprPath { + // `::CONST` + attrs: Vec::new(), + qself: Some(QSelf { + lt_token: Token![<](span), + ty: Box::new(Type::Path(TypePath { + qself: None, + path: Path::from(Ident::new("T", span)), + })), + position: 1, + as_token: Some(Token![as](span)), + gt_token: Token![>](span), + }), + path: Path { + leading_colon: None, + segments: Punctuated::from_iter([ + PathSegment::from(Ident::new("Trait", span)), + PathSegment::from(Ident::new("CONST", span)), + ]), + }, + })); + } + + let Some(depth) = depth.checked_sub(1) else { + return; + }; + + // Expr::Assign + iter(depth, &mut |expr| { + iter(0, &mut |simple| { + f(Expr::Assign(ExprAssign { + // `x = $expr` + attrs: Vec::new(), + left: Box::new(simple.clone()), + eq_token: Token![=](span), + right: Box::new(expr.clone()), + })); + f(Expr::Assign(ExprAssign { + // `$expr = x` + attrs: Vec::new(), + left: Box::new(expr.clone()), + eq_token: Token![=](span), + right: Box::new(simple), + })); + }); + }); + + // Expr::Binary + iter(depth, &mut |expr| { + iter(0, &mut |simple| { + for op in [ + BinOp::Add(Token![+](span)), + //BinOp::Sub(Token![-](span)), + //BinOp::Mul(Token![*](span)), + //BinOp::Div(Token![/](span)), + //BinOp::Rem(Token![%](span)), + //BinOp::And(Token![&&](span)), + //BinOp::Or(Token![||](span)), + //BinOp::BitXor(Token![^](span)), + //BinOp::BitAnd(Token![&](span)), + //BinOp::BitOr(Token![|](span)), + //BinOp::Shl(Token![<<](span)), + //BinOp::Shr(Token![>>](span)), + //BinOp::Eq(Token![==](span)), + BinOp::Lt(Token![<](span)), + //BinOp::Le(Token![<=](span)), + //BinOp::Ne(Token![!=](span)), + //BinOp::Ge(Token![>=](span)), + //BinOp::Gt(Token![>](span)), + BinOp::ShlAssign(Token![<<=](span)), + ] { + f(Expr::Binary(ExprBinary { + // `x + $expr` + attrs: Vec::new(), + left: Box::new(simple.clone()), + op, + right: Box::new(expr.clone()), + })); + f(Expr::Binary(ExprBinary { + // `$expr + x` + attrs: Vec::new(), + left: Box::new(expr.clone()), + op, + right: Box::new(simple.clone()), + })); + } + }); + }); + + // Expr::Block + f(Expr::Block(ExprBlock { + // `{}` + attrs: Vec::new(), + label: None, + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + + // Expr::Break + f(Expr::Break(ExprBreak { + // `break` + attrs: Vec::new(), + break_token: Token![break](span), + label: None, + expr: None, + })); + iter(depth, &mut |expr| { + f(Expr::Break(ExprBreak { + // `break $expr` + attrs: Vec::new(), + break_token: Token![break](span), + label: None, + expr: Some(Box::new(expr)), + })); + }); + + // Expr::Call + iter(depth, &mut |expr| { + f(Expr::Call(ExprCall { + // `$expr()` + attrs: Vec::new(), + func: Box::new(expr), + paren_token: token::Paren(span), + args: Punctuated::new(), + })); + }); + + // Expr::Cast + iter(depth, &mut |expr| { + f(Expr::Cast(ExprCast { + // `$expr as T` + attrs: Vec::new(), + expr: Box::new(expr), + as_token: Token![as](span), + ty: Box::new(Type::Path(TypePath { + qself: None, + path: Path::from(Ident::new("T", span)), + })), + })); + }); + + // Expr::Closure + iter(depth, &mut |expr| { + f(Expr::Closure(ExprClosure { + // `|| $expr` + attrs: Vec::new(), + lifetimes: None, + constness: None, + movability: None, + asyncness: None, + capture: None, + or1_token: Token![|](span), + inputs: Punctuated::new(), + or2_token: Token![|](span), + output: ReturnType::Default, + body: Box::new(expr), + })); + }); + + // Expr::Field + iter(depth, &mut |expr| { + f(Expr::Field(ExprField { + // `$expr.field` + attrs: Vec::new(), + base: Box::new(expr), + dot_token: Token![.](span), + member: Member::Named(Ident::new("field", span)), + })); + }); + + // Expr::If + iter(depth, &mut |expr| { + f(Expr::If(ExprIf { + // `if $expr {}` + attrs: Vec::new(), + if_token: Token![if](span), + cond: Box::new(expr), + then_branch: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + else_branch: None, + })); + }); + + // Expr::Let + iter(depth, &mut |expr| { + f(Expr::Let(ExprLet { + attrs: Vec::new(), + let_token: Token![let](span), + pat: Box::new(Pat::Wild(PatWild { + attrs: Vec::new(), + underscore_token: Token![_](span), + })), + eq_token: Token![=](span), + expr: Box::new(expr), + })); + }); + + // Expr::Range + f(Expr::Range(ExprRange { + // `..` + attrs: Vec::new(), + start: None, + limits: RangeLimits::HalfOpen(Token![..](span)), + end: None, + })); + iter(depth, &mut |expr| { + f(Expr::Range(ExprRange { + // `..$expr` + attrs: Vec::new(), + start: None, + limits: RangeLimits::HalfOpen(Token![..](span)), + end: Some(Box::new(expr.clone())), + })); + f(Expr::Range(ExprRange { + // `$expr..` + attrs: Vec::new(), + start: Some(Box::new(expr)), + limits: RangeLimits::HalfOpen(Token![..](span)), + end: None, + })); + }); + + // Expr::Reference + iter(depth, &mut |expr| { + f(Expr::Reference(ExprReference { + // `&$expr` + attrs: Vec::new(), + and_token: Token![&](span), + mutability: None, + expr: Box::new(expr), + })); + }); + + // Expr::Return + f(Expr::Return(ExprReturn { + // `return` + attrs: Vec::new(), + return_token: Token![return](span), + expr: None, + })); + iter(depth, &mut |expr| { + f(Expr::Return(ExprReturn { + // `return $expr` + attrs: Vec::new(), + return_token: Token![return](span), + expr: Some(Box::new(expr)), + })); + }); + + // Expr::Try + iter(depth, &mut |expr| { + f(Expr::Try(ExprTry { + // `$expr?` + attrs: Vec::new(), + expr: Box::new(expr), + question_token: Token![?](span), + })); + }); + + // Expr::Unary + iter(depth, &mut |expr| { + for op in [ + UnOp::Deref(Token![*](span)), + //UnOp::Not(Token![!](span)), + //UnOp::Neg(Token![-](span)), + ] { + f(Expr::Unary(ExprUnary { + // `*$expr` + attrs: Vec::new(), + op, + expr: Box::new(expr.clone()), + })); + } + }); + + if false { + // Expr::Array + f(Expr::Array(ExprArray { + // `[]` + attrs: Vec::new(), + bracket_token: token::Bracket(span), + elems: Punctuated::new(), + })); + + // Expr::Async + f(Expr::Async(ExprAsync { + // `async {}` + attrs: Vec::new(), + async_token: Token![async](span), + capture: None, + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + + // Expr::Await + iter(depth, &mut |expr| { + f(Expr::Await(ExprAwait { + // `$expr.await` + attrs: Vec::new(), + base: Box::new(expr), + dot_token: Token![.](span), + await_token: Token![await](span), + })); + }); + + // Expr::Block + f(Expr::Block(ExprBlock { + // `'a: {}` + attrs: Vec::new(), + label: Some(Label { + name: Lifetime::new("'a", span), + colon_token: Token![:](span), + }), + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + iter(depth, &mut |expr| { + f(Expr::Block(ExprBlock { + // `{ $expr }` + attrs: Vec::new(), + label: None, + block: Block { + brace_token: token::Brace(span), + stmts: Vec::from([Stmt::Expr(expr.clone(), None)]), + }, + })); + f(Expr::Block(ExprBlock { + // `{ $expr; }` + attrs: Vec::new(), + label: None, + block: Block { + brace_token: token::Brace(span), + stmts: Vec::from([Stmt::Expr(expr, Some(Token![;](span)))]), + }, + })); + }); + + // Expr::Break + f(Expr::Break(ExprBreak { + // `break 'a` + attrs: Vec::new(), + break_token: Token![break](span), + label: Some(Lifetime::new("'a", span)), + expr: None, + })); + iter(depth, &mut |expr| { + f(Expr::Break(ExprBreak { + // `break 'a $expr` + attrs: Vec::new(), + break_token: Token![break](span), + label: Some(Lifetime::new("'a", span)), + expr: Some(Box::new(expr)), + })); + }); + + // Expr::Closure + f(Expr::Closure(ExprClosure { + // `|| -> T {}` + attrs: Vec::new(), + lifetimes: None, + constness: None, + movability: None, + asyncness: None, + capture: None, + or1_token: Token![|](span), + inputs: Punctuated::new(), + or2_token: Token![|](span), + output: ReturnType::Type( + Token![->](span), + Box::new(Type::Path(TypePath { + qself: None, + path: Path::from(Ident::new("T", span)), + })), + ), + body: Box::new(Expr::Block(ExprBlock { + attrs: Vec::new(), + label: None, + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })), + })); + + // Expr::Const + f(Expr::Const(ExprConst { + // `const {}` + attrs: Vec::new(), + const_token: Token![const](span), + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + + // Expr::Continue + f(Expr::Continue(ExprContinue { + // `continue` + attrs: Vec::new(), + continue_token: Token![continue](span), + label: None, + })); + f(Expr::Continue(ExprContinue { + // `continue 'a` + attrs: Vec::new(), + continue_token: Token![continue](span), + label: Some(Lifetime::new("'a", span)), + })); + + // Expr::ForLoop + iter(depth, &mut |expr| { + f(Expr::ForLoop(ExprForLoop { + // `for _ in $expr {}` + attrs: Vec::new(), + label: None, + for_token: Token![for](span), + pat: Box::new(Pat::Wild(PatWild { + attrs: Vec::new(), + underscore_token: Token![_](span), + })), + in_token: Token![in](span), + expr: Box::new(expr.clone()), + body: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + f(Expr::ForLoop(ExprForLoop { + // `'a: for _ in $expr {}` + attrs: Vec::new(), + label: Some(Label { + name: Lifetime::new("'a", span), + colon_token: Token![:](span), + }), + for_token: Token![for](span), + pat: Box::new(Pat::Wild(PatWild { + attrs: Vec::new(), + underscore_token: Token![_](span), + })), + in_token: Token![in](span), + expr: Box::new(expr), + body: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + }); + + // Expr::Index + iter(depth, &mut |expr| { + f(Expr::Index(ExprIndex { + // `$expr[0]` + attrs: Vec::new(), + expr: Box::new(expr), + bracket_token: token::Bracket(span), + index: Box::new(Expr::Lit(ExprLit { + attrs: Vec::new(), + lit: Lit::Int(LitInt::new("0", span)), + })), + })); + }); + + // Expr::Loop + f(Expr::Loop(ExprLoop { + // `loop {}` + attrs: Vec::new(), + label: None, + loop_token: Token![loop](span), + body: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + f(Expr::Loop(ExprLoop { + // `'a: loop {}` + attrs: Vec::new(), + label: Some(Label { + name: Lifetime::new("'a", span), + colon_token: Token![:](span), + }), + loop_token: Token![loop](span), + body: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + + // Expr::Macro + f(Expr::Macro(ExprMacro { + // `m!()` + attrs: Vec::new(), + mac: Macro { + path: Path::from(Ident::new("m", span)), + bang_token: Token![!](span), + delimiter: MacroDelimiter::Paren(token::Paren(span)), + tokens: TokenStream::new(), + }, + })); + f(Expr::Macro(ExprMacro { + // `m! {}` + attrs: Vec::new(), + mac: Macro { + path: Path::from(Ident::new("m", span)), + bang_token: Token![!](span), + delimiter: MacroDelimiter::Brace(token::Brace(span)), + tokens: TokenStream::new(), + }, + })); + + // Expr::Match + iter(depth, &mut |expr| { + f(Expr::Match(ExprMatch { + // `match $expr {}` + attrs: Vec::new(), + match_token: Token![match](span), + expr: Box::new(expr.clone()), + brace_token: token::Brace(span), + arms: Vec::new(), + })); + f(Expr::Match(ExprMatch { + // `match x { _ => $expr }` + attrs: Vec::new(), + match_token: Token![match](span), + expr: Box::new(Expr::Path(ExprPath { + attrs: Vec::new(), + qself: None, + path: Path::from(Ident::new("x", span)), + })), + brace_token: token::Brace(span), + arms: Vec::from([Arm { + attrs: Vec::new(), + pat: Pat::Wild(PatWild { + attrs: Vec::new(), + underscore_token: Token![_](span), + }), + guard: None, + fat_arrow_token: Token![=>](span), + body: Box::new(expr.clone()), + comma: None, + }]), + })); + f(Expr::Match(ExprMatch { + // `match x { _ if $expr => {} }` + attrs: Vec::new(), + match_token: Token![match](span), + expr: Box::new(Expr::Path(ExprPath { + attrs: Vec::new(), + qself: None, + path: Path::from(Ident::new("x", span)), + })), + brace_token: token::Brace(span), + arms: Vec::from([Arm { + attrs: Vec::new(), + pat: Pat::Wild(PatWild { + attrs: Vec::new(), + underscore_token: Token![_](span), + }), + guard: Some((Token![if](span), Box::new(expr))), + fat_arrow_token: Token![=>](span), + body: Box::new(Expr::Block(ExprBlock { + attrs: Vec::new(), + label: None, + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })), + comma: None, + }]), + })); + }); + + // Expr::MethodCall + iter(depth, &mut |expr| { + f(Expr::MethodCall(ExprMethodCall { + // `$expr.method()` + attrs: Vec::new(), + receiver: Box::new(expr.clone()), + dot_token: Token![.](span), + method: Ident::new("method", span), + turbofish: None, + paren_token: token::Paren(span), + args: Punctuated::new(), + })); + f(Expr::MethodCall(ExprMethodCall { + // `$expr.method::()` + attrs: Vec::new(), + receiver: Box::new(expr), + dot_token: Token![.](span), + method: Ident::new("method", span), + turbofish: Some(AngleBracketedGenericArguments { + colon2_token: Some(Token![::](span)), + lt_token: Token![<](span), + args: Punctuated::from_iter([GenericArgument::Type(Type::Path( + TypePath { + qself: None, + path: Path::from(Ident::new("T", span)), + }, + ))]), + gt_token: Token![>](span), + }), + paren_token: token::Paren(span), + args: Punctuated::new(), + })); + }); + + // Expr::RawAddr + iter(depth, &mut |expr| { + f(Expr::RawAddr(ExprRawAddr { + // `&raw const $expr` + attrs: Vec::new(), + and_token: Token![&](span), + raw: Token![raw](span), + mutability: PointerMutability::Const(Token![const](span)), + expr: Box::new(expr), + })); + }); + + // Expr::Struct + f(Expr::Struct(ExprStruct { + // `Struct {}` + attrs: Vec::new(), + qself: None, + path: Path::from(Ident::new("Struct", span)), + brace_token: token::Brace(span), + fields: Punctuated::new(), + dot2_token: None, + rest: None, + })); + + // Expr::TryBlock + f(Expr::TryBlock(ExprTryBlock { + // `try {}` + attrs: Vec::new(), + try_token: Token![try](span), + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + + // Expr::Unsafe + f(Expr::Unsafe(ExprUnsafe { + // `unsafe {}` + attrs: Vec::new(), + unsafe_token: Token![unsafe](span), + block: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + + // Expr::While + iter(depth, &mut |expr| { + f(Expr::While(ExprWhile { + // `while $expr {}` + attrs: Vec::new(), + label: None, + while_token: Token![while](span), + cond: Box::new(expr.clone()), + body: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + f(Expr::While(ExprWhile { + // `'a: while $expr {}` + attrs: Vec::new(), + label: Some(Label { + name: Lifetime::new("'a", span), + colon_token: Token![:](span), + }), + while_token: Token![while](span), + cond: Box::new(expr), + body: Block { + brace_token: token::Brace(span), + stmts: Vec::new(), + }, + })); + }); + + // Expr::Yield + f(Expr::Yield(ExprYield { + // `yield` + attrs: Vec::new(), + yield_token: Token![yield](span), + expr: None, + })); + iter(depth, &mut |expr| { + f(Expr::Yield(ExprYield { + // `yield $expr` + attrs: Vec::new(), + yield_token: Token![yield](span), + expr: Some(Box::new(expr)), + })); + }); + } + } + + let mut failures = 0; + macro_rules! fail { + ($($message:tt)*) => {{ + eprintln!($($message)*); + failures += 1; + return; + }}; + } + let mut assert = |mut original: Expr| { + let tokens = original.to_token_stream(); + let Ok(mut parsed) = syn::parse2::(tokens.clone()) else { + fail!( + "failed to parse: {}\n{:#?}", + tokens, + crate::macros::debug::Lite(&original), + ); + }; + AsIfPrinted.visit_expr_mut(&mut original); + FlattenParens.visit_expr_mut(&mut parsed); + if original != parsed { + fail!( + "before: {}\n{:#?}\nafter: {}\n{:#?}", + tokens, + crate::macros::debug::Lite(&original), + parsed.to_token_stream(), + crate::macros::debug::Lite(&parsed), + ); + } + let mut tokens_no_paren = tokens.clone(); + FlattenParens::visit_token_stream_mut(&mut tokens_no_paren); + if tokens.to_string() != tokens_no_paren.to_string() { + if let Ok(mut parsed2) = syn::parse2::(tokens_no_paren) { + FlattenParens.visit_expr_mut(&mut parsed2); + if original == parsed2 { + fail!("redundant parens: {}", tokens); + } + } + } + }; + + iter(4, &mut assert); + if failures > 0 { + eprintln!("FAILURES: {failures}"); + ExitCode::FAILURE + } else { + ExitCode::SUCCESS + } +} diff --git a/tests/test_precedence.rs b/tests/test_precedence.rs index 7231bd6409..c7b4aceca7 100644 --- a/tests/test_precedence.rs +++ b/tests/test_precedence.rs @@ -49,6 +49,7 @@ use std::fs; use std::path::Path; use std::process; use std::sync::atomic::{AtomicUsize, Ordering}; +use syn::parse::Parser as _; #[macro_use] mod macros; @@ -56,6 +57,9 @@ mod macros; mod common; mod repo; +#[path = "../src/scan_expr.rs"] +mod scan_expr; + #[test] fn test_rustc_precedence() { repo::rayon_init(); @@ -115,7 +119,8 @@ fn test_expressions(path: &Path, edition: Edition, exprs: Vec) -> (us rustc_span::create_session_if_not_set_then(edition, |_| { for expr in exprs { - let source_code = expr.to_token_stream().to_string(); + let expr_tokens = expr.to_token_stream(); + let source_code = expr_tokens.to_string(); let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&source_code) { e } else { @@ -173,6 +178,16 @@ fn test_expressions(path: &Path, edition: Edition, exprs: Vec) -> (us continue; } + if scan_expr::scan_expr.parse2(expr_tokens).is_err() { + failed += 1; + errorf!( + "\nFAIL {} - failed to scan expr\n{}\n", + path.display(), + source_code, + ); + continue; + } + passed += 1; } }); @@ -190,12 +205,11 @@ fn librustc_parenthesize(mut librustc_expr: P) -> P { ExprKind, GenericArg, GenericBound, Local, LocalKind, Pat, PolyTraitRef, Stmt, StmtKind, StructExpr, StructRest, TraitBoundModifiers, Ty, }; - use rustc_ast::mut_visit::{walk_flat_map_item, MutVisitor}; + use rustc_ast::mut_visit::{visit_clobber, walk_flat_map_assoc_item, MutVisitor}; use rustc_ast::visit::{AssocCtxt, BoundKind}; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; use rustc_span::DUMMY_SP; use smallvec::SmallVec; - use std::mem; use std::ops::DerefMut; use thin_vec::ThinVec; @@ -265,19 +279,13 @@ fn librustc_parenthesize(mut librustc_expr: P) -> P { match e.kind { ExprKind::Block(..) | ExprKind::If(..) | ExprKind::Let(..) => {} ExprKind::Binary(..) if contains_let_chain(e) => {} - _ => { - let inner = mem::replace( - e, - P(Expr { - id: ast::DUMMY_NODE_ID, - kind: ExprKind::Dummy, - span: DUMMY_SP, - attrs: ThinVec::new(), - tokens: None, - }), - ); - e.kind = ExprKind::Paren(inner); - } + _ => visit_clobber(&mut **e, |inner| Expr { + id: ast::DUMMY_NODE_ID, + kind: ExprKind::Paren(P(inner)), + span: DUMMY_SP, + attrs: ThinVec::new(), + tokens: None, + }), } } @@ -334,7 +342,7 @@ fn librustc_parenthesize(mut librustc_expr: P) -> P { fn flat_map_assoc_item( &mut self, item: P, - _ctxt: AssocCtxt, + ctxt: AssocCtxt, ) -> SmallVec<[P; 1]> { match &item.kind { AssocItemKind::Const(const_item) @@ -343,7 +351,7 @@ fn librustc_parenthesize(mut librustc_expr: P) -> P { { SmallVec::from([item]) } - _ => walk_flat_map_item(self, item), + _ => walk_flat_map_assoc_item(self, item, ctxt), } } diff --git a/tests/test_unparenthesize.rs b/tests/test_unparenthesize.rs index 5db2e57a52..1330aa2bad 100644 --- a/tests/test_unparenthesize.rs +++ b/tests/test_unparenthesize.rs @@ -1,22 +1,25 @@ #![cfg(not(miri))] +#![recursion_limit = "1024"] +#![feature(rustc_private)] #![allow( clippy::manual_assert, + clippy::match_like_matches_macro, clippy::needless_lifetimes, clippy::uninlined_format_args )] +use crate::common::visit::{AsIfPrinted, FlattenParens}; use quote::ToTokens as _; use std::fs; -use std::mem; use std::panic; use std::path::Path; use std::sync::atomic::{AtomicUsize, Ordering}; -use syn::visit_mut::{self, VisitMut}; -use syn::Expr; +use syn::visit_mut::VisitMut as _; #[macro_use] mod macros; +mod common; mod repo; #[test] @@ -34,24 +37,23 @@ fn test_unparenthesize() { } } -struct FlattenParens; - -impl VisitMut for FlattenParens { - fn visit_expr_mut(&mut self, e: &mut Expr) { - while let Expr::Paren(paren) = e { - *e = mem::replace(&mut *paren.expr, Expr::PLACEHOLDER); - } - visit_mut::visit_expr_mut(self, e); - } -} - fn test(path: &Path, failed: &AtomicUsize) { let content = fs::read_to_string(path).unwrap(); match panic::catch_unwind(|| -> syn::Result<()> { - let mut syntax_tree = syn::parse_file(&content)?; - FlattenParens.visit_file_mut(&mut syntax_tree); - syn::parse2::(syntax_tree.to_token_stream())?; + let mut before = syn::parse_file(&content)?; + FlattenParens.visit_file_mut(&mut before); + let printed = before.to_token_stream(); + let mut after = syn::parse2::(printed.clone())?; + FlattenParens.visit_file_mut(&mut after); + // Normalize features that we expect Syn not to print. + AsIfPrinted.visit_file_mut(&mut before); + if before != after { + errorf!("=== {}\n", path.display()); + if failed.fetch_add(1, Ordering::Relaxed) == 0 { + errorf!("BEFORE:\n{:#?}\nAFTER:\n{:#?}\n", before, after); + } + } Ok(()) }) { Err(_) => {