diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7ce2146da0..6ab024b5a8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,7 +37,7 @@ jobs: strategy: fail-fast: false matrix: - rust: [stable, beta, 1.63.0, 1.60.0] + rust: [stable, beta, 1.63.0, 1.61.0] include: - rust: nightly components: rustc-dev @@ -77,7 +77,7 @@ jobs: - run: cargo check ${{env.target}} --no-default-features --features 'full fold visit visit-mut parsing printing' - if: matrix.components == 'rustc-dev' run: cargo check --benches --all-features --release - - if: matrix.rust != '1.60.0' + - if: matrix.rust != '1.61.0' run: cargo check ${{env.target}} --manifest-path json/Cargo.toml --no-default-features examples: diff --git a/Cargo.toml b/Cargo.toml index edced6541a..8af231a039 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "syn" -version = "2.0.66" +version = "2.0.72" authors = ["David Tolnay "] categories = ["development-tools::procedural-macro-helpers", "parser-implementations"] description = "Parser for Rust source code" @@ -18,7 +18,7 @@ include = [ keywords = ["macros", "syn"] license = "MIT OR Apache-2.0" repository = "https://github.com/dtolnay/syn" -rust-version = "1.60" +rust-version = "1.61" [features] default = ["derive", "parsing", "printing", "clone-impls", "proc-macro"] @@ -42,15 +42,17 @@ unicode-ident = "1" [dev-dependencies] anyhow = "1" automod = "1" -flate2 = "1" insta = "1" -rayon = "1" ref-cast = "1" -reqwest = { version = "0.12", features = ["blocking"] } rustversion = "1" syn-test-suite = { version = "0", path = "tests/features" } -tar = "0.4.16" termcolor = "1" + +[target.'cfg(not(miri))'.dev-dependencies] +flate2 = "1" +rayon = "1" +reqwest = { version = "0.12", features = ["blocking"] } +tar = "0.4.16" walkdir = "2.3.2" [lib] diff --git a/README.md b/README.md index 04f9bf6cb1..16a393b9f0 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ contains some APIs that may be useful more generally. [`syn::DeriveInput`]: https://docs.rs/syn/2.0/syn/struct.DeriveInput.html [parser functions]: https://docs.rs/syn/2.0/syn/parse/index.html -*Version requirement: Syn supports rustc 1.60 and up.* +*Version requirement: Syn supports rustc 1.61 and up.* [*Release notes*](https://github.com/dtolnay/syn/releases) diff --git a/benches/rust.rs b/benches/rust.rs index bfa3a17f4a..09917e7cbe 100644 --- a/benches/rust.rs +++ b/benches/rust.rs @@ -24,20 +24,24 @@ mod macros; mod repo; use std::fs; +use std::path::Path; use std::time::{Duration, Instant}; #[cfg(not(syn_only))] mod tokenstream_parse { use proc_macro2::TokenStream; + use std::path::Path; use std::str::FromStr; - pub fn bench(content: &str) -> Result<(), ()> { + pub fn bench(_path: &Path, content: &str) -> Result<(), ()> { TokenStream::from_str(content).map(drop).map_err(drop) } } mod syn_parse { - pub fn bench(content: &str) -> Result<(), ()> { + use std::path::Path; + + pub fn bench(_path: &Path, content: &str) -> Result<(), ()> { syn::parse_file(content).map(drop).map_err(drop) } } @@ -52,14 +56,16 @@ mod librustc_parse { extern crate rustc_session; extern crate rustc_span; + use crate::repo; use rustc_data_structures::sync::Lrc; use rustc_error_messages::FluentBundle; use rustc_errors::{emitter::Emitter, translation::Translate, DiagCtxt, DiagInner}; use rustc_session::parse::ParseSess; use rustc_span::source_map::{FilePathMapping, SourceMap}; - use rustc_span::{edition::Edition, FileName}; + use rustc_span::FileName; + use std::path::Path; - pub fn bench(content: &str) -> Result<(), ()> { + pub fn bench(path: &Path, content: &str) -> Result<(), ()> { struct SilentEmitter; impl Emitter for SilentEmitter { @@ -78,16 +84,16 @@ mod librustc_parse { } } - rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| { + let edition = repo::edition(path).parse().unwrap(); + rustc_span::create_session_if_not_set_then(edition, |_| { let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty())); let emitter = Box::new(SilentEmitter); let handler = DiagCtxt::new(emitter); let sess = ParseSess::with_dcx(handler, source_map); - if let Err(diagnostic) = rustc_parse::parse_crate_from_source_str( - FileName::Custom("bench".to_owned()), - content.to_owned(), - &sess, - ) { + let name = FileName::Custom("bench".to_owned()); + let mut parser = + rustc_parse::new_parser_from_source_str(&sess, name, content.to_owned()).unwrap(); + if let Err(diagnostic) = parser.parse_crate_mod() { diagnostic.cancel(); return Err(()); }; @@ -98,13 +104,15 @@ mod librustc_parse { #[cfg(not(syn_only))] mod read_from_disk { - pub fn bench(content: &str) -> Result<(), ()> { + use std::path::Path; + + pub fn bench(_path: &Path, content: &str) -> Result<(), ()> { let _ = content; Ok(()) } } -fn exec(mut codepath: impl FnMut(&str) -> Result<(), ()>) -> Duration { +fn exec(mut codepath: impl FnMut(&Path, &str) -> Result<(), ()>) -> Duration { let begin = Instant::now(); let mut success = 0; let mut total = 0; @@ -123,7 +131,7 @@ fn exec(mut codepath: impl FnMut(&str) -> Result<(), ()>) -> Duration { return; } let content = fs::read_to_string(path).unwrap(); - let ok = codepath(&content).is_ok(); + let ok = codepath(path, &content).is_ok(); success += ok as usize; total += 1; if !ok { @@ -143,7 +151,7 @@ fn main() { [ $( $(#[$cfg])* - (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>), + (stringify!($name), $name::bench as fn(&Path, &str) -> Result<(), ()>), )* ] }; @@ -153,7 +161,7 @@ fn main() { { let mut lines = 0; let mut files = 0; - exec(|content| { + exec(|_path, content| { lines += content.lines().count(); files += 1; Ok(()) diff --git a/src/buffer.rs b/src/buffer.rs index 1686e28209..b7657bebc2 100644 --- a/src/buffer.rs +++ b/src/buffer.rs @@ -20,8 +20,9 @@ enum Entry { Ident(Ident), Punct(Punct), Literal(Literal), - // End entries contain the offset (negative) to the start of the buffer. - End(isize), + // End entries contain the offset (negative) to the start of the buffer, and + // offset (negative) to the matching Group entry. + End(isize, isize), } /// A buffer that can be efficiently traversed multiple times, unlike @@ -42,12 +43,15 @@ impl TokenBuffer { TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)), TokenTree::Group(group) => { let group_start_index = entries.len(); - entries.push(Entry::End(0)); // we replace this below + entries.push(Entry::End(0, 0)); // we replace this below Self::recursive_new(entries, group.stream()); let group_end_index = entries.len(); - entries.push(Entry::End(-(group_end_index as isize))); - let group_end_offset = group_end_index - group_start_index; - entries[group_start_index] = Entry::Group(group, group_end_offset); + let group_offset = group_end_index - group_start_index; + entries.push(Entry::End( + -(group_end_index as isize), + -(group_offset as isize), + )); + entries[group_start_index] = Entry::Group(group, group_offset); } } } @@ -66,7 +70,7 @@ impl TokenBuffer { pub fn new2(stream: TokenStream) -> Self { let mut entries = Vec::new(); Self::recursive_new(&mut entries, stream); - entries.push(Entry::End(-(entries.len() as isize))); + entries.push(Entry::End(-(entries.len() as isize), 0)); Self { entries: entries.into_boxed_slice(), } @@ -111,7 +115,7 @@ impl<'a> Cursor<'a> { // object in global storage. struct UnsafeSyncEntry(Entry); unsafe impl Sync for UnsafeSyncEntry {} - static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0)); + static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0, 0)); Cursor { ptr: &EMPTY_ENTRY.0, @@ -128,7 +132,7 @@ impl<'a> Cursor<'a> { // past it, unless `ptr == scope`, which means that we're at the edge of // our cursor's scope. We should only have `ptr != scope` at the exit // from None-delimited groups entered with `ignore_none`. - while let Entry::End(_) = unsafe { &*ptr } { + while let Entry::End(..) = unsafe { &*ptr } { if ptr == scope { break; } @@ -300,7 +304,7 @@ impl<'a> Cursor<'a> { Entry::Literal(literal) => (literal.clone().into(), 1), Entry::Ident(ident) => (ident.clone().into(), 1), Entry::Punct(punct) => (punct.clone().into(), 1), - Entry::End(_) => return None, + Entry::End(..) => return None, }; let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) }; @@ -309,13 +313,20 @@ impl<'a> Cursor<'a> { /// Returns the `Span` of the current token, or `Span::call_site()` if this /// cursor points to eof. - pub fn span(self) -> Span { + pub fn span(mut self) -> Span { match self.entry() { Entry::Group(group, _) => group.span(), Entry::Literal(literal) => literal.span(), Entry::Ident(ident) => ident.span(), Entry::Punct(punct) => punct.span(), - Entry::End(_) => Span::call_site(), + Entry::End(_, offset) => { + self.ptr = unsafe { self.ptr.offset(*offset) }; + if let Entry::Group(group, _) = self.entry() { + group.span_close() + } else { + Span::call_site() + } + } } } @@ -325,23 +336,6 @@ impl<'a> Cursor<'a> { pub(crate) fn prev_span(mut self) -> Span { if start_of_buffer(self) < self.ptr { self.ptr = unsafe { self.ptr.offset(-1) }; - if let Entry::End(_) = self.entry() { - // Locate the matching Group begin token. - let mut depth = 1; - loop { - self.ptr = unsafe { self.ptr.offset(-1) }; - match self.entry() { - Entry::Group(group, _) => { - depth -= 1; - if depth == 0 { - return group.span(); - } - } - Entry::End(_) => depth += 1, - Entry::Literal(_) | Entry::Ident(_) | Entry::Punct(_) => {} - } - } - } } self.span() } @@ -354,7 +348,7 @@ impl<'a> Cursor<'a> { self.ignore_none(); let len = match self.entry() { - Entry::End(_) => return None, + Entry::End(..) => return None, // Treat lifetimes as a single tt for the purposes of 'skip'. Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => { @@ -409,7 +403,7 @@ pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool { fn start_of_buffer(cursor: Cursor) -> *const Entry { unsafe { match &*cursor.scope { - Entry::End(offset) => cursor.scope.offset(*offset), + Entry::End(offset, _) => cursor.scope.offset(*offset), _ => unreachable!(), } } @@ -425,10 +419,3 @@ pub(crate) fn open_span_of_group(cursor: Cursor) -> Span { _ => cursor.span(), } } - -pub(crate) fn close_span_of_group(cursor: Cursor) -> Span { - match cursor.entry() { - Entry::Group(group, _) => group.span_close(), - _ => cursor.span(), - } -} diff --git a/src/classify.rs b/src/classify.rs index 1b0ff30040..42732e6d8d 100644 --- a/src/classify.rs +++ b/src/classify.rs @@ -1,10 +1,16 @@ +#[cfg(feature = "full")] use crate::expr::Expr; +#[cfg(any(feature = "printing", feature = "full"))] use crate::generics::TypeParamBound; +#[cfg(any(feature = "printing", feature = "full"))] use crate::path::{Path, PathArguments}; +#[cfg(any(feature = "printing", feature = "full"))] use crate::punctuated::Punctuated; +#[cfg(any(feature = "printing", feature = "full"))] use crate::ty::{ReturnType, Type}; #[cfg(feature = "full")] use proc_macro2::{Delimiter, TokenStream, TokenTree}; +#[cfg(any(feature = "printing", feature = "full"))] use std::ops::ControlFlow; #[cfg(feature = "full")] @@ -146,106 +152,112 @@ pub(crate) fn confusable_with_adjacent_block(mut expr: &Expr) -> bool { } #[cfg(feature = "printing")] -pub(crate) fn confusable_with_adjacent_lt(mut expr: &Expr) -> bool { +pub(crate) fn trailing_unparameterized_path(mut ty: &Type) -> bool { + loop { + match ty { + Type::BareFn(t) => match &t.output { + ReturnType::Default => return false, + ReturnType::Type(_, ret) => ty = ret, + }, + Type::ImplTrait(t) => match last_type_in_bounds(&t.bounds) { + ControlFlow::Break(trailing_path) => return trailing_path, + ControlFlow::Continue(t) => ty = t, + }, + Type::Path(t) => match last_type_in_path(&t.path) { + ControlFlow::Break(trailing_path) => return trailing_path, + ControlFlow::Continue(t) => ty = t, + }, + Type::Ptr(t) => ty = &t.elem, + Type::Reference(t) => ty = &t.elem, + Type::TraitObject(t) => match last_type_in_bounds(&t.bounds) { + ControlFlow::Break(trailing_path) => return trailing_path, + ControlFlow::Continue(t) => ty = t, + }, + + Type::Array(_) + | Type::Group(_) + | Type::Infer(_) + | Type::Macro(_) + | Type::Never(_) + | Type::Paren(_) + | Type::Slice(_) + | Type::Tuple(_) + | Type::Verbatim(_) => return false, + } + } + + fn last_type_in_path(path: &Path) -> ControlFlow { + match &path.segments.last().unwrap().arguments { + PathArguments::None => ControlFlow::Break(true), + PathArguments::AngleBracketed(_) => ControlFlow::Break(false), + PathArguments::Parenthesized(arg) => match &arg.output { + ReturnType::Default => ControlFlow::Break(false), + ReturnType::Type(_, ret) => ControlFlow::Continue(ret), + }, + } + } + + fn last_type_in_bounds( + bounds: &Punctuated, + ) -> ControlFlow { + match bounds.last().unwrap() { + TypeParamBound::Trait(t) => last_type_in_path(&t.path), + TypeParamBound::Lifetime(_) | TypeParamBound::Verbatim(_) => ControlFlow::Break(false), + } + } +} + +/// Whether the expression's first token is the label of a loop/block. +#[cfg(all(feature = "printing", feature = "full"))] +pub(crate) fn expr_leading_label(mut expr: &Expr) -> bool { loop { match expr { - Expr::Binary(e) => expr = &e.right, - Expr::Cast(e) => return trailing_unparameterized_path(&e.ty), - Expr::Reference(e) => expr = &e.expr, - Expr::Unary(e) => expr = &e.expr, + Expr::Block(e) => return e.label.is_some(), + Expr::ForLoop(e) => return e.label.is_some(), + Expr::Loop(e) => return e.label.is_some(), + Expr::While(e) => return e.label.is_some(), + + Expr::Assign(e) => expr = &e.left, + Expr::Await(e) => expr = &e.base, + Expr::Binary(e) => expr = &e.left, + Expr::Call(e) => expr = &e.func, + Expr::Cast(e) => expr = &e.expr, + Expr::Field(e) => expr = &e.base, + Expr::Index(e) => expr = &e.expr, + Expr::MethodCall(e) => expr = &e.receiver, + Expr::Range(e) => match &e.start { + Some(start) => expr = start, + None => return false, + }, + Expr::Try(e) => expr = &e.expr, Expr::Array(_) - | Expr::Assign(_) | Expr::Async(_) - | Expr::Await(_) - | Expr::Block(_) | Expr::Break(_) - | Expr::Call(_) | Expr::Closure(_) | Expr::Const(_) | Expr::Continue(_) - | Expr::Field(_) - | Expr::ForLoop(_) | Expr::Group(_) | Expr::If(_) - | Expr::Index(_) | Expr::Infer(_) | Expr::Let(_) | Expr::Lit(_) - | Expr::Loop(_) | Expr::Macro(_) | Expr::Match(_) - | Expr::MethodCall(_) | Expr::Paren(_) | Expr::Path(_) - | Expr::Range(_) + | Expr::Reference(_) | Expr::Repeat(_) | Expr::Return(_) | Expr::Struct(_) - | Expr::Try(_) | Expr::TryBlock(_) | Expr::Tuple(_) + | Expr::Unary(_) | Expr::Unsafe(_) | Expr::Verbatim(_) - | Expr::While(_) | Expr::Yield(_) => return false, } } - - fn trailing_unparameterized_path(mut ty: &Type) -> bool { - loop { - match ty { - Type::BareFn(t) => match &t.output { - ReturnType::Default => return false, - ReturnType::Type(_, ret) => ty = ret, - }, - Type::ImplTrait(t) => match last_type_in_bounds(&t.bounds) { - ControlFlow::Break(trailing_path) => return trailing_path, - ControlFlow::Continue(t) => ty = t, - }, - Type::Path(t) => match last_type_in_path(&t.path) { - ControlFlow::Break(trailing_path) => return trailing_path, - ControlFlow::Continue(t) => ty = t, - }, - Type::Ptr(t) => ty = &t.elem, - Type::Reference(t) => ty = &t.elem, - Type::TraitObject(t) => match last_type_in_bounds(&t.bounds) { - ControlFlow::Break(trailing_path) => return trailing_path, - ControlFlow::Continue(t) => ty = t, - }, - - Type::Array(_) - | Type::Group(_) - | Type::Infer(_) - | Type::Macro(_) - | Type::Never(_) - | Type::Paren(_) - | Type::Slice(_) - | Type::Tuple(_) - | Type::Verbatim(_) => return false, - } - } - } - - fn last_type_in_path(path: &Path) -> ControlFlow { - match &path.segments.last().unwrap().arguments { - PathArguments::None => ControlFlow::Break(true), - PathArguments::AngleBracketed(_) => ControlFlow::Break(false), - PathArguments::Parenthesized(arg) => match &arg.output { - ReturnType::Default => ControlFlow::Break(false), - ReturnType::Type(_, ret) => ControlFlow::Continue(ret), - }, - } - } - - fn last_type_in_bounds( - bounds: &Punctuated, - ) -> ControlFlow { - match bounds.last().unwrap() { - TypeParamBound::Trait(t) => last_type_in_path(&t.path), - TypeParamBound::Lifetime(_) | TypeParamBound::Verbatim(_) => ControlFlow::Break(false), - } - } } /// Whether the expression's last token is `}`. diff --git a/src/discouraged.rs b/src/discouraged.rs index 4109c670e7..71c78c8c65 100644 --- a/src/discouraged.rs +++ b/src/discouraged.rs @@ -212,7 +212,7 @@ impl<'a> AnyDelimiter for ParseBuffer<'a> { fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan, ParseBuffer)> { self.step(|cursor| { if let Some((content, delimiter, span, rest)) = cursor.any_group() { - let scope = crate::buffer::close_span_of_group(*cursor); + let scope = span.close(); let nested = crate::parse::advance_step_cursor(cursor, content); let unexpected = crate::parse::get_unexpected(self); let content = crate::parse::new_parse_buffer(scope, nested, unexpected); diff --git a/src/expr.rs b/src/expr.rs index c60bcf4771..62ebdad857 100644 --- a/src/expr.rs +++ b/src/expr.rs @@ -1216,7 +1216,7 @@ pub(crate) mod parsing { expr.replace_attrs(attrs); let allow_struct = AllowStruct(true); - return parse_expr(input, expr, allow_struct, Precedence::Any); + return parse_expr(input, expr, allow_struct, Precedence::MIN); } if input.peek(Token![.]) && !input.peek(Token![..]) || input.peek(Token![?]) { @@ -1226,7 +1226,7 @@ pub(crate) mod parsing { expr.replace_attrs(attrs); let allow_struct = AllowStruct(true); - return parse_expr(input, expr, allow_struct, Precedence::Any); + return parse_expr(input, expr, allow_struct, Precedence::MIN); } attrs.extend(expr.replace_attrs(Vec::new())); @@ -1413,7 +1413,7 @@ pub(crate) mod parsing { } else if input.peek(Token![as]) { Precedence::Cast } else { - Precedence::Any + Precedence::MIN } } @@ -1432,7 +1432,7 @@ pub(crate) mod parsing { lhs, #[cfg(feature = "full")] allow_struct, - Precedence::Any, + Precedence::MIN, ) } @@ -2981,6 +2981,7 @@ pub(crate) mod printing { use crate::attr::Attribute; #[cfg(feature = "full")] use crate::attr::FilterAttrs; + #[cfg(feature = "full")] use crate::classify; #[cfg(feature = "full")] use crate::expr::{ @@ -2994,7 +2995,6 @@ pub(crate) mod printing { ExprMethodCall, ExprParen, ExprPath, ExprReference, ExprStruct, ExprUnary, FieldValue, Index, Member, }; - #[cfg(feature = "full")] use crate::fixup::FixupContext; use crate::op::BinOp; use crate::path; @@ -3032,13 +3032,9 @@ pub(crate) mod printing { expr: &Expr, needs_group: bool, tokens: &mut TokenStream, - #[cfg(feature = "full")] mut fixup: FixupContext, + mut fixup: FixupContext, ) { - #[cfg(not(feature = "full"))] - let do_print_expr = |tokens: &mut TokenStream| expr.to_tokens(tokens); - - #[cfg(feature = "full")] - let do_print_expr = { + if needs_group { // If we are surrounding the whole cond in parentheses, such as: // // if (return Struct {}) {} @@ -3050,11 +3046,10 @@ pub(crate) mod printing { // // if x == (Struct {}) {} // - if needs_group { - fixup = FixupContext::NONE; - } - |tokens: &mut TokenStream| print_expr(expr, tokens, fixup) - }; + fixup = FixupContext::NONE; + } + + let do_print_expr = |tokens: &mut TokenStream| print_expr(expr, tokens, fixup); if needs_group { token::Paren::default().surround(tokens, do_print_expr); @@ -3063,53 +3058,84 @@ pub(crate) mod printing { } } - #[cfg(feature = "full")] pub(crate) fn print_expr(expr: &Expr, tokens: &mut TokenStream, mut fixup: FixupContext) { + #[cfg(feature = "full")] let needs_group = fixup.would_cause_statement_boundary(expr); + #[cfg(not(feature = "full"))] + let needs_group = false; + if needs_group { fixup = FixupContext::NONE; } let do_print_expr = |tokens: &mut TokenStream| match expr { + #[cfg(feature = "full")] Expr::Array(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Assign(e) => print_expr_assign(e, tokens, fixup), + #[cfg(feature = "full")] Expr::Async(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Await(e) => print_expr_await(e, tokens, fixup), Expr::Binary(e) => print_expr_binary(e, tokens, fixup), + #[cfg(feature = "full")] Expr::Block(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Break(e) => print_expr_break(e, tokens, fixup), Expr::Call(e) => print_expr_call(e, tokens, fixup), Expr::Cast(e) => print_expr_cast(e, tokens, fixup), + #[cfg(feature = "full")] Expr::Closure(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Const(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Continue(e) => e.to_tokens(tokens), Expr::Field(e) => print_expr_field(e, tokens, fixup), + #[cfg(feature = "full")] Expr::ForLoop(e) => e.to_tokens(tokens), Expr::Group(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::If(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Index(e) => print_expr_index(e, tokens, fixup), + #[cfg(feature = "full")] Expr::Infer(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Let(e) => print_expr_let(e, tokens, fixup), Expr::Lit(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Loop(e) => e.to_tokens(tokens), Expr::Macro(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Match(e) => e.to_tokens(tokens), Expr::MethodCall(e) => print_expr_method_call(e, tokens, fixup), Expr::Paren(e) => e.to_tokens(tokens), Expr::Path(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Range(e) => print_expr_range(e, tokens, fixup), Expr::Reference(e) => print_expr_reference(e, tokens, fixup), + #[cfg(feature = "full")] Expr::Repeat(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Return(e) => print_expr_return(e, tokens, fixup), Expr::Struct(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Try(e) => print_expr_try(e, tokens, fixup), + #[cfg(feature = "full")] Expr::TryBlock(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Tuple(e) => e.to_tokens(tokens), Expr::Unary(e) => print_expr_unary(e, tokens, fixup), + #[cfg(feature = "full")] Expr::Unsafe(e) => e.to_tokens(tokens), Expr::Verbatim(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::While(e) => e.to_tokens(tokens), + #[cfg(feature = "full")] Expr::Yield(e) => print_expr_yield(e, tokens, fixup), + + #[cfg(not(feature = "full"))] + _ => unreachable!(), }; if needs_group { @@ -3143,14 +3169,14 @@ pub(crate) mod printing { outer_attrs_to_tokens(&e.attrs, tokens); print_subexpression( &e.left, - Precedence::of(&e.left) <= Precedence::Assign, + Precedence::of(&e.left) <= Precedence::Range, tokens, fixup.leftmost_subexpression(), ); e.eq_token.to_tokens(tokens); print_subexpression( &e.right, - Precedence::of_rhs(&e.right) < Precedence::Assign, + fixup.trailing_precedence(&e.right) < Precedence::Assign, tokens, fixup.subsequent_subexpression(), ); @@ -3180,7 +3206,7 @@ pub(crate) mod printing { outer_attrs_to_tokens(&e.attrs, tokens); print_subexpression( &e.base, - Precedence::of(&e.base) < Precedence::Postfix, + Precedence::of(&e.base) < Precedence::Unambiguous, tokens, fixup.leftmost_subexpression_with_dot(), ); @@ -3191,71 +3217,47 @@ pub(crate) mod printing { #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprBinary { fn to_tokens(&self, tokens: &mut TokenStream) { - print_expr_binary( - self, - tokens, - #[cfg(feature = "full")] - FixupContext::NONE, - ); + print_expr_binary(self, tokens, FixupContext::NONE); } } - fn print_expr_binary( - e: &ExprBinary, - tokens: &mut TokenStream, - #[cfg(feature = "full")] fixup: FixupContext, - ) { + fn print_expr_binary(e: &ExprBinary, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); - let binop_prec = Precedence::of_binop(&e.op); - let left_prec = Precedence::of(&e.left); - let right_prec = Precedence::of_rhs(&e.right); - let (mut left_needs_group, right_needs_group) = if let Precedence::Assign = binop_prec { - (left_prec <= binop_prec, right_prec < binop_prec) - } else { - (left_prec < binop_prec, right_prec <= binop_prec) - }; - - // These cases require parenthesization independently of precedence. - match (&*e.left, &e.op) { - // `x as i32 < y` has the parser thinking that `i32 < y` is the - // beginning of a path type. It starts trying to parse `x as (i32 < - // y ...` instead of `(x as i32) < ...`. We need to convince it - // _not_ to do that. - (_, BinOp::Lt(_) | BinOp::Shl(_)) if classify::confusable_with_adjacent_lt(&e.left) => { - left_needs_group = true; - } - - // We are given `(let _ = a) OP b`. - // - // - When `OP <= LAnd` we should print `let _ = a OP b` to avoid - // redundant parens as the parser will interpret this as `(let _ = - // a) OP b`. - // - // - Otherwise, e.g. when we have `(let a = b) < c` in AST, parens - // are required since the parser would interpret `let a = b < c` - // as `let a = (b < c)`. To achieve this, we force parens. + let left_fixup = fixup.leftmost_subexpression_with_begin_operator( #[cfg(feature = "full")] - (Expr::Let(_), _) if binop_prec > Precedence::And => { - left_needs_group = true; - } + match &e.op { + BinOp::Sub(_) + | BinOp::Mul(_) + | BinOp::And(_) + | BinOp::Or(_) + | BinOp::BitAnd(_) + | BinOp::BitOr(_) + | BinOp::Shl(_) + | BinOp::Lt(_) => true, + _ => false, + }, + match &e.op { + BinOp::Shl(_) | BinOp::Lt(_) => true, + _ => false, + }, + ); - _ => {} - } + let binop_prec = Precedence::of_binop(&e.op); + let left_prec = left_fixup.leading_precedence(&e.left); + let right_prec = fixup.trailing_precedence(&e.right); + let (left_needs_group, right_needs_group) = match binop_prec { + Precedence::Assign => (left_prec <= Precedence::Range, right_prec < binop_prec), + Precedence::Compare => (left_prec <= binop_prec, right_prec <= binop_prec), + _ => (left_prec < binop_prec, right_prec <= binop_prec), + }; - print_subexpression( - &e.left, - left_needs_group, - tokens, - #[cfg(feature = "full")] - fixup.leftmost_subexpression(), - ); + print_subexpression(&e.left, left_needs_group, tokens, left_fixup); e.op.to_tokens(tokens); print_subexpression( &e.right, right_needs_group, tokens, - #[cfg(feature = "full")] fixup.subsequent_subexpression(), ); } @@ -3286,41 +3288,43 @@ pub(crate) mod printing { outer_attrs_to_tokens(&e.attrs, tokens); e.break_token.to_tokens(tokens); e.label.to_tokens(tokens); - if let Some(expr) = &e.expr { - print_expr(expr, tokens, fixup.subsequent_subexpression()); + if let Some(value) = &e.expr { + print_subexpression( + value, + // Parenthesize `break 'inner: loop { break 'inner 1 } + 1` + // ^---------------------------------^ + e.label.is_none() && classify::expr_leading_label(value), + tokens, + fixup.subsequent_subexpression(), + ); } } #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprCall { fn to_tokens(&self, tokens: &mut TokenStream) { - print_expr_call( - self, - tokens, - #[cfg(feature = "full")] - FixupContext::NONE, - ); + print_expr_call(self, tokens, FixupContext::NONE); } } - fn print_expr_call( - e: &ExprCall, - tokens: &mut TokenStream, - #[cfg(feature = "full")] fixup: FixupContext, - ) { + fn print_expr_call(e: &ExprCall, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); - let precedence = if let Expr::Field(_) = &*e.func { - Precedence::Any + let call_precedence = if let Expr::Field(_) = &*e.func { + Precedence::MIN } else { - Precedence::Postfix + Precedence::Unambiguous }; + let func_fixup = fixup.leftmost_subexpression_with_begin_operator( + #[cfg(feature = "full")] + true, + false, + ); print_subexpression( &e.func, - Precedence::of(&e.func) < precedence, + func_fixup.leading_precedence(&e.func) < call_precedence, tokens, - #[cfg(feature = "full")] - fixup.leftmost_subexpression(), + func_fixup, ); e.paren_token.surround(tokens, |tokens| { @@ -3331,26 +3335,16 @@ pub(crate) mod printing { #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprCast { fn to_tokens(&self, tokens: &mut TokenStream) { - print_expr_cast( - self, - tokens, - #[cfg(feature = "full")] - FixupContext::NONE, - ); + print_expr_cast(self, tokens, FixupContext::NONE); } } - fn print_expr_cast( - e: &ExprCast, - tokens: &mut TokenStream, - #[cfg(feature = "full")] fixup: FixupContext, - ) { + fn print_expr_cast(e: &ExprCast, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); print_subexpression( &e.expr, Precedence::of(&e.expr) < Precedence::Cast, tokens, - #[cfg(feature = "full")] fixup.leftmost_subexpression(), ); e.as_token.to_tokens(tokens); @@ -3407,26 +3401,16 @@ pub(crate) mod printing { #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprField { fn to_tokens(&self, tokens: &mut TokenStream) { - print_expr_field( - self, - tokens, - #[cfg(feature = "full")] - FixupContext::NONE, - ); + print_expr_field(self, tokens, FixupContext::NONE); } } - fn print_expr_field( - e: &ExprField, - tokens: &mut TokenStream, - #[cfg(feature = "full")] fixup: FixupContext, - ) { + fn print_expr_field(e: &ExprField, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); print_subexpression( &e.base, - Precedence::of(&e.base) < Precedence::Postfix, + Precedence::of(&e.base) < Precedence::Unambiguous, tokens, - #[cfg(feature = "full")] fixup.leftmost_subexpression_with_dot(), ); e.dot_token.to_tokens(tokens); @@ -3502,27 +3486,22 @@ pub(crate) mod printing { #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprIndex { fn to_tokens(&self, tokens: &mut TokenStream) { - print_expr_index( - self, - tokens, - #[cfg(feature = "full")] - FixupContext::NONE, - ); + print_expr_index(self, tokens, FixupContext::NONE); } } - fn print_expr_index( - e: &ExprIndex, - tokens: &mut TokenStream, - #[cfg(feature = "full")] fixup: FixupContext, - ) { + fn print_expr_index(e: &ExprIndex, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); + let obj_fixup = fixup.leftmost_subexpression_with_begin_operator( + #[cfg(feature = "full")] + true, + false, + ); print_subexpression( &e.expr, - Precedence::of(&e.expr) < Precedence::Postfix, + obj_fixup.leading_precedence(&e.expr) < Precedence::Unambiguous, tokens, - #[cfg(feature = "full")] - fixup.leftmost_subexpression(), + obj_fixup, ); e.bracket_token.surround(tokens, |tokens| { e.index.to_tokens(tokens); @@ -3618,26 +3597,16 @@ pub(crate) mod printing { #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprMethodCall { fn to_tokens(&self, tokens: &mut TokenStream) { - print_expr_method_call( - self, - tokens, - #[cfg(feature = "full")] - FixupContext::NONE, - ); + print_expr_method_call(self, tokens, FixupContext::NONE); } } - fn print_expr_method_call( - e: &ExprMethodCall, - tokens: &mut TokenStream, - #[cfg(feature = "full")] fixup: FixupContext, - ) { + fn print_expr_method_call(e: &ExprMethodCall, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); print_subexpression( &e.receiver, - Precedence::of(&e.receiver) < Precedence::Postfix, + Precedence::of(&e.receiver) < Precedence::Unambiguous, tokens, - #[cfg(feature = "full")] fixup.leftmost_subexpression_with_dot(), ); e.dot_token.to_tokens(tokens); @@ -3689,7 +3658,7 @@ pub(crate) mod printing { if let Some(end) = &e.end { print_subexpression( end, - Precedence::of_rhs(end) <= Precedence::Range, + fixup.trailing_precedence(end) <= Precedence::Range, tokens, fixup.subsequent_subexpression(), ); @@ -3699,28 +3668,18 @@ pub(crate) mod printing { #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprReference { fn to_tokens(&self, tokens: &mut TokenStream) { - print_expr_reference( - self, - tokens, - #[cfg(feature = "full")] - FixupContext::NONE, - ); + print_expr_reference(self, tokens, FixupContext::NONE); } } - fn print_expr_reference( - e: &ExprReference, - tokens: &mut TokenStream, - #[cfg(feature = "full")] fixup: FixupContext, - ) { + fn print_expr_reference(e: &ExprReference, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); e.and_token.to_tokens(tokens); e.mutability.to_tokens(tokens); print_subexpression( &e.expr, - Precedence::of_rhs(&e.expr) < Precedence::Prefix, + fixup.trailing_precedence(&e.expr) < Precedence::Prefix, tokens, - #[cfg(feature = "full")] fixup.subsequent_subexpression(), ); } @@ -3785,7 +3744,7 @@ pub(crate) mod printing { outer_attrs_to_tokens(&e.attrs, tokens); print_subexpression( &e.expr, - Precedence::of(&e.expr) < Precedence::Postfix, + Precedence::of(&e.expr) < Precedence::Unambiguous, tokens, fixup.leftmost_subexpression_with_dot(), ); @@ -3821,27 +3780,17 @@ pub(crate) mod printing { #[cfg_attr(docsrs, doc(cfg(feature = "printing")))] impl ToTokens for ExprUnary { fn to_tokens(&self, tokens: &mut TokenStream) { - print_expr_unary( - self, - tokens, - #[cfg(feature = "full")] - FixupContext::NONE, - ); + print_expr_unary(self, tokens, FixupContext::NONE); } } - fn print_expr_unary( - e: &ExprUnary, - tokens: &mut TokenStream, - #[cfg(feature = "full")] fixup: FixupContext, - ) { + fn print_expr_unary(e: &ExprUnary, tokens: &mut TokenStream, fixup: FixupContext) { outer_attrs_to_tokens(&e.attrs, tokens); e.op.to_tokens(tokens); print_subexpression( &e.expr, - Precedence::of_rhs(&e.expr) < Precedence::Prefix, + fixup.trailing_precedence(&e.expr) < Precedence::Prefix, tokens, - #[cfg(feature = "full")] fixup.subsequent_subexpression(), ); } diff --git a/src/fixup.rs b/src/fixup.rs index 5407c9fdf3..58ed9e73d4 100644 --- a/src/fixup.rs +++ b/src/fixup.rs @@ -13,6 +13,7 @@ pub(crate) struct FixupContext { // // match x {}; // not when its own statement // + #[cfg(feature = "full")] stmt: bool, // This is the difference between: @@ -44,6 +45,7 @@ pub(crate) struct FixupContext { // Example: `$match;` // // No parentheses required. + #[cfg(feature = "full")] leftmost_subexpression_in_stmt: bool, // Print expression such that it can be parsed as a match arm. @@ -59,6 +61,7 @@ pub(crate) struct FixupContext { // _ => m! {} - 1, // binary subtraction operator // } // + #[cfg(feature = "full")] match_arm: bool, // This is almost equivalent to `leftmost_subexpression_in_stmt`, other than @@ -74,6 +77,7 @@ pub(crate) struct FixupContext { // _ => m! {} - 1, // no parens // } // + #[cfg(feature = "full")] leftmost_subexpression_in_match_arm: bool, // This is the difference between: @@ -84,22 +88,61 @@ pub(crate) struct FixupContext { // () if let _ = Struct {} => {} // no parens // } // + #[cfg(feature = "full")] parenthesize_exterior_struct_lit: bool, + + // This is the difference between: + // + // let _ = 1 + return 1; // no parens if rightmost subexpression + // + // let _ = 1 + (return 1) + 1; // needs parens + // + #[cfg(feature = "full")] + parenthesize_exterior_jump: bool, + + // This is the difference between: + // + // let _ = (return) - 1; // without paren, this would return -1 + // + // let _ = return + 1; // no paren because '+' cannot begin expr + // + #[cfg(feature = "full")] + next_operator_can_begin_expr: bool, + + // This is the difference between: + // + // let _ = x as u8 + T; + // + // let _ = (x as u8) < T; + // + // Without parens, the latter would want to parse `u8 Self { FixupContext { stmt: true, @@ -109,6 +152,7 @@ impl FixupContext { /// Create the initial fixup for printing an expression as the right-hand /// side of a match arm. + #[cfg(feature = "full")] pub fn new_match_arm() -> Self { FixupContext { match_arm: true, @@ -120,6 +164,7 @@ impl FixupContext { /// of an `if` or `while`. There are a few other positions which are /// grammatically equivalent and also use this, such as the iterator /// expression in `for` and the scrutinee in `match`. + #[cfg(feature = "full")] pub fn new_condition() -> Self { FixupContext { parenthesize_exterior_struct_lit: true, @@ -140,11 +185,17 @@ impl FixupContext { /// `-$a` nor `[$a]` have one. pub fn leftmost_subexpression(self) -> Self { FixupContext { + #[cfg(feature = "full")] stmt: false, + #[cfg(feature = "full")] leftmost_subexpression_in_stmt: self.stmt || self.leftmost_subexpression_in_stmt, + #[cfg(feature = "full")] match_arm: false, + #[cfg(feature = "full")] leftmost_subexpression_in_match_arm: self.match_arm || self.leftmost_subexpression_in_match_arm, + #[cfg(feature = "full")] + parenthesize_exterior_jump: true, ..self } } @@ -155,14 +206,36 @@ impl FixupContext { /// subexpressions. pub fn leftmost_subexpression_with_dot(self) -> Self { FixupContext { + #[cfg(feature = "full")] stmt: self.stmt || self.leftmost_subexpression_in_stmt, + #[cfg(feature = "full")] leftmost_subexpression_in_stmt: false, + #[cfg(feature = "full")] match_arm: self.match_arm || self.leftmost_subexpression_in_match_arm, + #[cfg(feature = "full")] leftmost_subexpression_in_match_arm: false, + #[cfg(feature = "full")] + parenthesize_exterior_jump: true, ..self } } + /// Transform this fixup into the one that should apply when printing a + /// leftmost subexpression followed by punctuation that is legal as the + /// first token of an expression. + pub fn leftmost_subexpression_with_begin_operator( + self, + #[cfg(feature = "full")] next_operator_can_begin_expr: bool, + next_operator_can_begin_generics: bool, + ) -> Self { + FixupContext { + #[cfg(feature = "full")] + next_operator_can_begin_expr, + next_operator_can_begin_generics, + ..self.leftmost_subexpression() + } + } + /// Transform this fixup into the one that should apply when printing any /// subexpression that is neither a leftmost subexpression nor surrounded in /// delimiters. @@ -173,9 +246,13 @@ impl FixupContext { /// `$a.f($b)`. pub fn subsequent_subexpression(self) -> Self { FixupContext { + #[cfg(feature = "full")] stmt: false, + #[cfg(feature = "full")] leftmost_subexpression_in_stmt: false, + #[cfg(feature = "full")] match_arm: false, + #[cfg(feature = "full")] leftmost_subexpression_in_match_arm: false, ..self } @@ -186,8 +263,10 @@ impl FixupContext { /// /// The documentation on `FixupContext::leftmost_subexpression_in_stmt` has /// examples. + #[cfg(feature = "full")] pub fn would_cause_statement_boundary(self, expr: &Expr) -> bool { (self.leftmost_subexpression_in_stmt && !classify::requires_semi_to_be_stmt(expr)) + || ((self.stmt || self.leftmost_subexpression_in_stmt) && matches!(expr, Expr::Let(_))) || (self.leftmost_subexpression_in_match_arm && !classify::requires_comma_to_be_match_arm(expr)) } @@ -203,9 +282,59 @@ impl FixupContext { /// /// - `true && false`, because otherwise this would be misinterpreted as a /// "let chain". + #[cfg(feature = "full")] pub fn needs_group_as_let_scrutinee(self, expr: &Expr) -> bool { self.parenthesize_exterior_struct_lit && classify::confusable_with_adjacent_block(expr) - || Precedence::of_rhs(expr) <= Precedence::And + || self.trailing_precedence(expr) < Precedence::Let + } + + /// Determines the effective precedence of a left subexpression. Some + /// expressions have lower precedence when adjacent to particular operators. + pub fn leading_precedence(self, expr: &Expr) -> Precedence { + #[cfg(feature = "full")] + if self.next_operator_can_begin_expr { + // Decrease precedence of value-less jumps when followed by an + // operator that would otherwise get interpreted as beginning a + // value for the jump. + if let Expr::Break(_) | Expr::Return(_) | Expr::Yield(_) = expr { + return Precedence::Jump; + } + } + self.precedence(expr) + } + + /// Determines the effective precedence of a right subexpression. Some + /// expressions have higher precedence on the right side of a binary + /// operator than on the left. + pub fn trailing_precedence(self, expr: &Expr) -> Precedence { + #[cfg(feature = "full")] + if !self.parenthesize_exterior_jump { + match expr { + // Increase precedence of expressions that extend to the end of + // current statement or group. + Expr::Break(_) + | Expr::Closure(_) + | Expr::Let(_) + | Expr::Return(_) + | Expr::Yield(_) => { + return Precedence::Prefix; + } + Expr::Range(e) if e.start.is_none() => return Precedence::Prefix, + _ => {} + } + } + self.precedence(expr) + } + + fn precedence(self, expr: &Expr) -> Precedence { + if self.next_operator_can_begin_generics { + if let Expr::Cast(cast) = expr { + if classify::trailing_unparameterized_path(&cast.ty) { + return Precedence::MIN; + } + } + } + Precedence::of(expr) } } diff --git a/src/generics.rs b/src/generics.rs index c755151d4b..9a5d9b3bc3 100644 --- a/src/generics.rs +++ b/src/generics.rs @@ -772,6 +772,33 @@ pub(crate) mod parsing { let begin = input.fork(); + if cfg!(feature = "full") && input.peek(Token![use]) { + input.parse::()?; + input.parse::()?; + loop { + let lookahead = input.lookahead1(); + if lookahead.peek(Lifetime) { + input.parse::()?; + } else if lookahead.peek(Ident) { + input.parse::()?; + } else if lookahead.peek(Token![>]) { + break; + } else { + return Err(lookahead.error()); + } + let lookahead = input.lookahead1(); + if lookahead.peek(Token![,]) { + input.parse::()?; + } else if lookahead.peek(Token![>]) { + break; + } else { + return Err(lookahead.error()); + } + } + input.parse::]>()?; + return Ok(TypeParamBound::Verbatim(verbatim::between(&begin, input))); + } + let content; let (paren_token, content) = if input.peek(token::Paren) { (Some(parenthesized!(content in input)), &content) diff --git a/src/group.rs b/src/group.rs index b742927eef..1534ae995d 100644 --- a/src/group.rs +++ b/src/group.rs @@ -82,7 +82,7 @@ fn parse_delimited<'a>( ) -> Result<(DelimSpan, ParseBuffer<'a>)> { input.step(|cursor| { if let Some((content, span, rest)) = cursor.group(delimiter) { - let scope = crate::buffer::close_span_of_group(*cursor); + let scope = span.close(); let nested = crate::parse::advance_step_cursor(cursor, content); let unexpected = crate::parse::get_unexpected(input); let content = crate::parse::new_parse_buffer(scope, nested, unexpected); diff --git a/src/lib.rs b/src/lib.rs index a8372e8079..516a68662b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -249,7 +249,7 @@ //! dynamic library libproc_macro from rustc toolchain. // Syn types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/syn/2.0.66")] +#![doc(html_root_url = "https://docs.rs/syn/2.0.72")] #![cfg_attr(docsrs, feature(doc_cfg))] #![deny(unsafe_op_in_unsafe_fn)] #![allow(non_camel_case_types)] @@ -285,6 +285,7 @@ clippy::must_use_candidate, clippy::needless_doctest_main, clippy::needless_pass_by_value, + clippy::needless_update, clippy::never_loop, clippy::range_plus_one, clippy::redundant_else, @@ -386,7 +387,7 @@ mod file; #[cfg_attr(docsrs, doc(cfg(feature = "full")))] pub use crate::file::File; -#[cfg(all(feature = "full", feature = "printing"))] +#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))] mod fixup; #[cfg(any(feature = "full", feature = "derive"))] @@ -536,6 +537,7 @@ mod verbatim; #[cfg(all(feature = "parsing", feature = "full"))] mod whitespace; +#[rustfmt::skip] // https://github.com/rust-lang/rustfmt/issues/6176 mod gen { /// Syntax tree traversal to transform the nodes of an owned syntax tree. /// diff --git a/src/lit.rs b/src/lit.rs index a37aa2a9e7..103c0f6ed7 100644 --- a/src/lit.rs +++ b/src/lit.rs @@ -856,8 +856,11 @@ pub(crate) mod parsing { value, Lit, LitBool, LitByte, LitByteStr, LitCStr, LitChar, LitFloat, LitFloatRepr, LitInt, LitIntRepr, LitStr, }; - use crate::parse::{Parse, ParseStream}; - use proc_macro2::{Literal, Punct}; + use crate::parse::{Parse, ParseStream, Unexpected}; + use crate::token::{self, Token}; + use proc_macro2::{Literal, Punct, Span}; + use std::cell::Cell; + use std::rc::Rc; #[cfg_attr(docsrs, doc(cfg(feature = "parsing")))] impl Parse for Lit { @@ -1017,6 +1020,42 @@ pub(crate) mod parsing { } } } + + fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool { + let scope = Span::call_site(); + let unexpected = Rc::new(Cell::new(Unexpected::None)); + let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected); + peek(&buffer) + } + + macro_rules! impl_token { + ($display:literal $name:ty) => { + impl Token for $name { + fn peek(cursor: Cursor) -> bool { + fn peek(input: ParseStream) -> bool { + <$name as Parse>::parse(input).is_ok() + } + peek_impl(cursor, peek) + } + + fn display() -> &'static str { + $display + } + } + + impl token::private::Sealed for $name {} + }; + } + + impl_token!("literal" Lit); + impl_token!("string literal" LitStr); + impl_token!("byte string literal" LitByteStr); + impl_token!("C-string literal" LitCStr); + impl_token!("byte literal" LitByte); + impl_token!("character literal" LitChar); + impl_token!("integer literal" LitInt); + impl_token!("floating point literal" LitFloat); + impl_token!("boolean literal" LitBool); } #[cfg(feature = "printing")] diff --git a/src/lookahead.rs b/src/lookahead.rs index 2ca1471472..75e3a658a3 100644 --- a/src/lookahead.rs +++ b/src/lookahead.rs @@ -3,7 +3,7 @@ use crate::error::{self, Error}; use crate::sealed::lookahead::Sealed; use crate::span::IntoSpans; use crate::token::Token; -use proc_macro2::{Delimiter, Span}; +use proc_macro2::Span; use std::cell::RefCell; /// Support for checking the next token in a stream to decide how to parse. @@ -162,8 +162,4 @@ impl IntoSpans for TokenMarker { } } -pub(crate) fn is_delimiter(cursor: Cursor, delimiter: Delimiter) -> bool { - cursor.group(delimiter).is_some() -} - impl T, T: Token> Sealed for F {} diff --git a/src/parse.rs b/src/parse.rs index a80a914760..9495c6fc02 100644 --- a/src/parse.rs +++ b/src/parse.rs @@ -743,10 +743,12 @@ impl<'a> ParseBuffer<'a> { Punctuated::parse_terminated_with(self, parser) } - /// Returns whether there are tokens remaining in this stream. + /// Returns whether there are no more tokens remaining to be parsed from + /// this stream. /// - /// This method returns true at the end of the content of a set of - /// delimiters, as well as at the very end of the complete macro input. + /// This method returns true upon reaching the end of the content within a + /// set of delimiters, as well as at the end of the tokens provided to the + /// outermost parsing entry point. /// /// # Example /// diff --git a/src/parse_quote.rs b/src/parse_quote.rs index c4f47e16d1..22cd98effb 100644 --- a/src/parse_quote.rs +++ b/src/parse_quote.rs @@ -113,6 +113,7 @@ use proc_macro2::TokenStream; // Not public API. #[doc(hidden)] +#[track_caller] pub fn parse(token_stream: TokenStream) -> T { let parser = T::parse; match parser.parse2(token_stream) { diff --git a/src/precedence.rs b/src/precedence.rs index 450958ad34..1a26f195db 100644 --- a/src/precedence.rs +++ b/src/precedence.rs @@ -1,5 +1,7 @@ #[cfg(feature = "printing")] use crate::expr::Expr; +#[cfg(all(feature = "printing", feature = "full"))] +use crate::expr::{ExprBreak, ExprReturn, ExprYield}; use crate::op::BinOp; #[cfg(all(feature = "printing", feature = "full"))] use crate::ty::ReturnType; @@ -8,7 +10,7 @@ use std::cmp::Ordering; // Reference: https://doc.rust-lang.org/reference/expressions.html#expression-precedence pub(crate) enum Precedence { // return, break, closures - Any, + Jump, // = += -= *= /= %= &= |= ^= <<= >>= Assign, // .. ..= @@ -17,6 +19,9 @@ pub(crate) enum Precedence { Or, // && And, + // let + #[cfg(feature = "printing")] + Let, // == != < > <= >= Compare, // | @@ -28,27 +33,26 @@ pub(crate) enum Precedence { // << >> Shift, // + - - Arithmetic, + Sum, // * / % - Term, + Product, // as Cast, // unary - * ! & &mut #[cfg(feature = "printing")] Prefix, - // function calls, array indexing, field expressions, method calls, ? - #[cfg(feature = "printing")] - Postfix, - // paths, loops + // paths, loops, function calls, array indexing, field expressions, method calls #[cfg(feature = "printing")] Unambiguous, } impl Precedence { + pub(crate) const MIN: Self = Precedence::Jump; + pub(crate) fn of_binop(op: &BinOp) -> Self { match op { - BinOp::Add(_) | BinOp::Sub(_) => Precedence::Arithmetic, - BinOp::Mul(_) | BinOp::Div(_) | BinOp::Rem(_) => Precedence::Term, + BinOp::Add(_) | BinOp::Sub(_) => Precedence::Sum, + BinOp::Mul(_) | BinOp::Div(_) | BinOp::Rem(_) => Precedence::Product, BinOp::And(_) => Precedence::And, BinOp::Or(_) => Precedence::Or, BinOp::BitXor(_) => Precedence::BitXor, @@ -81,41 +85,48 @@ impl Precedence { match e { #[cfg(feature = "full")] Expr::Closure(e) => match e.output { - ReturnType::Default => Precedence::Any, + ReturnType::Default => Precedence::Jump, ReturnType::Type(..) => Precedence::Unambiguous, }, - Expr::Break(_) | Expr::Return(_) | Expr::Yield(_) => Precedence::Any, + #[cfg(feature = "full")] + Expr::Break(ExprBreak { expr, .. }) + | Expr::Return(ExprReturn { expr, .. }) + | Expr::Yield(ExprYield { expr, .. }) => match expr { + Some(_) => Precedence::Jump, + None => Precedence::Unambiguous, + }, + Expr::Assign(_) => Precedence::Assign, Expr::Range(_) => Precedence::Range, Expr::Binary(e) => Precedence::of_binop(&e.op), + Expr::Let(_) => Precedence::Let, Expr::Cast(_) => Precedence::Cast, - Expr::Let(_) | Expr::Reference(_) | Expr::Unary(_) => Precedence::Prefix, - - Expr::Await(_) - | Expr::Call(_) - | Expr::MethodCall(_) - | Expr::Field(_) - | Expr::Index(_) - | Expr::Try(_) => Precedence::Postfix, + Expr::Reference(_) | Expr::Unary(_) => Precedence::Prefix, Expr::Array(_) | Expr::Async(_) + | Expr::Await(_) | Expr::Block(_) + | Expr::Call(_) | Expr::Const(_) | Expr::Continue(_) + | Expr::Field(_) | Expr::ForLoop(_) | Expr::Group(_) | Expr::If(_) + | Expr::Index(_) | Expr::Infer(_) | Expr::Lit(_) | Expr::Loop(_) | Expr::Macro(_) | Expr::Match(_) + | Expr::MethodCall(_) | Expr::Paren(_) | Expr::Path(_) | Expr::Repeat(_) | Expr::Struct(_) + | Expr::Try(_) | Expr::TryBlock(_) | Expr::Tuple(_) | Expr::Unsafe(_) @@ -123,19 +134,7 @@ impl Precedence { | Expr::While(_) => Precedence::Unambiguous, #[cfg(not(feature = "full"))] - Expr::Closure(_) => unreachable!(), - } - } - - #[cfg(feature = "printing")] - pub(crate) fn of_rhs(e: &Expr) -> Self { - match e { - Expr::Break(_) | Expr::Closure(_) | Expr::Return(_) | Expr::Yield(_) => { - Precedence::Prefix - } - #[cfg(feature = "full")] - Expr::Range(e) if e.start.is_none() => Precedence::Prefix, - _ => Precedence::of(e), + Expr::Break(_) | Expr::Closure(_) | Expr::Return(_) | Expr::Yield(_) => unreachable!(), } } } diff --git a/src/punctuated.rs b/src/punctuated.rs index 29e8dce15f..b26ed647f1 100644 --- a/src/punctuated.rs +++ b/src/punctuated.rs @@ -92,6 +92,29 @@ impl Punctuated { self.iter_mut().next_back() } + /// Borrows the element at the given index. + pub fn get(&self, index: usize) -> Option<&T> { + if let Some((value, _punct)) = self.inner.get(index) { + Some(value) + } else if index == self.inner.len() { + self.last.as_deref() + } else { + None + } + } + + /// Mutably borrows the element at the given index. + pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { + let inner_len = self.inner.len(); + if let Some((value, _punct)) = self.inner.get_mut(index) { + Some(value) + } else if index == inner_len { + self.last.as_deref_mut() + } else { + None + } + } + /// Returns an iterator over borrowed syntax tree nodes of type `&T`. pub fn iter(&self) -> Iter { Iter { diff --git a/src/token.rs b/src/token.rs index e04f105747..061e8806ca 100644 --- a/src/token.rs +++ b/src/token.rs @@ -98,10 +98,6 @@ use crate::error::Result; #[cfg(feature = "parsing")] use crate::lifetime::Lifetime; #[cfg(feature = "parsing")] -use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr}; -#[cfg(feature = "parsing")] -use crate::lookahead; -#[cfg(feature = "parsing")] use crate::parse::{Parse, ParseStream}; use crate::span::IntoSpans; use proc_macro2::extra::DelimSpan; @@ -164,54 +160,10 @@ pub(crate) mod private { #[cfg(feature = "parsing")] impl private::Sealed for Ident {} -#[cfg(feature = "parsing")] -fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool { - use crate::parse::Unexpected; - use std::cell::Cell; - use std::rc::Rc; - - let scope = Span::call_site(); - let unexpected = Rc::new(Cell::new(Unexpected::None)); - let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected); - peek(&buffer) -} - -macro_rules! impl_token { - ($display:literal $name:ty) => { - #[cfg(feature = "parsing")] - impl Token for $name { - fn peek(cursor: Cursor) -> bool { - fn peek(input: ParseStream) -> bool { - <$name as Parse>::parse(input).is_ok() - } - peek_impl(cursor, peek) - } - - fn display() -> &'static str { - $display - } - } - - #[cfg(feature = "parsing")] - impl private::Sealed for $name {} - }; -} - -impl_token!("lifetime" Lifetime); -impl_token!("literal" Lit); -impl_token!("string literal" LitStr); -impl_token!("byte string literal" LitByteStr); -impl_token!("byte literal" LitByte); -impl_token!("character literal" LitChar); -impl_token!("integer literal" LitInt); -impl_token!("floating point literal" LitFloat); -impl_token!("boolean literal" LitBool); -impl_token!("group token" proc_macro2::Group); - macro_rules! impl_low_level_token { - ($display:literal $ty:ident $get:ident) => { + ($display:literal $($path:ident)::+ $get:ident) => { #[cfg(feature = "parsing")] - impl Token for $ty { + impl Token for $($path)::+ { fn peek(cursor: Cursor) -> bool { cursor.$get().is_some() } @@ -222,13 +174,15 @@ macro_rules! impl_low_level_token { } #[cfg(feature = "parsing")] - impl private::Sealed for $ty {} + impl private::Sealed for $($path)::+ {} }; } impl_low_level_token!("punctuation token" Punct punct); impl_low_level_token!("literal" Literal literal); impl_low_level_token!("token" TokenTree token_tree); +impl_low_level_token!("group token" proc_macro2::Group any_group); +impl_low_level_token!("lifetime" Lifetime lifetime); #[cfg(feature = "parsing")] impl private::Sealed for T {} @@ -692,7 +646,7 @@ impl private::Sealed for Group {} #[cfg(feature = "parsing")] impl Token for Paren { fn peek(cursor: Cursor) -> bool { - lookahead::is_delimiter(cursor, Delimiter::Parenthesis) + cursor.group(Delimiter::Parenthesis).is_some() } fn display() -> &'static str { @@ -703,7 +657,7 @@ impl Token for Paren { #[cfg(feature = "parsing")] impl Token for Brace { fn peek(cursor: Cursor) -> bool { - lookahead::is_delimiter(cursor, Delimiter::Brace) + cursor.group(Delimiter::Brace).is_some() } fn display() -> &'static str { @@ -714,7 +668,7 @@ impl Token for Brace { #[cfg(feature = "parsing")] impl Token for Bracket { fn peek(cursor: Cursor) -> bool { - lookahead::is_delimiter(cursor, Delimiter::Bracket) + cursor.group(Delimiter::Bracket).is_some() } fn display() -> &'static str { @@ -725,7 +679,7 @@ impl Token for Bracket { #[cfg(feature = "parsing")] impl Token for Group { fn peek(cursor: Cursor) -> bool { - lookahead::is_delimiter(cursor, Delimiter::None) + cursor.group(Delimiter::None).is_some() } fn display() -> &'static str { diff --git a/syn.json b/syn.json index 19e97f4f1b..08f2366a5f 100644 --- a/syn.json +++ b/syn.json @@ -1,5 +1,5 @@ { - "version": "2.0.66", + "version": "2.0.72", "types": [ { "ident": "Abi", diff --git a/tests/common/eq.rs b/tests/common/eq.rs index b44ea3660d..11d391d80e 100644 --- a/tests/common/eq.rs +++ b/tests/common/eq.rs @@ -10,8 +10,8 @@ use rustc_ast::ast::AngleBracketedArg; use rustc_ast::ast::AngleBracketedArgs; use rustc_ast::ast::AnonConst; use rustc_ast::ast::Arm; -use rustc_ast::ast::AssocConstraint; -use rustc_ast::ast::AssocConstraintKind; +use rustc_ast::ast::AssocItemConstraint; +use rustc_ast::ast::AssocItemConstraintKind; use rustc_ast::ast::AssocItemKind; use rustc_ast::ast::AttrArgs; use rustc_ast::ast::AttrArgsEq; @@ -102,6 +102,8 @@ use rustc_ast::ast::MacCallStmt; use rustc_ast::ast::MacStmtStyle; use rustc_ast::ast::MacroDef; use rustc_ast::ast::MatchKind; +use rustc_ast::ast::MetaItem; +use rustc_ast::ast::MetaItemKind; use rustc_ast::ast::MetaItemLit; use rustc_ast::ast::MethodCall; use rustc_ast::ast::ModKind; @@ -109,6 +111,7 @@ use rustc_ast::ast::ModSpans; use rustc_ast::ast::Movability; use rustc_ast::ast::MutTy; use rustc_ast::ast::Mutability; +use rustc_ast::ast::NestedMetaItem; use rustc_ast::ast::NodeId; use rustc_ast::ast::NormalAttr; use rustc_ast::ast::Param; @@ -127,7 +130,6 @@ use rustc_ast::ast::RangeLimits; use rustc_ast::ast::RangeSyntax; use rustc_ast::ast::Recovered; use rustc_ast::ast::Safety; -use rustc_ast::ast::StaticForeignItem; use rustc_ast::ast::StaticItem; use rustc_ast::ast::Stmt; use rustc_ast::ast::StmtKind; @@ -164,7 +166,7 @@ use rustc_ast::token::{ self, CommentKind, Delimiter, IdentIsRaw, Lit, Nonterminal, Token, TokenKind, }; use rustc_ast::tokenstream::{ - AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing, DelimSpan, LazyAttrTokenStream, + AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing, DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenTree, }; use rustc_data_structures::packed::Pu128; @@ -464,18 +466,18 @@ macro_rules! spanless_eq_enum { spanless_eq_struct!(AngleBracketedArgs; span args); spanless_eq_struct!(AnonConst; id value); spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder); -spanless_eq_struct!(AssocConstraint; id ident gen_args kind span); -spanless_eq_struct!(AttrItem; path args tokens); +spanless_eq_struct!(AssocItemConstraint; id ident gen_args kind span); +spanless_eq_struct!(AttrItem; unsafety path args tokens); spanless_eq_struct!(AttrTokenStream; 0); spanless_eq_struct!(Attribute; kind id style span); -spanless_eq_struct!(AttributesData; attrs tokens); +spanless_eq_struct!(AttrsTarget; attrs tokens); spanless_eq_struct!(BareFnTy; safety ext generic_params decl decl_span); spanless_eq_struct!(BindingMode; 0 1); spanless_eq_struct!(Block; stmts id rules span tokens could_be_bare_literal); spanless_eq_struct!(Closure; binder capture_clause constness coroutine_kind movability fn_decl body !fn_decl_span !fn_arg_span); spanless_eq_struct!(ConstItem; defaultness generics ty expr); spanless_eq_struct!(Crate; attrs items spans id is_placeholder); -spanless_eq_struct!(Delegation; id qself path rename body); +spanless_eq_struct!(Delegation; id qself path rename body from_glob); spanless_eq_struct!(DelegationMac; qself prefix suffixes body); spanless_eq_struct!(DelimArgs; dspan delim tokens); spanless_eq_struct!(DelimSpacing; open close); @@ -506,6 +508,7 @@ spanless_eq_struct!(Local; id pat ty kind span colon_sp attrs !tokens); spanless_eq_struct!(MacCall; path args); spanless_eq_struct!(MacCallStmt; mac style attrs tokens); spanless_eq_struct!(MacroDef; body macro_rules); +spanless_eq_struct!(MetaItem; unsafety path kind span); spanless_eq_struct!(MetaItemLit; symbol suffix kind span); spanless_eq_struct!(MethodCall; seg receiver args !span); spanless_eq_struct!(ModSpans; !inner_span !inject_use_span); @@ -518,8 +521,7 @@ spanless_eq_struct!(Path; span segments tokens); spanless_eq_struct!(PathSegment; ident id args); spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span); spanless_eq_struct!(QSelf; ty path_span position); -spanless_eq_struct!(StaticForeignItem; ty mutability expr); -spanless_eq_struct!(StaticItem; ty mutability expr); +spanless_eq_struct!(StaticItem; ty safety mutability expr); spanless_eq_struct!(Stmt; id kind span); spanless_eq_struct!(StrLit; symbol suffix symbol_unescaped style span); spanless_eq_struct!(StructExpr; qself path fields rest); @@ -539,12 +541,12 @@ spanless_eq_struct!(WhereClause; has_where_token predicates span); spanless_eq_struct!(WhereEqPredicate; span lhs_ty rhs_ty); spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds); spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0)); -spanless_eq_enum!(AssocConstraintKind; Equality(term) Bound(bounds)); +spanless_eq_enum!(AssocItemConstraintKind; Equality(term) Bound(bounds)); spanless_eq_enum!(AssocItemKind; Const(0) Fn(0) Type(0) MacCall(0) Delegation(0) DelegationMac(0)); spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(0 1)); spanless_eq_enum!(AttrArgsEq; Ast(0) Hir(0)); spanless_eq_enum!(AttrStyle; Outer Inner); -spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2 3) Attributes(0)); +spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2 3) AttrsTarget(0)); spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt); spanless_eq_enum!(BlockCheckMode; Default Unsafe(0)); spanless_eq_enum!(BorrowKind; Ref Raw); @@ -571,8 +573,8 @@ spanless_eq_enum!(FormatSign; Plus Minus); spanless_eq_enum!(FormatTrait; Display Debug LowerExp UpperExp Octal Pointer Binary LowerHex UpperHex); spanless_eq_enum!(GenBlockKind; Async Gen AsyncGen); spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0)); -spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0)); -spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0)); +spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0) ParenthesizedElided(0)); +spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0) Use(0 1)); spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span default)); spanless_eq_enum!(ImplPolarity; Positive Negative(0)); spanless_eq_enum!(Inline; Yes No); @@ -585,15 +587,17 @@ spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed); spanless_eq_enum!(LocalKind; Decl Init(0) InitElse(0 1)); spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces); spanless_eq_enum!(MatchKind; Prefix Postfix); +spanless_eq_enum!(MetaItemKind; Word List(0) NameValue(0)); spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded); spanless_eq_enum!(Movability; Static Movable); spanless_eq_enum!(Mutability; Mut Not); +spanless_eq_enum!(NestedMetaItem; MetaItem(0) Lit(0)); spanless_eq_enum!(PatFieldsRest; Rest None); spanless_eq_enum!(PreciseCapturingArg; Lifetime(0) Arg(0 1)); spanless_eq_enum!(RangeEnd; Included(0) Excluded); spanless_eq_enum!(RangeLimits; HalfOpen Closed); spanless_eq_enum!(Recovered; No Yes(0)); -spanless_eq_enum!(Safety; Unsafe(0) Default); +spanless_eq_enum!(Safety; Unsafe(0) Safe(0) Default); spanless_eq_enum!(StmtKind; Let(0) Item(0) Expr(0) Semi(0) Empty MacCall(0)); spanless_eq_enum!(StrStyle; Cooked Raw(0)); spanless_eq_enum!(StructRest; Base(0) Rest(0) None); @@ -613,7 +617,7 @@ spanless_eq_enum!(CoroutineKind; Async(span closure_id return_impl_trait_id) spanless_eq_enum!(ExprKind; Array(0) ConstBlock(0) Call(0 1) MethodCall(0) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1 2 3) If(0 1 2) While(0 1 2) ForLoop(pat iter body label kind) Loop(0 1 2) - Match(0 1 2) Closure(0) Block(0 1) Gen(0 1 2) Await(0 1) TryBlock(0) + Match(0 1 2) Closure(0) Block(0 1) Gen(0 1 2 3) Await(0 1) TryBlock(0) Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1 2) Underscore Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0) InlineAsm(0) OffsetOf(0 1) MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0) @@ -632,7 +636,7 @@ spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2 3) TupleStruct(0 1 2) Slice(0) Rest Never Paren(0) MacCall(0) Err(0)); spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Ref(0 1) BareFn(0) Never Tup(0) AnonStruct(0 1) AnonUnion(0 1) Path(0 1) TraitObject(0 1) - ImplTrait(0 1 2) Paren(0) Typeof(0) Infer ImplicitSelf MacCall(0) CVarArgs + ImplTrait(0 1) Paren(0) Typeof(0) Infer ImplicitSelf MacCall(0) CVarArgs Pat(0 1) Dummy Err(0)); impl SpanlessEq for Ident { diff --git a/tests/common/mod.rs b/tests/common/mod.rs index f29d800c72..c85ac0b4c9 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -1,28 +1,5 @@ #![allow(dead_code)] #![allow(clippy::module_name_repetitions, clippy::shadow_unrelated)] -use rayon::ThreadPoolBuilder; -use std::env; - pub mod eq; pub mod parse; - -/// Read the `ABORT_AFTER_FAILURE` environment variable, and parse it. -pub fn abort_after() -> usize { - match env::var("ABORT_AFTER_FAILURE") { - Ok(s) => s.parse().expect("failed to parse ABORT_AFTER_FAILURE"), - Err(_) => usize::MAX, - } -} - -/// Configure Rayon threadpool. -pub fn rayon_init() { - let stack_size = match env::var("RUST_MIN_STACK") { - Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"), - Err(_) => 1024 * 1024 * if cfg!(debug_assertions) { 40 } else { 20 }, - }; - ThreadPoolBuilder::new() - .stack_size(stack_size) - .build_global() - .unwrap(); -} diff --git a/tests/common/parse.rs b/tests/common/parse.rs index 2d00a62f46..cc724c4258 100644 --- a/tests/common/parse.rs +++ b/tests/common/parse.rs @@ -15,13 +15,10 @@ pub fn librustc_expr(input: &str) -> Option> { match panic::catch_unwind(|| { let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(); let sess = ParseSess::new(locale_resources); - let e = parse::new_parser_from_source_str( - &sess, - FileName::Custom("test_precedence".to_string()), - input.to_string(), - ) - .parse_expr(); - match e { + let name = FileName::Custom("test_precedence".to_string()); + let mut parser = parse::new_parser_from_source_str(&sess, name, input.to_string()).unwrap(); + let presult = parser.parse_expr(); + match presult { Ok(expr) => Some(expr), Err(diagnostic) => { diagnostic.emit(); diff --git a/tests/repo/mod.rs b/tests/repo/mod.rs index c8400288d9..26f4c4886b 100644 --- a/tests/repo/mod.rs +++ b/tests/repo/mod.rs @@ -6,20 +6,50 @@ use self::progress::Progress; use anyhow::Result; use flate2::read::GzDecoder; use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; +use rayon::ThreadPoolBuilder; use std::collections::BTreeSet; +use std::env; use std::ffi::OsStr; use std::fs; use std::path::{Path, PathBuf}; use tar::Archive; use walkdir::{DirEntry, WalkDir}; -const REVISION: &str = "becebb3158149a115cad8a402612e25436a7e37b"; +const REVISION: &str = "5069856495870486134dd2ca0b0e2516308c5c2a"; #[rustfmt::skip] static EXCLUDE_FILES: &[&str] = &[ + // TODO: parenthesization of `{ (match () {})() }` + "compiler/rustc_lint/src/context/diagnostics.rs", + + // TODO: `unsafe static`, `safe fn` + // https://github.com/dtolnay/syn/issues/1675 + "src/tools/rustfmt/tests/target/unsafe_extern_blocks.rs", + "tests/rustdoc/unsafe-extern-blocks.rs", + "tests/ui/rust-2024/unsafe-extern-blocks/safe-items.rs", + + // TODO: unsafe attributes: `#[unsafe(path::to)]` + // https://github.com/dtolnay/syn/issues/1710 + "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0213_metas.rs", + "src/tools/rustfmt/tests/target/unsafe_attributes.rs", + "tests/ui/attributes/unsafe/unsafe-attributes.rs", + "tests/ui/rust-2024/unsafe-attributes/unsafe-attribute-marked.rs", + + // TODO: vararg in function pointer type: `extern fn(_: *mut _, _: ...)` + // https://github.com/dtolnay/syn/issues/1711 + "library/std/src/sys/pal/uefi/helpers.rs", + // TODO: explicit tail calls: `become _g()` // https://github.com/dtolnay/syn/issues/1501 + "src/tools/miri/tests/fail/tail_calls/cc-mismatch.rs", + "src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.rs", + "src/tools/miri/tests/pass/tail_call.rs", "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs", + "tests/mir-opt/tail_call_drops.rs", + "tests/ui/explicit-tail-calls/ctfe-arg-good-borrow.rs", + "tests/ui/explicit-tail-calls/ctfe-arg-move.rs", + "tests/ui/explicit-tail-calls/ctfe-collatz-multi-rec.rs", + "tests/ui/explicit-tail-calls/drop-order.rs", "tests/ui/explicit-tail-calls/return-lifetime-sub.rs", // TODO: non-lifetime binders: `where for<'a, T> &'a Struct: Trait` @@ -35,6 +65,7 @@ static EXCLUDE_FILES: &[&str] = &[ // TODO: return type notation: `where T: Trait` // https://github.com/dtolnay/syn/issues/1434 "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_associated_return_type_bounds.rs", + "src/tools/rustfmt/tests/target/return-type-notation.rs", "tests/ui/associated-type-bounds/return-type-notation/basic.rs", "tests/ui/associated-type-bounds/return-type-notation/unpretty-parenthesized.rs", "tests/ui/feature-gates/feature-gate-return_type_notation.rs", @@ -60,7 +91,9 @@ static EXCLUDE_FILES: &[&str] = &[ // TODO: `!` as a pattern // https://github.com/dtolnay/syn/issues/1546 + "tests/mir-opt/building/match/never_patterns.rs", "tests/ui/rfcs/rfc-0000-never_patterns/diverges.rs", + "tests/ui/rfcs/rfc-0000-never_patterns/use-bindings.rs", // TODO: async trait bounds: `impl async Fn()` // https://github.com/dtolnay/syn/issues/1628 @@ -77,6 +110,7 @@ static EXCLUDE_FILES: &[&str] = &[ "tests/ui/async-await/async-closures/captures.rs", "tests/ui/async-await/async-closures/constrained-but-no-upvars-yet.rs", "tests/ui/async-await/async-closures/drop.rs", + "tests/ui/async-await/async-closures/force-move-due-to-inferred-kind.rs", "tests/ui/async-await/async-closures/mangle.rs", "tests/ui/async-await/async-closures/moro-example.rs", "tests/ui/async-await/async-closures/move-is-async-fn.rs", @@ -102,7 +136,7 @@ static EXCLUDE_FILES: &[&str] = &[ // https://github.com/dtolnay/syn/issues/1630 "src/tools/rustfmt/tests/source/postfix-match/pf-match.rs", "src/tools/rustfmt/tests/target/postfix-match/pf-match.rs", - "tests/pretty/postfix-match.rs", + "tests/pretty/postfix-match/simple-matches.rs", "tests/ui/match/postfix-match/no-unused-parens.rs", "tests/ui/match/postfix-match/pf-match-chain.rs", "tests/ui/match/postfix-match/postfix-match.rs", @@ -110,10 +144,23 @@ static EXCLUDE_FILES: &[&str] = &[ // TODO: delegation // https://github.com/dtolnay/syn/issues/1580 "tests/pretty/delegation.rs", + "tests/ui/delegation/body-identity-glob.rs", + "tests/ui/delegation/body-identity-list.rs", "tests/ui/delegation/explicit-paths-in-traits-pass.rs", "tests/ui/delegation/explicit-paths-pass.rs", "tests/ui/delegation/explicit-paths-signature-pass.rs", + "tests/ui/delegation/fn-header.rs", + "tests/ui/delegation/glob-glob.rs", + "tests/ui/delegation/glob-override.rs", + "tests/ui/delegation/glob.rs", + "tests/ui/delegation/impl-trait.rs", + "tests/ui/delegation/list.rs", + "tests/ui/delegation/macro-inside-glob.rs", + "tests/ui/delegation/macro-inside-list.rs", + "tests/ui/delegation/method-call-priority.rs", "tests/ui/delegation/parse.rs", + "tests/ui/delegation/rename.rs", + "tests/ui/delegation/self-coercion.rs", // TODO: for await // https://github.com/dtolnay/syn/issues/1631 @@ -132,6 +179,9 @@ static EXCLUDE_FILES: &[&str] = &[ "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_closure_range_method_call.rs", "src/tools/rustfmt/tests/source/issue-4808.rs", + // Several of the above + "tests/ui/unpretty/expanded-exhaustive.rs", + // Compile-fail expr parameter in const generic position: f::<1 + 2>() "tests/ui/const-generics/early/closing-args-token.rs", "tests/ui/const-generics/early/const-expression-parameter.rs", @@ -232,7 +282,7 @@ static EXCLUDE_FILES: &[&str] = &[ // Placeholder syntax for "throw expressions" "compiler/rustc_errors/src/translation.rs", "compiler/rustc_expand/src/module.rs", - "compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs", + "compiler/rustc_infer/src/infer/need_type_info.rs", "src/tools/clippy/tests/ui/needless_return.rs", "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs", "tests/pretty/yeet-expr.rs", @@ -384,6 +434,25 @@ pub fn edition(path: &Path) -> &'static str { } } +#[allow(dead_code)] +pub fn abort_after() -> usize { + match env::var("ABORT_AFTER_FAILURE") { + Ok(s) => s.parse().expect("failed to parse ABORT_AFTER_FAILURE"), + Err(_) => usize::MAX, + } +} + +pub fn rayon_init() { + let stack_size = match env::var("RUST_MIN_STACK") { + Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"), + Err(_) => 1024 * 1024 * if cfg!(debug_assertions) { 40 } else { 20 }, + }; + ThreadPoolBuilder::new() + .stack_size(stack_size) + .build_global() + .unwrap(); +} + pub fn clone_rust() { let needs_clone = match fs::read_to_string("tests/rust/COMMIT") { Err(_) => true, diff --git a/tests/test_expr.rs b/tests/test_expr.rs index 961e3c4ea3..d20cce8d6a 100644 --- a/tests/test_expr.rs +++ b/tests/test_expr.rs @@ -660,7 +660,7 @@ fn test_fixup() { quote! { 0 + (0 + 0) }, quote! { (a = b) = c }, quote! { (x as i32) < 0 }, - quote! { (1 + x as i32) < 0 }, + quote! { 1 + (x as i32) < 0 }, quote! { (1 + 1).abs() }, quote! { (lo..hi)[..] }, quote! { (a..b)..(c..d) }, @@ -672,6 +672,15 @@ fn test_fixup() { quote! { match m { _ => ({}) - 1 } }, quote! { if let _ = (a && b) && c {} }, quote! { if let _ = (S {}) {} }, + quote! { break ('a: loop { break 'a 1 } + 1) }, + quote! { a + (|| b) + c }, + quote! { if let _ = ((break) - 1 || true) {} }, + quote! { if let _ = (break + 1 || true) {} }, + quote! { (break)() }, + quote! { (..) = () }, + quote! { (..) += () }, + quote! { (1 < 2) == (3 < 4) }, + quote! { { (let _ = ()) } }, ] { let original: Expr = syn::parse2(tokens).unwrap(); diff --git a/tests/test_precedence.rs b/tests/test_precedence.rs index 02b87f37d8..40b0bfad03 100644 --- a/tests/test_precedence.rs +++ b/tests/test_precedence.rs @@ -57,9 +57,9 @@ mod repo; #[test] fn test_rustc_precedence() { - common::rayon_init(); + repo::rayon_init(); repo::clone_rust(); - let abort_after = common::abort_after(); + let abort_after = repo::abort_after(); if abort_after == 0 { panic!("skipping all precedence tests"); } @@ -97,8 +97,8 @@ fn test_rustc_precedence() { } }); - let passed = passed.load(Ordering::Relaxed); - let failed = failed.load(Ordering::Relaxed); + let passed = passed.into_inner(); + let failed = failed.into_inner(); errorf!("\n===== Precedence Test Results =====\n"); errorf!("{} passed | {} failed\n", passed, failed); @@ -300,9 +300,10 @@ fn librustc_parenthesize(mut librustc_expr: P) -> P { constness: BoundConstness::Maybe(_), .. }, - ) => {} + ) + | GenericBound::Outlives(..) + | GenericBound::Use(..) => {} GenericBound::Trait(ty, _modifier) => self.visit_poly_trait_ref(ty), - GenericBound::Outlives(_lifetime) => {} } } diff --git a/tests/test_round_trip.rs b/tests/test_round_trip.rs index 9b089036d0..9d459d9e1f 100644 --- a/tests/test_round_trip.rs +++ b/tests/test_round_trip.rs @@ -50,9 +50,9 @@ mod repo; #[test] fn test_round_trip() { - common::rayon_init(); + repo::rayon_init(); repo::clone_rust(); - let abort_after = common::abort_after(); + let abort_after = repo::abort_after(); if abort_after == 0 { panic!("skipping all round_trip tests"); } @@ -61,7 +61,7 @@ fn test_round_trip() { repo::for_each_rust_file(|path| test(path, &failed, abort_after)); - let failed = failed.load(Ordering::Relaxed); + let failed = failed.into_inner(); if failed > 0 { panic!("{} failures", failed); } @@ -160,7 +160,8 @@ fn librustc_parse(content: String, sess: &ParseSess) -> PResult { static COUNTER: AtomicUsize = AtomicUsize::new(0); let counter = COUNTER.fetch_add(1, Ordering::Relaxed); let name = FileName::Custom(format!("test_round_trip{}", counter)); - parse::parse_crate_from_source_str(name, content, sess) + let mut parser = parse::new_parser_from_source_str(sess, name, content).unwrap(); + parser.parse_crate_mod() } fn translate_message(diagnostic: &Diag) -> Cow<'static, str> { @@ -220,7 +221,9 @@ fn normalize(krate: &mut Crate) { for arg in &mut e.args { match arg { AngleBracketedArg::Arg(arg) => self.visit_generic_arg(arg), - AngleBracketedArg::Constraint(constraint) => self.visit_constraint(constraint), + AngleBracketedArg::Constraint(constraint) => { + self.visit_assoc_item_constraint(constraint); + } } } } diff --git a/tests/test_size.rs b/tests/test_size.rs index 32e6119c98..29fd43589d 100644 --- a/tests/test_size.rs +++ b/tests/test_size.rs @@ -1,35 +1,53 @@ // Assumes proc-macro2's "span-locations" feature is off. -#![cfg(target_pointer_width = "64")] - use std::mem; use syn::{Expr, Item, Lit, Pat, Type}; -#[rustversion::attr(before(2022-11-24), ignore)] +#[rustversion::attr(before(2022-11-24), ignore = "requires nightly-2022-11-24 or newer")] +#[rustversion::attr( + since(2022-11-24), + cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit") +)] #[test] fn test_expr_size() { assert_eq!(mem::size_of::(), 176); } -#[rustversion::attr(before(2022-09-09), ignore)] +#[rustversion::attr(before(2022-09-09), ignore = "requires nightly-2022-09-09 or newer")] +#[rustversion::attr( + since(2022-09-09), + cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit") +)] #[test] fn test_item_size() { assert_eq!(mem::size_of::(), 352); } -#[rustversion::attr(before(2023-04-29), ignore)] +#[rustversion::attr(before(2023-04-29), ignore = "requires nightly-2023-04-29 or newer")] +#[rustversion::attr( + since(2023-04-29), + cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit") +)] #[test] fn test_type_size() { assert_eq!(mem::size_of::(), 224); } -#[rustversion::attr(before(2023-04-29), ignore)] +#[rustversion::attr(before(2023-04-29), ignore = "requires nightly-2023-04-29 or newer")] +#[rustversion::attr( + since(2023-04-29), + cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit") +)] #[test] fn test_pat_size() { assert_eq!(mem::size_of::(), 184); } -#[rustversion::attr(before(2023-12-20), ignore)] +#[rustversion::attr(before(2023-12-20), ignore = "requires nightly-2023-12-20 or newer")] +#[rustversion::attr( + since(2023-12-20), + cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit") +)] #[test] fn test_lit_size() { assert_eq!(mem::size_of::(), 24); diff --git a/tests/test_ty.rs b/tests/test_ty.rs index f1ced9ea37..d85479de45 100644 --- a/tests/test_ty.rs +++ b/tests/test_ty.rs @@ -395,3 +395,50 @@ fn test_tuple_comma() { } "###); } + +#[test] +fn test_impl_trait_use() { + let tokens = quote! { + impl Sized + use<'_, 'a, A, Test> + }; + + snapshot!(tokens as Type, @r###" + Type::ImplTrait { + bounds: [ + TypeParamBound::Trait(TraitBound { + path: Path { + segments: [ + PathSegment { + ident: "Sized", + }, + ], + }, + }), + Token![+], + TypeParamBound::Verbatim(`use < '_ , 'a , A , Test >`), + ], + } + "###); + + let trailing = quote! { + impl Sized + use<'_,> + }; + + snapshot!(trailing as Type, @r###" + Type::ImplTrait { + bounds: [ + TypeParamBound::Trait(TraitBound { + path: Path { + segments: [ + PathSegment { + ident: "Sized", + }, + ], + }, + }), + Token![+], + TypeParamBound::Verbatim(`use < '_ , >`), + ], + } + "###); +} diff --git a/tests/test_unparenthesize.rs b/tests/test_unparenthesize.rs new file mode 100644 index 0000000000..69166aca20 --- /dev/null +++ b/tests/test_unparenthesize.rs @@ -0,0 +1,63 @@ +#![cfg(not(miri))] +#![allow(clippy::manual_assert, clippy::uninlined_format_args)] + +use quote::ToTokens as _; +use std::fs; +use std::mem; +use std::panic; +use std::path::Path; +use std::sync::atomic::{AtomicUsize, Ordering}; +use syn::visit_mut::{self, VisitMut}; +use syn::Expr; + +#[macro_use] +mod macros; + +mod repo; + +#[test] +fn test_unparenthesize() { + repo::rayon_init(); + repo::clone_rust(); + + let failed = AtomicUsize::new(0); + + repo::for_each_rust_file(|path| test(path, &failed)); + + let failed = failed.into_inner(); + if failed > 0 { + panic!("{} failures", failed); + } +} + +struct FlattenParens; + +impl VisitMut for FlattenParens { + fn visit_expr_mut(&mut self, e: &mut Expr) { + while let Expr::Paren(paren) = e { + *e = mem::replace(&mut *paren.expr, Expr::PLACEHOLDER); + } + visit_mut::visit_expr_mut(self, e); + } +} + +fn test(path: &Path, failed: &AtomicUsize) { + let content = fs::read_to_string(path).unwrap(); + + match panic::catch_unwind(|| -> syn::Result<()> { + let mut syntax_tree = syn::parse_file(&content)?; + FlattenParens.visit_file_mut(&mut syntax_tree); + syn::parse2::(syntax_tree.to_token_stream())?; + Ok(()) + }) { + Err(_) => { + errorf!("=== {}: syn panic\n", path.display()); + failed.fetch_add(1, Ordering::Relaxed); + } + Ok(Err(msg)) => { + errorf!("=== {}: syn failed to parse\n{:?}\n", path.display(), msg); + failed.fetch_add(1, Ordering::Relaxed); + } + Ok(Ok(())) => {} + } +}