Skip to content

Commit f5400f1

Browse files
authored
Unrolled build for rust-lang#125815
Rollup merge of rust-lang#125815 - nnethercote:rustc_parse-top-level-cleanups, r=spastorino `rustc_parse` top-level cleanups A bunch of improvements in and around `compiler/rustc_parse/src/lib.rs`. Many of the changes streamline the API in that file from this (12 functions and one macro): ``` name args return type ---- ---- ----------- panictry_buffer! Result<T, Vec<Diag>> T pub parse_crate_from_file path PResult<Crate> pub parse_crate_attrs_from_file path PResult<AttrVec> pub parse_crate_from_source_str name,src PResult<Crate> pub parse_crate_attrs_from_source_str name,src PResult<AttrVec> pub new_parser_from_source_str name,src Parser pub maybe_new_parser_from_source_str name,src Result<Parser, Vec<Diag>> pub new_parser_from_file path,error_sp Parser maybe_source_file_to_parser srcfile Result<Parser, Vec<Diag>> pub parse_stream_from_source_str name,src,override_sp TokenStream pub source_file_to_stream srcfile,override_sp TokenStream maybe_file_to_stream srcfile,override_sp Result<TokenStream, Vec<Diag>> pub stream_to_parser stream,subparser_name Parser ``` to this: ``` name args return type ---- ---- ----------- unwrap_or_emit_fatal Result<T, Vec<Diag>> T pub new_parser_from_source_str name,src Result<Parser, Vec<Diag>> pub new_parser_from_file path,error_sp Result<Parser, Vec<Diag>> new_parser_from_source_file srcfile Result<Parser, Vec<Diag>> pub source_str_to_stream name,src,override_sp Result<TokenStream, Vec<Diag>> source_file_to_stream srcfile,override_sp Result<TokenStream, Vec<Diag>> ``` I found the old API quite confusing, with lots of similar-sounding function names and no clear structure. I think the new API is much better. r? `@spastorino`
2 parents 5ee2dfd + 2d4e7df commit f5400f1

File tree

24 files changed

+213
-255
lines changed

24 files changed

+213
-255
lines changed

compiler/rustc_builtin_macros/src/cfg_eval.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,7 @@ impl CfgEval<'_, '_> {
196196
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
197197
// to the captured `AttrTokenStream` (specifically, we capture
198198
// `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
199-
let mut parser = rustc_parse::stream_to_parser(&self.cfg.sess.psess, orig_tokens, None);
199+
let mut parser = Parser::new(&self.cfg.sess.psess, orig_tokens, None);
200200
parser.capture_cfg = true;
201201
match parse_annotatable_with(&mut parser) {
202202
Ok(a) => annotatable = a,

compiler/rustc_builtin_macros/src/cmdline_attrs.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,17 @@ use crate::errors;
44
use rustc_ast::attr::mk_attr;
55
use rustc_ast::token;
66
use rustc_ast::{self as ast, AttrItem, AttrStyle};
7+
use rustc_parse::{new_parser_from_source_str, unwrap_or_emit_fatal};
78
use rustc_session::parse::ParseSess;
89
use rustc_span::FileName;
910

1011
pub fn inject(krate: &mut ast::Crate, psess: &ParseSess, attrs: &[String]) {
1112
for raw_attr in attrs {
12-
let mut parser = rustc_parse::new_parser_from_source_str(
13+
let mut parser = unwrap_or_emit_fatal(new_parser_from_source_str(
1314
psess,
1415
FileName::cli_crate_attr_source_code(raw_attr),
1516
raw_attr.clone(),
16-
);
17+
));
1718

1819
let start_span = parser.token.span;
1920
let AttrItem { path, args, tokens: _ } = match parser.parse_attr_item(false) {

compiler/rustc_builtin_macros/src/source_util.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@ use rustc_expand::base::{
1212
};
1313
use rustc_expand::module::DirOwnership;
1414
use rustc_lint_defs::BuiltinLintDiag;
15-
use rustc_parse::new_parser_from_file;
1615
use rustc_parse::parser::{ForceCollect, Parser};
16+
use rustc_parse::{new_parser_from_file, unwrap_or_emit_fatal};
1717
use rustc_session::lint::builtin::INCOMPLETE_INCLUDE;
1818
use rustc_span::source_map::SourceMap;
1919
use rustc_span::symbol::Symbol;
@@ -126,7 +126,7 @@ pub(crate) fn expand_include<'cx>(
126126
return ExpandResult::Ready(DummyResult::any(sp, guar));
127127
}
128128
};
129-
let p = new_parser_from_file(cx.psess(), &file, Some(sp));
129+
let p = unwrap_or_emit_fatal(new_parser_from_file(cx.psess(), &file, Some(sp)));
130130

131131
// If in the included file we have e.g., `mod bar;`,
132132
// then the path of `bar.rs` should be relative to the directory of `file`.

compiler/rustc_driver_impl/src/lib.rs

+6-4
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ use rustc_interface::{interface, Queries};
3232
use rustc_lint::unerased_lint_store;
3333
use rustc_metadata::creader::MetadataLoader;
3434
use rustc_metadata::locator;
35+
use rustc_parse::{new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal};
3536
use rustc_session::config::{nightly_options, CG_OPTIONS, Z_OPTIONS};
3637
use rustc_session::config::{ErrorOutputType, Input, OutFileName, OutputType};
3738
use rustc_session::getopts::{self, Matches};
@@ -1264,12 +1265,13 @@ pub fn handle_options(early_dcx: &EarlyDiagCtxt, args: &[String]) -> Option<geto
12641265
}
12651266

12661267
fn parse_crate_attrs<'a>(sess: &'a Session) -> PResult<'a, ast::AttrVec> {
1267-
match &sess.io.input {
1268-
Input::File(ifile) => rustc_parse::parse_crate_attrs_from_file(ifile, &sess.psess),
1268+
let mut parser = unwrap_or_emit_fatal(match &sess.io.input {
1269+
Input::File(file) => new_parser_from_file(&sess.psess, file, None),
12691270
Input::Str { name, input } => {
1270-
rustc_parse::parse_crate_attrs_from_source_str(name.clone(), input.clone(), &sess.psess)
1271+
new_parser_from_source_str(&sess.psess, name.clone(), input.clone())
12711272
}
1272-
}
1273+
});
1274+
parser.parse_inner_attributes()
12731275
}
12741276

12751277
/// Runs a closure and catches unwinds triggered by fatal errors.

compiler/rustc_expand/src/base.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ use rustc_data_structures::sync::{self, Lrc};
1515
use rustc_errors::{DiagCtxt, ErrorGuaranteed, PResult};
1616
use rustc_feature::Features;
1717
use rustc_lint_defs::{BufferedEarlyLint, RegisteredTools};
18-
use rustc_parse::{parser, MACRO_ARGUMENTS};
18+
use rustc_parse::{parser::Parser, MACRO_ARGUMENTS};
1919
use rustc_session::config::CollapseMacroDebuginfo;
2020
use rustc_session::{parse::ParseSess, Limit, Session};
2121
use rustc_span::def_id::{CrateNum, DefId, LocalDefId};
@@ -1149,8 +1149,8 @@ impl<'a> ExtCtxt<'a> {
11491149
pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
11501150
expand::MacroExpander::new(self, true)
11511151
}
1152-
pub fn new_parser_from_tts(&self, stream: TokenStream) -> parser::Parser<'a> {
1153-
rustc_parse::stream_to_parser(&self.sess.psess, stream, MACRO_ARGUMENTS)
1152+
pub fn new_parser_from_tts(&self, stream: TokenStream) -> Parser<'a> {
1153+
Parser::new(&self.sess.psess, stream, MACRO_ARGUMENTS)
11541154
}
11551155
pub fn source_map(&self) -> &'a SourceMap {
11561156
self.sess.psess.source_map()

compiler/rustc_expand/src/module.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ use crate::errors::{
55
use rustc_ast::ptr::P;
66
use rustc_ast::{token, AttrVec, Attribute, Inline, Item, ModSpans};
77
use rustc_errors::{Diag, ErrorGuaranteed};
8-
use rustc_parse::new_parser_from_file;
98
use rustc_parse::validate_attr;
9+
use rustc_parse::{new_parser_from_file, unwrap_or_emit_fatal};
1010
use rustc_session::parse::ParseSess;
1111
use rustc_session::Session;
1212
use rustc_span::symbol::{sym, Ident};
@@ -66,7 +66,8 @@ pub(crate) fn parse_external_mod(
6666
}
6767

6868
// Actually parse the external file as a module.
69-
let mut parser = new_parser_from_file(&sess.psess, &mp.file_path, Some(span));
69+
let mut parser =
70+
unwrap_or_emit_fatal(new_parser_from_file(&sess.psess, &mp.file_path, Some(span)));
7071
let (inner_attrs, items, inner_span) =
7172
parser.parse_mod(&token::Eof).map_err(|err| ModError::ParserError(err))?;
7273
attrs.extend(inner_attrs);

compiler/rustc_expand/src/proc_macro.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ use rustc_ast as ast;
66
use rustc_ast::ptr::P;
77
use rustc_ast::tokenstream::TokenStream;
88
use rustc_errors::ErrorGuaranteed;
9-
use rustc_parse::parser::ForceCollect;
9+
use rustc_parse::parser::{ForceCollect, Parser};
1010
use rustc_session::config::ProcMacroExecutionStrategy;
1111
use rustc_span::profiling::SpannedEventArgRecorder;
1212
use rustc_span::Span;
@@ -154,8 +154,7 @@ impl MultiItemModifier for DeriveProcMacro {
154154
};
155155

156156
let error_count_before = ecx.dcx().err_count();
157-
let mut parser =
158-
rustc_parse::stream_to_parser(&ecx.sess.psess, stream, Some("proc-macro derive"));
157+
let mut parser = Parser::new(&ecx.sess.psess, stream, Some("proc-macro derive"));
159158
let mut items = vec![];
160159

161160
loop {

compiler/rustc_expand/src/proc_macro_server.rs

+8-10
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,8 @@ use rustc_data_structures::fx::FxHashMap;
1313
use rustc_data_structures::sync::Lrc;
1414
use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan, PResult};
1515
use rustc_parse::lexer::nfc_normalize;
16-
use rustc_parse::parse_stream_from_source_str;
16+
use rustc_parse::parser::Parser;
17+
use rustc_parse::{new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
1718
use rustc_session::parse::ParseSess;
1819
use rustc_span::def_id::CrateNum;
1920
use rustc_span::symbol::{self, sym, Symbol};
@@ -466,7 +467,8 @@ impl server::FreeFunctions for Rustc<'_, '_> {
466467

467468
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span, Self::Symbol>, ()> {
468469
let name = FileName::proc_macro_source_code(s);
469-
let mut parser = rustc_parse::new_parser_from_source_str(self.psess(), name, s.to_owned());
470+
let mut parser =
471+
unwrap_or_emit_fatal(new_parser_from_source_str(self.psess(), name, s.to_owned()));
470472

471473
let first_span = parser.token.span.data();
472474
let minus_present = parser.eat(&token::BinOp(token::Minus));
@@ -538,12 +540,12 @@ impl server::TokenStream for Rustc<'_, '_> {
538540
}
539541

540542
fn from_str(&mut self, src: &str) -> Self::TokenStream {
541-
parse_stream_from_source_str(
543+
unwrap_or_emit_fatal(source_str_to_stream(
544+
self.psess(),
542545
FileName::proc_macro_source_code(src),
543546
src.to_string(),
544-
self.psess(),
545547
Some(self.call_site),
546-
)
548+
))
547549
}
548550

549551
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
@@ -553,11 +555,7 @@ impl server::TokenStream for Rustc<'_, '_> {
553555
fn expand_expr(&mut self, stream: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
554556
// Parse the expression from our tokenstream.
555557
let expr: PResult<'_, _> = try {
556-
let mut p = rustc_parse::stream_to_parser(
557-
self.psess(),
558-
stream.clone(),
559-
Some("proc_macro expand expr"),
560-
);
558+
let mut p = Parser::new(self.psess(), stream.clone(), Some("proc_macro expand expr"));
561559
let expr = p.parse_expr()?;
562560
if p.token != token::Eof {
563561
p.unexpected()?;

compiler/rustc_interface/src/interface.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ use rustc_lint::LintStore;
1414
use rustc_middle::ty;
1515
use rustc_middle::ty::CurrentGcx;
1616
use rustc_middle::util::Providers;
17-
use rustc_parse::maybe_new_parser_from_source_str;
17+
use rustc_parse::new_parser_from_source_str;
1818
use rustc_query_impl::QueryCtxt;
1919
use rustc_query_system::query::print_query_stack;
2020
use rustc_session::config::{self, Cfg, CheckCfg, ExpectedValues, Input, OutFileName};
@@ -67,7 +67,7 @@ pub(crate) fn parse_cfg(dcx: &DiagCtxt, cfgs: Vec<String>) -> Cfg {
6767
};
6868
}
6969

70-
match maybe_new_parser_from_source_str(&psess, filename, s.to_string()) {
70+
match new_parser_from_source_str(&psess, filename, s.to_string()) {
7171
Ok(mut parser) => match parser.parse_meta_item() {
7272
Ok(meta_item) if parser.token == token::Eof => {
7373
if meta_item.path.segments.len() != 1 {
@@ -166,7 +166,7 @@ pub(crate) fn parse_check_cfg(dcx: &DiagCtxt, specs: Vec<String>) -> CheckCfg {
166166
error!("expected `cfg(name, values(\"value1\", \"value2\", ... \"valueN\"))`")
167167
};
168168

169-
let mut parser = match maybe_new_parser_from_source_str(&psess, filename, s.to_string()) {
169+
let mut parser = match new_parser_from_source_str(&psess, filename, s.to_string()) {
170170
Ok(parser) => parser,
171171
Err(errs) => {
172172
errs.into_iter().for_each(|err| err.cancel());

compiler/rustc_interface/src/passes.rs

+11-6
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,9 @@ use rustc_middle::arena::Arena;
1919
use rustc_middle::dep_graph::DepGraph;
2020
use rustc_middle::ty::{self, GlobalCtxt, RegisteredTools, TyCtxt};
2121
use rustc_middle::util::Providers;
22-
use rustc_parse::{parse_crate_from_file, parse_crate_from_source_str, validate_attr};
22+
use rustc_parse::{
23+
new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal, validate_attr,
24+
};
2325
use rustc_passes::{abi_test, hir_stats, layout_test};
2426
use rustc_resolve::Resolver;
2527
use rustc_session::code_stats::VTableSizeInfo;
@@ -42,11 +44,14 @@ use std::{env, fs, iter};
4244
use tracing::{info, instrument};
4345

4446
pub fn parse<'a>(sess: &'a Session) -> PResult<'a, ast::Crate> {
45-
let krate = sess.time("parse_crate", || match &sess.io.input {
46-
Input::File(file) => parse_crate_from_file(file, &sess.psess),
47-
Input::Str { input, name } => {
48-
parse_crate_from_source_str(name.clone(), input.clone(), &sess.psess)
49-
}
47+
let krate = sess.time("parse_crate", || {
48+
let mut parser = unwrap_or_emit_fatal(match &sess.io.input {
49+
Input::File(file) => new_parser_from_file(&sess.psess, file, None),
50+
Input::Str { input, name } => {
51+
new_parser_from_source_str(&sess.psess, name.clone(), input.clone())
52+
}
53+
});
54+
parser.parse_crate_mod()
5055
})?;
5156

5257
if sess.opts.unstable_opts.input_stats {

compiler/rustc_parse/src/lexer/mod.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ pub(crate) struct UnmatchedDelim {
4242
pub candidate_span: Option<Span>,
4343
}
4444

45-
pub(crate) fn parse_token_trees<'psess, 'src>(
45+
pub(crate) fn lex_token_trees<'psess, 'src>(
4646
psess: &'psess ParseSess,
4747
mut src: &'src str,
4848
mut start_pos: BytePos,
@@ -66,7 +66,7 @@ pub(crate) fn parse_token_trees<'psess, 'src>(
6666
last_lifetime: None,
6767
};
6868
let (stream, res, unmatched_delims) =
69-
tokentrees::TokenTreesReader::parse_all_token_trees(string_reader);
69+
tokentrees::TokenTreesReader::lex_all_token_trees(string_reader);
7070
match res {
7171
Ok(()) if unmatched_delims.is_empty() => Ok(stream),
7272
_ => {

compiler/rustc_parse/src/lexer/tokentrees.rs

+24-26
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ use super::diagnostics::report_suspicious_mismatch_block;
22
use super::diagnostics::same_indentation_level;
33
use super::diagnostics::TokenTreeDiagInfo;
44
use super::{StringReader, UnmatchedDelim};
5+
use crate::Parser;
56
use rustc_ast::token::{self, Delimiter, Token};
67
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
78
use rustc_ast_pretty::pprust::token_to_string;
@@ -17,22 +18,21 @@ pub(super) struct TokenTreesReader<'psess, 'src> {
1718
}
1819

1920
impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
20-
pub(super) fn parse_all_token_trees(
21+
pub(super) fn lex_all_token_trees(
2122
string_reader: StringReader<'psess, 'src>,
2223
) -> (TokenStream, Result<(), Vec<PErr<'psess>>>, Vec<UnmatchedDelim>) {
2324
let mut tt_reader = TokenTreesReader {
2425
string_reader,
2526
token: Token::dummy(),
2627
diag_info: TokenTreeDiagInfo::default(),
2728
};
28-
let (_open_spacing, stream, res) =
29-
tt_reader.parse_token_trees(/* is_delimited */ false);
29+
let (_open_spacing, stream, res) = tt_reader.lex_token_trees(/* is_delimited */ false);
3030
(stream, res, tt_reader.diag_info.unmatched_delims)
3131
}
3232

33-
// Parse a stream of tokens into a list of `TokenTree`s. The `Spacing` in
34-
// the result is that of the opening delimiter.
35-
fn parse_token_trees(
33+
// Lex into a token stream. The `Spacing` in the result is that of the
34+
// opening delimiter.
35+
fn lex_token_trees(
3636
&mut self,
3737
is_delimited: bool,
3838
) -> (Spacing, TokenStream, Result<(), Vec<PErr<'psess>>>) {
@@ -42,12 +42,10 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
4242
let mut buf = Vec::new();
4343
loop {
4444
match self.token.kind {
45-
token::OpenDelim(delim) => {
46-
buf.push(match self.parse_token_tree_open_delim(delim) {
47-
Ok(val) => val,
48-
Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
49-
})
50-
}
45+
token::OpenDelim(delim) => buf.push(match self.lex_token_tree_open_delim(delim) {
46+
Ok(val) => val,
47+
Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
48+
}),
5149
token::CloseDelim(delim) => {
5250
return (
5351
open_spacing,
@@ -95,24 +93,24 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
9593
err
9694
}
9795

98-
fn parse_token_tree_open_delim(
96+
fn lex_token_tree_open_delim(
9997
&mut self,
10098
open_delim: Delimiter,
10199
) -> Result<TokenTree, Vec<PErr<'psess>>> {
102-
// The span for beginning of the delimited section
100+
// The span for beginning of the delimited section.
103101
let pre_span = self.token.span;
104102

105103
self.diag_info.open_braces.push((open_delim, self.token.span));
106104

107-
// Parse the token trees within the delimiters.
105+
// Lex the token trees within the delimiters.
108106
// We stop at any delimiter so we can try to recover if the user
109107
// uses an incorrect delimiter.
110-
let (open_spacing, tts, res) = self.parse_token_trees(/* is_delimited */ true);
108+
let (open_spacing, tts, res) = self.lex_token_trees(/* is_delimited */ true);
111109
if let Err(errs) = res {
112110
return Err(self.unclosed_delim_err(tts, errs));
113111
}
114112

115-
// Expand to cover the entire delimited token tree
113+
// Expand to cover the entire delimited token tree.
116114
let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
117115
let sm = self.string_reader.psess.source_map();
118116

@@ -150,7 +148,7 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
150148
self.diag_info.last_unclosed_found_span = Some(self.token.span);
151149
// This is a conservative error: only report the last unclosed
152150
// delimiter. The previous unclosed delimiters could actually be
153-
// closed! The parser just hasn't gotten to them yet.
151+
// closed! The lexer just hasn't gotten to them yet.
154152
if let Some(&(_, sp)) = self.diag_info.open_braces.last() {
155153
unclosed_delimiter = Some(sp);
156154
};
@@ -234,11 +232,11 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
234232
) -> Vec<PErr<'psess>> {
235233
// If there are unclosed delims, see if there are diff markers and if so, point them
236234
// out instead of complaining about the unclosed delims.
237-
let mut parser = crate::stream_to_parser(self.string_reader.psess, tts, None);
235+
let mut parser = Parser::new(self.string_reader.psess, tts, None);
238236
let mut diff_errs = vec![];
239-
// Suggest removing a `{` we think appears in an `if`/`while` condition
240-
// We want to suggest removing a `{` only if we think we're in an `if`/`while` condition, but
241-
// we have no way of tracking this in the lexer itself, so we piggyback on the parser
237+
// Suggest removing a `{` we think appears in an `if`/`while` condition.
238+
// We want to suggest removing a `{` only if we think we're in an `if`/`while` condition,
239+
// but we have no way of tracking this in the lexer itself, so we piggyback on the parser.
242240
let mut in_cond = false;
243241
while parser.token != token::Eof {
244242
if let Err(diff_err) = parser.err_vcs_conflict_marker() {
@@ -249,14 +247,15 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
249247
parser.token.kind,
250248
token::CloseDelim(Delimiter::Brace) | token::FatArrow
251249
) {
252-
// end of the `if`/`while` body, or the end of a `match` guard
250+
// End of the `if`/`while` body, or the end of a `match` guard.
253251
in_cond = false;
254252
} else if in_cond && parser.token == token::OpenDelim(Delimiter::Brace) {
255253
// Store the `&&` and `let` to use their spans later when creating the diagnostic
256254
let maybe_andand = parser.look_ahead(1, |t| t.clone());
257255
let maybe_let = parser.look_ahead(2, |t| t.clone());
258256
if maybe_andand == token::OpenDelim(Delimiter::Brace) {
259-
// This might be the beginning of the `if`/`while` body (i.e., the end of the condition)
257+
// This might be the beginning of the `if`/`while` body (i.e., the end of the
258+
// condition).
260259
in_cond = false;
261260
} else if maybe_andand == token::AndAnd && maybe_let.is_keyword(kw::Let) {
262261
let mut err = parser.dcx().struct_span_err(
@@ -288,8 +287,7 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
288287
}
289288

290289
fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'psess> {
291-
// An unexpected closing delimiter (i.e., there is no
292-
// matching opening delimiter).
290+
// An unexpected closing delimiter (i.e., there is no matching opening delimiter).
293291
let token_str = token_to_string(&self.token);
294292
let msg = format!("unexpected closing delimiter: `{token_str}`");
295293
let mut err = self.string_reader.psess.dcx.struct_span_err(self.token.span, msg);

0 commit comments

Comments
 (0)