Skip to content

Commit fcde6c9

Browse files
committed
Auto merge of #17333 - DropDemBits:extract-format-args-snippet-cap, r=Veykril
fix: Only generate snippets for `extract_expressions_from_format_string` if snippets are supported Part of #17332 Fixes `extract_expressions_from_format_string` so that it doesn't generate snippets if the client doesn't support it.
2 parents 1bed783 + 0c7d5c6 commit fcde6c9

File tree

6 files changed

+189
-68
lines changed

6 files changed

+189
-68
lines changed

src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs

+91-54
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::{AssistContext, Assists};
1+
use crate::{utils, AssistContext, Assists};
22
use hir::DescendPreference;
33
use ide_db::{
44
assists::{AssistId, AssistKind},
@@ -8,8 +8,12 @@ use ide_db::{
88
},
99
};
1010
use itertools::Itertools;
11-
use stdx::format_to;
12-
use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange};
11+
use syntax::{
12+
ast::{self, make},
13+
ted, AstNode, AstToken, NodeOrToken,
14+
SyntaxKind::WHITESPACE,
15+
T,
16+
};
1317

1418
// Assist: extract_expressions_from_format_string
1519
//
@@ -34,6 +38,7 @@ pub(crate) fn extract_expressions_from_format_string(
3438
) -> Option<()> {
3539
let fmt_string = ctx.find_token_at_offset::<ast::String>()?;
3640
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
41+
let tt_delimiter = tt.left_delimiter_token()?.kind();
3742

3843
let expanded_t = ast::String::cast(
3944
ctx.sema
@@ -61,81 +66,95 @@ pub(crate) fn extract_expressions_from_format_string(
6166
"Extract format expressions",
6267
tt.syntax().text_range(),
6368
|edit| {
64-
let fmt_range = fmt_string.syntax().text_range();
65-
66-
// Replace old format string with new format string whose arguments have been extracted
67-
edit.replace(fmt_range, new_fmt);
68-
69-
// Insert cursor at end of format string
70-
edit.insert(fmt_range.end(), "$0");
69+
let tt = edit.make_mut(tt);
7170

7271
// Extract existing arguments in macro
73-
let tokens =
74-
tt.token_trees_and_tokens().collect_vec();
75-
76-
let mut existing_args: Vec<String> = vec![];
72+
let tokens = tt.token_trees_and_tokens().collect_vec();
7773

78-
let mut current_arg = String::new();
79-
if let [_opening_bracket, NodeOrToken::Token(format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(end_bracket)] =
74+
let existing_args = if let [_opening_bracket, NodeOrToken::Token(_format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(_end_bracket)] =
8075
tokens.as_slice()
8176
{
82-
for t in tokens {
83-
match t {
84-
NodeOrToken::Node(n) => {
85-
format_to!(current_arg, "{n}");
86-
},
87-
NodeOrToken::Token(t) if t.kind() == COMMA => {
88-
existing_args.push(current_arg.trim().into());
89-
current_arg.clear();
90-
},
91-
NodeOrToken::Token(t) => {
92-
current_arg.push_str(t.text());
93-
},
94-
}
95-
}
96-
existing_args.push(current_arg.trim().into());
77+
let args = tokens.split(|it| matches!(it, NodeOrToken::Token(t) if t.kind() == T![,])).map(|arg| {
78+
// Strip off leading and trailing whitespace tokens
79+
let arg = match arg.split_first() {
80+
Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
81+
_ => arg,
82+
};
83+
let arg = match arg.split_last() {
84+
Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
85+
_ => arg,
86+
};
87+
arg
88+
});
9789

98-
// delete everything after the format string till end bracket
99-
// we're going to insert the new arguments later
100-
edit.delete(TextRange::new(
101-
format_string.text_range().end(),
102-
end_bracket.text_range().start(),
103-
));
104-
}
90+
args.collect()
91+
} else {
92+
vec![]
93+
};
10594

10695
// Start building the new args
10796
let mut existing_args = existing_args.into_iter();
108-
let mut args = String::new();
97+
let mut new_tt_bits = vec![NodeOrToken::Token(make::tokens::literal(&new_fmt))];
98+
let mut placeholder_indexes = vec![];
10999

110-
let mut placeholder_idx = 1;
100+
for arg in extracted_args {
101+
if matches!(arg, Arg::Expr(_) | Arg::Placeholder) {
102+
// insert ", " before each arg
103+
new_tt_bits.extend_from_slice(&[
104+
NodeOrToken::Token(make::token(T![,])),
105+
NodeOrToken::Token(make::tokens::single_space()),
106+
]);
107+
}
111108

112-
for extracted_args in extracted_args {
113-
match extracted_args {
114-
Arg::Expr(s)=> {
115-
args.push_str(", ");
109+
match arg {
110+
Arg::Expr(s) => {
116111
// insert arg
117-
args.push_str(&s);
112+
// FIXME: use the crate's edition for parsing
113+
let expr = ast::Expr::parse(&s, syntax::Edition::CURRENT).syntax_node();
114+
let mut expr_tt = utils::tt_from_syntax(expr);
115+
new_tt_bits.append(&mut expr_tt);
118116
}
119117
Arg::Placeholder => {
120-
args.push_str(", ");
121118
// try matching with existing argument
122119
match existing_args.next() {
123-
Some(ea) => {
124-
args.push_str(&ea);
120+
Some(arg) => {
121+
new_tt_bits.extend_from_slice(arg);
125122
}
126123
None => {
127-
// insert placeholder
128-
args.push_str(&format!("${placeholder_idx}"));
129-
placeholder_idx += 1;
124+
placeholder_indexes.push(new_tt_bits.len());
125+
new_tt_bits.push(NodeOrToken::Token(make::token(T![_])));
130126
}
131127
}
132128
}
133129
Arg::Ident(_s) => (),
134130
}
135131
}
136132

133+
137134
// Insert new args
138-
edit.insert(fmt_range.end(), args);
135+
let new_tt = make::token_tree(tt_delimiter, new_tt_bits).clone_for_update();
136+
ted::replace(tt.syntax(), new_tt.syntax());
137+
138+
if let Some(cap) = ctx.config.snippet_cap {
139+
// Add placeholder snippets over placeholder args
140+
for pos in placeholder_indexes {
141+
// Skip the opening delimiter
142+
let Some(NodeOrToken::Token(placeholder)) =
143+
new_tt.token_trees_and_tokens().skip(1).nth(pos)
144+
else {
145+
continue;
146+
};
147+
148+
if stdx::always!(placeholder.kind() == T![_]) {
149+
edit.add_placeholder_snippet_token(cap, placeholder);
150+
}
151+
}
152+
153+
// Add the final tabstop after the format literal
154+
if let Some(NodeOrToken::Token(literal)) = new_tt.token_trees_and_tokens().nth(1) {
155+
edit.add_tabstop_after_token(cap, literal);
156+
}
157+
}
139158
},
140159
);
141160

@@ -145,7 +164,7 @@ pub(crate) fn extract_expressions_from_format_string(
145164
#[cfg(test)]
146165
mod tests {
147166
use super::*;
148-
use crate::tests::check_assist;
167+
use crate::tests::{check_assist, check_assist_no_snippet_cap};
149168

150169
#[test]
151170
fn multiple_middle_arg() {
@@ -195,7 +214,7 @@ fn main() {
195214
"#,
196215
r#"
197216
fn main() {
198-
print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, $1);
217+
print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, ${1:_});
199218
}
200219
"#,
201220
);
@@ -292,4 +311,22 @@ fn main() {
292311
"#,
293312
);
294313
}
314+
315+
#[test]
316+
fn without_snippets() {
317+
check_assist_no_snippet_cap(
318+
extract_expressions_from_format_string,
319+
r#"
320+
//- minicore: fmt
321+
fn main() {
322+
print!("{} {x + 1:b} {} {}$0", y + 2, 2);
323+
}
324+
"#,
325+
r#"
326+
fn main() {
327+
print!("{} {:b} {} {}", y + 2, x + 1, 2, _);
328+
}
329+
"#,
330+
);
331+
}
295332
}

src/tools/rust-analyzer/crates/ide-assists/src/utils.rs

+45-2
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,9 @@ use syntax::{
1414
edit_in_place::{AttrsOwnerEdit, Indent, Removable},
1515
make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
1616
},
17-
ted, AstNode, AstToken, Direction, SourceFile,
17+
ted, AstNode, AstToken, Direction, NodeOrToken, SourceFile,
1818
SyntaxKind::*,
19-
SyntaxNode, TextRange, TextSize, T,
19+
SyntaxNode, SyntaxToken, TextRange, TextSize, T,
2020
};
2121

2222
use crate::assist_context::{AssistContext, SourceChangeBuilder};
@@ -916,3 +916,46 @@ pub(crate) fn replace_record_field_expr(
916916
edit.replace(file_range.range, initializer.syntax().text());
917917
}
918918
}
919+
920+
/// Creates a token tree list from a syntax node, creating the needed delimited sub token trees.
921+
/// Assumes that the input syntax node is a valid syntax tree.
922+
pub(crate) fn tt_from_syntax(node: SyntaxNode) -> Vec<NodeOrToken<ast::TokenTree, SyntaxToken>> {
923+
let mut tt_stack = vec![(None, vec![])];
924+
925+
for element in node.descendants_with_tokens() {
926+
let NodeOrToken::Token(token) = element else { continue };
927+
928+
match token.kind() {
929+
T!['('] | T!['{'] | T!['['] => {
930+
// Found an opening delimiter, start a new sub token tree
931+
tt_stack.push((Some(token.kind()), vec![]));
932+
}
933+
T![')'] | T!['}'] | T![']'] => {
934+
// Closing a subtree
935+
let (delimiter, tt) = tt_stack.pop().expect("unbalanced delimiters");
936+
let (_, parent_tt) = tt_stack
937+
.last_mut()
938+
.expect("parent token tree was closed before it was completed");
939+
let closing_delimiter = delimiter.map(|it| match it {
940+
T!['('] => T![')'],
941+
T!['{'] => T!['}'],
942+
T!['['] => T![']'],
943+
_ => unreachable!(),
944+
});
945+
stdx::always!(
946+
closing_delimiter == Some(token.kind()),
947+
"mismatched opening and closing delimiters"
948+
);
949+
950+
let sub_tt = make::token_tree(delimiter.expect("unbalanced delimiters"), tt);
951+
parent_tt.push(NodeOrToken::Node(sub_tt));
952+
}
953+
_ => {
954+
let (_, current_tt) = tt_stack.last_mut().expect("unmatched delimiters");
955+
current_tt.push(NodeOrToken::Token(token))
956+
}
957+
}
958+
}
959+
960+
tt_stack.pop().expect("parent token tree was closed before it was completed").1
961+
}

src/tools/rust-analyzer/crates/ide-db/src/source_change.rs

+6
Original file line numberDiff line numberDiff line change
@@ -338,6 +338,12 @@ impl SourceChangeBuilder {
338338
self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into()))
339339
}
340340

341+
/// Adds a snippet to move the cursor selected over `token`
342+
pub fn add_placeholder_snippet_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
343+
assert!(token.parent().is_some());
344+
self.add_snippet(PlaceSnippet::Over(token.into()))
345+
}
346+
341347
/// Adds a snippet to move the cursor selected over `nodes`
342348
///
343349
/// This allows for renaming newly generated items without having to go

src/tools/rust-analyzer/crates/syntax/src/ast/make.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1159,7 +1159,7 @@ pub mod tokens {
11591159

11601160
pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
11611161
SourceFile::parse(
1162-
"const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT,
1162+
"const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let _ @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT,
11631163
)
11641164
});
11651165

src/tools/rust-analyzer/crates/syntax/src/lib.rs

+33-11
Original file line numberDiff line numberDiff line change
@@ -107,14 +107,22 @@ impl<T> Parse<T> {
107107
}
108108

109109
impl<T: AstNode> Parse<T> {
110+
/// Converts this parse result into a parse result for an untyped syntax tree.
110111
pub fn to_syntax(self) -> Parse<SyntaxNode> {
111112
Parse { green: self.green, errors: self.errors, _ty: PhantomData }
112113
}
113114

115+
/// Gets the parsed syntax tree as a typed ast node.
116+
///
117+
/// # Panics
118+
///
119+
/// Panics if the root node cannot be casted into the typed ast node
120+
/// (e.g. if it's an `ERROR` node).
114121
pub fn tree(&self) -> T {
115122
T::cast(self.syntax_node()).unwrap()
116123
}
117124

125+
/// Converts from `Parse<T>` to [`Result<T, Vec<SyntaxError>>`].
118126
pub fn ok(self) -> Result<T, Vec<SyntaxError>> {
119127
match self.errors() {
120128
errors if !errors.is_empty() => Err(errors),
@@ -167,6 +175,29 @@ impl Parse<SourceFile> {
167175
}
168176
}
169177

178+
impl ast::Expr {
179+
/// Parses an `ast::Expr` from `text`.
180+
///
181+
/// Note that if the parsed root node is not a valid expression, [`Parse::tree`] will panic.
182+
/// For example:
183+
/// ```rust,should_panic
184+
/// # use syntax::{ast, Edition};
185+
/// ast::Expr::parse("let fail = true;", Edition::CURRENT).tree();
186+
/// ```
187+
pub fn parse(text: &str, edition: Edition) -> Parse<ast::Expr> {
188+
let _p = tracing::span!(tracing::Level::INFO, "Expr::parse").entered();
189+
let (green, errors) = parsing::parse_text_at(text, parser::TopEntryPoint::Expr, edition);
190+
let root = SyntaxNode::new_root(green.clone());
191+
192+
assert!(
193+
ast::Expr::can_cast(root.kind()) || root.kind() == SyntaxKind::ERROR,
194+
"{:?} isn't an expression",
195+
root.kind()
196+
);
197+
Parse::new(green, errors)
198+
}
199+
}
200+
170201
/// `SourceFile` represents a parse tree for a single Rust file.
171202
pub use crate::ast::SourceFile;
172203

@@ -177,11 +208,7 @@ impl SourceFile {
177208
let root = SyntaxNode::new_root(green.clone());
178209

179210
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
180-
Parse {
181-
green,
182-
errors: if errors.is_empty() { None } else { Some(errors.into()) },
183-
_ty: PhantomData,
184-
}
211+
Parse::new(green, errors)
185212
}
186213
}
187214

@@ -290,12 +317,7 @@ impl ast::TokenTree {
290317
}
291318

292319
let (green, errors) = builder.finish_raw();
293-
294-
Parse {
295-
green,
296-
errors: if errors.is_empty() { None } else { Some(errors.into()) },
297-
_ty: PhantomData,
298-
}
320+
Parse::new(green, errors)
299321
}
300322
}
301323

src/tools/rust-analyzer/crates/syntax/src/parsing.rs

+13
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,19 @@ pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Ve
1818
(node, errors)
1919
}
2020

21+
pub(crate) fn parse_text_at(
22+
text: &str,
23+
entry: parser::TopEntryPoint,
24+
edition: parser::Edition,
25+
) -> (GreenNode, Vec<SyntaxError>) {
26+
let _p = tracing::span!(tracing::Level::INFO, "parse_text_at").entered();
27+
let lexed = parser::LexedStr::new(text);
28+
let parser_input = lexed.to_input();
29+
let parser_output = entry.parse(&parser_input, edition);
30+
let (node, errors, _eof) = build_tree(lexed, parser_output);
31+
(node, errors)
32+
}
33+
2134
pub(crate) fn build_tree(
2235
lexed: parser::LexedStr<'_>,
2336
parser_output: parser::Output,

0 commit comments

Comments
 (0)