Skip to content

Commit 0b4d7d5

Browse files
authored
Run pyupgrade on blib2to3 and src (#3771)
1 parent 114e835 commit 0b4d7d5

12 files changed

Lines changed: 102 additions & 112 deletions

File tree

src/black/files.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@
4242
import colorama # noqa: F401
4343

4444

45-
@lru_cache()
45+
@lru_cache
4646
def find_project_root(
4747
srcs: Sequence[str], stdin_filename: Optional[str] = None
4848
) -> Tuple[Path, str]:
@@ -212,7 +212,7 @@ def strip_specifier_set(specifier_set: SpecifierSet) -> SpecifierSet:
212212
return SpecifierSet(",".join(str(s) for s in specifiers))
213213

214214

215-
@lru_cache()
215+
@lru_cache
216216
def find_user_pyproject_toml() -> Path:
217217
r"""Return the path to the top-level user configuration for black.
218218
@@ -232,7 +232,7 @@ def find_user_pyproject_toml() -> Path:
232232
return user_config_path.resolve()
233233

234234

235-
@lru_cache()
235+
@lru_cache
236236
def get_gitignore(root: Path) -> PathSpec:
237237
"""Return a PathSpec matching gitignore content if present."""
238238
gitignore = root / ".gitignore"

src/black/handle_ipynb_magics.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ class Replacement:
5555
src: str
5656

5757

58-
@lru_cache()
58+
@lru_cache
5959
def jupyter_dependencies_are_installed(*, verbose: bool, quiet: bool) -> bool:
6060
try:
6161
# isort: off

src/blib2to3/pgen2/conv.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ def parse_graminit_h(self, filename):
6363
try:
6464
f = open(filename)
6565
except OSError as err:
66-
print("Can't open %s: %s" % (filename, err))
66+
print(f"Can't open {filename}: {err}")
6767
return False
6868
self.symbol2number = {}
6969
self.number2symbol = {}
@@ -72,7 +72,7 @@ def parse_graminit_h(self, filename):
7272
lineno += 1
7373
mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line)
7474
if not mo and line.strip():
75-
print("%s(%s): can't parse %s" % (filename, lineno, line.strip()))
75+
print(f"{filename}({lineno}): can't parse {line.strip()}")
7676
else:
7777
symbol, number = mo.groups()
7878
number = int(number)
@@ -113,7 +113,7 @@ def parse_graminit_c(self, filename):
113113
try:
114114
f = open(filename)
115115
except OSError as err:
116-
print("Can't open %s: %s" % (filename, err))
116+
print(f"Can't open {filename}: {err}")
117117
return False
118118
# The code below essentially uses f's iterator-ness!
119119
lineno = 0

src/blib2to3/pgen2/driver.py

Lines changed: 13 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -28,11 +28,8 @@
2828
Iterable,
2929
List,
3030
Optional,
31-
Text,
3231
Iterator,
3332
Tuple,
34-
TypeVar,
35-
Generic,
3633
Union,
3734
)
3835
from contextlib import contextmanager
@@ -116,7 +113,7 @@ def can_advance(self, to: int) -> bool:
116113
return True
117114

118115

119-
class Driver(object):
116+
class Driver:
120117
def __init__(self, grammar: Grammar, logger: Optional[Logger] = None) -> None:
121118
self.grammar = grammar
122119
if logger is None:
@@ -189,30 +186,30 @@ def parse_tokens(self, tokens: Iterable[GoodTokenInfo], debug: bool = False) ->
189186
assert p.rootnode is not None
190187
return p.rootnode
191188

192-
def parse_stream_raw(self, stream: IO[Text], debug: bool = False) -> NL:
189+
def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> NL:
193190
"""Parse a stream and return the syntax tree."""
194191
tokens = tokenize.generate_tokens(stream.readline, grammar=self.grammar)
195192
return self.parse_tokens(tokens, debug)
196193

197-
def parse_stream(self, stream: IO[Text], debug: bool = False) -> NL:
194+
def parse_stream(self, stream: IO[str], debug: bool = False) -> NL:
198195
"""Parse a stream and return the syntax tree."""
199196
return self.parse_stream_raw(stream, debug)
200197

201198
def parse_file(
202-
self, filename: Path, encoding: Optional[Text] = None, debug: bool = False
199+
self, filename: Path, encoding: Optional[str] = None, debug: bool = False
203200
) -> NL:
204201
"""Parse a file and return the syntax tree."""
205-
with io.open(filename, "r", encoding=encoding) as stream:
202+
with open(filename, encoding=encoding) as stream:
206203
return self.parse_stream(stream, debug)
207204

208-
def parse_string(self, text: Text, debug: bool = False) -> NL:
205+
def parse_string(self, text: str, debug: bool = False) -> NL:
209206
"""Parse a string and return the syntax tree."""
210207
tokens = tokenize.generate_tokens(
211208
io.StringIO(text).readline, grammar=self.grammar
212209
)
213210
return self.parse_tokens(tokens, debug)
214211

215-
def _partially_consume_prefix(self, prefix: Text, column: int) -> Tuple[Text, Text]:
212+
def _partially_consume_prefix(self, prefix: str, column: int) -> Tuple[str, str]:
216213
lines: List[str] = []
217214
current_line = ""
218215
current_column = 0
@@ -240,7 +237,7 @@ def _partially_consume_prefix(self, prefix: Text, column: int) -> Tuple[Text, Te
240237
return "".join(lines), current_line
241238

242239

243-
def _generate_pickle_name(gt: Path, cache_dir: Optional[Path] = None) -> Text:
240+
def _generate_pickle_name(gt: Path, cache_dir: Optional[Path] = None) -> str:
244241
head, tail = os.path.splitext(gt)
245242
if tail == ".txt":
246243
tail = ""
@@ -252,8 +249,8 @@ def _generate_pickle_name(gt: Path, cache_dir: Optional[Path] = None) -> Text:
252249

253250

254251
def load_grammar(
255-
gt: Text = "Grammar.txt",
256-
gp: Optional[Text] = None,
252+
gt: str = "Grammar.txt",
253+
gp: Optional[str] = None,
257254
save: bool = True,
258255
force: bool = False,
259256
logger: Optional[Logger] = None,
@@ -276,7 +273,7 @@ def load_grammar(
276273
return g
277274

278275

279-
def _newer(a: Text, b: Text) -> bool:
276+
def _newer(a: str, b: str) -> bool:
280277
"""Inquire whether file a was written since file b."""
281278
if not os.path.exists(a):
282279
return False
@@ -286,7 +283,7 @@ def _newer(a: Text, b: Text) -> bool:
286283

287284

288285
def load_packaged_grammar(
289-
package: str, grammar_source: Text, cache_dir: Optional[Path] = None
286+
package: str, grammar_source: str, cache_dir: Optional[Path] = None
290287
) -> grammar.Grammar:
291288
"""Normally, loads a pickled grammar by doing
292289
pkgutil.get_data(package, pickled_grammar)
@@ -309,7 +306,7 @@ def load_packaged_grammar(
309306
return g
310307

311308

312-
def main(*args: Text) -> bool:
309+
def main(*args: str) -> bool:
313310
"""Main program, when run as a script: produce grammar pickle files.
314311
315312
Calls load_grammar for each argument, a path to a grammar text file.

src/blib2to3/pgen2/grammar.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,19 +16,19 @@
1616
import os
1717
import pickle
1818
import tempfile
19-
from typing import Any, Dict, List, Optional, Text, Tuple, TypeVar, Union
19+
from typing import Any, Dict, List, Optional, Tuple, TypeVar, Union
2020

2121
# Local imports
2222
from . import token
2323

2424
_P = TypeVar("_P", bound="Grammar")
25-
Label = Tuple[int, Optional[Text]]
25+
Label = Tuple[int, Optional[str]]
2626
DFA = List[List[Tuple[int, int]]]
2727
DFAS = Tuple[DFA, Dict[int, int]]
2828
Path = Union[str, "os.PathLike[str]"]
2929

3030

31-
class Grammar(object):
31+
class Grammar:
3232
"""Pgen parsing tables conversion class.
3333
3434
Once initialized, this class supplies the grammar tables for the

src/blib2to3/pgen2/literals.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,10 @@
55

66
import re
77

8-
from typing import Dict, Match, Text
8+
from typing import Dict, Match
99

1010

11-
simple_escapes: Dict[Text, Text] = {
11+
simple_escapes: Dict[str, str] = {
1212
"a": "\a",
1313
"b": "\b",
1414
"f": "\f",
@@ -22,7 +22,7 @@
2222
}
2323

2424

25-
def escape(m: Match[Text]) -> Text:
25+
def escape(m: Match[str]) -> str:
2626
all, tail = m.group(0, 1)
2727
assert all.startswith("\\")
2828
esc = simple_escapes.get(tail)
@@ -44,7 +44,7 @@ def escape(m: Match[Text]) -> Text:
4444
return chr(i)
4545

4646

47-
def evalString(s: Text) -> Text:
47+
def evalString(s: str) -> str:
4848
assert s.startswith("'") or s.startswith('"'), repr(s[:1])
4949
q = s[0]
5050
if s[:3] == q * 3:

src/blib2to3/pgen2/parse.py

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
how this parsing engine works.
1010
1111
"""
12-
import copy
1312
from contextlib import contextmanager
1413

1514
# Local imports
@@ -18,7 +17,6 @@
1817
cast,
1918
Any,
2019
Optional,
21-
Text,
2220
Union,
2321
Tuple,
2422
Dict,
@@ -35,7 +33,7 @@
3533
from blib2to3.pgen2.driver import TokenProxy
3634

3735

38-
Results = Dict[Text, NL]
36+
Results = Dict[str, NL]
3937
Convert = Callable[[Grammar, RawNode], Union[Node, Leaf]]
4038
DFA = List[List[Tuple[int, int]]]
4139
DFAS = Tuple[DFA, Dict[int, int]]
@@ -100,7 +98,7 @@ def backtrack(self) -> Iterator[None]:
10098
finally:
10199
self.parser.is_backtracking = is_backtracking
102100

103-
def add_token(self, tok_type: int, tok_val: Text, raw: bool = False) -> None:
101+
def add_token(self, tok_type: int, tok_val: str, raw: bool = False) -> None:
104102
func: Callable[..., Any]
105103
if raw:
106104
func = self.parser._addtoken
@@ -114,7 +112,7 @@ def add_token(self, tok_type: int, tok_val: Text, raw: bool = False) -> None:
114112
args.insert(0, ilabel)
115113
func(*args)
116114

117-
def determine_route(self, value: Optional[Text] = None, force: bool = False) -> Optional[int]:
115+
def determine_route(self, value: Optional[str] = None, force: bool = False) -> Optional[int]:
118116
alive_ilabels = self.ilabels
119117
if len(alive_ilabels) == 0:
120118
*_, most_successful_ilabel = self._dead_ilabels
@@ -131,18 +129,18 @@ class ParseError(Exception):
131129
"""Exception to signal the parser is stuck."""
132130

133131
def __init__(
134-
self, msg: Text, type: Optional[int], value: Optional[Text], context: Context
132+
self, msg: str, type: Optional[int], value: Optional[str], context: Context
135133
) -> None:
136134
Exception.__init__(
137-
self, "%s: type=%r, value=%r, context=%r" % (msg, type, value, context)
135+
self, f"{msg}: type={type!r}, value={value!r}, context={context!r}"
138136
)
139137
self.msg = msg
140138
self.type = type
141139
self.value = value
142140
self.context = context
143141

144142

145-
class Parser(object):
143+
class Parser:
146144
"""Parser engine.
147145
148146
The proper usage sequence is:
@@ -236,7 +234,7 @@ def setup(self, proxy: "TokenProxy", start: Optional[int] = None) -> None:
236234
self.used_names: Set[str] = set()
237235
self.proxy = proxy
238236

239-
def addtoken(self, type: int, value: Text, context: Context) -> bool:
237+
def addtoken(self, type: int, value: str, context: Context) -> bool:
240238
"""Add a token; return True iff this is the end of the program."""
241239
# Map from token to label
242240
ilabels = self.classify(type, value, context)
@@ -284,7 +282,7 @@ def addtoken(self, type: int, value: Text, context: Context) -> bool:
284282

285283
return self._addtoken(ilabel, type, value, context)
286284

287-
def _addtoken(self, ilabel: int, type: int, value: Text, context: Context) -> bool:
285+
def _addtoken(self, ilabel: int, type: int, value: str, context: Context) -> bool:
288286
# Loop until the token is shifted; may raise exceptions
289287
while True:
290288
dfa, state, node = self.stack[-1]
@@ -329,7 +327,7 @@ def _addtoken(self, ilabel: int, type: int, value: Text, context: Context) -> bo
329327
# No success finding a transition
330328
raise ParseError("bad input", type, value, context)
331329

332-
def classify(self, type: int, value: Text, context: Context) -> List[int]:
330+
def classify(self, type: int, value: str, context: Context) -> List[int]:
333331
"""Turn a token into a label. (Internal)
334332
335333
Depending on whether the value is a soft-keyword or not,
@@ -352,7 +350,7 @@ def classify(self, type: int, value: Text, context: Context) -> List[int]:
352350
raise ParseError("bad token", type, value, context)
353351
return [ilabel]
354352

355-
def shift(self, type: int, value: Text, newstate: int, context: Context) -> None:
353+
def shift(self, type: int, value: str, newstate: int, context: Context) -> None:
356354
"""Shift a token. (Internal)"""
357355
if self.is_backtracking:
358356
dfa, state, _ = self.stack[-1]

0 commit comments

Comments
 (0)