Bump tokenize-rt from 5.2.0 to 6.0.0 (#2376)

* Bump tokenize-rt from 5.2.0 to 6.0.0

Bumps [tokenize-rt](https://github.com/asottile/tokenize-rt) from 5.2.0 to 6.0.0.
- [Commits](https://github.com/asottile/tokenize-rt/compare/v5.2.0...v6.0.0)

---
updated-dependencies:
- dependency-name: tokenize-rt
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>

* Update tokenize-rt==6.0.0

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>

[skip ci]
This commit is contained in:
dependabot[bot] 2024-08-10 19:19:28 -07:00 committed by GitHub
parent 509d18801b
commit 7d4efac75d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 7 additions and 2 deletions

View file

@ -99,6 +99,11 @@ def src_to_tokens(src: str) -> list[Token]:
end_offset += len(newtok.encode())
tok_name = tokenize.tok_name[tok_type]
if tok_name == 'FSTRING_MIDDLE': # pragma: >=3.12 cover
ecol += tok_text.count('{') + tok_text.count('}')
tok_text = tok_text.replace('{', '{{').replace('}', '}}')
tokens.append(Token(tok_name, tok_text, sline, end_offset))
last_line, last_col = eline, ecol
if sline != eline:
@ -115,7 +120,7 @@ def tokens_to_src(tokens: Iterable[Token]) -> str:
def reversed_enumerate(
tokens: Sequence[Token],
) -> Generator[tuple[int, Token], None, None]:
) -> Generator[tuple[int, Token]]:
for i in reversed(range(len(tokens))):
yield i, tokens[i]