Skip to content

Commit 8d17721

Browse files
committed
Corrected style code
1 parent 9eb841b commit 8d17721

File tree

1 file changed

+10
-11
lines changed

1 file changed

+10
-11
lines changed

discord_markdown_ast_parser/lexer.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import re
2-
from dataclasses import dataclass, InitVar, field
2+
from dataclasses import dataclass, field
33
from enum import Enum
44
from typing import Optional, List, Generator, Dict
55
import itertools
@@ -8,10 +8,10 @@
88
class Lexing:
99
def __init__(self, pattern: Optional[str] = None, flags: re.RegexFlag = re.NOFLAG):
1010
self.regex = re.compile(pattern, flags=flags) if pattern else None
11-
11+
1212
def __call__(self, text: str):
1313
return self.regex and self.regex.match(text)
14-
14+
1515
def __repr__(self):
1616
return f"{self.__class__.__name__}({self.regex and self.regex.pattern!r})"
1717

@@ -22,16 +22,16 @@ def __repr__(self):
2222
class LexingRule(Lexing, Enum):
2323
USER_MENTION = r"<@!?(\d{15,20})>"
2424
ROLE_MENTION = r"<@&(\d{15,20})>"
25-
SLASH_COMMAND_MENTION = r"</([a-zA-Z0-9_ ]{2,}):(\d{15,20})>"
25+
SLASH_COMMAND_MENTION = r"</([a-zA-Z0-9_ ]{2,}):(\d{15,20})>"
2626
CHANNEL_MENTION = r"<#(\d{15,20})>"
2727
TIMESTAMP = r"<t:(-?\d+)(?::([tTdDfFR]))?>"
2828
EMOJI_CUSTOM = r"<:([a-zA-Z0-9_]{2,}):(\d{15,20})>"
2929
EMOJI_CUSTOM_ANIMATED = r"<a:([a-zA-Z0-9_]{2,}):(\d{15,20})>"
3030
EMOJI_UNICODE = r"(\u00a9|\u00ae|[\u2000-\u3300]|\ud83c[\ud000-\udfff]|\ud83d[\ud000-\udfff]|\ud83e[\ud000-\udfff])"
3131
EMOJI_UNICODE_ENCODED = r":([a-zA-Z0-9_]+):"
32-
URL_WITHOUT_PREVIEW_EMBEDDED = f"\[([^\]]+)\]\(<({URL_REGEX})>\)"
33-
URL_WITH_PREVIEW_EMBEDDED = f"\[([^\]]+)\]\(({URL_REGEX})\)"
34-
URL_WITHOUT_PREVIEW = f"<{URL_REGEX}>"
32+
URL_WITHOUT_PREVIEW_EMBEDDED = fr"\[([^\]]+)\]\(<({URL_REGEX})>\)"
33+
URL_WITH_PREVIEW_EMBEDDED = fr"\[([^\]]+)\]\(({URL_REGEX})\)"
34+
URL_WITHOUT_PREVIEW = fr"<{URL_REGEX}>"
3535
URL_WITH_PREVIEW = URL_REGEX
3636
QUOTE_LINE_PREFIX = r"(>>)?> "
3737
TILDE = r"~"
@@ -49,12 +49,11 @@ class Token:
4949
value: str = ""
5050
lexing_rule: Lexing = LexingRule.TEXT_INLINE
5151
groups: List[str] = field(default_factory=list)
52-
52+
5353
def __contains__(self, rule: Lexing):
5454
return self.lexing_rule == rule
5555

5656

57-
5857
def lex(input_text: str, custom: Optional[Dict[str, List[Lexing]]] = None) -> Generator[Token, None, None]:
5958
"""Lexes the input text and returns a generator of tokens.
6059
The generator will yield a token for each lexing rule that matches the input text.
@@ -67,7 +66,7 @@ def lex(input_text: str, custom: Optional[Dict[str, List[Lexing]]] = None) -> Ge
6766
"""
6867
seen_simple_text = ""
6968
custom = custom or {}
70-
69+
7170
while input_text:
7271
for rule in itertools.chain(*custom.values(), LexingRule):
7372
match = rule(input_text)
@@ -80,7 +79,7 @@ def lex(input_text: str, custom: Optional[Dict[str, List[Lexing]]] = None) -> Ge
8079
continue # don't yield a token in this run
8180

8281
# cut off matched part
83-
input_text = input_text[len(match[0]) :]
82+
input_text = input_text[len(match[0]):]
8483

8584
# yield inline text if we have some left
8685
if len(seen_simple_text) > 0:

0 commit comments

Comments
 (0)