Make the style-aware length more accurate
This commit is contained in:
parent
2d66e9c0b3
commit
32bc73b6f9
1 changed files with 10 additions and 6 deletions
|
@ -56,24 +56,25 @@ def wc_wrap(text: str, length: int) -> Generator[str, None, None]:
|
|||
|
||||
words = re.split(r"\s+", text.strip())
|
||||
stack = [] # stack to ensure enclosure of style tags
|
||||
temp_length = length
|
||||
for word in words:
|
||||
word_len = wcswidth(word)
|
||||
matches = re.findall(STYLE_TAG_PATTERN, word)
|
||||
for match in matches:
|
||||
full, end, name = match
|
||||
if name in STYLES:
|
||||
word_len -= len(full)
|
||||
temp_length += len(full)
|
||||
if end != '/':
|
||||
stack.append(name)
|
||||
elif len(stack) and name == stack[-1]:
|
||||
stack.pop()
|
||||
|
||||
if line_words and line_len + word_len > length:
|
||||
if line_words and line_len + word_len > temp_length:
|
||||
line = " ".join(line_words)
|
||||
temp_length = length
|
||||
for style in reversed(stack):
|
||||
line += '</' + style + '>'
|
||||
temp_length += 3 + len(style)
|
||||
line_len += len(style) + 3
|
||||
temp_length += len(style) + 3
|
||||
if line_len <= temp_length:
|
||||
yield line
|
||||
else:
|
||||
|
@ -81,11 +82,14 @@ def wc_wrap(text: str, length: int) -> Generator[str, None, None]:
|
|||
|
||||
line_len = 0
|
||||
line_words = []
|
||||
temp_length = length
|
||||
|
||||
if len(line_words) == 0:
|
||||
styles = ''
|
||||
for style in stack:
|
||||
styles += '<' + style + '>'
|
||||
temp_length += len(styles)
|
||||
line_len += len(styles)
|
||||
line_words = [styles + word]
|
||||
stack = []
|
||||
else:
|
||||
|
@ -94,10 +98,10 @@ def wc_wrap(text: str, length: int) -> Generator[str, None, None]:
|
|||
|
||||
if line_words:
|
||||
line = " ".join(line_words)
|
||||
if line_len <= length:
|
||||
if line_len <= temp_length:
|
||||
yield line
|
||||
else:
|
||||
yield from _wc_hard_wrap(line, length)
|
||||
yield from _wc_hard_wrap(line, temp_length)
|
||||
|
||||
|
||||
def trunc(text: str, length: int) -> str:
|
||||
|
|
Loading…
Reference in a new issue