Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
jackdewinter authored Oct 23, 2024
1 parent 3335836 commit 911e871
Show file tree
Hide file tree
Showing 16 changed files with 6,220 additions and 477 deletions.
8 changes: 4 additions & 4 deletions publish/coverage.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
"projectName": "pymarkdown",
"reportSource": "pytest",
"branchLevel": {
"totalMeasured": 5367,
"totalCovered": 5367
"totalMeasured": 5385,
"totalCovered": 5385
},
"lineLevel": {
"totalMeasured": 21037,
"totalCovered": 21037
"totalMeasured": 21081,
"totalCovered": 21081
}
}

4 changes: 2 additions & 2 deletions publish/pylint_suppression.json
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,7 @@
"too-few-public-methods": 1,
"too-many-arguments": 19,
"too-many-locals": 3,
"too-many-boolean-expressions": 2
"too-many-boolean-expressions": 3
},
"pymarkdown/transform_markdown/transform_list_block.py": {
"too-many-arguments": 4
Expand All @@ -511,7 +511,7 @@
"too-many-arguments": 253,
"too-many-locals": 50,
"chained-comparison": 2,
"too-many-boolean-expressions": 3,
"too-many-boolean-expressions": 4,
"protected-access": 25,
"deprecated-decorator": 3,
"broad-exception-caught": 3,
Expand Down
8 changes: 4 additions & 4 deletions publish/test-results.json
Original file line number Diff line number Diff line change
Expand Up @@ -1364,10 +1364,10 @@
},
{
"name": "test.rules.test_md031",
"totalTests": 379,
"totalTests": 653,
"failedTests": 0,
"errorTests": 0,
"skippedTests": 0,
"skippedTests": 84,
"elapsedTimeInMilliseconds": 0
},
{
Expand Down Expand Up @@ -1620,10 +1620,10 @@
},
{
"name": "test.test_markdown_extra",
"totalTests": 177,
"totalTests": 203,
"failedTests": 0,
"errorTests": 0,
"skippedTests": 1,
"skippedTests": 2,
"elapsedTimeInMilliseconds": 0
},
{
Expand Down
15 changes: 7 additions & 8 deletions pymarkdown/container_blocks/container_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,16 +83,14 @@ def __reduce_containers_if_required_bq_list(
def __xx(parser_state: ParserState, extra_bqs: int) -> List[MarkdownToken]:
until_index = len(parser_state.token_stack) - 1
needed_bqs = extra_bqs
while (
until_index > 0
and needed_bqs > 0
and parser_state.token_stack[until_index].is_block_quote
):
while until_index > 0 and needed_bqs > 0:
if parser_state.token_stack[until_index].is_block_quote:
needed_bqs -= 1
until_index -= 1
needed_bqs -= 1
x_tokens, _ = parser_state.close_open_blocks_fn(
parser_state,
include_block_quotes=True,
include_lists=True,
was_forced=True,
until_this_index=until_index + 1,
)
Expand All @@ -111,9 +109,10 @@ def __reduce_containers_if_required_bq(
extra_bqs: int,
) -> Tuple[bool, str, Optional[str]]:
x_tokens = ContainerHelper.__xx(parser_state, extra_bqs)
count_block_quotes = sum(bool(i.is_block_quote_end) for i in x_tokens)
assert (
len(x_tokens) == extra_bqs
), "Should have generated the requested number of tokens."
count_block_quotes == extra_bqs
), "Should have generated the requested number of block quote tokens."

first_new_token = cast(EndMarkdownToken, x_tokens[0])

Expand Down
3 changes: 3 additions & 0 deletions pymarkdown/leaf_blocks/atx_leaf_block_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,7 @@ def __parse_atx_heading_found(
extracted_whitespace_at_start,
extracted_whitespace_at_end,
extracted_whitespace_before_end,
block_quote_data,
)
return new_tokens

Expand All @@ -208,6 +209,7 @@ def __parse_atx_heading_add_tokens(
extracted_whitespace_at_start: str,
extracted_whitespace_at_end: str,
extracted_whitespace_before_end: str,
block_quote_data: BlockQuoteData,
) -> List[MarkdownToken]:
start_token = AtxHeadingMarkdownToken(
hash_count,
Expand All @@ -222,6 +224,7 @@ def __parse_atx_heading_add_tokens(
position_marker.index_indent,
old_top_of_stack,
new_tokens,
block_quote_data,
was_token_already_added_to_stack=False,
delay_tab_match=delay_tab_match,
)
Expand Down
1 change: 1 addition & 0 deletions pymarkdown/leaf_blocks/fenced_leaf_block_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,6 +438,7 @@ def __process_fenced_start(
position_marker.index_indent,
old_top_of_stack,
new_tokens,
block_quote_data,
alt_removed_chars_at_start=removed_char_length,
original_line=original_line,
)
Expand Down
90 changes: 62 additions & 28 deletions pymarkdown/leaf_blocks/leaf_block_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import logging
from typing import List, Optional, Tuple, cast

from pymarkdown.block_quotes.block_quote_data import BlockQuoteData
from pymarkdown.general.parser_helper import ParserHelper
from pymarkdown.general.parser_logger import ParserLogger
from pymarkdown.general.parser_state import ParserState
Expand All @@ -30,6 +31,7 @@ def correct_for_leaf_block_start_in_list(
removed_chars_at_start: int,
old_top_of_stack_token: StackToken,
html_tokens: List[MarkdownToken],
block_quote_data: BlockQuoteData,
was_token_already_added_to_stack: bool = True,
delay_tab_match: bool = False,
alt_removed_chars_at_start: Optional[int] = None,
Expand Down Expand Up @@ -83,6 +85,7 @@ def correct_for_leaf_block_start_in_list(
delay_tab_match,
alt_removed_chars_at_start,
is_html,
block_quote_data,
original_line,
)

Expand Down Expand Up @@ -280,22 +283,65 @@ def __detect_list_already_added_to(parser_state: ParserState) -> bool:
removed_tokens.append(cast(ListStartMarkdownToken, current_token))
add_index += 1

if original_removed_tokens:
assert len(original_removed_tokens) == 1
assert original_removed_tokens[0].is_list_start
assert original_removed_tokens
assert len(original_removed_tokens) == 1
assert original_removed_tokens[0].is_list_start

original_leading_spaces = original_removed_tokens[0].leading_spaces
current_leading_spaces = removed_tokens[0].leading_spaces
if (
original_leading_spaces = original_removed_tokens[0].leading_spaces
current_leading_spaces = removed_tokens[0].leading_spaces
return bool(
(
original_leading_spaces
and current_leading_spaces
and original_leading_spaces != current_leading_spaces
):
return True
return False
)
)

# pylint: enable=too-many-locals

@staticmethod
def __handle_leaf_start_inner(
parser_state: ParserState,
removed_chars_at_start: int,
ws_count: int,
html_tokens: List[MarkdownToken],
stack_delta: int,
) -> Tuple[Optional[ListStackToken], int]:
assert parser_state.token_stack[
-1
].is_list, "Token at the end of the stack must be a list token."
list_stack_token = cast(ListStackToken, parser_state.token_stack[-1])

POGGER.debug(">>removed_chars_at_start>>$>>", removed_chars_at_start)
POGGER.debug(">>stack indent>>$>>", list_stack_token.indent_level)
if (removed_chars_at_start + ws_count) >= list_stack_token.indent_level:
return None, stack_delta
tokens_from_close, _ = parser_state.close_open_blocks_fn(
parser_state,
until_this_index=(len(parser_state.token_stack) - 1),
include_lists=True,
)
POGGER.debug(
">>correct_for_leaf_block_start_in_list>>tokens_from_close>>$>>",
tokens_from_close,
)
html_tokens.extend(tokens_from_close)
remaining_stack_index = len(parser_state.token_stack) - 1
while (
stack_delta > 0
and parser_state.token_stack[remaining_stack_index].is_block_quote
):
stack_delta -= 1
remaining_stack_index -= 1
if remaining_stack_index != len(parser_state.token_stack) - 1:
tokens_from_close, _ = parser_state.close_open_blocks_fn(
parser_state,
until_this_index=remaining_stack_index + 1,
include_block_quotes=True,
)
html_tokens.extend(tokens_from_close)
return list_stack_token, stack_delta

# pylint: disable=too-many-arguments
@staticmethod
def __handle_leaf_start(
Expand All @@ -305,6 +351,7 @@ def __handle_leaf_start(
delay_tab_match: bool,
alt_removed_chars_at_start: Optional[int],
is_html: bool,
block_quote_data: BlockQuoteData,
original_line: Optional[str] = None,
) -> None:
POGGER.debug(
Expand All @@ -320,30 +367,17 @@ def __handle_leaf_start(
parser_state.original_line_to_parse[removed_chars_at_start:], 0
)

stack_delta = block_quote_data.stack_count - block_quote_data.current_count

adjust_with_leading_spaces = False
is_remaining_list_token = True
while is_remaining_list_token:
assert parser_state.token_stack[
-1
].is_list, "Token at the end of the stack must be a list token."
list_stack_token = cast(ListStackToken, parser_state.token_stack[-1])

POGGER.debug(">>removed_chars_at_start>>$>>", removed_chars_at_start)
POGGER.debug(">>stack indent>>$>>", list_stack_token.indent_level)
if (removed_chars_at_start + ws_count) >= list_stack_token.indent_level:
break # pragma: no cover

tokens_from_close, _ = parser_state.close_open_blocks_fn(
parser_state,
until_this_index=(len(parser_state.token_stack) - 1),
include_lists=True,
list_stack_token, stack_delta = LeafBlockHelper.__handle_leaf_start_inner(
parser_state, removed_chars_at_start, ws_count, html_tokens, stack_delta
)
if list_stack_token is None:
break
adjust_with_leading_spaces = True
POGGER.debug(
">>correct_for_leaf_block_start_in_list>>tokens_from_close>>$>>",
tokens_from_close,
)
html_tokens.extend(tokens_from_close)
is_remaining_list_token = parser_state.token_stack[-1].is_list

POGGER.debug("is_remaining_list_token=$", is_remaining_list_token)
Expand Down
2 changes: 2 additions & 0 deletions pymarkdown/leaf_blocks/leaf_block_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ def is_paragraph_ending_leaf_block_start(
extracted_whitespace,
original_line,
index_indent,
skip_whitespace_check=True,
)
POGGER.debug(
"is_paragraph_ending_leaf_block_start>>is_fenced_code_block>>$",
Expand Down Expand Up @@ -141,6 +142,7 @@ def handle_html_block(
position_marker.index_indent,
old_top_of_stack,
html_tokens,
grab_bag.block_quote_data,
alt_removed_chars_at_start=alt_removed_chars_at_start,
is_html=True,
original_line=grab_bag.original_line,
Expand Down
5 changes: 5 additions & 0 deletions pymarkdown/leaf_blocks/setext_leaf_block_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ def parse_setext_headings(
ex_ws_l,
split_tab_with_block_quote_suffix,
extra_whitespace_prefix,
block_quote_data,
)
else:
POGGER.debug(
Expand All @@ -150,6 +151,7 @@ def __prepare_and_create_setext_token(
ex_ws_l: int,
split_tab_with_block_quote_suffix: bool,
extra_whitespace_prefix: Optional[str],
block_quote_data: BlockQuoteData,
) -> Tuple[int, int, str]:
_, collected_to_index = ParserHelper.collect_while_character_verified(
line_to_parse,
Expand Down Expand Up @@ -189,6 +191,7 @@ def __prepare_and_create_setext_token(
extra_whitespace_prefix,
old_top_of_stack,
new_tokens,
block_quote_data,
)
return collected_to_index, after_whitespace_index, extra_whitespace_after_setext

Expand All @@ -203,6 +206,7 @@ def __prepare_and_create_setext_token_list_adjust(
extra_whitespace_prefix: Optional[str],
old_top_of_stack: StackToken,
new_tokens: List[MarkdownToken],
block_quote_data: BlockQuoteData,
) -> None:
POGGER.debug("parser_state.token_stack[-1]>>:$:<", parser_state.token_stack[-1])
POGGER.debug("parser_state.token_stack>>:$:<", parser_state.token_stack)
Expand All @@ -221,6 +225,7 @@ def __prepare_and_create_setext_token_list_adjust(
position_marker.index_indent,
old_top_of_stack,
new_tokens,
block_quote_data,
was_token_already_added_to_stack=False,
delay_tab_match=True,
)
Expand Down
36 changes: 21 additions & 15 deletions pymarkdown/leaf_blocks/thematic_leaf_block_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,21 +176,24 @@ def __handle_special_case(
last_list_markdown_token = cast(
ListStartMarkdownToken, list_end_token.start_markdown_token
)
inner_list_markdown_token = cast(
ListStartMarkdownToken,
parser_state.token_stack[stack_index].matching_markdown_token,
)
leading_space_to_move = (
last_list_markdown_token.remove_last_leading_space()
)
assert leading_space_to_move is not None
POGGER.debug(
"__handle_special_case>>list_token>>$", inner_list_markdown_token
)
inner_list_markdown_token.add_leading_spaces(leading_space_to_move)
POGGER.debug(
"__handle_special_case>>list_token>>$", inner_list_markdown_token
)
if last_list_markdown_token.leading_spaces is not None:
inner_list_markdown_token = cast(
ListStartMarkdownToken,
parser_state.token_stack[stack_index].matching_markdown_token,
)
leading_space_to_move = (
last_list_markdown_token.remove_last_leading_space()
)
assert leading_space_to_move is not None
POGGER.debug(
"__handle_special_case>>list_token>>$",
inner_list_markdown_token,
)
inner_list_markdown_token.add_leading_spaces(leading_space_to_move)
POGGER.debug(
"__handle_special_case>>list_token>>$",
inner_list_markdown_token,
)

@staticmethod
def parse_thematic_break(
Expand Down Expand Up @@ -303,6 +306,7 @@ def __perform_adjusts(
new_tokens,
start_char,
token_text,
block_quote_data,
)
else:
split_tab, extracted_whitespace, whitespace_prefix = (
Expand Down Expand Up @@ -330,6 +334,7 @@ def __parse_thematic_break_with_suffix(
new_tokens: List[MarkdownToken],
start_char: str,
token_text: str,
block_quote_data: BlockQuoteData,
) -> None:
POGGER.debug("parser_state.token_stack[-1]>>:$:<", parser_state.token_stack[-1])
POGGER.debug("parser_state.token_stack>>:$:<", parser_state.token_stack)
Expand All @@ -350,6 +355,7 @@ def __parse_thematic_break_with_suffix(
position_marker.index_indent,
old_top_of_stack,
new_tokens,
block_quote_data,
was_token_already_added_to_stack=False,
delay_tab_match=True,
)
Expand Down
Loading

0 comments on commit 911e871

Please sign in to comment.