Skip to content

Commit

Permalink
Merge pull request #149 from jackdewinter/issue-143
Browse files Browse the repository at this point in the history
#145 : First pass
  • Loading branch information
jackdewinter authored Nov 21, 2021
2 parents b971e26 + a69eebf commit 5ab751d
Show file tree
Hide file tree
Showing 17 changed files with 2,419 additions and 1,625 deletions.
4 changes: 3 additions & 1 deletion changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
- refactoring: identified and removed unused pylint suppressions
- [Changed - Issue 112](https://github.com/jackdewinter/pymarkdown/issues/112)
- refactoring: finding and applying all Sourcery recommended issues
- Module Refactoring to reduce complexity
- Changed - Module Refactoring to reduce complexity
- [plugin_manager.py](https://github.com/jackdewinter/pymarkdown/issues/115)
- [list_block_processor.py](https://github.com/jackdewinter/pymarkdown/issues/117)
- [coalesce_processor.py + emphasis_helper.py](https://github.com/jackdewinter/pymarkdown/issues/119)
Expand All @@ -31,6 +31,8 @@
- [main.py + inline_helper.py + rule md027](https://github.com/jackdewinter/pymarkdown/issues/138)
- [rest of main directory](https://github.com/jackdewinter/pymarkdown/issues/140)
- [extensions and rules](https://github.com/jackdewinter/pymarkdown/issues/143)
- [Changed - Issue 145](https://github.com/jackdewinter/pymarkdown/issues/145)
- refactoring: Either implemented todo or filed new issue to do it later

### Fixed

Expand Down
2 changes: 1 addition & 1 deletion publish/coverage.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"projectName": "pymarkdown", "reportSource": "pytest", "branchLevel": {"totalMeasured": 2956, "totalCovered": 2956}, "lineLevel": {"totalMeasured": 10164, "totalCovered": 10164}}
{"projectName": "pymarkdown", "reportSource": "pytest", "branchLevel": {"totalMeasured": 2960, "totalCovered": 2960}, "lineLevel": {"totalMeasured": 10160, "totalCovered": 10160}}
32 changes: 3 additions & 29 deletions pymarkdown/block_quote_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,6 @@ def __handle_block_quote_block_kludges(
last_block_quote_index,
adjusted_index_number,
):
# TODO for nesting, may need to augment with this_bq_count already set.
this_bq_count = alt_this_bq_count
POGGER.debug(">>this_bq_count>>$", this_bq_count)
POGGER.debug(">>did_process>>$", did_process)
Expand Down Expand Up @@ -363,8 +362,7 @@ def __check_if_really_start_list(
was_forced=True,
)
parser_state.token_document.extend(container_level_tokens)
lines_to_requeue = [position_marker.text_to_parse]
requeue_line_info = RequeueLineInfo(lines_to_requeue, False)
requeue_line_info = RequeueLineInfo([position_marker.text_to_parse], False)
return current_indent, requeue_line_info

@staticmethod
Expand Down Expand Up @@ -1091,33 +1089,9 @@ def __ensure_stack_at_level(
],
was_forced=True,
caller_can_handle_requeue=True,
requeue_reset=True,
)
if requeue_line_info and requeue_line_info.lines_to_requeue:
# TODO is this common?
POGGER.debug(
"__ensure_stack_at_level>>lines_to_requeue>>$",
requeue_line_info.lines_to_requeue,
)
POGGER.debug(
"__close_required_lists_after_start>>parser_state.original_line_to_parse>>$",
parser_state.original_line_to_parse,
)
POGGER.debug(
"__ensure_stack_at_level>>token_stack>>$",
parser_state.token_stack,
)
POGGER.debug(
"__ensure_stack_at_level>>token_document>>$",
parser_state.token_document,
)
assert not requeue_line_info.lines_to_requeue[0]
requeue_line_info.lines_to_requeue[
0
] = parser_state.original_line_to_parse
POGGER.debug(
"__close_required_lists_after_start>>lines_to_requeue>>$",
requeue_line_info.lines_to_requeue,
)
if requeue_line_info:
return None, None, requeue_line_info

BlockQuoteProcessor.__decrease_stack(
Expand Down
40 changes: 17 additions & 23 deletions pymarkdown/container_block_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,7 +548,6 @@ def __handle_leaf_tokens(
# POGGER.debug_with_visible_whitespace("text>>$>>", line_to_parse)
# POGGER.debug("container_level_tokens>>$>>", container_level_tokens)

# TODO refactor to make indent unnecessary?
calculated_indent = len(parser_state.original_line_to_parse) - len(
line_to_parse
)
Expand Down Expand Up @@ -692,7 +691,6 @@ def __get_list_start_index(
Note: This is one of the more heavily traffic functions in the
parser. Debugging should be uncommented only if needed.
"""
# TODO refactor so it doesn't need this!
new_position_marker = PositionMarker(
position_marker.line_number, start_index, line_to_parse
)
Expand Down Expand Up @@ -1433,7 +1431,6 @@ def __look_for_container_blocks(
init_bq=this_bq_count,
)
assert not requeue_line_info or not requeue_line_info.lines_to_requeue
# TODO will need to deal with force_ignore_first_as_lrd

POGGER.debug("\ncheck next container_start>recursed")
POGGER.debug("check next container_start>stack>>$", parser_state.token_stack)
Expand Down Expand Up @@ -1705,21 +1702,22 @@ def __handle_html_block(
@staticmethod
def __handle_block_leaf_tokens(
parser_state,
xposition_marker,
incoming_position_marker,
new_tokens,
ignore_link_definition_start,
):
remaining_line_to_parse = xposition_marker.text_to_parse[
xposition_marker.index_number :
remaining_line_to_parse = incoming_position_marker.text_to_parse[
incoming_position_marker.index_number :
]
(new_index_number, extracted_whitespace,) = ParserHelper.extract_whitespace(
xposition_marker.text_to_parse, xposition_marker.index_number
incoming_position_marker.text_to_parse,
incoming_position_marker.index_number,
)
position_marker = PositionMarker(
xposition_marker.line_number,
incoming_position_marker.line_number,
new_index_number,
xposition_marker.text_to_parse,
index_indent=xposition_marker.index_indent,
incoming_position_marker.text_to_parse,
index_indent=incoming_position_marker.index_indent,
)

pre_tokens = ContainerBlockProcessor.__close_indented_block_if_indent_not_there(
Expand Down Expand Up @@ -1770,7 +1768,7 @@ def __handle_block_leaf_tokens(
@staticmethod
def __parse_line_for_leaf_blocks(
parser_state,
xposition_marker,
position_marker,
this_bq_count,
removed_chars_at_start,
ignore_link_definition_start,
Expand All @@ -1785,34 +1783,32 @@ def __parse_line_for_leaf_blocks(
Note: This is one of the more heavily traffic functions in the
parser. Debugging should be uncommented only if needed.
"""
POGGER.debug("Leaf Line:$:", xposition_marker.text_to_parse)
POGGER.debug("Leaf Line:$:", position_marker.text_to_parse)
# POGGER.debug("this_bq_count:$:", this_bq_count)
new_tokens = []

# TODO rename to avoid collision with parameter

(
pre_tokens,
outer_processed,
requeue_line_info,
position_marker,
leaf_block_position_marker,
extracted_whitespace,
) = ContainerBlockProcessor.__handle_block_leaf_tokens(
parser_state,
xposition_marker,
position_marker,
new_tokens,
ignore_link_definition_start,
)

if not outer_processed:
assert not new_tokens
new_tokens = LeafBlockProcessor.parse_atx_headings(
parser_state, position_marker, extracted_whitespace
parser_state, leaf_block_position_marker, extracted_whitespace
)
if not new_tokens:
new_tokens = LeafBlockProcessor.parse_indented_code_block(
parser_state,
position_marker,
leaf_block_position_marker,
extracted_whitespace,
removed_chars_at_start,
last_block_quote_index,
Expand All @@ -1822,25 +1818,23 @@ def __parse_line_for_leaf_blocks(
stack_bq_count = parser_state.count_of_block_quotes_on_stack()
new_tokens = LeafBlockProcessor.parse_setext_headings(
parser_state,
position_marker,
leaf_block_position_marker,
extracted_whitespace,
this_bq_count,
stack_bq_count,
)
if not new_tokens:
stack_bq_count = parser_state.count_of_block_quotes_on_stack()
new_tokens = LeafBlockProcessor.parse_thematic_break(
parser_state,
position_marker,
leaf_block_position_marker,
extracted_whitespace,
this_bq_count,
stack_bq_count,
)
if not new_tokens:
stack_bq_count = parser_state.count_of_block_quotes_on_stack()
new_tokens = LeafBlockProcessor.parse_paragraph(
parser_state,
position_marker,
leaf_block_position_marker,
extracted_whitespace,
this_bq_count,
stack_bq_count,
Expand Down
14 changes: 5 additions & 9 deletions pymarkdown/leaf_block_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -567,7 +567,6 @@ def __create_indented_block(
extracted_whitespace,
)

# TODO revisit with tabs
line_number = position_marker.line_number
column_number = (
position_marker.index_number
Expand Down Expand Up @@ -764,19 +763,16 @@ def parse_thematic_break(
extracted_whitespace,
)
if start_char:
# TODO why not use close?
if parser_state.token_stack[-1].is_paragraph:
force_paragraph_close_if_present = (
this_bq_count == 0 and stack_bq_count > 0
)
new_tokens.append(
parser_state.token_stack[
-1
].generate_close_markdown_token_from_stack_token(
was_forced=force_paragraph_close_if_present
)
new_tokens, _ = parser_state.close_open_blocks_fn(
parser_state,
destination_array=new_tokens,
only_these_blocks=[ParagraphStackToken],
was_forced=force_paragraph_close_if_present,
)
del parser_state.token_stack[-1]
if this_bq_count == 0 and stack_bq_count > 0:
new_tokens, _ = parser_state.close_open_blocks_fn(
parser_state,
Expand Down
2 changes: 1 addition & 1 deletion pymarkdown/link_reference_definition_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -729,7 +729,7 @@ def handle_link_reference_definition_leaf_block(
parser_state.original_stack_depth,
parser_state.original_document_depth,
)
if requeue_line_info and requeue_line_info.lines_to_requeue:
if requeue_line_info:
outer_processed = True
POGGER.debug(
"plflb-process_link_reference_definition>>outer_processed>>$<lines_to_requeue<$<$",
Expand Down
61 changes: 19 additions & 42 deletions pymarkdown/list_block_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,6 +345,18 @@ def __calculate_starts_within_paragraph(
is_sub_list = start_index >= parser_state.token_stack[-2].indent_level
return is_first_item_in_list, is_sub_list

@staticmethod
def __get_list_functions(is_ulist):
if is_ulist:
POGGER.debug("hlb>>searching for ulist")
is_start_fn = ListBlockProcessor.is_ulist_start
create_token_fn = ListBlockProcessor.__handle_list_block_unordered
else:
POGGER.debug("hlb>>searching for olist")
is_start_fn = ListBlockProcessor.is_olist_start
create_token_fn = ListBlockProcessor.__handle_list_block_ordered
return is_start_fn, create_token_fn

# pylint: disable=too-many-locals, too-many-arguments
@staticmethod
def handle_list_block(
Expand Down Expand Up @@ -377,14 +389,9 @@ def handle_list_block(

if not did_process:

if is_ulist:
POGGER.debug("hlb>>searching for ulist")
is_start_fn = ListBlockProcessor.is_ulist_start
create_token_fn = ListBlockProcessor.__handle_list_block_unordered
else:
POGGER.debug("hlb>>searching for olist")
is_start_fn = ListBlockProcessor.is_olist_start
create_token_fn = ListBlockProcessor.__handle_list_block_ordered
is_start_fn, create_token_fn = ListBlockProcessor.__get_list_functions(
is_ulist
)

(
started_ulist,
Expand Down Expand Up @@ -1163,30 +1170,9 @@ def __close_required_lists_after_start(
parser_state,
until_this_index=last_list_index + 1,
caller_can_handle_requeue=True,
requeue_reset=True,
)
if requeue_line_info and requeue_line_info.lines_to_requeue:
POGGER.debug(
"__close_required_lists_after_start>>lines_to_requeue>>$",
requeue_line_info.lines_to_requeue,
)
POGGER.debug(
"__close_required_lists_after_start>>parser_state.original_line_to_parse>>$",
parser_state.original_line_to_parse,
)
POGGER.debug(
"__close_required_lists_after_start>>token_stack>>$",
parser_state.token_stack,
)
POGGER.debug(
"__close_required_lists_after_start>>token_document>>$",
parser_state.token_document,
)
assert not requeue_line_info.lines_to_requeue[0]
requeue_line_info.lines_to_requeue[0] = parser_state.original_line_to_parse
POGGER.debug(
"__close_required_lists_after_start>>lines_to_requeue>>$",
requeue_line_info.lines_to_requeue,
)
if requeue_line_info:
return None, None, requeue_line_info

POGGER.debug("old-stack>>$<<", container_level_tokens)
Expand Down Expand Up @@ -1568,18 +1554,9 @@ def __check_for_list_closures(
until_this_index=search_index,
include_lists=True,
caller_can_handle_requeue=True,
requeue_reset=True,
)
POGGER.debug("container_level_tokens>$>", container_level_tokens)
if requeue_line_info:
POGGER.debug("requeue_line_info>$>", requeue_line_info.lines_to_requeue)
POGGER.debug(
"original_line_to_parse>$>", parser_state.original_line_to_parse
)
assert len(requeue_line_info.lines_to_requeue) > 1
assert not requeue_line_info.lines_to_requeue[0]
requeue_line_info.lines_to_requeue[
0
] = parser_state.original_line_to_parse

return container_level_tokens, requeue_line_info

# pylint: enable=too-many-arguments
1 change: 0 additions & 1 deletion pymarkdown/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -621,7 +621,6 @@ def main(self):
if new_handler:
new_handler.close()

# TODO self.__plugins.number_of_pragma_failures
if self.__plugins.number_of_scan_failures or total_error_count:
sys.exit(1)

Expand Down
1 change: 0 additions & 1 deletion pymarkdown/plugins/rule_md_027.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,6 @@ def __handle_fenced_code_block_end(
self.__bq_line_index[num_container_tokens] += 1

def __handle_link_reference_definition(self, context, token, num_container_tokens):
# TODO - https://github.com/jackdewinter/pymarkdown/issues/100
scoped_block_quote_token = self.__container_tokens[-1]
if token.extracted_whitespace:
column_number_delta = -(
Expand Down
3 changes: 2 additions & 1 deletion pymarkdown/requeue_line_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ class RequeueLineInfo:
Class to provide a container for lines that need to be requeued.
"""

def __init__(self, lines_to_requeue=None, force_ignore_first_as_lrd=None):
def __init__(self, lines_to_requeue, force_ignore_first_as_lrd=None):
assert lines_to_requeue
self.__lines_to_requeue = lines_to_requeue
self.__force_ignore_first_as_lrd = force_ignore_first_as_lrd

Expand Down
Loading

0 comments on commit 5ab751d

Please sign in to comment.