OXIESEC PANEL
- Current Dir:
/
/
opt
/
alt
/
python311
/
lib
/
python3.11
/
site-packages
/
markdown_it
/
rules_inline
Server IP: 2a02:4780:11:1594:0:ef5:22d7:a
Upload:
Create Dir:
Name
Size
Modified
Perms
📁
..
-
05/14/2024 03:18:15 PM
rwxr-xr-x
📄
__init__.py
696 bytes
05/14/2024 03:18:15 PM
rw-r--r--
📁
__pycache__
-
05/14/2024 03:18:15 PM
rwxr-xr-x
📄
autolink.py
2.03 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
backticks.py
1.99 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
balance_pairs.py
4.74 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
emphasis.py
3.05 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
entity.py
1.61 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
escape.py
1.62 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
fragments_join.py
1.46 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
html_inline.py
1.1 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
image.py
4.04 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
link.py
4.22 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
linkify.py
1.66 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
newline.py
1.27 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
state_inline.py
4.98 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
strikethrough.py
3.14 KB
05/14/2024 03:18:15 PM
rw-r--r--
📄
text.py
901 bytes
05/14/2024 03:18:15 PM
rw-r--r--
Editing: strikethrough.py
Close
# ~~strike through~~ from __future__ import annotations from .state_inline import Delimiter, StateInline def tokenize(state: StateInline, silent: bool) -> bool: """Insert each marker as a separate text token, and add it to delimiter list""" start = state.pos ch = state.src[start] if silent: return False if ch != "~": return False scanned = state.scanDelims(state.pos, True) length = scanned.length if length < 2: return False if length % 2: token = state.push("text", "", 0) token.content = ch length -= 1 i = 0 while i < length: token = state.push("text", "", 0) token.content = ch + ch state.delimiters.append( Delimiter( marker=ord(ch), length=0, # disable "rule of 3" length checks meant for emphasis token=len(state.tokens) - 1, end=-1, open=scanned.can_open, close=scanned.can_close, ) ) i += 2 state.pos += scanned.length return True def _postProcess(state: StateInline, delimiters: list[Delimiter]) -> None: loneMarkers = [] maximum = len(delimiters) i = 0 while i < maximum: startDelim = delimiters[i] if startDelim.marker != 0x7E: # /* ~ */ i += 1 continue if startDelim.end == -1: i += 1 continue endDelim = delimiters[startDelim.end] token = state.tokens[startDelim.token] token.type = "s_open" token.tag = "s" token.nesting = 1 token.markup = "~~" token.content = "" token = state.tokens[endDelim.token] token.type = "s_close" token.tag = "s" token.nesting = -1 token.markup = "~~" token.content = "" if ( state.tokens[endDelim.token - 1].type == "text" and state.tokens[endDelim.token - 1].content == "~" ): loneMarkers.append(endDelim.token - 1) i += 1 # If a marker sequence has an odd number of characters, it's split # like this: `~~~~~` -> `~` + `~~` + `~~`, leaving one marker at the # start of the sequence. # # So, we have to move all those markers after subsequent s_close tags. # while loneMarkers: i = loneMarkers.pop() j = i + 1 while (j < len(state.tokens)) and (state.tokens[j].type == "s_close"): j += 1 j -= 1 if i != j: token = state.tokens[j] state.tokens[j] = state.tokens[i] state.tokens[i] = token def postProcess(state: StateInline) -> None: """Walk through delimiter list and replace text tokens with tags.""" tokens_meta = state.tokens_meta maximum = len(state.tokens_meta) _postProcess(state, state.delimiters) curr = 0 while curr < maximum: try: curr_meta = tokens_meta[curr] except IndexError: pass else: if curr_meta and "delimiters" in curr_meta: _postProcess(state, curr_meta["delimiters"]) curr += 1