1212import subprocess
1313import sys
1414import threading
15- from collections .abc import Callable , Iterable
15+ from collections .abc import (
16+ Callable ,
17+ Iterable ,
18+ )
1619from difflib import SequenceMatcher
1720from enum import Enum
18- from typing import TYPE_CHECKING , Any , TextIO , TypeVar , Union , cast
21+ from typing import (
22+ TYPE_CHECKING ,
23+ Any ,
24+ TextIO ,
25+ TypeVar ,
26+ Union ,
27+ cast ,
28+ )
1929
2030from . import constants
21- from .argparse_custom import ChoicesProviderFunc , CompleterFunc
31+ from . import string_utils as su
32+ from .argparse_custom import (
33+ ChoicesProviderFunc ,
34+ CompleterFunc ,
35+ )
2236
2337if TYPE_CHECKING : # pragma: no cover
2438 import cmd2 # noqa: F401
@@ -188,9 +202,7 @@ def alphabetical_sort(list_to_sort: Iterable[str]) -> list[str]:
188202 :param list_to_sort: the list being sorted
189203 :return: the sorted list
190204 """
191- from .string_utils import norm_fold
192-
193- return sorted (list_to_sort , key = norm_fold )
205+ return sorted (list_to_sort , key = su .norm_fold )
194206
195207
196208def try_int_or_force_to_lower_case (input_str : str ) -> int | str :
@@ -199,12 +211,10 @@ def try_int_or_force_to_lower_case(input_str: str) -> int | str:
199211 :param input_str: string to convert
200212 :return: the string as an integer or a lower case version of the string.
201213 """
202- from .string_utils import norm_fold
203-
204214 try :
205215 return int (input_str )
206216 except ValueError :
207- return norm_fold (input_str )
217+ return su . norm_fold (input_str )
208218
209219
210220def natural_keys (input_str : str ) -> list [int | str ]:
@@ -238,11 +248,9 @@ def quote_specific_tokens(tokens: list[str], tokens_to_quote: list[str]) -> None
238248 :param tokens: token list being edited
239249 :param tokens_to_quote: the tokens, which if present in tokens, to quote
240250 """
241- from .string_utils import quote
242-
243251 for i , token in enumerate (tokens ):
244252 if token in tokens_to_quote :
245- tokens [i ] = quote (token )
253+ tokens [i ] = su . quote (token )
246254
247255
248256def unquote_specific_tokens (tokens : list [str ], tokens_to_unquote : list [str ]) -> None :
@@ -251,10 +259,8 @@ def unquote_specific_tokens(tokens: list[str], tokens_to_unquote: list[str]) ->
251259 :param tokens: token list being edited
252260 :param tokens_to_unquote: the tokens, which if present in tokens, to unquote
253261 """
254- from .string_utils import strip_quotes
255-
256262 for i , token in enumerate (tokens ):
257- unquoted_token = strip_quotes (token )
263+ unquoted_token = su . strip_quotes (token )
258264 if unquoted_token in tokens_to_unquote :
259265 tokens [i ] = unquoted_token
260266
@@ -264,12 +270,10 @@ def expand_user(token: str) -> str:
264270
265271 :param token: the string to expand
266272 """
267- from .string_utils import is_quoted , strip_quotes
268-
269273 if token :
270- if is_quoted (token ):
274+ if su . is_quoted (token ):
271275 quote_char = token [0 ]
272- token = strip_quotes (token )
276+ token = su . strip_quotes (token )
273277 else :
274278 quote_char = ''
275279
0 commit comments