diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5969c53a..a47b8a36 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,12 @@ Changes ======= + +5.0.1 +~~~~~ +* ENH: improved type annotations and moved them inline + + 5.0.1 ~~~~~ * FIX: Prevented duplicate or inconsistent profiler output under Python 3.14 when multiprocessing is used. diff --git a/line_profiler/__main__.py b/line_profiler/__main__.py index c626c205..33831ea3 100644 --- a/line_profiler/__main__.py +++ b/line_profiler/__main__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from .line_profiler import main if __name__ == '__main__': diff --git a/line_profiler/__main__.pyi b/line_profiler/__main__.pyi deleted file mode 100644 index 8b137891..00000000 --- a/line_profiler/__main__.pyi +++ /dev/null @@ -1 +0,0 @@ - diff --git a/line_profiler/_diagnostics.py b/line_profiler/_diagnostics.py index cbf3d24b..a2639746 100644 --- a/line_profiler/_diagnostics.py +++ b/line_profiler/_diagnostics.py @@ -71,10 +71,10 @@ def _boolean_environ( """ # (TODO: migrate to `line_profiler.cli_utils.boolean()` after # merging #335) - try: - value = os.environ.get(envvar).casefold() - except AttributeError: # None + value = os.environ.get(envvar) + if value is None: return default + value = value.casefold() non_default_values = falsy if default else truey if value in {v.casefold() for v in non_default_values}: return not default diff --git a/line_profiler/_line_profiler.pyi b/line_profiler/_line_profiler.pyi new file mode 100644 index 00000000..a51cfffd --- /dev/null +++ b/line_profiler/_line_profiler.pyi @@ -0,0 +1,25 @@ +from __future__ import annotations + +from typing import Any, Mapping + + +class LineStats: + timings: Mapping[tuple[str, int, str], list[tuple[int, int, int]]] + unit: float + + def __init__( + self, + timings: Mapping[tuple[str, int, str], list[tuple[int, int, int]]], + unit: float, + ) -> None: ... + + +class LineProfiler: + def enable_by_count(self) -> None: ... + def disable_by_count(self) -> None: ... + def add_function(self, func: Any) -> None: ... + def get_stats(self) -> LineStats: ... + def dump_stats(self, filename: str) -> None: ... + + +def label(code: Any) -> Any: ... diff --git a/line_profiler/autoprofile/ast_profile_transformer.py b/line_profiler/autoprofile/ast_profile_transformer.py index 1f4655d8..95827aeb 100644 --- a/line_profiler/autoprofile/ast_profile_transformer.py +++ b/line_profiler/autoprofile/ast_profile_transformer.py @@ -1,7 +1,12 @@ +from __future__ import annotations + import ast +from typing import cast, Union, List -def ast_create_profile_node(modname, profiler_name='profile', attr='add_imported_function_or_module'): +def ast_create_profile_node( + modname: str, profiler_name: str = 'profile', + attr: str = 'add_imported_function_or_module') -> ast.Expr: """Create an abstract syntax tree node that adds an object to the profiler to be profiled. An abstract syntax tree node is created which calls the attr method from profile and @@ -29,7 +34,7 @@ def ast_create_profile_node(modname, profiler_name='profile', attr='add_imported """ func = ast.Attribute(value=ast.Name(id=profiler_name, ctx=ast.Load()), attr=attr, ctx=ast.Load()) names = modname.split('.') - value = ast.Name(id=names[0], ctx=ast.Load()) + value: ast.expr = ast.Name(id=names[0], ctx=ast.Load()) for name in names[1:]: value = ast.Attribute(attr=name, ctx=ast.Load(), value=value) expr = ast.Expr(value=ast.Call(func=func, args=[value], keywords=[])) @@ -45,7 +50,9 @@ class AstProfileTransformer(ast.NodeTransformer): immediately after the import. """ - def __init__(self, profile_imports=False, profiled_imports=None, profiler_name='profile'): + def __init__(self, profile_imports: bool = False, + profiled_imports: list[str] | None = None, + profiler_name: str = 'profile') -> None: """Initializes the AST transformer with the profiler name. Args: @@ -63,7 +70,9 @@ def __init__(self, profile_imports=False, profiled_imports=None, profiler_name=' self._profiled_imports = profiled_imports if profiled_imports is not None else [] self._profiler_name = profiler_name - def _visit_func_def(self, node): + def _visit_func_def( + self, node: ast.FunctionDef | ast.AsyncFunctionDef + ) -> ast.FunctionDef | ast.AsyncFunctionDef: """Decorate functions/methods with profiler. Checks if the function/method already has a profile_name decorator, if not, it will append @@ -81,17 +90,19 @@ def _visit_func_def(self, node): """ decor_ids = set() for decor in node.decorator_list: - try: + if isinstance(decor, ast.Name): decor_ids.add(decor.id) - except AttributeError: - ... if self._profiler_name not in decor_ids: node.decorator_list.append(ast.Name(id=self._profiler_name, ctx=ast.Load())) - return self.generic_visit(node) + self.generic_visit(node) + return node visit_FunctionDef = visit_AsyncFunctionDef = _visit_func_def - def _visit_import(self, node): + def _visit_import( + self, node: ast.Import | ast.ImportFrom + ) -> (ast.Import | ast.ImportFrom + | list[ast.Import | ast.ImportFrom | ast.Expr]): """Add a node that profiles an import If profile_imports is True and the import is not in profiled_imports, @@ -110,8 +121,10 @@ def _visit_import(self, node): returns list containing the import node and the profiling node """ if not self._profile_imports: - return self.generic_visit(node) - visited = [self.generic_visit(node)] + self.generic_visit(node) + return node + this_visit = cast(Union[ast.Import, ast.ImportFrom], self.generic_visit(node)) + visited: list[ast.Import | ast.ImportFrom | ast.Expr] = [this_visit] for names in node.names: node_name = names.name if names.asname is None else names.asname if node_name in self._profiled_imports: @@ -121,7 +134,9 @@ def _visit_import(self, node): visited.append(expr) return visited - def visit_Import(self, node): + def visit_Import( + self, node: ast.Import + ) -> ast.Import | list[ast.Import | ast.Expr]: """Add a node that profiles an object imported using the "import foo" sytanx Args: @@ -135,9 +150,12 @@ def visit_Import(self, node): if profile_imports is True: returns list containing the import node and the profiling node """ - return self._visit_import(node) + return cast(Union[ast.Import, List[Union[ast.Import, ast.Expr]]], + self._visit_import(node)) - def visit_ImportFrom(self, node): + def visit_ImportFrom( + self, node: ast.ImportFrom + ) -> ast.ImportFrom | list[ast.ImportFrom | ast.Expr]: """Add a node that profiles an object imported using the "from foo import bar" syntax Args: @@ -151,4 +169,5 @@ def visit_ImportFrom(self, node): if profile_imports is True: returns list containing the import node and the profiling node """ - return self._visit_import(node) + return cast(Union[ast.ImportFrom, List[Union[ast.ImportFrom, ast.Expr]]], + self._visit_import(node)) diff --git a/line_profiler/autoprofile/ast_profile_transformer.pyi b/line_profiler/autoprofile/ast_profile_transformer.pyi deleted file mode 100644 index 9d64182c..00000000 --- a/line_profiler/autoprofile/ast_profile_transformer.pyi +++ /dev/null @@ -1,36 +0,0 @@ -from typing import List -import _ast -import ast -from typing import Union - - -def ast_create_profile_node(modname, - profiler_name: str = ..., - attr: str = ...) -> (_ast.Expr): - ... - - -class AstProfileTransformer(ast.NodeTransformer): - - def __init__(self, - profile_imports: bool = False, - profiled_imports: List[str] | None = None, - profiler_name: str = 'profile') -> None: - ... - - def visit_FunctionDef(self, node: _ast.FunctionDef) -> (_ast.FunctionDef): - ... - - def visit_AsyncFunctionDef( - self, node: _ast.AsyncFunctionDef) -> (_ast.AsyncFunctionDef): - ... - - def visit_Import( - self, node: _ast.Import - ) -> (Union[_ast.Import, List[Union[_ast.Import, _ast.Expr]]]): - ... - - def visit_ImportFrom( - self, node: _ast.ImportFrom - ) -> (Union[_ast.ImportFrom, List[Union[_ast.ImportFrom, _ast.Expr]]]): - ... diff --git a/line_profiler/autoprofile/ast_tree_profiler.py b/line_profiler/autoprofile/ast_tree_profiler.py index 892ebfbc..994074f6 100644 --- a/line_profiler/autoprofile/ast_tree_profiler.py +++ b/line_profiler/autoprofile/ast_tree_profiler.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import ast import os +from typing import Type from .ast_profile_transformer import (AstProfileTransformer, ast_create_profile_node) @@ -20,11 +23,11 @@ class AstTreeProfiler: """ def __init__(self, - script_file, - prof_mod, - profile_imports, - ast_transformer_class_handler=AstProfileTransformer, - profmod_extractor_class_handler=ProfmodExtractor): + script_file: str, + prof_mod: list[str], + profile_imports: bool, + ast_transformer_class_handler: Type = AstProfileTransformer, + profmod_extractor_class_handler: Type = ProfmodExtractor) -> None: """Initializes the AST tree profiler instance with the script file path Args: @@ -52,7 +55,8 @@ def __init__(self, self._profmod_extractor_class_handler = profmod_extractor_class_handler @staticmethod - def _check_profile_full_script(script_file, prof_mod): + def _check_profile_full_script( + script_file: str, prof_mod: list[str]) -> bool: """Check whether whole script should be profiled. Checks whether path to script has been passed to prof_mod indicating that @@ -76,7 +80,7 @@ def _check_profile_full_script(script_file, prof_mod): return profile_full_script @staticmethod - def _get_script_ast_tree(script_file): + def _get_script_ast_tree(script_file: str) -> ast.Module: """Generate an abstract syntax from a script file. Args: @@ -93,10 +97,10 @@ def _get_script_ast_tree(script_file): return tree def _profile_ast_tree(self, - tree, - tree_imports_to_profile_dict, - profile_full_script=False, - profile_imports=False): + tree: ast.Module, + tree_imports_to_profile_dict: dict[int, str], + profile_full_script: bool = False, + profile_imports: bool = False) -> ast.Module: """Add profiling to an abstract syntax tree. Adds nodes to the AST that adds the specified objects to the profiler. @@ -139,7 +143,7 @@ def _profile_ast_tree(self, ast.fix_missing_locations(tree) return tree - def profile(self): + def profile(self) -> ast.Module: """Create an abstract syntax tree of a script and add profiling to it. Reads a script file and generates an abstract syntax tree. diff --git a/line_profiler/autoprofile/ast_tree_profiler.pyi b/line_profiler/autoprofile/ast_tree_profiler.pyi deleted file mode 100644 index fc533e86..00000000 --- a/line_profiler/autoprofile/ast_tree_profiler.pyi +++ /dev/null @@ -1,23 +0,0 @@ -from typing import List -from typing import Type -import _ast - -from .ast_profile_transformer import AstProfileTransformer -from .profmod_extractor import ProfmodExtractor - -__docstubs__: str - - -class AstTreeProfiler: - - def __init__( - self, - script_file: str, - prof_mod: List[str], - profile_imports: bool, - ast_transformer_class_handler: Type = AstProfileTransformer, - profmod_extractor_class_handler: Type = ProfmodExtractor) -> None: - ... - - def profile(self) -> (_ast.Module): - ... diff --git a/line_profiler/autoprofile/autoprofile.py b/line_profiler/autoprofile/autoprofile.py index 5985a84b..57b118bf 100644 --- a/line_profiler/autoprofile/autoprofile.py +++ b/line_profiler/autoprofile/autoprofile.py @@ -44,12 +44,16 @@ def main(): python -m kernprof -p demo.py -l demo.py python -m line_profiler -rmt demo.py.lprof """ +from __future__ import annotations import contextlib import functools import importlib.util import operator import sys import types +from collections.abc import MutableMapping +from typing import Any, cast, Dict, Mapping +from typing import ContextManager from .ast_tree_profiler import AstTreeProfiler from .run_module import AstTreeModuleProfiler from .line_profiler_utils import add_imported_function_or_module @@ -58,7 +62,7 @@ def main(): PROFILER_LOCALS_NAME = 'prof' -def _extend_line_profiler_for_profiling_imports(prof): +def _extend_line_profiler_for_profiling_imports(prof: Any) -> None: """Allow profiler to handle functions/methods, classes & modules with a single call. Add a method to LineProfiler that can identify whether the object is a @@ -73,7 +77,9 @@ def _extend_line_profiler_for_profiling_imports(prof): prof.add_imported_function_or_module = types.MethodType(add_imported_function_or_module, prof) -def run(script_file, ns, prof_mod, profile_imports=False, as_module=False): +def run(script_file: str, ns: MutableMapping[str, Any], + prof_mod: list[str], profile_imports: bool = False, + as_module: bool = False) -> None: """Automatically profile a script and run it. Profile functions, classes & modules specified in prof_mod without needing to add @@ -98,21 +104,26 @@ def run(script_file, ns, prof_mod, profile_imports=False, as_module=False): Whether we're running script_file as a module """ class restore_dict: - def __init__(self, d, target=None): + def __init__(self, d: MutableMapping[str, Any], target=None): self.d = d self.target = target - self.copy = None + self.copy: Mapping[str, Any] | None = None def __enter__(self): assert self.copy is None - self.copy = self.d.copy() + self.copy = dict(self.d) return self.target def __exit__(self, *_, **__): self.d.clear() - self.d.update(self.copy) + if self.copy is not None: + self.d.update(self.copy) self.copy = None + Profiler: type[AstTreeModuleProfiler] | type[AstTreeProfiler] + namespace: MutableMapping[str, Any] + ctx: ContextManager + if as_module: Profiler = AstTreeModuleProfiler module_name = modpath_to_modname(script_file) @@ -144,4 +155,4 @@ def __exit__(self, *_, **__): code_obj = compile(tree_profiled, script_file, 'exec') with ctx as callback: callback() - exec(code_obj, namespace, namespace) + exec(code_obj, cast(Dict[str, Any], namespace), namespace) diff --git a/line_profiler/autoprofile/autoprofile.pyi b/line_profiler/autoprofile/autoprofile.pyi deleted file mode 100644 index 65ddbf2b..00000000 --- a/line_profiler/autoprofile/autoprofile.pyi +++ /dev/null @@ -1,11 +0,0 @@ -from typing import List - -PROFILER_LOCALS_NAME: str - - -def run(script_file: str, - ns: dict, - prof_mod: List[str], - profile_imports: bool = False, - as_module: bool = False) -> None: - ... diff --git a/line_profiler/autoprofile/eager_preimports.py b/line_profiler/autoprofile/eager_preimports.py index 1e8444df..d03f617b 100644 --- a/line_profiler/autoprofile/eager_preimports.py +++ b/line_profiler/autoprofile/eager_preimports.py @@ -2,16 +2,18 @@ Tools for eagerly pre-importing everything as specified in ``line_profiler.autoprof.run(prof_mod=...)``. """ +from __future__ import annotations + import ast import functools import itertools -from collections import namedtuple from collections.abc import Collection from keyword import iskeyword from importlib.util import find_spec from pkgutil import walk_packages from textwrap import dedent, indent as indent_ from warnings import warn +from typing import Any, Generator, NamedTuple, TextIO from .util_static import ( modname_to_modpath, modpath_to_modname, package_modpaths) @@ -20,7 +22,7 @@ 'resolve_profiling_targets', 'write_eager_import_module') -def is_dotted_path(obj): +def is_dotted_path(obj: Any) -> bool: """ Example: >>> assert not is_dotted_path(object()) @@ -37,7 +39,7 @@ def is_dotted_path(obj): return True -def get_expression(obj): +def get_expression(obj: Any) -> ast.Expression | None: """ Example: >>> assert not get_expression(object()) @@ -55,7 +57,8 @@ def get_expression(obj): return None -def split_dotted_path(dotted_path, static=True): +def split_dotted_path( + dotted_path: str, static: bool = True) -> tuple[str, str | None]: """ Arguments: dotted_path (str): @@ -133,7 +136,7 @@ def split_dotted_path(dotted_path, static=True): f'module: {checked_locs!r}') -def strip(s): +def strip(s: str) -> str: return dedent(s).strip('\n') @@ -163,18 +166,20 @@ class LoadedNameFinder(ast.NodeVisitor): >>> names = LoadedNameFinder.find(ast.parse(module)) >>> assert names == {'bar', 'foobar', 'a', 'str'}, names """ - def __init__(self): - self.names = set() - self.contexts = [] + def __init__(self) -> None: + self.names: set[str] = set() + self.contexts: list[set[str]] = [] - def visit_Name(self, node): + def visit_Name(self, node: ast.Name) -> None: if not isinstance(node.ctx, ast.Load): return name = node.id if not any(name in ctx for ctx in self.contexts): self.names.add(node.id) - def _visit_func_def(self, node): + def _visit_func_def( + self, node: ast.FunctionDef | ast.AsyncFunctionDef | ast.Lambda + ) -> None: args = node.args arg_names = { arg.arg @@ -191,13 +196,13 @@ def _visit_func_def(self, node): visit_FunctionDef = visit_AsyncFunctionDef = visit_Lambda = _visit_func_def @classmethod - def find(cls, node): + def find(cls, node: ast.AST) -> set[str]: finder = cls() finder.visit(node) return finder.names -def propose_names(prefixes): +def propose_names(prefixes: Collection[str]) -> Generator[str, None, None]: """ Generate names based on prefixes. @@ -235,7 +240,9 @@ def propose_names(prefixes): yield pattern(prefix, i) -def resolve_profiling_targets(dotted_paths, static=True, recurse=False): +def resolve_profiling_targets( + dotted_paths: Collection[str], static: bool = True, + recurse: Collection[str] | bool = False) -> ResolvedResult: """ Arguments: dotted_paths (Collection[str]): @@ -308,7 +315,7 @@ def walk_packages_import_sys(pkg): dotted_paths |= recurse indirect_submods = set() - all_targets = {} + all_targets: dict[str, set[str | None]] = {} unknown_locs = [] split_path = functools.partial(split_dotted_path, static=static) walk = walk_packages_static if static else walk_packages_import_sys @@ -327,11 +334,12 @@ def walk_packages_import_sys(pkg): return ResolvedResult(all_targets, indirect_submods, unknown_locs) -def write_eager_import_module(dotted_paths, stream=None, *, - static=True, - recurse=False, - adder='profile.add_imported_function_or_module', - indent=' '): +def write_eager_import_module( + dotted_paths: Collection[str], stream: TextIO | None = None, *, + static: bool = True, + recurse: Collection[str] | bool = False, + adder: str = 'profile.add_imported_function_or_module', + indent: str = ' ') -> None: r""" Write a module which autoprofiles all its imports. @@ -472,6 +480,7 @@ def write_eager_import_module(dotted_paths, stream=None, *, # Get the names loaded by `adder`; # these names are not allowed in the namespace + assert expr is not None forbidden_names = LoadedNameFinder.find(expr) # We need three free names: # - One for `adder` @@ -539,7 +548,9 @@ def write_eager_import_module(dotted_paths, stream=None, *, chunks = [] if profile_whole_module: chunks.append(f'{adder_name}({module_name})') - for target in sorted(targets): + + targets_ = sorted(t for t in targets if t is not None) + for target in sorted(targets_): path = f'{module}.{target}' chunks.append(strip(f""" try: @@ -564,5 +575,7 @@ def write_eager_import_module(dotted_paths, stream=None, *, """)) -ResolvedResult = namedtuple('ResolvedResult', - ('targets', 'indirect', 'unresolved')) +class ResolvedResult(NamedTuple): + targets: dict[str, set[str | None]] + indirect: set[str] + unresolved: list[str] diff --git a/line_profiler/autoprofile/eager_preimports.pyi b/line_profiler/autoprofile/eager_preimports.pyi deleted file mode 100644 index 756a6b7b..00000000 --- a/line_profiler/autoprofile/eager_preimports.pyi +++ /dev/null @@ -1,67 +0,0 @@ -import ast -from typing import ( - Any, Union, - Collection, Dict, Generator, List, NamedTuple, Set, Tuple, - TextIO) - - -def is_dotted_path(obj: Any) -> bool: - ... - - -def get_expression(obj: Any) -> Union[ast.Expression, None]: - ... - - -def split_dotted_path( - dotted_path: str, static: bool = True) -> Tuple[str, Union[str, None]]: - ... - - -def strip(s: str) -> str: - ... - - -class LoadedNameFinder(ast.NodeVisitor): - names: Set[str] - contexts: List[Set[str]] - - def visit_Name(self, node: ast.Name) -> None: - ... - - def visit_FunctionDef(self, - node: Union[ast.FunctionDef, ast.AsyncFunctionDef, - ast.Lambda]) -> None: - ... - - visit_AsyncFunctionDef = visit_Lambda = visit_FunctionDef - - @classmethod - def find(cls, node: ast.AST) -> Set[str]: - ... - - -def propose_names(prefixes: Collection[str]) -> Generator[str, None, None]: - ... - - -def resolve_profiling_targets( - dotted_paths: Collection[str], - static: bool = True, - recurse: Union[Collection[str], bool] = False) -> 'ResolvedResult': - ... - - -def write_eager_import_module( - dotted_paths: Collection[str], stream: Union[TextIO, None] = None, *, - static: bool = True, - recurse: Union[Collection[str], bool] = False, - adder: str = 'profile.add_imported_function_or_module', - indent: str = ' ') -> None: - ... - - -class ResolvedResult(NamedTuple): - targets: Dict[str, Set[Union[str, None]]] - indirect: Set[str] - unresolved: List[str] diff --git a/line_profiler/autoprofile/line_profiler_utils.py b/line_profiler/autoprofile/line_profiler_utils.py index c4e736d1..a84811ec 100644 --- a/line_profiler/autoprofile/line_profiler_utils.py +++ b/line_profiler/autoprofile/line_profiler_utils.py @@ -1,8 +1,38 @@ +from __future__ import annotations + import inspect +from functools import cached_property, partial, partialmethod +from types import FunctionType, MethodType, ModuleType +from typing import TYPE_CHECKING, Any, Literal, overload + +if TYPE_CHECKING: # pragma: no cover + from ..profiler_mixin import CLevelCallable, CythonCallable + from ..scoping_policy import ScopingPolicy, ScopingPolicyDict + + +@overload +def add_imported_function_or_module( + self, item: CLevelCallable | Any, *, + scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> Literal[0]: + ... + + +@overload +def add_imported_function_or_module( + self, + item: (FunctionType | CythonCallable | type | partial | property + | cached_property | MethodType | staticmethod | classmethod + | partialmethod | ModuleType), + *, scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> Literal[0, 1]: + ... -def add_imported_function_or_module(self, item, *, - scoping_policy=None, wrap=False): +def add_imported_function_or_module( + self, item: object, *, + scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> Literal[0, 1]: """ Method to add an object to :py:class:`~.line_profiler.LineProfiler` to be profiled. @@ -15,8 +45,7 @@ def add_imported_function_or_module(self, item, *, Args: item (Union[Callable, Type, ModuleType]): Object to be profiled. - scoping_policy (Union[ScopingPolicy, str, ScopingPolicyDict, \ -None]): + scoping_policy (Union[ScopingPolicy, str, ScopingPolicyDict, None]): Whether (and how) to match the scope of members and decide on whether to add them: diff --git a/line_profiler/autoprofile/line_profiler_utils.pyi b/line_profiler/autoprofile/line_profiler_utils.pyi deleted file mode 100644 index 2d114b34..00000000 --- a/line_profiler/autoprofile/line_profiler_utils.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from functools import partial, partialmethod, cached_property -from types import FunctionType, MethodType, ModuleType -from typing import overload, Any, Literal, TypeVar, TYPE_CHECKING - -if TYPE_CHECKING: # Stub-only annotations - from ..profiler_mixin import CLevelCallable, CythonCallable - from ..scoping_policy import ScopingPolicy, ScopingPolicyDict - - - - -@overload -def add_imported_function_or_module( - self, item: CLevelCallable | Any, - scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, - wrap: bool = False) -> Literal[0]: - ... - - -@overload -def add_imported_function_or_module( - self, - item: (FunctionType | CythonCallable - | type | partial | property | cached_property - | MethodType | staticmethod | classmethod | partialmethod - | ModuleType), - scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, - wrap: bool = False) -> Literal[0, 1]: - ... diff --git a/line_profiler/autoprofile/profmod_extractor.py b/line_profiler/autoprofile/profmod_extractor.py index 929a1e9e..f19319dc 100644 --- a/line_profiler/autoprofile/profmod_extractor.py +++ b/line_profiler/autoprofile/profmod_extractor.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import ast import os import sys - +from typing import List, cast, Any, Union from .util_static import (modname_to_modpath, modpath_to_modname, package_modpaths) @@ -13,7 +15,8 @@ class ProfmodExtractor: abstract syntax tree. """ - def __init__(self, tree, script_file, prof_mod): + def __init__(self, tree: ast.Module, script_file: str, + prof_mod: list[str]) -> None: """Initializes the AST tree profiler instance with the AST, script file path and prof_mod Args: @@ -33,7 +36,7 @@ def __init__(self, tree, script_file, prof_mod): self._prof_mod = prof_mod @staticmethod - def _is_path(text): + def _is_path(text: str) -> bool: """Check whether a string is a path. Checks if a string contains a slash or ends with .py indicating it is a path. @@ -50,7 +53,8 @@ def _is_path(text): return ret @classmethod - def _get_modnames_to_profile_from_prof_mod(cls, script_file, prof_mod): + def _get_modnames_to_profile_from_prof_mod( + cls, script_file: str, prof_mod: list[str]) -> list[str]: """Grab the valid paths and all dotted paths in prof_mod and their subpackages and submodules, in the form of dotted paths. @@ -95,7 +99,7 @@ def _get_modnames_to_profile_from_prof_mod(cls, script_file, prof_mod): if it fails, the item may point to an installed module rather than local script so we check if the item is path and whether that path exists, else skip the item. """ - modpath = modname_to_modpath(mod, sys_path=new_sys_path) + modpath = modname_to_modpath(mod, sys_path=cast(List[Union[str, os.PathLike]], new_sys_path)) if modpath is None: """if cannot convert to modpath, check if already path and if invalid""" if not os.path.exists(mod): @@ -127,7 +131,8 @@ def _get_modnames_to_profile_from_prof_mod(cls, script_file, prof_mod): return modnames_to_profile @staticmethod - def _ast_get_imports_from_tree(tree): + def _ast_get_imports_from_tree( + tree: ast.Module) -> list[dict[str, str | int | None]]: """Get all imports in an abstract syntax tree. Args: @@ -160,8 +165,10 @@ def _ast_get_imports_from_tree(tree): module_dict_list.append(module_dict) modname_list.append(modname) elif isinstance(node, ast.ImportFrom): + if node.module is None: + continue for name in node.names: - modname = node.module + '.' + name.name + modname = f'{node.module}.{name.name}' if modname not in modname_list: alias = name.asname or name.name module_dict = { @@ -174,7 +181,10 @@ def _ast_get_imports_from_tree(tree): return module_dict_list @staticmethod - def _find_modnames_in_tree_imports(modnames_to_profile, module_dict_list): + def _find_modnames_in_tree_imports( + modnames_to_profile: list[str], + module_dict_list: list[dict[str, str | int | None]] + ) -> dict[int, str]: """Map modnames to imports from an abstract sytax tree. Find imports in modue_dict_list, created from an abstract syntax tree, that match @@ -200,21 +210,28 @@ def _find_modnames_in_tree_imports(modnames_to_profile, module_dict_list): value (str): alias (or name if no alias used) of import """ - modnames_found_in_tree = {} + modnames_found_in_tree: dict[int, str] = {} modname_added_list = [] for i, module_dict in enumerate(module_dict_list): modname = module_dict['name'] + if not isinstance(modname, str): + continue if modname in modname_added_list: continue """check if either the parent module or submodule are in modnames_to_profile""" if modname not in modnames_to_profile and modname.rsplit('.', 1)[0] not in modnames_to_profile: continue name = module_dict['alias'] or modname + if not isinstance(name, str): + raise TypeError('should have gotten a str') modname_added_list.append(modname) - modnames_found_in_tree[module_dict['tree_index']] = name + tree_index = module_dict['tree_index'] + if not isinstance(tree_index, int): + raise TypeError('should have gotten an int') + modnames_found_in_tree[tree_index] = name return modnames_found_in_tree - def run(self): + def run(self) -> dict[int, str]: """Map prof_mod to imports in an abstract syntax tree. Takes the paths and dotted paths in prod_mod and finds their respective imports in an diff --git a/line_profiler/autoprofile/profmod_extractor.pyi b/line_profiler/autoprofile/profmod_extractor.pyi deleted file mode 100644 index ebaf7526..00000000 --- a/line_profiler/autoprofile/profmod_extractor.pyi +++ /dev/null @@ -1,13 +0,0 @@ -import _ast -from typing import List -from typing import Dict - - -class ProfmodExtractor: - - def __init__(self, tree: _ast.Module, script_file: str, - prof_mod: List[str]) -> None: - ... - - def run(self) -> (Dict[int, str]): - ... diff --git a/line_profiler/autoprofile/run_module.py b/line_profiler/autoprofile/run_module.py index f4461409..6e545126 100644 --- a/line_profiler/autoprofile/run_module.py +++ b/line_profiler/autoprofile/run_module.py @@ -1,11 +1,14 @@ +from __future__ import annotations + import ast import os +from typing import cast from .ast_tree_profiler import AstTreeProfiler from .util_static import modname_to_modpath, modpath_to_modname -def get_module_from_importfrom(node, module): +def get_module_from_importfrom(node: ast.ImportFrom, module: str) -> str: r"""Resolve the full path of a relative import. Args: @@ -44,7 +47,7 @@ def get_module_from_importfrom(node, module): """ level = node.level if not level: - return node.module + return node.module or '' chunks = module.split('.')[:-level] if node.module: chunks.append(node.module) @@ -53,16 +56,18 @@ def get_module_from_importfrom(node, module): class ImportFromTransformer(ast.NodeTransformer): """Turn all the relative imports into absolute imports.""" - def __init__(self, module): + def __init__(self, module: str) -> None: self.module = module - def visit_ImportFrom(self, node): + def visit_ImportFrom(self, node: ast.ImportFrom) -> ast.ImportFrom: level = node.level if not level: - return self.generic_visit(node) + self.generic_visit(node) + return node module = get_module_from_importfrom(node, self.module) new_node = ast.ImportFrom(module=module, names=node.names, level=0) - return self.generic_visit(ast.copy_location(new_node, node)) + visited = self.generic_visit(ast.copy_location(new_node, node)) + return cast(ast.ImportFrom, visited) class AstTreeModuleProfiler(AstTreeProfiler): @@ -74,7 +79,7 @@ class AstTreeModuleProfiler(AstTreeProfiler): classes & modules in prof_mod to the profiler to be profiled. """ @classmethod - def _get_script_ast_tree(cls, script_file): + def _get_script_ast_tree(cls, script_file: str) -> ast.Module: tree = super()._get_script_ast_tree(script_file) # Note: don't drop the `.__init__` or `.__main__` suffix, lest # the relative imports fail @@ -83,11 +88,12 @@ def _get_script_ast_tree(cls, script_file): return ImportFromTransformer(module).visit(tree) @staticmethod - def _is_main(fname): + def _is_main(fname: str) -> bool: return os.path.basename(fname) == '__main__.py' @classmethod - def _check_profile_full_script(cls, script_file, prof_mod): + def _check_profile_full_script( + cls, script_file: str, prof_mod: list[str]) -> bool: rp = os.path.realpath paths_to_check = {rp(script_file)} if cls._is_main(script_file): diff --git a/line_profiler/autoprofile/run_module.pyi b/line_profiler/autoprofile/run_module.pyi deleted file mode 100644 index 6d63a6d5..00000000 --- a/line_profiler/autoprofile/run_module.pyi +++ /dev/null @@ -1,21 +0,0 @@ -import ast - -from .ast_tree_profiler import AstTreeProfiler - - -def get_module_from_importfrom(node: ast.ImportFrom, module: str) -> str: - ... - - -class ImportFromTransformer(ast.NodeTransformer): - def __init__(self, module: str) -> None: - ... - - def visit_ImportFrom(self, node: ast.ImportFrom) -> ast.ImportFrom: - ... - - module: str - - -class AstTreeModuleProfiler(AstTreeProfiler): - ... diff --git a/line_profiler/autoprofile/util_static.py b/line_profiler/autoprofile/util_static.py index 117eef4e..4eaf224b 100644 --- a/line_profiler/autoprofile/util_static.py +++ b/line_profiler/autoprofile/util_static.py @@ -3,7 +3,7 @@ :py:mod:`xdoctest` via dev/maintain/port_utilities.py in the line_profiler repo. """ - +from __future__ import annotations from os.path import abspath from os.path import dirname from os.path import exists @@ -15,6 +15,7 @@ from os.path import join import os from os.path import split +import typing from os.path import isfile from os.path import realpath import sys @@ -93,52 +94,51 @@ def package_modpaths( break -IS_PY_GE_308 = sys.version_info[0:2] >= (3, 8) +IS_PY_LT_314: bool = sys.version_info[0:2] < (3, 14) + + +IS_PY_GE_308: bool = sys.version_info[0:2] >= (3, 8) def _parse_static_node_value(node): """ Extract a constant value from a node if possible - - Args: - node (ast.AST): input node - - Returns: - Any: parsed value """ import ast - from collections import OrderedDict import numbers + from collections import OrderedDict - if ( - (isinstance(node, ast.Constant) and isinstance(node.value, numbers.Number)) - if IS_PY_GE_308 - else isinstance(node, ast.Num) - ): - value = node.value if IS_PY_GE_308 else node.n - elif ( - (isinstance(node, ast.Constant) and isinstance(node.value, str)) - if IS_PY_GE_308 - else isinstance(node, ast.Str) - ): - value = node.value if IS_PY_GE_308 else node.s - elif isinstance(node, ast.List): - value = list(map(_parse_static_node_value, node.elts)) - elif isinstance(node, ast.Tuple): - value = tuple(map(_parse_static_node_value, node.elts)) - elif isinstance(node, ast.Dict): + if IS_PY_GE_308: + if isinstance(node, ast.Constant) and isinstance(node.value, numbers.Number): + return node.value + if isinstance(node, ast.Constant) and isinstance(node.value, str): + return node.value + else: + num_type = getattr(ast, "Num", None) + str_type = getattr(ast, "Str", None) + if (num_type is not None) and isinstance(node, num_type): + return node.n + if (str_type is not None) and isinstance(node, str_type): + return node.s + if isinstance(node, ast.List): + return list(map(_parse_static_node_value, node.elts)) + if isinstance(node, ast.Tuple): + return tuple(map(_parse_static_node_value, node.elts)) + if isinstance(node, ast.Dict): keys = map(_parse_static_node_value, node.keys) values = map(_parse_static_node_value, node.values) - value = OrderedDict(zip(keys, values)) - elif isinstance(node, ast.Constant): - value = node.value - else: - raise TypeError( - """Cannot parse a static value from non-static node of type: {!r}""".format( - type(node) - ) + return OrderedDict(zip(keys, values)) + if IS_PY_LT_314: + nameconst_type = getattr(ast, "NameConstant", None) + if (nameconst_type is not None) and isinstance(node, nameconst_type): + return node.value + if isinstance(node, ast.Constant): + return node.value + raise TypeError( + "Cannot parse a static value from non-static node of type: {!r}".format( + type(node) ) - return value + ) def _extension_module_tags(): @@ -184,8 +184,6 @@ def _static_parse(varname, fpath): >>> assert _static_parse('a', fpath) == ("3", 5, 6) >>> fpath.write_text('b = 10' + chr(10) + 'a = None') >>> assert _static_parse('a', fpath) is None - >>> fpath.write_text('a = None') - >>> assert _static_parse('a', fpath) is None >>> import pytest >>> with pytest.raises(TypeError): >>> fpath.write_text('a = list(range(10))') @@ -250,7 +248,7 @@ def _platform_pylib_exts(): return tuple(valid_exts) -def _syspath_modname_to_modpath(modname, sys_path=None, exclude=None): +def _syspath_modname_to_modpath(modname, sys_path=None, exclude=None) -> str | None: """ syspath version of modname_to_modpath @@ -266,7 +264,7 @@ def _syspath_modname_to_modpath(modname, sys_path=None, exclude=None): Defaults to None. Returns: - str: path to the module. + str | None: path to the module or None if it does not exist. Note: This is much slower than the pkgutil mechanisms. @@ -283,12 +281,11 @@ def _syspath_modname_to_modpath(modname, sys_path=None, exclude=None): ...static_analysis.py >>> print(_syspath_modname_to_modpath('xdoctest')) ...xdoctest - >>> # xdoctest: +REQUIRES(CPython) - >>> print(_syspath_modname_to_modpath('_ctypes')) - ..._ctypes... + >>> print(_syspath_modname_to_modpath('json')) + ...json >>> assert _syspath_modname_to_modpath('xdoctest', sys_path=[]) is None >>> assert _syspath_modname_to_modpath('xdoctest.static_analysis', sys_path=[]) is None - >>> assert _syspath_modname_to_modpath('_ctypes', sys_path=[]) is None + >>> assert _syspath_modname_to_modpath('json', sys_path=[]) is None >>> assert _syspath_modname_to_modpath('this', sys_path=[]) is None Example: @@ -328,14 +325,15 @@ def _isvalid(modpath, base): if sys_path is None: sys_path = sys.path candidate_dpaths = [("." if (p == "") else p) for p in sys_path] - if exclude: - def normalize(p): - if sys.platform.startswith("win32"): - return realpath(p).lower() - else: - return realpath(p) + def normalize(p): + if sys.platform.startswith("win32"): + return realpath(p).lower() + else: + return realpath(p) + real_exclude = set() + if exclude: real_exclude = {normalize(p) for p in exclude} candidate_dpaths = [ p for p in candidate_dpaths if (normalize(p) not in real_exclude) @@ -392,8 +390,8 @@ def check_dpath(dpath): import pathlib for editable_pth in new_editable_pth_paths: - editable_pth = pathlib.Path(editable_pth) - target = editable_pth.read_text().strip().split("\n")[(-1)] + editable_pth_ = pathlib.Path(editable_pth) + target = editable_pth_.read_text().strip().split("\n")[(-1)] if (not exclude) or (normalize(target) not in real_exclude): modpath = check_dpath(target) if modpath: @@ -416,10 +414,17 @@ def check_dpath(dpath): if modpath: found_modpath = modpath break + if typing.TYPE_CHECKING: + found_modpath = typing.cast((str | None), found_modpath) return found_modpath -def modname_to_modpath(modname, hide_init=True, hide_main=False, sys_path=None): +def modname_to_modpath( + modname: str, + hide_init: bool = True, + hide_main: bool = False, + sys_path: list[(str | os.PathLike)] | None = None, +) -> str | None: """ Finds the path to a python module from its name. @@ -456,9 +461,8 @@ def modname_to_modpath(modname, hide_init=True, hide_main=False, sys_path=None): >>> modname = 'xdoctest' >>> modpath = modname_to_modpath(modname, hide_init=False) >>> assert modpath.endswith('__init__.py') - >>> # xdoctest: +REQUIRES(CPython) - >>> modpath = basename(modname_to_modpath('_ctypes')) - >>> assert 'ctypes' in modpath + >>> modpath = modname_to_modpath('json') + >>> assert 'json' in modpath """ if hide_main or sys_path: modpath = _syspath_modname_to_modpath(modname, sys_path) @@ -466,11 +470,13 @@ def modname_to_modpath(modname, hide_init=True, hide_main=False, sys_path=None): modpath = _syspath_modname_to_modpath(modname, sys_path) if modpath is None: return None - modpath = normalize_modpath(modpath, hide_init=hide_init, hide_main=hide_main) - return modpath + modpath_ = normalize_modpath(modpath, hide_init=hide_init, hide_main=hide_main) + if typing.TYPE_CHECKING: + modpath_ = typing.cast(str, modpath_) + return modpath_ -def split_modpath(modpath, check=True): +def split_modpath(modpath: str | os.PathLike, check: bool = True) -> tuple[(str, str)]: """ Splits the modpath into the dir that must be in PYTHONPATH for the module to be imported and the modulepath relative to this directory. @@ -514,7 +520,9 @@ def split_modpath(modpath, check=True): return (dpath, rel_modpath) -def normalize_modpath(modpath, hide_init=True, hide_main=False): +def normalize_modpath( + modpath: str | os.PathLike, hide_init: bool = True, hide_main: bool = False +) -> str | os.PathLike: """ Normalizes __init__ and __main__ paths. @@ -567,8 +575,12 @@ def normalize_modpath(modpath, hide_init=True, hide_main=False): def modpath_to_modname( - modpath, hide_init=True, hide_main=False, check=True, relativeto=None -): + modpath: str, + hide_init: bool = True, + hide_main: bool = False, + check: bool = True, + relativeto: str | None = None, +) -> str: """ Determines importable name from file path @@ -620,10 +632,9 @@ def modpath_to_modname( >>> assert modpath_to_modname(dirname(xdoctest.__file__.replace('.pyc', '.py'))) == 'xdoctest' Example: - >>> # xdoctest: +REQUIRES(CPython) - >>> modpath = modname_to_modpath('_ctypes') + >>> modpath = modname_to_modpath('json') >>> modname = modpath_to_modname(modpath) - >>> assert modname == '_ctypes' + >>> assert modname == 'json' Example: >>> modpath = '/foo/libfoobar.linux-x86_64-3.6.so' @@ -633,8 +644,8 @@ def modpath_to_modname( if check and (relativeto is None): if not exists(modpath): raise ValueError("modpath={} does not exist".format(modpath)) - modpath_ = abspath(expanduser(modpath)) - modpath_ = normalize_modpath(modpath_, hide_init=hide_init, hide_main=hide_main) + modpath__ = abspath(expanduser(modpath)) + modpath_ = normalize_modpath(modpath__, hide_init=hide_init, hide_main=hide_main) if relativeto: dpath = dirname(abspath(expanduser(relativeto))) rel_modpath = relpath(modpath_, dpath) diff --git a/line_profiler/autoprofile/util_static.pyi b/line_profiler/autoprofile/util_static.pyi deleted file mode 100644 index 42ccd84e..00000000 --- a/line_profiler/autoprofile/util_static.pyi +++ /dev/null @@ -1,41 +0,0 @@ -from typing import List -from os import PathLike -from typing import Tuple -from collections.abc import Generator -from typing import Any - - -def package_modpaths(pkgpath, - with_pkg: bool = ..., - with_mod: bool = ..., - followlinks: bool = ..., - recursive: bool = ..., - with_libs: bool = ..., - check: bool = ...) -> Generator[Any, None, None]: - ... - - -def modname_to_modpath( - modname: str, - hide_init: bool = True, - hide_main: bool = False, - sys_path: None | List[str | PathLike] = None) -> str | None: - ... - - -def split_modpath(modpath: str, check: bool = True) -> Tuple[str, str]: - ... - - -def normalize_modpath(modpath: str | PathLike, - hide_init: bool = True, - hide_main: bool = False) -> str | PathLike: - ... - - -def modpath_to_modname(modpath: str, - hide_init: bool = True, - hide_main: bool = False, - check: bool = True, - relativeto: str | None = None) -> str: - ... diff --git a/line_profiler/cli_utils.py b/line_profiler/cli_utils.py index 6344b6fa..0238c602 100644 --- a/line_profiler/cli_utils.py +++ b/line_profiler/cli_utils.py @@ -2,12 +2,16 @@ Shared utilities between the :command:`python -m line_profiler` and :command:`kernprof` CLI tools. """ +from __future__ import annotations + import argparse import functools import os import pathlib import shutil import sys +from os import PathLike +from typing import Protocol, Sequence, TypeVar, cast from .toml_config import ConfigSource @@ -17,8 +21,9 @@ for k in ('1', 'on', 'True', 'T', 'yes', 'Y')}} -def add_argument(parser_like, arg, /, *args, - hide_complementary_options=True, **kwargs): +def add_argument(parser_like, arg: str, /, *args: str, + hide_complementary_options: bool = True, + **kwargs: object) -> argparse.Action: """ Override the ``'store_true'`` and ``'store_false'`` actions so that they are turned into options which: @@ -70,7 +75,7 @@ def negated(*args, **kwargs): return negated # Make sure there's at least one positional argument - args = [arg, *args] + args = (arg, *args) if kwargs.get('action') not in ('store_true', 'store_false'): return parser_like.add_argument(*args, **kwargs) @@ -96,10 +101,9 @@ def negated(*args, **kwargs): kwargs.setdefault(key, value) long_kwargs = kwargs.copy() short_kwargs = {**kwargs, 'action': 'store_const'} - for key, value in dict( - nargs='?', - type=functools.partial(boolean, invert=not const)).items(): - long_kwargs.setdefault(key, value) + + long_kwargs.setdefault('nargs', '?') + long_kwargs.setdefault('type', functools.partial(boolean, invert=not const)) # Mention the short options in the long options' documentation, and # suppress the short options in the help @@ -111,7 +115,9 @@ def negated(*args, **kwargs): 'form' if len(short_flags) == 1 else 'forms', ', '.join(short_flags)) if long_kwargs.get('help'): - help_text = long_kwargs['help'].strip() + raw_help = long_kwargs['help'] + help_text = raw_help if isinstance(raw_help, str) else str(raw_help) + help_text = help_text.strip() if help_text.endswith((')', ']')): # Interpolate into existing parenthetical help_text = '{}; {}{}{}'.format( @@ -126,7 +132,8 @@ def negated(*args, **kwargs): long_kwargs['help'] = f'({additional_msg})' short_kwargs['help'] = argparse.SUPPRESS - long_action = short_action = None + long_action = None + short_action = None if long_flags: long_action = parser_like.add_argument(*long_flags, **long_kwargs) short_kwargs['dest'] = long_action.dest @@ -158,7 +165,10 @@ def negated(*args, **kwargs): return action -def get_cli_config(subtable, /, *args, **kwargs): +def get_cli_config( + subtable: str, /, + config: str | PathLike[str] | bool | None = None, + *, read_env: bool = True) -> ConfigSource: """ Get the ``tool.line_profiler.`` configs and normalize its keys (``some-key`` -> ``some_key``). @@ -175,28 +185,32 @@ def get_cli_config(subtable, /, *args, **kwargs): New :py:class:`~.line_profiler.toml_config.ConfigSource` instance """ - config = ConfigSource.from_config(*args, **kwargs).get_subconfig(subtable) - config.conf_dict = {key.replace('-', '_'): value - for key, value in config.conf_dict.items()} - return config + config_source = ConfigSource.from_config( + config, read_env=read_env).get_subconfig(subtable) + config_source.conf_dict = { + key.replace('-', '_'): value + for key, value in config_source.conf_dict.items()} + return config_source -def get_python_executable(): +def get_python_executable() -> str: """ Returns: str: command Command or path thereto corresponding to :py:data:`sys.executable`. """ - if os.path.samefile(shutil.which('python'), sys.executable): + python_path = shutil.which('python') + python3_path = shutil.which('python3') + if python_path and os.path.samefile(python_path, sys.executable): return 'python' - elif os.path.samefile(shutil.which('python3'), sys.executable): + elif python3_path and os.path.samefile(python3_path, sys.executable): return 'python3' else: return short_string_path(sys.executable) -def positive_float(value): +def positive_float(value: str) -> float: """ Arguments: value (str) @@ -214,7 +228,8 @@ def positive_float(value): return val -def boolean(value, *, fallback=None, invert=False): +def boolean(value: str, *, fallback: bool | None = None, + invert: bool = False) -> bool: """ Arguments: value (str) @@ -275,7 +290,7 @@ def boolean(value, *, fallback=None, invert=False): return fallback -def short_string_path(path): +def short_string_path(path: str | PathLike[str]) -> str: """ Arguments: path (str | os.PathLike[str]): @@ -288,11 +303,12 @@ def short_string_path(path): current directory. """ path = pathlib.Path(path) - paths = {str(path)} + paths: set[str] = {str(path)} abspath = path.absolute() paths.add(str(abspath)) try: paths.add(str(abspath.relative_to(path.cwd().absolute()))) except ValueError: # Not relative to the curdir pass - return min(paths, key=len) + paths_list = list(paths) + return cast(str, min(paths_list, key=len)) diff --git a/line_profiler/cli_utils.pyi b/line_profiler/cli_utils.pyi deleted file mode 100644 index 182efe98..00000000 --- a/line_profiler/cli_utils.pyi +++ /dev/null @@ -1,60 +0,0 @@ -""" -Shared utilities between the :command:`python -m line_profiler` and -:command:`kernprof` CLI tools. -""" -import argparse -import pathlib -from os import PathLike -from typing import Protocol, Sequence, Tuple, TypeVar - -from line_profiler.toml_config import ConfigSource - - -P_con = TypeVar('P_con', bound='ParserLike', contravariant=True) -A_co = TypeVar('A_co', bound='ActionLike', covariant=True) - - -class ActionLike(Protocol[P_con]): - def __call__(self, parser: P_con, - namespace: argparse.Namespace, - values: str | Sequence | None, - option_string: str | None = None) -> None: - ... - - def format_usage(self) -> str: - ... - - -class ParserLike(Protocol[A_co]): - def add_argument(self, arg: str, /, *args: str, **kwargs) -> A_co: - ... - - @property - def prefix_chars(self) -> str: - ... - - -def add_argument(parser_like: ParserLike[A_co], arg: str, /, *args: str, - hide_complementary_options: bool = True, **kwargs) -> A_co: - ... - - -def get_cli_config(subtable: str, /, *args, **kwargs) -> ConfigSource: - ... - - -def get_python_executable() -> str: - ... - - -def positive_float(value: str) -> float: - ... - - -def boolean(value: str, *, - fallback: bool | None = None, invert: bool = False) -> bool: - ... - - -def short_string_path(path: str | PathLike[str]) -> str: - ... diff --git a/line_profiler/explicit_profiler.py b/line_profiler/explicit_profiler.py index 787c4ac2..0d88ca42 100644 --- a/line_profiler/explicit_profiler.py +++ b/line_profiler/explicit_profiler.py @@ -170,7 +170,7 @@ def func4(): import pathlib import sys import typing -from typing import Any, Callable +from typing import Any, Callable, TypeVar if typing.TYPE_CHECKING: ConfigArg = str | pathlib.PurePath | bool | None @@ -181,6 +181,8 @@ def func4(): from .line_profiler import LineProfiler from .toml_config import ConfigSource +F = TypeVar('F', bound=Callable[..., Any]) + # The first process that enables profiling records its PID here. Child processes # created via multiprocessing (spawn/forkserver) inherit this environment value, # which helps prevent helper processes from claiming ownership and clobbering @@ -291,6 +293,7 @@ def __init__(self, config: ConfigArg = None) -> None: self._profile = None self._owner_pid = None self.enabled = None + # Configs: # - How to toggle the profiler self.setup_config = config_source.conf_dict['setup'] diff --git a/line_profiler/ipython_extension.py b/line_profiler/ipython_extension.py index 17a58867..4e99213f 100644 --- a/line_profiler/ipython_extension.py +++ b/line_profiler/ipython_extension.py @@ -32,6 +32,7 @@ .. |lprun_all| replace:: :py:data:`%%lprun_all ` .. |builtins| replace:: :py:mod:`__builtins__ ` """ +from __future__ import annotations import ast import builtins @@ -530,7 +531,7 @@ def lprun_all(self, parameter_s="", cell=""): # - `prof.add_function()` might have replaced the code # object, so retrieve it back from the dummy function mock_func = types.SimpleNamespace(__code__=code) - prof.add_function(mock_func) # type: ignore[arg-type] + prof.add_function(mock_func) code = mock_func.__code__ # Notes: # - We don't define `ip.user_global_ns` and `ip.user_ns` diff --git a/line_profiler/ipython_extension.pyi b/line_profiler/ipython_extension.pyi deleted file mode 100644 index 8bba105b..00000000 --- a/line_profiler/ipython_extension.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from IPython.core.magic import Magics -from . import LineProfiler - - -class LineProfilerMagics(Magics): - def lprun(self, parameter_s: str = ...) -> LineProfiler | None: - ... - - def lprun_all(self, - parameter_s: str = "", - cell: str = "") -> LineProfiler | None: - ... diff --git a/line_profiler/line_profiler.py b/line_profiler/line_profiler.py index 7bada629..399ebfd2 100755 --- a/line_profiler/line_profiler.py +++ b/line_profiler/line_profiler.py @@ -4,7 +4,10 @@ inspect its output. This depends on the :py:mod:`line_profiler._line_profiler` Cython backend. """ +from __future__ import annotations + import functools +import io import inspect import linecache import operator @@ -16,6 +19,9 @@ import tokenize from argparse import ArgumentParser from datetime import datetime +from os import PathLike +from typing import (TYPE_CHECKING, IO, Callable, Literal, Mapping, Protocol, + Sequence, TypeVar, cast, Tuple) try: from ._line_profiler import (LineProfiler as CLineProfiler, @@ -29,19 +35,35 @@ from .cli_utils import ( add_argument, get_cli_config, positive_float, short_string_path) from .profiler_mixin import ByCountProfilerMixin, is_c_level_callable -from .scoping_policy import ScopingPolicy +from .scoping_policy import ScopingPolicy, ScopingPolicyDict from .toml_config import ConfigSource +if TYPE_CHECKING: # pragma: no cover + from typing_extensions import ParamSpec, Self + + class _IPythonLike(Protocol): + def register_magics(self, magics: type) -> None: + ... + + PS = ParamSpec('PS') + _TimingsMap = Mapping[Tuple[str, int, str], list[Tuple[int, int, int]]] + T = TypeVar('T') + T_co = TypeVar('T_co', covariant=True) + +ColumnLiterals = Literal['line', 'hits', 'time', 'perhit', 'percent'] + # NOTE: This needs to be in sync with ../kernprof.py and __init__.py __version__ = '5.0.2' @functools.lru_cache() -def get_column_widths(config=False): +def get_column_widths( + config: bool | str | None = False +) -> Mapping[ColumnLiterals, int]: """ Arguments - config (bool | str | pathlib.PurePath | None) + config (bool | str | None) Passed to :py:meth:`.ConfigSource.from_config`. Note: * Results are cached. @@ -50,17 +72,20 @@ def get_column_widths(config=False): """ subconf = (ConfigSource.from_config(config) .get_subconfig('show', 'column_widths')) - return types.MappingProxyType(subconf.conf_dict) + return types.MappingProxyType( + cast(Mapping[ColumnLiterals, int], subconf.conf_dict)) -def load_ipython_extension(ip): +def load_ipython_extension(ip: object) -> None: """ API for IPython to recognize this module as an IPython extension. """ from .ipython_extension import LineProfilerMagics + if TYPE_CHECKING: + ip = cast(_IPythonLike, ip) ip.register_magics(LineProfilerMagics) -def get_code_block(filename, lineno): +def get_code_block(filename: os.PathLike[str] | str, lineno: int) -> list[str]: """ Get the lines in the code block in a file starting from required line number; understands Cython code. @@ -81,8 +106,7 @@ def get_code_block(filename, lineno): this repo since 2008 (`fb60664`_), so we will continue using it until we can't. - .. _fb60664: https://github.com/pyutils/line_profiler/commit/\ -fb60664135296ba6061cfaa2bb66d4ba77964c53 + .. _fb60664: https://github.com/pyutils/line_profiler/commit/fb60664135296ba6061cfaa2bb66d4ba77964c53 Example: @@ -147,7 +171,7 @@ def get_code_block(filename, lineno): namespace = inspect.getblock.__globals__ namespace['BlockFinder'] = _CythonBlockFinder try: - return inspect.getblock(linecache.getlines(filename)[lineno - 1:]) + return inspect.getblock(linecache.getlines(os.fspath(filename))[lineno - 1:]) finally: namespace['BlockFinder'] = BlockFinder @@ -163,14 +187,17 @@ class _CythonBlockFinder(inspect.BlockFinder): is public but undocumented API. See similar caveat in :py:func:`~.get_code_block`. """ - def tokeneater(self, type, token, *args, **kwargs): + def tokeneater( + self, type: int, token: str, + srowcol: tuple[int, int], erowcol: tuple[int, int], + line: str) -> None: if ( not self.started and type == tokenize.NAME and token in ('cdef', 'cpdef', 'property')): # Fudge the token to get the desired 'scoping' behavior token = 'def' - return super().tokeneater(type, token, *args, **kwargs) + return super().tokeneater(type, token, srowcol, erowcol, line) class _WrapperInfo: @@ -183,22 +210,31 @@ class _WrapperInfo: profiler_id (int) ID of the `LineProfiler`. """ - def __init__(self, func, profiler_id): + def __init__(self, func: types.FunctionType, profiler_id: int) -> None: self.func = func self.profiler_id = profiler_id +class _StatsLike(Protocol): + timings: _TimingsMap + unit: float + + class LineStats(CLineStats): - def __repr__(self): + timings: _TimingsMap + unit: float + + def __init__(self, timings: _TimingsMap, unit: float) -> None: + super().__init__(timings, unit) + + def __repr__(self) -> str: return '{}({}, {:.2G})'.format( type(self).__name__, self.timings, self.unit) - def __eq__(self, other): + def __eq__(self, other: object) -> bool: """ Example: >>> from copy import deepcopy - >>> - >>> >>> stats1 = LineStats( ... {('foo', 1, 'spam.py'): [(2, 10, 300)], ... ('bar', 10, 'spam.py'): @@ -222,7 +258,7 @@ def __eq__(self, other): return NotImplemented return True - def __add__(self, other): + def __add__(self, other: _StatsLike) -> Self: """ Example: >>> stats1 = LineStats( @@ -246,7 +282,7 @@ def __add__(self, other): timings, unit = self._get_aggregated_timings([self, other]) return type(self)(timings, unit) - def __iadd__(self, other): + def __iadd__(self, other: _StatsLike) -> Self: """ Example: >>> stats1 = LineStats( @@ -273,17 +309,26 @@ def __iadd__(self, other): self.timings, self.unit = self._get_aggregated_timings([self, other]) return self - def print(self, stream=None, **kwargs): - show_text(self.timings, self.unit, stream=stream, **kwargs) - - def to_file(self, filename): + def print( + self, stream: io.TextIOBase | None = None, + output_unit: float | None = None, + stripzeros: bool = False, details: bool = True, + summarize: bool = False, sort: bool = False, rich: bool = False, + *, config: str | PathLike[str] | bool | None = None) -> None: + show_text(self.timings, self.unit, output_unit=output_unit, + stream=stream, stripzeros=stripzeros, details=details, + summarize=summarize, sort=sort, rich=rich, config=config) + + def to_file(self, filename: PathLike[str] | str) -> None: """ Pickle the instance to the given filename. """ with open(filename, 'wb') as f: pickle.dump(self, f, pickle.HIGHEST_PROTOCOL) @classmethod - def from_files(cls, file, /, *files): + def from_files( + cls, file: PathLike[str] | str, /, + *files: PathLike[str] | str) -> Self: """ Utility function to load an instance from the given filenames. """ @@ -294,7 +339,9 @@ def from_files(cls, file, /, *files): return cls.from_stats_objects(*stats_objs) @classmethod - def from_stats_objects(cls, stats, /, *more_stats): + def from_stats_objects( + cls, stats: _StatsLike, /, + *more_stats: _StatsLike) -> Self: """ Example: >>> stats1 = LineStats( @@ -330,7 +377,6 @@ def _get_aggregated_timings(stats_objs): # rounding errors stats_objs = sorted(stats_objs, key=operator.attrgetter('unit')) unit = stats_objs[-1].unit - # type: dict[tuple[str, int, int], dict[int, tuple[int, float]] timing_dict = {} for stats in stats_objs: factor = stats.unit / unit @@ -368,7 +414,8 @@ class LineProfiler(CLineProfiler, ByCountProfilerMixin): >>> func() >>> profile.print_stats() """ - def __call__(self, func): + + def __call__(self, func: Callable) -> Callable: """ Decorate a function, method, :py:class:`property`, :py:func:`~functools.partial` object etc. to start the profiler @@ -384,12 +431,15 @@ def __call__(self, func): self.add_callable(func) return self.wrap_callable(func) - def wrap_callable(self, func): + def wrap_callable(self, func: Callable) -> Callable: if is_c_level_callable(func): # Non-profilable return func return super().wrap_callable(func) - def add_callable(self, func, guard=None, name=None): + def add_callable( + self, func: object, + guard: Callable[[Callable], bool] | None = None, + name: str | None = None) -> Literal[0, 1]: """ Register a function, method, :py:class:`property`, :py:func:`~functools.partial` object, etc. with the underlying @@ -398,7 +448,7 @@ def add_callable(self, func, guard=None, name=None): Args: func (...): Function, class/static/bound method, property, etc. - guard (Optional[Callable[[types.FunctionType], bool]]) + guard (Optional[Callable[[Callable], bool]]) Optional checker callable, which takes a function object and returns true(-y) if it *should not* be passed to :py:meth:`.add_function()`. Defaults to checking @@ -461,18 +511,21 @@ def _debug(self, msg): msg = f'{self_repr}: {msg}' logger.debug(msg) - def get_stats(self): + def get_stats(self) -> LineStats: return LineStats.from_stats_objects(super().get_stats()) - def dump_stats(self, filename): + def dump_stats(self, filename: os.PathLike[str] | str) -> None: """ Dump a representation of the data to a file as a pickled :py:class:`~.LineStats` object from :py:meth:`~.get_stats()`. """ self.get_stats().to_file(filename) - def print_stats(self, stream=None, output_unit=None, stripzeros=False, - details=True, summarize=False, sort=False, rich=False, *, - config=None): + def print_stats( + self, stream: io.TextIOBase | None = None, + output_unit: float | None = None, stripzeros: bool = False, + details: bool = True, summarize: bool = False, + sort: bool = False, rich: bool = False, *, + config: str | PathLike[str] | bool | None = None) -> None: """ Show the gathered statistics. """ self.get_stats().print( @@ -481,13 +534,16 @@ def print_stats(self, stream=None, output_unit=None, stripzeros=False, sort=sort, rich=rich, config=config) def _add_namespace( - self, namespace, *, - seen=None, - func_scoping_policy=ScopingPolicy.NONE, - class_scoping_policy=ScopingPolicy.NONE, - module_scoping_policy=ScopingPolicy.NONE, - wrap=False, - name=None): + self, namespace: type | types.ModuleType, *, + seen: set[int] | None = None, + func_scoping_policy: ScopingPolicy = cast( + ScopingPolicy, ScopingPolicy.NONE), + class_scoping_policy: ScopingPolicy = cast( + ScopingPolicy, ScopingPolicy.NONE), + module_scoping_policy: ScopingPolicy = cast( + ScopingPolicy, ScopingPolicy.NONE), + wrap: bool = False, + name: str | None = None) -> int: def func_guard(func): return self._already_a_wrapper(func) or not func_check(func) @@ -546,7 +602,10 @@ def func_guard(func): self._repr_for_log(namespace, name))) return count - def add_class(self, cls, *, scoping_policy=None, wrap=False): + def add_class( + self, cls: type, *, + scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> int: """ Add the members (callables (wrappers), methods, classes, ...) in a class' local namespace and profile them. @@ -591,7 +650,10 @@ def add_class(self, cls, *, scoping_policy=None, wrap=False): module_scoping_policy=policies['module'], wrap=wrap) - def add_module(self, mod, *, scoping_policy=None, wrap=False): + def add_module( + self, mod: types.ModuleType, *, + scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> int: """ Add the members (callables (wrappers), methods, classes, ...) in a module's local namespace and profile them. @@ -658,7 +720,7 @@ def _mark_wrapper(self, wrapper): # This could be in the ipython_extension submodule, # but it doesn't depend on the IPython module so it's easier to just let it stay here. -def is_generated_code(filename): +def is_generated_code(filename: str) -> bool: """ Return True if a filename corresponds to generated code, such as a Jupyter Notebook cell. """ @@ -672,10 +734,13 @@ def is_generated_code(filename): ) -def show_func(filename, start_lineno, func_name, timings, unit, - output_unit=None, stream=None, stripzeros=False, rich=False, +def show_func(filename: str, start_lineno: int, func_name: str, + timings: Sequence[tuple[int, int, int | float]], unit: float, + output_unit: float | None = None, + stream: io.TextIOBase | None = None, + stripzeros: bool = False, rich: bool = False, *, - config=None): + config: str | PathLike[str] | bool | None = None) -> None: """ Show results for a single function. @@ -740,7 +805,7 @@ def show_func(filename, start_lineno, func_name, timings, unit, ... output_unit, stream, stripzeros, rich) """ if stream is None: - stream = sys.stdout + stream = cast(io.TextIOBase, sys.stdout) total_hits = sum(t[1] for t in timings) total_time = sum(t[2] for t in timings) @@ -752,13 +817,16 @@ def show_func(filename, start_lineno, func_name, timings, unit, # References: # https://github.com/Textualize/rich/discussions/3076 try: - from rich.syntax import Syntax - from rich.highlighter import ReprHighlighter - from rich.text import Text - from rich.console import Console - from rich.table import Table + import importlib + + Syntax = importlib.import_module('rich.syntax').Syntax + ReprHighlighter = importlib.import_module( + 'rich.highlighter').ReprHighlighter + Text = importlib.import_module('rich.text').Text + Console = importlib.import_module('rich.console').Console + Table = importlib.import_module('rich.table').Table except ImportError: - rich = 0 + rich = False if output_unit is None: output_unit = unit @@ -785,6 +853,8 @@ def show_func(filename, start_lineno, func_name, timings, unit, sublines = [''] * nlines # Define minimum column sizes so text fits and usually looks consistent + if isinstance(config, os.PathLike): + config = os.fspath(config) conf_column_sizes = get_column_widths(config) default_column_sizes = { col: max(width, conf_column_sizes.get(col, width)) @@ -824,18 +894,18 @@ def show_func(filename, start_lineno, func_name, timings, unit, column_sizes['time'] = max(column_sizes['time'], max_timelen) column_sizes['perhit'] = max(column_sizes['perhit'], max_perhitlen) - col_order = ['line', 'hits', 'time', 'perhit', 'percent'] + col_order: list[ColumnLiterals] = ['line', 'hits', 'time', 'perhit', 'percent'] lhs_template = ' '.join(['%' + str(column_sizes[k]) + 's' for k in col_order]) template = lhs_template + ' %-s' - linenos = range(start_lineno, start_lineno + len(sublines)) + linenos = list(range(start_lineno, start_lineno + len(sublines))) empty = ('', '', '', '') header = ('Line #', 'Hits', 'Time', 'Per Hit', '% Time', 'Line Contents') - header = template % header + header_line = template % header stream.write('\n') - stream.write(header) + stream.write(header_line) stream.write('\n') - stream.write('=' * len(header)) + stream.write('=' * len(header_line)) stream.write('\n') if rich: @@ -843,8 +913,8 @@ def show_func(filename, start_lineno, func_name, timings, unit, lhs_lines = [] rhs_lines = [] for lineno, line in zip(linenos, sublines): - nhits, time, per_hit, percent = display.get(lineno, empty) - txt = lhs_template % (lineno, nhits, time, per_hit, percent) + nhits_s, time_s, per_hit_s, percent_s = display.get(lineno, empty) + txt = lhs_template % (lineno, nhits_s, time_s, per_hit_s, percent_s) rhs_lines.append(line.rstrip('\n').rstrip('\r')) lhs_lines.append(txt) @@ -874,30 +944,36 @@ def show_func(filename, start_lineno, func_name, timings, unit, # Use a Console to render to the stream # Not sure if we should force-terminal or just specify the color system # write_console = Console(file=stream, force_terminal=True, soft_wrap=True) - write_console = Console(file=stream, soft_wrap=True, color_system='standard') + write_console = Console( + file=cast(IO[str], stream), + soft_wrap=True, + color_system='standard') write_console.print(table) stream.write('\n') else: for lineno, line in zip(linenos, sublines): - nhits, time, per_hit, percent = display.get(lineno, empty) + nhits_s, time_s, per_hit_s, percent_s = display.get(lineno, empty) line_ = line.rstrip('\n').rstrip('\r') - txt = template % (lineno, nhits, time, per_hit, percent, line_) + txt = template % (lineno, nhits_s, time_s, per_hit_s, percent_s, line_) try: stream.write(txt) except UnicodeEncodeError: # todo: better handling of windows encoding issue # for now just work around it line_ = 'UnicodeEncodeError - help wanted for a fix' - txt = template % (lineno, nhits, time, per_hit, percent, line_) + txt = template % (lineno, nhits_s, time_s, per_hit_s, percent_s, line_) stream.write(txt) stream.write('\n') stream.write('\n') -def show_text(stats, unit, output_unit=None, stream=None, stripzeros=False, - details=True, summarize=False, sort=False, rich=False, *, - config=None): +def show_text(stats: _TimingsMap, unit: float, + output_unit: float | None = None, + stream: io.TextIOBase | None = None, + stripzeros: bool = False, details: bool = True, + summarize: bool = False, sort: bool = False, rich: bool = False, + *, config: str | PathLike[str] | bool | None = None) -> None: """ Show text for the given timings. @@ -912,7 +988,7 @@ def show_text(stats, unit, output_unit=None, stream=None, stripzeros=False, python -m line_profiler -mtz "uuid.lprof" """ if stream is None: - stream = sys.stdout + stream = cast(io.TextIOBase, sys.stdout) if output_unit is not None: stream.write('Timer unit: %g s\n\n' % output_unit) @@ -924,7 +1000,7 @@ def show_text(stats, unit, output_unit=None, stream=None, stripzeros=False, stats_order = sorted(stats.items(), key=lambda kv: sum(t[2] for t in kv[1])) else: # Default ordering - stats_order = stats.items() + stats_order = list(stats.items()) # Pre-lookup the appropriate config file config = ConfigSource.from_config(config).path @@ -940,13 +1016,15 @@ def show_text(stats, unit, output_unit=None, stream=None, stripzeros=False, # Summarize the total time for each function if rich: try: - from rich.console import Console - from rich.markup import escape + import importlib + + Console = importlib.import_module('rich.console').Console + escape = importlib.import_module('rich.markup').escape except ImportError: - rich = 0 + rich = False line_template = '%6.2f seconds - %s:%s - %s' if rich: - write_console = Console(file=stream, soft_wrap=True, + write_console = Console(file=cast(IO[str], stream), soft_wrap=True, color_system='standard') for (fn, lineno, name), timings in stats_order: total_time = sum(t[2] for t in timings) * unit @@ -967,7 +1045,7 @@ def show_text(stats, unit, output_unit=None, stream=None, stripzeros=False, load_stats = LineStats.from_files -def main(): +def main() -> None: """ The line profiler CLI to view output from :command:`kernprof -l`. """ diff --git a/line_profiler/line_profiler.pyi b/line_profiler/line_profiler.pyi deleted file mode 100644 index 0a9bdbb0..00000000 --- a/line_profiler/line_profiler.pyi +++ /dev/null @@ -1,198 +0,0 @@ -import io -from functools import cached_property, partial, partialmethod -from os import PathLike -from types import FunctionType, ModuleType -from typing import (TYPE_CHECKING, - overload, - Callable, Mapping, - Literal, Self, - Protocol, TypeVar) -try: - from typing import ( # type: ignore[attr-defined] # noqa: F401 - ParamSpec) -except ImportError: - from typing_extensions import ParamSpec # noqa: F401 -from _typeshed import Incomplete -from ._line_profiler import (LineProfiler as CLineProfiler, - LineStats as CLineStats) -from .profiler_mixin import ByCountProfilerMixin, CLevelCallable -from .scoping_policy import ScopingPolicy, ScopingPolicyDict - -if TYPE_CHECKING: - from .profiler_mixin import UnparametrizedCallableLike - - -T = TypeVar('T') -T_co = TypeVar('T_co', covariant=True) -PS = ParamSpec('PS') - - -def get_column_widths( - config: bool | str | PathLike[str] | None = False) -> Mapping[ - Literal['line', 'hits', 'time', 'perhit', 'percent'], int]: - ... - - -def load_ipython_extension(ip) -> None: - ... - - -class _StatsLike(Protocol): - timings: Mapping[tuple[str, int, str], # funcname, lineno, filename - list[tuple[int, int, int]]] # lineno, nhits, time - unit: float - - -class LineStats(CLineStats): - def to_file(self, filename: PathLike[str] | str) -> None: - ... - - def print(self, stream: Incomplete | None = None, **kwargs) -> None: - ... - - @classmethod - def from_files(cls, file: PathLike[str] | str, /, - *files: PathLike[str] | str) -> Self: - ... - - @classmethod - def from_stats_objects(cls, stats: _StatsLike, /, - *more_stats: _StatsLike) -> Self: - ... - - def __repr__(self) -> str: - ... - - def __eq__(self, other) -> bool: - ... - - def __add__(self, other: _StatsLike) -> Self: - ... - - def __iadd__(self, other: _StatsLike) -> Self: - ... - - -class LineProfiler(CLineProfiler, ByCountProfilerMixin): - @overload - def __call__(self, # type: ignore[overload-overlap] - func: CLevelCallable) -> CLevelCallable: - ... - - @overload - def __call__( # type: ignore[overload-overlap] - self, func: UnparametrizedCallableLike, - ) -> UnparametrizedCallableLike: - ... - - @overload - def __call__(self, # type: ignore[overload-overlap] - func: type[T]) -> type[T]: - ... - - @overload - def __call__(self, # type: ignore[overload-overlap] - func: partial[T]) -> partial[T]: - ... - - @overload - def __call__(self, func: partialmethod[T]) -> partialmethod[T]: - ... - - @overload - def __call__(self, func: cached_property[T_co]) -> cached_property[T_co]: - ... - - @overload - def __call__(self, # type: ignore[overload-overlap] - func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: - ... - - @overload - def __call__( - self, func: classmethod[type[T], PS, T_co], - ) -> classmethod[type[T], PS, T_co]: - ... - - # Fallback: just wrap the `.__call__()` of a generic callable - - @overload - def __call__(self, func: Callable) -> Callable: - ... - - def add_callable( - self, func, - guard: Callable[[FunctionType], bool] | None = None, - name: str | None = None) -> Literal[0, 1]: - ... - - def get_stats(self) -> LineStats: - ... - - def dump_stats(self, filename) -> None: - ... - - def print_stats(self, - stream: Incomplete | None = ..., - output_unit: Incomplete | None = ..., - stripzeros: bool = ..., - details: bool = ..., - summarize: bool = ..., - sort: bool = ..., - rich: bool = ..., - *, - config: str | PathLike[str] | bool | None = None) -> None: - ... - - def add_module( - self, mod: ModuleType, *, - scoping_policy: ( - ScopingPolicy | str | ScopingPolicyDict | None) = None, - wrap: bool = False) -> int: - ... - - def add_class( - self, cls: type, *, - scoping_policy: ( - ScopingPolicy | str | ScopingPolicyDict | None) = None, - wrap: bool = False) -> int: - ... - - -def is_generated_code(filename): - ... - - -def show_func(filename: str, - start_lineno: int, - func_name: str, - timings: list[tuple[int, int, float]], - unit: float, - output_unit: float | None = None, - stream: io.TextIOBase | None = None, - stripzeros: bool = False, - rich: bool = False, - *, - config: str | PathLike[str] | bool | None = None) -> None: - ... - - -def show_text(stats, - unit, - output_unit: Incomplete | None = ..., - stream: Incomplete | None = ..., - stripzeros: bool = ..., - details: bool = ..., - summarize: bool = ..., - sort: bool = ..., - rich: bool = ..., - *, - config: str | PathLike[str] | bool | None = None) -> None: - ... - - -load_stats = LineStats.from_files - - -def main(): - ... diff --git a/line_profiler/line_profiler_utils.py b/line_profiler/line_profiler_utils.py index 0d903888..8d15f060 100644 --- a/line_profiler/line_profiler_utils.py +++ b/line_profiler/line_profiler_utils.py @@ -1,7 +1,13 @@ """ Miscellaneous utilities that :py:mod:`line_profiler` uses. """ +from __future__ import annotations + import enum +import typing + +if typing.TYPE_CHECKING: + from typing_extensions import Self class _StrEnumBase(str, enum.Enum): @@ -28,17 +34,17 @@ class _StrEnumBase(str, enum.Enum): ValueError: 'baz' is not a valid MyEnum """ @staticmethod - def _generate_next_value_(name, *_, **__): + def _generate_next_value_(name: str, *_, **__) -> str: return name.lower() - def __eq__(self, other): + def __eq__(self, other: object) -> bool: return self.value == other - def __str__(self): + def __str__(self) -> str: return self.value -class StringEnum(getattr(enum, 'StrEnum', _StrEnumBase)): +class StringEnum(getattr(enum, 'StrEnum', _StrEnumBase)): # type: ignore[misc] """ Convenience wrapper around :py:class:`enum.StrEnum`. @@ -65,7 +71,7 @@ class StringEnum(getattr(enum, 'StrEnum', _StrEnumBase)): 'bar' """ @classmethod - def _missing_(cls, value): + def _missing_(cls, value: object) -> Self | None: if not isinstance(value, str): return None members = {name.casefold(): instance diff --git a/line_profiler/line_profiler_utils.pyi b/line_profiler/line_profiler_utils.pyi deleted file mode 100644 index a510cf94..00000000 --- a/line_profiler/line_profiler_utils.pyi +++ /dev/null @@ -1,26 +0,0 @@ -import enum -try: - from typing import Self # type: ignore[attr-defined] # noqa: F401 -except ImportError: # Python < 3.11 - from typing_extensions import Self # noqa: F401 - - -# Note: `mypy` tries to read this class as a free-standing enum -# (instead of an `enum.Enum` subclass that string enums are to inherit -# from), and complains that it has no members -- so silence that - - -class StringEnum(str, enum.Enum): # type: ignore[misc] - @staticmethod - def _generate_next_value_(name: str, *_, **__) -> str: - ... - - def __eq__(self, other) -> bool: - ... - - def __str__(self) -> str: - ... - - @classmethod - def _missing_(cls, value) -> Self | None: - ... diff --git a/line_profiler/profiler_mixin.py b/line_profiler/profiler_mixin.py index 3bc98c1d..1f66773d 100644 --- a/line_profiler/profiler_mixin.py +++ b/line_profiler/profiler_mixin.py @@ -1,8 +1,14 @@ +from __future__ import annotations + import functools import inspect import types +from functools import cached_property, partial, partialmethod from sys import version_info +from typing import (TYPE_CHECKING, Any, Callable, Mapping, Protocol, TypeVar, + cast, Sequence) from warnings import warn +from ._line_profiler import label from .scoping_policy import ScopingPolicy @@ -26,8 +32,109 @@ # https://cython.readthedocs.io/en/latest/src/tutorial/profiling_tutorial.html _CANNOT_LINE_TRACE_CYTHON = (3, 12) <= version_info < (3, 13, 0, 'beta', 1) - -def is_c_level_callable(func): +if TYPE_CHECKING: + from typing_extensions import ParamSpec, TypeIs + UnparametrizedCallableLike = TypeVar( + 'UnparametrizedCallableLike', + types.FunctionType, property, types.MethodType) + T = TypeVar('T') + T_co = TypeVar('T_co', covariant=True) + PS = ParamSpec('PS') + + class CythonCallable(Protocol[PS, T_co]): + def __call__(self, *args: PS.args, **kwargs: PS.kwargs) -> T_co: + ... + + @property + def __code__(self) -> types.CodeType: + ... + + @property + def func_code(self) -> types.CodeType: + ... + + @property + def __name__(self) -> str: + ... + + @property + def func_name(self) -> str: + ... + + @property + def __qualname__(self) -> str: + ... + + @property + def __doc__(self) -> str | None: + ... + + @__doc__.setter + def __doc__(self, doc: str | None) -> None: + ... + + @property + def func_doc(self) -> str | None: + ... + + @property + def __globals__(self) -> dict[str, Any]: + ... + + @property + def func_globals(self) -> dict[str, Any]: + ... + + @property + def __dict__(self) -> dict[str, Any]: + ... + + @__dict__.setter + def __dict__(self, dict: dict[str, Any]) -> None: + ... + + @property + def func_dict(self) -> dict[str, Any]: + ... + + @property + def __annotations__(self) -> dict[str, Any]: + ... + + @__annotations__.setter + def __annotations__(self, annotations: dict[str, Any]) -> None: + ... + + @property + def __defaults__(self): + ... + + @property + def func_defaults(self): + ... + + @property + def __kwdefaults__(self): + ... + + @property + def __closure__(self): + ... + + @property + def func_closure(self): + ... +else: + CythonCallable = type(label) + +CLevelCallable = TypeVar( + 'CLevelCallable', + types.BuiltinFunctionType, types.BuiltinMethodType, + types.ClassMethodDescriptorType, types.MethodDescriptorType, + types.MethodWrapperType, types.WrapperDescriptorType) + + +def is_c_level_callable(func: Any) -> TypeIs[CLevelCallable]: """ Returns: func_is_c_level (bool): @@ -37,7 +144,7 @@ def is_c_level_callable(func): return isinstance(func, C_LEVEL_CALLABLE_TYPES) -def is_cython_callable(func): +def is_cython_callable(func: Any) -> TypeIs[CythonCallable]: if not callable(func): return False # Note: don't directly check against a Cython function type, since @@ -48,31 +155,31 @@ def is_cython_callable(func): in ('cython_function_or_method', 'fused_cython_function')) -def is_classmethod(f): +def is_classmethod(f: Any) -> TypeIs[classmethod]: return isinstance(f, classmethod) -def is_staticmethod(f): +def is_staticmethod(f: Any) -> TypeIs[staticmethod]: return isinstance(f, staticmethod) -def is_boundmethod(f): +def is_boundmethod(f: Any) -> TypeIs[types.MethodType]: return isinstance(f, types.MethodType) -def is_partialmethod(f): +def is_partialmethod(f: Any) -> TypeIs[partialmethod]: return isinstance(f, functools.partialmethod) -def is_partial(f): +def is_partial(f: Any) -> TypeIs[partial]: return isinstance(f, functools.partial) -def is_property(f): +def is_property(f: Any) -> TypeIs[property]: return isinstance(f, property) -def is_cached_property(f): +def is_cached_property(f: Any) -> TypeIs[cached_property]: return isinstance(f, functools.cached_property) @@ -86,7 +193,14 @@ class ByCountProfilerMixin: Used by :py:class:`line_profiler.line_profiler.LineProfiler` and :py:class:`kernprof.ContextualProfile`. """ - def wrap_callable(self, func): + + def enable_by_count(self) -> None: # pragma: no cover - implemented in C + raise NotImplementedError + + def disable_by_count(self) -> None: # pragma: no cover - implemented in C + raise NotImplementedError + + def wrap_callable(self, func: Callable) -> Callable: """ Decorate a function to start the profiler on function entry and stop it on function exit. @@ -119,7 +233,8 @@ def wrap_callable(self, func): 'callable wrapper') @classmethod - def get_underlying_functions(cls, func): + def get_underlying_functions( + cls, func: object) -> list[types.FunctionType | CythonCallable]: """ Get the underlying function objects of a callable or an adjacent object. @@ -127,27 +242,39 @@ def get_underlying_functions(cls, func): Returns: funcs (list[Callable]) """ - return cls._get_underlying_functions(func) + result = [] + for impl in cls._get_underlying_functions(func): + # Include FunctionType and CythonCallable, but not type objects + if isinstance(impl, types.FunctionType) or is_cython_callable(impl): + result.append(impl) + return result @classmethod - def _get_underlying_functions(cls, func, seen=None, stop_at_classes=False): + def _get_underlying_functions( + cls, func: object, seen: set[int] | None = None, + stop_at_classes: bool = False + ) -> Sequence[Callable]: if seen is None: seen = set() - kwargs = {'seen': seen, 'stop_at_classes': stop_at_classes} # Extract inner functions - if any(check(func) - for check in (is_boundmethod, is_classmethod, is_staticmethod)): - return cls._get_underlying_functions(func.__func__, **kwargs) - if any(check(func) - for check in (is_partial, is_partialmethod, is_cached_property)): - return cls._get_underlying_functions(func.func, **kwargs) + if is_boundmethod(func): + return cls._get_underlying_functions( + func.__func__, seen=seen, stop_at_classes=stop_at_classes) + if is_classmethod(func) or is_staticmethod(func): + return cls._get_underlying_functions( + func.__func__, seen=seen, stop_at_classes=stop_at_classes) + if is_partial(func) or is_partialmethod(func) or is_cached_property(func): + return cls._get_underlying_functions( + func.func, seen=seen, stop_at_classes=stop_at_classes) # Dispatch to specific handlers if is_property(func): - return cls._get_underlying_functions_from_property(func, **kwargs) + return cls._get_underlying_functions_from_property( + func, seen, stop_at_classes) if isinstance(func, type): if stop_at_classes: return [func] - return cls._get_underlying_functions_from_type(func, **kwargs) + return cls._get_underlying_functions_from_type( + func, seen, stop_at_classes) # Otherwise, the object should either be a function... if not callable(func): raise TypeError(f'func = {func!r}: ' @@ -165,12 +292,14 @@ def _get_underlying_functions(cls, func, seen=None, stop_at_classes=False): func = type(func).__call__ if is_c_level_callable(func): # Can happen with builtin types return [] - return [func] + return [cast(types.FunctionType, func)] @classmethod def _get_underlying_functions_from_property( - cls, prop, seen, stop_at_classes): - result = [] + cls, prop: property, seen: set[int], + stop_at_classes: bool + ) -> Sequence[Callable]: + result: list[Callable] = [] for impl in prop.fget, prop.fset, prop.fdel: if impl is not None: result.extend( @@ -178,8 +307,11 @@ def _get_underlying_functions_from_property( return result @classmethod - def _get_underlying_functions_from_type(cls, kls, seen, stop_at_classes): - result = [] + def _get_underlying_functions_from_type( + cls, kls: type, seen: set[int], + stop_at_classes: bool + ) -> Sequence[Callable]: + result: list[Callable] = [] get_filter = cls._class_scoping_policy.get_filter func_check = get_filter(kls, 'func') cls_check = get_filter(kls, 'class') @@ -515,4 +647,5 @@ def __exit__(self, *_, **__): self.disable_by_count() _profiler_wrapped_marker = '__line_profiler_id__' - _class_scoping_policy = ScopingPolicy.CHILDREN + _class_scoping_policy: ScopingPolicy = cast( + ScopingPolicy, ScopingPolicy.CHILDREN) diff --git a/line_profiler/profiler_mixin.pyi b/line_profiler/profiler_mixin.pyi deleted file mode 100644 index ba7a9d3a..00000000 --- a/line_profiler/profiler_mixin.pyi +++ /dev/null @@ -1,271 +0,0 @@ -from functools import cached_property, partial, partialmethod -from types import (CodeType, FunctionType, MethodType, - BuiltinFunctionType, BuiltinMethodType, - ClassMethodDescriptorType, MethodDescriptorType, - MethodWrapperType, WrapperDescriptorType) -from typing import (TYPE_CHECKING, overload, - Any, Callable, Mapping, Protocol, TypeVar) -try: - from typing import ( # type: ignore[attr-defined] # noqa: F401 - ParamSpec) -except ImportError: # Python < 3.10 - from typing_extensions import ParamSpec # noqa: F401 -try: - from typing import ( # type: ignore[attr-defined] # noqa: F401 - Self) -except ImportError: # Python < 3.11 - from typing_extensions import Self # noqa: F401 -try: - from typing import ( # type: ignore[attr-defined] # noqa: F401 - TypeIs) -except ImportError: # Python < 3.13 - from typing_extensions import TypeIs # noqa: F401 -from ._line_profiler import label - - -UnparametrizedCallableLike = TypeVar('UnparametrizedCallableLike', - FunctionType, property, MethodType) -T = TypeVar('T') -T_co = TypeVar('T_co', covariant=True) -PS = ParamSpec('PS') - -if TYPE_CHECKING: - class CythonCallable(Protocol[PS, T_co]): - def __call__(self, *args: PS.args, **kwargs: PS.kwargs) -> T_co: - ... - - @property - def __code__(self) -> CodeType: - ... - - @property - def func_code(self) -> CodeType: - ... - - @property - def __name__(self) -> str: - ... - - @property - def func_name(self) -> str: - ... - - @property - def __qualname__(self) -> str: - ... - - @property - def __doc__(self) -> str | None: - ... - - @__doc__.setter - def __doc__(self, doc: str | None) -> None: - ... - - @property - def func_doc(self) -> str | None: - ... - - @property - def __globals__(self) -> dict[str, Any]: - ... - - @property - def func_globals(self) -> dict[str, Any]: - ... - - @property - def __dict__(self) -> dict[str, Any]: - ... - - @__dict__.setter - def __dict__(self, dict: dict[str, Any]) -> None: - ... - - @property - def func_dict(self) -> dict[str, Any]: - ... - - @property - def __annotations__(self) -> dict[str, Any]: - ... - - @__annotations__.setter - def __annotations__(self, annotations: dict[str, Any]) -> None: - ... - - @property - def __defaults__(self): - ... - - @property - def func_defaults(self): - ... - - @property - def __kwdefaults__(self): - ... - - @property - def __closure__(self): - ... - - @property - def func_closure(self): - ... - - -else: - CythonCallable = type(label) - -CLevelCallable = TypeVar('CLevelCallable', - BuiltinFunctionType, BuiltinMethodType, - ClassMethodDescriptorType, MethodDescriptorType, - MethodWrapperType, WrapperDescriptorType) - - -def is_c_level_callable(func: Any) -> TypeIs[CLevelCallable]: - ... - - -def is_cython_callable(func: Any) -> TypeIs[CythonCallable]: - ... - - -def is_classmethod(f: Any) -> TypeIs[classmethod]: - ... - - -def is_staticmethod(f: Any) -> TypeIs[staticmethod]: - ... - - -def is_boundmethod(f: Any) -> TypeIs[MethodType]: - ... - - -def is_partialmethod(f: Any) -> TypeIs[partialmethod]: - ... - - -def is_partial(f: Any) -> TypeIs[partial]: - ... - - -def is_property(f: Any) -> TypeIs[property]: - ... - - -def is_cached_property(f: Any) -> TypeIs[cached_property]: - ... - - -class ByCountProfilerMixin: - def get_underlying_functions(self, func) -> list[FunctionType]: - ... - - @overload - def wrap_callable(self, # type: ignore[overload-overlap] - func: CLevelCallable) -> CLevelCallable: - ... - - @overload - def wrap_callable( # type: ignore[overload-overlap] - self, func: UnparametrizedCallableLike, - ) -> UnparametrizedCallableLike: - ... - - @overload - def wrap_callable(self, # type: ignore[overload-overlap] - func: type[T]) -> type[T]: - ... - - @overload - def wrap_callable(self, # type: ignore[overload-overlap] - func: partial[T]) -> partial[T]: - ... - - @overload - def wrap_callable(self, func: partialmethod[T]) -> partialmethod[T]: - ... - - @overload - def wrap_callable(self, - func: cached_property[T_co]) -> cached_property[T_co]: - ... - - @overload - def wrap_callable(self, # type: ignore[overload-overlap] - func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: - ... - - @overload - def wrap_callable( - self, func: classmethod[type[T], PS, T_co], - ) -> classmethod[type[T], PS, T_co]: - ... - - # Fallback: just return a wrapper function around a generic callable - - @overload - def wrap_callable(self, func: Callable) -> FunctionType: - ... - - def wrap_classmethod( - self, func: classmethod[type[T], PS, T_co], - ) -> classmethod[type[T], PS, T_co]: - ... - - def wrap_staticmethod( - self, func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: - ... - - def wrap_boundmethod(self, func: MethodType) -> MethodType: - ... - - def wrap_partialmethod(self, func: partialmethod[T]) -> partialmethod[T]: - ... - - def wrap_partial(self, func: partial[T]) -> partial[T]: - ... - - def wrap_property(self, func: property) -> property: - ... - - def wrap_cached_property( - self, func: cached_property[T_co]) -> cached_property[T_co]: - ... - - def wrap_async_generator(self, func: FunctionType) -> FunctionType: - ... - - def wrap_coroutine(self, func: FunctionType) -> FunctionType: - ... - - def wrap_generator(self, func: FunctionType) -> FunctionType: - ... - - def wrap_function(self, func: Callable) -> FunctionType: - ... - - def wrap_class(self, func: type[T]) -> type[T]: - ... - - def run(self, cmd: str) -> Self: - ... - - def runctx(self, - cmd: str, - globals: dict[str, Any] | None, - locals: Mapping[str, Any] | None) -> Self: - ... - - def runcall(self, func: Callable[PS, T], /, - *args: PS.args, **kw: PS.kwargs) -> T: - ... - - def __enter__(self) -> Self: - ... - - def __exit__(self, *_, **__) -> None: - ... diff --git a/line_profiler/scoping_policy.py b/line_profiler/scoping_policy.py index cedf51e9..b9c9a579 100644 --- a/line_profiler/scoping_policy.py +++ b/line_profiler/scoping_policy.py @@ -1,6 +1,8 @@ +from __future__ import annotations + from enum import auto -from types import MappingProxyType, ModuleType -from typing import Union, TypedDict +from types import FunctionType, MappingProxyType, ModuleType +from typing import Callable, Literal, TypedDict, cast, overload, Union from .line_profiler_utils import StringEnum @@ -11,8 +13,13 @@ #: * Descend ingo sibling and descendant classes #: (:py:attr:`ScopingPolicy.SIBLINGS`) #: * Don't descend into modules (:py:attr:`ScopingPolicy.EXACT`) -DEFAULT_SCOPING_POLICIES = MappingProxyType( - {'func': 'siblings', 'class': 'siblings', 'module': 'exact'}) +DEFAULT_SCOPING_POLICIES: ScopingPolicyDict = { + 'func': 'siblings', + 'class': 'siblings', + 'module': 'exact' +} + + class ScopingPolicy(StringEnum): @@ -97,7 +104,7 @@ class ScopingPolicy(StringEnum): # Verification - def __init_subclass__(cls, *args, **kwargs): + def __init_subclass__(cls, *args: object, **kwargs: object) -> None: """ Call :py:meth:`_check_class`. """ @@ -105,7 +112,7 @@ def __init_subclass__(cls, *args, **kwargs): cls._check_class() @classmethod - def _check_class(cls): + def _check_class(cls) -> None: """ Verify that :py:meth:`.get_filter` return a callable for all policy values and object types. @@ -122,7 +129,25 @@ class MockClass: # Filtering - def get_filter(self, namespace, obj_type): + @overload + def get_filter( + self, namespace: type | ModuleType, + obj_type: Literal['func']) -> Callable[[Callable], bool]: + ... + + @overload + def get_filter( + self, namespace: type | ModuleType, + obj_type: Literal['class']) -> Callable[[type], bool]: + ... + + @overload + def get_filter( + self, namespace: type | ModuleType, + obj_type: Literal['module']) -> Callable[[ModuleType], bool]: + ... + + def get_filter(self, namespace: type | ModuleType, obj_type: str): """ Args: namespace (Union[type, types.ModuleType]): @@ -149,22 +174,24 @@ def get_filter(self, namespace, obj_type): if obj_type == 'module': if is_class: return self._return_const(False) - return self._get_module_filter_in_module(namespace) + return self._get_module_filter_in_module(cast(ModuleType, namespace)) if is_class: - method = self._get_callable_filter_in_class - else: - method = self._get_callable_filter_in_module - return method(namespace, is_class=(obj_type == 'class')) + return self._get_callable_filter_in_class( + cast(type, namespace), is_class=(obj_type == 'class')) + return self._get_callable_filter_in_module( + cast(ModuleType, namespace), is_class=(obj_type == 'class')) @classmethod - def to_policies(cls, policies=None): + def to_policies( + cls, + policies: str | ScopingPolicy | ScopingPolicyDict | None = None + ) -> _ScopingPolicyDict: """ Normalize ``policies`` into a dictionary of policies for various object types. Args: - policies (Union[str, ScopingPolicy, \ -ScopingPolicyDict, None]): + policies (Union[str, ScopingPolicy, ScopingPolicyDict, None]): :py:class:`ScopingPolicy`, string convertible thereto (case-insensitive), or a mapping containing such values and the keys as outlined in the return value; @@ -172,8 +199,7 @@ def to_policies(cls, policies=None): :py:data:`DEFAULT_SCOPING_POLICIES`. Returns: - normalized_policies (dict[Literal['func', 'class', \ -'module'], ScopingPolicy]): + normalized_policies (dict[Literal['func', 'class', 'module'], ScopingPolicy]): Dictionary with the following key-value pairs: ``'func'`` @@ -215,97 +241,119 @@ def to_policies(cls, policies=None): policies = DEFAULT_SCOPING_POLICIES if isinstance(policies, str): policy = cls(policies) - return _ScopingPolicyDict( - dict.fromkeys(['func', 'class', 'module'], policy)) + return _ScopingPolicyDict({ + 'func': policy, + 'class': policy, + 'module': policy, + }) return _ScopingPolicyDict({'func': cls(policies['func']), 'class': cls(policies['class']), 'module': cls(policies['module'])}) @staticmethod - def _return_const(value): + def _return_const(value: bool) -> Callable[[object], bool]: def return_const(*_, **__): return value return return_const @staticmethod - def _match_prefix(s, prefix, sep='.'): + def _match_prefix(s: str, prefix: str, sep: str = '.') -> bool: return s == prefix or s.startswith(prefix + sep) - def _get_callable_filter_in_class(self, cls, is_class): - def func_is_child(other): + def _get_callable_filter_in_class( + self, cls: type, is_class: bool + ) -> Callable[[FunctionType | type], bool]: + def func_is_child(other: FunctionType | type): if not modules_are_equal(other): return False return other.__qualname__ == f'{cls.__qualname__}.{other.__name__}' - def modules_are_equal(other): # = sibling check + def modules_are_equal(other: FunctionType | type): # = sibling check return cls.__module__ == other.__module__ - def func_is_descdendant(other): + def func_is_descdendant(other: FunctionType | type): if not modules_are_equal(other): return False return other.__qualname__.startswith(cls.__qualname__ + '.') - return {'exact': (self._return_const(False) - if is_class else - func_is_child), - 'children': func_is_child, - 'descendants': func_is_descdendant, - 'siblings': modules_are_equal, - 'none': self._return_const(True)}[self.value] - - def _get_callable_filter_in_module(self, mod, is_class): - def func_is_child(other): + policies: dict[str, Callable[[FunctionType | type], bool]] = { + 'exact': (self._return_const(False) + if is_class else + func_is_child), + 'children': func_is_child, + 'descendants': func_is_descdendant, + 'siblings': modules_are_equal, + 'none': self._return_const(True), + } + return policies[self.value] + + def _get_callable_filter_in_module( + self, mod: ModuleType, is_class: bool + ) -> Callable[[FunctionType | type], bool]: + def func_is_child(other: FunctionType | type): return other.__module__ == mod.__name__ - def func_is_descdendant(other): + def func_is_descdendant(other: FunctionType | type): return self._match_prefix(other.__module__, mod.__name__) - def func_is_cousin(other): + def func_is_cousin(other: FunctionType | type): if func_is_descdendant(other): return True return self._match_prefix(other.__module__, parent) parent, _, basename = mod.__name__.rpartition('.') - return {'exact': (self._return_const(False) - if is_class else - func_is_child), - 'children': func_is_child, - 'descendants': func_is_descdendant, - 'siblings': (func_is_cousin # Only if a pkg - if basename else - func_is_descdendant), - 'none': self._return_const(True)}[self.value] - - def _get_module_filter_in_module(self, mod): - def module_is_descendant(other): + policies: dict[str, Callable[[FunctionType | type], bool]] = { + 'exact': (self._return_const(False) + if is_class else + func_is_child), + 'children': func_is_child, + 'descendants': func_is_descdendant, + 'siblings': (func_is_cousin # Only if a pkg + if basename else + func_is_descdendant), + 'none': self._return_const(True), + } + return policies[self.value] + + def _get_module_filter_in_module( + self, mod: ModuleType + ) -> Callable[[ModuleType], bool]: + def module_is_descendant(other: ModuleType): return other.__name__.startswith(mod.__name__ + '.') - def module_is_child(other): + def module_is_child(other: ModuleType): return other.__name__.rpartition('.')[0] == mod.__name__ - def module_is_sibling(other): + def module_is_sibling(other: ModuleType): return other.__name__.startswith(parent + '.') parent, _, basename = mod.__name__.rpartition('.') - return {'exact': self._return_const(False), - 'children': module_is_child, - 'descendants': module_is_descendant, - 'siblings': (module_is_sibling # Only if a pkg - if basename else - self._return_const(False)), - 'none': self._return_const(True)}[self.value] + policies: dict[str, Callable[[ModuleType], bool]] = { + 'exact': self._return_const(False), + 'children': module_is_child, + 'descendants': module_is_descendant, + 'siblings': (module_is_sibling # Only if a pkg + if basename else + self._return_const(False)), + 'none': self._return_const(True), + } + return policies[self.value] # Sanity check in case we extended `ScopingPolicy` and forgot to update # the corresponding methods ScopingPolicy._check_class() -ScopingPolicyDict = TypedDict('ScopingPolicyDict', - {'func': Union[str, ScopingPolicy], - 'class': Union[str, ScopingPolicy], - 'module': Union[str, ScopingPolicy]}) -_ScopingPolicyDict = TypedDict('_ScopingPolicyDict', - {'func': ScopingPolicy, - 'class': ScopingPolicy, - 'module': ScopingPolicy}) + +ScopingPolicyDict = TypedDict( + 'ScopingPolicyDict', + {'func': Union[str, ScopingPolicy], + 'class': Union[str, ScopingPolicy], + 'module': Union[str, ScopingPolicy]}) + +_ScopingPolicyDict = TypedDict( + '_ScopingPolicyDict', + {'func': ScopingPolicy, + 'class': ScopingPolicy, + 'module': ScopingPolicy}) diff --git a/line_profiler/scoping_policy.pyi b/line_profiler/scoping_policy.pyi deleted file mode 100644 index e6987289..00000000 --- a/line_profiler/scoping_policy.pyi +++ /dev/null @@ -1,50 +0,0 @@ -from enum import auto -from types import FunctionType, ModuleType -from typing import overload, Literal, Callable, TypedDict -from .line_profiler_utils import StringEnum - - -class ScopingPolicy(StringEnum): - EXACT = auto() - CHILDREN = auto() - DESCENDANTS = auto() - SIBLINGS = auto() - NONE = auto() - - @overload - def get_filter( - self, - namespace: type | ModuleType, - obj_type: Literal['func']) -> Callable[[FunctionType], bool]: - ... - - @overload - def get_filter( - self, - namespace: type | ModuleType, - obj_type: Literal['class']) -> Callable[[type], bool]: - ... - - @overload - def get_filter( - self, - namespace: type | ModuleType, - obj_type: Literal['module']) -> Callable[[ModuleType], bool]: - ... - - @classmethod - def to_policies( - cls, - policies: (str | 'ScopingPolicy' | 'ScopingPolicyDict' - | None) = None) -> '_ScopingPolicyDict': - ... - - -ScopingPolicyDict = TypedDict('ScopingPolicyDict', - {'func': str | ScopingPolicy, - 'class': str | ScopingPolicy, - 'module': str | ScopingPolicy}) -_ScopingPolicyDict = TypedDict('_ScopingPolicyDict', - {'func': str | ScopingPolicy, - 'class': str | ScopingPolicy, - 'module': str | ScopingPolicy}) diff --git a/line_profiler/toml_config.py b/line_profiler/toml_config.py index 06d0870c..61557276 100644 --- a/line_profiler/toml_config.py +++ b/line_profiler/toml_config.py @@ -2,9 +2,10 @@ Read and resolve user-supplied TOML files and combine them with the default to generate configurations. """ +from __future__ import annotations + import copy import dataclasses -import functools import importlib.resources import itertools import os @@ -14,16 +15,23 @@ except ImportError: # Python < 3.11 import tomli as tomllib # type: ignore[no-redef] # noqa: F811 from collections.abc import Mapping -from typing import Dict, List, Any +from os import PathLike +import typing +from typing import Any, Sequence, TypeVar, cast, Tuple, Dict + +if typing.TYPE_CHECKING: + Config = Tuple[Dict[str, Dict[str, Any]], pathlib.Path] + K = TypeVar('K') + V = TypeVar('V') __all__ = ['ConfigSource'] NAMESPACE = 'tool', 'line_profiler' -TARGETS = 'line_profiler.toml', 'pyproject.toml' +TARGETS = ['line_profiler.toml', 'pyproject.toml'] ENV_VAR = 'LINE_PROFILER_RC' -_DEFAULTS = None +_DEFAULTS: ConfigSource | None = None @dataclasses.dataclass @@ -33,7 +41,7 @@ class ConfigSource: read from. Attributes: - conf_dict (dict[str, Any]) + conf_dict (Mapping[str, Any]) The combination of the ``tool.line_profiler`` tables of the provided/looked-up config file (if any) and the default as a dictionary. @@ -45,11 +53,11 @@ class ConfigSource: :py:attr:`~.ConfigSource.path` :py:attr:`~.ConfigSource.conf_dict` can be found. """ - conf_dict: Dict[str, Any] + conf_dict: Mapping[str, Any] path: pathlib.Path - subtable: List[str] + subtable: list[str] - def copy(self): + def copy(self) -> ConfigSource: """ Returns: Copy of the object. @@ -57,7 +65,8 @@ def copy(self): return type(self)( copy.deepcopy(self.conf_dict), self.path, self.subtable.copy()) - def get_subconfig(self, *headers, allow_absence=False, copy=False): + def get_subconfig(self, *headers: str, allow_absence: bool = False, + copy: bool = False) -> ConfigSource: """ Arguments: headers (str): @@ -87,13 +96,14 @@ def get_subconfig(self, *headers, allow_absence=False, copy=False): >>> assert (display_widths.conf_dict ... is default.conf_dict['show']['column_widths']) """ - new_dict = get_subtable( - self.conf_dict, headers, allow_absence=allow_absence) + new_dict = cast( + Dict[str, Any], + get_subtable(self.conf_dict, headers, allow_absence=allow_absence)) new_subtable = [*self.subtable, *headers] return type(self)(new_dict, self.path, new_subtable) @classmethod - def from_default(cls, *, copy=True): + def from_default(cls, *, copy: bool = True) -> ConfigSource: """ Get the default TOML configuration that ships with the package. @@ -120,17 +130,27 @@ def find_file(anc, *chunks): global _DEFAULTS if _DEFAULTS is None: - package = __spec__.name.rpartition('.')[0] + if __spec__ is None: + package = __name__.rpartition('.')[0] + else: + package = __spec__.name.rpartition('.')[0] with find_file(package + '.rc', 'line_profiler.toml') as path: - conf_dict, source = find_and_read_config_file(config=path) - conf_dict = get_subtable(conf_dict, NAMESPACE, allow_absence=False) + result = find_and_read_config_file(config=path) + if result is None: + raise FileNotFoundError( + 'Default configuration file could not be read') + conf_dict, source = result + conf_dict = cast( + Dict[str, Any], + get_subtable(conf_dict, NAMESPACE, allow_absence=False)) _DEFAULTS = cls(conf_dict, source, list(NAMESPACE)) if not copy: return _DEFAULTS return _DEFAULTS.copy() @classmethod - def from_config(cls, config=None, *, read_env=True): + def from_config(cls, config: str | PathLike | bool | None = None, *, + read_env: bool = True) -> ConfigSource: """ Create an instance by loading from a config file. @@ -187,8 +207,8 @@ def from_config(cls, config=None, *, read_env=True): configuration (see :py:meth:`~.ConfigSource.from_default`). """ - def merge(template, supplied): - if not (isinstance(template, dict) and isinstance(supplied, dict)): + def merge(template: Mapping[str, Any], supplied: Mapping[str, Any]): + if not (isinstance(template, Mapping) and isinstance(supplied, Mapping)): return supplied result = {} for key, default in template.items(): @@ -204,27 +224,28 @@ def merge(template, supplied): config = None else: return default_instance + assert not isinstance(config, bool) if config is not None: # Promote to `Path` (and catch type errors) early config = pathlib.Path(config) if read_env: - get_conf = functools.partial(find_and_read_config_file, - config=config) - else: # Shield the lookup from the environment - get_conf = functools.partial(find_and_read_config_file, - config=config, env_var=None) - try: - content, source = get_conf() - except TypeError: # Got `None` + _result = find_and_read_config_file(config=config) + else: + # Shield the lookup from the environment + _result = find_and_read_config_file(config=config, env_var=None) + if _result is None: if config: if os.path.exists(config): - Error = ValueError - else: - Error = FileNotFoundError - raise Error( - f'Cannot load configurations from {config!r}') from None + raise ValueError( + f'Cannot load configurations from {config!r}' + ) from None + raise FileNotFoundError( + f'Cannot load configurations from {config!r}' + ) from None return default_instance - conf = {} + else: + content, source = _result + conf: dict[str, Mapping[str, Any]] = {} try: for header in get_headers(default_instance.conf_dict): # Get the top-level subtable @@ -258,7 +279,9 @@ def merge(template, supplied): def find_and_read_config_file( - *, config=None, env_var=ENV_VAR, targets=TARGETS): + *, config: str | PathLike | None = None, + env_var: str | None = ENV_VAR, + targets: Sequence[str | PathLike] = TARGETS) -> Config | None: """ Arguments: config (str | os.PathLike[str] | None): @@ -308,7 +331,8 @@ def iter_configs(dir_path): return None -def get_subtable(table, keys, *, allow_absence=True): +def get_subtable(table: Mapping[K, Mapping], keys: Sequence[K], *, + allow_absence: bool = True) -> Mapping: """ Arguments: table (Mapping): @@ -354,7 +378,8 @@ def get_subtable(table, keys, *, allow_absence=True): return subtable -def get_headers(table, *, include_implied=False): +def get_headers(table: Mapping[K, Any], *, + include_implied: bool = False) -> set[tuple[K, ...]]: """ Arguments: table (Mapping): @@ -381,7 +406,7 @@ def get_headers(table, *, include_implied=False): >>> assert get_headers({}) == set() >>> assert get_headers({'a': 1, 'b': 2}) == set() """ - results = set() + results: set[tuple[K, ...]] = set() for key, value in table.items(): if not isinstance(value, Mapping): continue diff --git a/line_profiler/toml_config.pyi b/line_profiler/toml_config.pyi deleted file mode 100644 index 93409341..00000000 --- a/line_profiler/toml_config.pyi +++ /dev/null @@ -1,54 +0,0 @@ -from dataclasses import dataclass -from os import PathLike -from pathlib import Path -from typing import Mapping, Sequence, Any, Self, TypeVar - - -TARGETS = 'line_profiler.toml', 'pyproject.toml' -ENV_VAR = 'LINE_PROFILER_RC' - -K = TypeVar('K') -V = TypeVar('V') -Config = tuple[dict[str, dict[str, Any]], Path] -NestedTable = Mapping[K, 'NestedTable[K, V]' | V] - - -@dataclass -class ConfigSource: - conf_dict: dict[str, Any] - path: Path - subtable: list[str] - - def copy(self) -> Self: - ... - - def get_subconfig(self, *headers: str, - allow_absence: bool = False, copy: bool = False) -> Self: - ... - - @classmethod - def from_default(cls, *, copy: bool = True) -> Self: - ... - - @classmethod - def from_config(cls, config: str | PathLike | bool | None = None, *, - read_env: bool = True) -> Self: - ... - - -def find_and_read_config_file( - *, - config: str | PathLike | None = None, - env_var: str | None = ENV_VAR, - targets: Sequence[str | PathLike] = TARGETS) -> Config: - ... - - -def get_subtable(table: NestedTable[K, V], keys: Sequence[K], *, - allow_absence: bool = True) -> NestedTable[K, V]: - ... - - -def get_headers(table: NestedTable[K, Any], *, - include_implied: bool = False) -> set[tuple[K, ...]]: - ... diff --git a/pyproject.toml b/pyproject.toml index 74ffac68..a63e487d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,10 @@ exclude_lines =[ "^ *pass *$", "if _debug:", "if __name__ == .__main__.:", + ".*if typing.TYPE_CHECKING:", + ".*if TYPE_CHECKING:", + "@typing.overload", + "@overload", ] omit =[ @@ -81,3 +85,10 @@ norecursedirs = ".git ignore build __pycache__ dev _skbuild docs agentx" filterwarnings = [ "default", ] + +[[tool.ty.overrides]] +# Apply the ignore unresolved rules to these files +include = [ + "line_profiler/line_profiler_utils.py", +] +rules = { unused-type-ignore-comment = "ignore" } diff --git a/requirements/build.txt b/requirements/build.txt index 6b1bfa99..5d3d1e05 100644 --- a/requirements/build.txt +++ b/requirements/build.txt @@ -9,5 +9,5 @@ ninja>=1.10.2 cibuildwheel>=3.1.2 ; python_version < '4.0' and python_version >= '3.11' # Python 3.11+ cibuildwheel>=3.1.2 ; python_version < '3.11' and python_version >= '3.10' # Python 3.10 -cibuildwheel>=3.1.2 ; python_version < '3.10' and python_version >= '3.9' # Python 3.9 -cibuildwheel>=3.1.2 ; python_version < '3.9' and python_version >= '3.8' # Python 3.8 +cibuildwheel>=2.19.2 ; python_version < '3.10' and python_version >= '3.9' # Python 3.9 +cibuildwheel>=2.19.2 ; python_version < '3.9' and python_version >= '3.8' # Python 3.8 diff --git a/requirements/runtime.txt b/requirements/runtime.txt index fe0df55e..85d1f5c2 100644 --- a/requirements/runtime.txt +++ b/requirements/runtime.txt @@ -1 +1,2 @@ tomli; python_version < '3.11' +typing_extensions diff --git a/tests/complex_example.py b/tests/complex_example.py index 698924b1..af66d399 100644 --- a/tests/complex_example.py +++ b/tests/complex_example.py @@ -47,6 +47,7 @@ PROFILE_TYPE=custom python complex_example.py """ +from __future__ import annotations import os # The test will define how we expect the profile decorator to exist diff --git a/tests/cython_example/setup.py b/tests/cython_example/setup.py index 3d6c83da..34dbe5bc 100644 --- a/tests/cython_example/setup.py +++ b/tests/cython_example/setup.py @@ -1,4 +1,8 @@ from setuptools import setup from Cython.Build import cythonize -setup(ext_modules=cythonize('cython_example.pyx')) +setup( + ext_modules=cythonize('cython_example.pyx'), + package_data={'': ['*.pyx']}, + include_package_data=True, +) diff --git a/tests/test_autoprofile.py b/tests/test_autoprofile.py index 77a319d0..c6f8656b 100644 --- a/tests/test_autoprofile.py +++ b/tests/test_autoprofile.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import re import subprocess diff --git a/tests/test_cli.py b/tests/test_cli.py index a347bf0b..c36aa899 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,3 +1,4 @@ +from __future__ import annotations import re from argparse import ArgumentParser, HelpFormatter from contextlib import nullcontext diff --git a/tests/test_complex_case.py b/tests/test_complex_case.py index ace7502b..ffc5e88b 100644 --- a/tests/test_complex_case.py +++ b/tests/test_complex_case.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import sys import tempfile diff --git a/tests/test_cython.py b/tests/test_cython.py index d54e997d..11b402d5 100644 --- a/tests/test_cython.py +++ b/tests/test_cython.py @@ -1,6 +1,7 @@ """ Tests for profiling Cython code. """ +from __future__ import annotations import math import os import subprocess diff --git a/tests/test_duplicate_functions.py b/tests/test_duplicate_functions.py index 8c470a8c..6cdc77bc 100644 --- a/tests/test_duplicate_functions.py +++ b/tests/test_duplicate_functions.py @@ -1,3 +1,6 @@ +from __future__ import annotations + + def test_duplicate_function(): """ Test from https://github.com/pyutils/line_profiler/issues/232 diff --git a/tests/test_eager_preimports.py b/tests/test_eager_preimports.py index ba5d344d..79ba6035 100644 --- a/tests/test_eager_preimports.py +++ b/tests/test_eager_preimports.py @@ -5,6 +5,7 @@ ----- Most of the features are already covered by the doctests. """ +from __future__ import annotations import subprocess import sys from contextlib import ExitStack diff --git a/tests/test_explicit_profile.py b/tests/test_explicit_profile.py index 511509b1..73d8a79e 100644 --- a/tests/test_explicit_profile.py +++ b/tests/test_explicit_profile.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import re import sys diff --git a/tests/test_import.py b/tests/test_import.py index ab9a8f29..4c2184e7 100644 --- a/tests/test_import.py +++ b/tests/test_import.py @@ -1,3 +1,6 @@ +from __future__ import annotations + + def test_import(): import line_profiler assert hasattr(line_profiler, 'LineProfiler') diff --git a/tests/test_ipython.py b/tests/test_ipython.py index 4066a500..29ba4ddd 100644 --- a/tests/test_ipython.py +++ b/tests/test_ipython.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import re import shlex diff --git a/tests/test_kernprof.py b/tests/test_kernprof.py index f8ac4678..3d6b7e97 100644 --- a/tests/test_kernprof.py +++ b/tests/test_kernprof.py @@ -1,3 +1,4 @@ +from __future__ import annotations import contextlib import os import re diff --git a/tests/test_line_profiler.py b/tests/test_line_profiler.py index d39e0040..067cb9bf 100644 --- a/tests/test_line_profiler.py +++ b/tests/test_line_profiler.py @@ -1,3 +1,4 @@ +from __future__ import annotations import asyncio import contextlib import functools diff --git a/tests/test_sys_monitoring.py b/tests/test_sys_monitoring.py index af6ef0ab..98007607 100644 --- a/tests/test_sys_monitoring.py +++ b/tests/test_sys_monitoring.py @@ -1,3 +1,4 @@ +from __future__ import annotations import gc import inspect import sys diff --git a/tests/test_sys_trace.py b/tests/test_sys_trace.py index 6210fbd0..23432892 100644 --- a/tests/test_sys_trace.py +++ b/tests/test_sys_trace.py @@ -10,6 +10,7 @@ - However, there effects are isolated since each test is run in a separate Python subprocess. """ +from __future__ import annotations import concurrent.futures import functools import inspect diff --git a/tests/test_toml_config.py b/tests/test_toml_config.py index 7ce7174f..3b1b1602 100644 --- a/tests/test_toml_config.py +++ b/tests/test_toml_config.py @@ -1,6 +1,7 @@ """ Test the handling of TOML configs. """ +from __future__ import annotations import os import re import sys