From d1b4c23eef727212aee429a7d80ae4b6a83091e8 Mon Sep 17 00:00:00 2001 From: joncrall Date: Wed, 21 Jan 2026 20:21:09 -0500 Subject: [PATCH 01/22] Remove stubs and add inline typing Fix ty type checking issues --- line_profiler/__main__.py | 2 + line_profiler/__main__.pyi | 1 - line_profiler/_diagnostics.py | 6 +- line_profiler/_line_profiler.pyi | 25 ++ .../autoprofile/ast_profile_transformer.py | 46 ++- .../autoprofile/ast_profile_transformer.pyi | 36 --- .../autoprofile/ast_tree_profiler.py | 28 +- .../autoprofile/ast_tree_profiler.pyi | 23 -- line_profiler/autoprofile/autoprofile.py | 20 +- line_profiler/autoprofile/autoprofile.pyi | 11 - line_profiler/autoprofile/eager_preimports.py | 51 ++-- .../autoprofile/eager_preimports.pyi | 67 ----- .../autoprofile/line_profiler_utils.py | 34 ++- .../autoprofile/line_profiler_utils.pyi | 29 -- .../autoprofile/profmod_extractor.py | 26 +- .../autoprofile/profmod_extractor.pyi | 13 - line_profiler/autoprofile/run_module.py | 24 +- line_profiler/autoprofile/run_module.pyi | 21 -- line_profiler/autoprofile/util_static.py | 41 ++- line_profiler/autoprofile/util_static.pyi | 41 --- line_profiler/cli_utils.py | 75 +++-- line_profiler/cli_utils.pyi | 60 ---- line_profiler/explicit_profiler.py | 6 +- line_profiler/ipython_extension.py | 131 ++++++--- line_profiler/ipython_extension.pyi | 12 - line_profiler/line_profiler.py | 232 +++++++++++---- line_profiler/line_profiler.pyi | 43 +-- line_profiler/line_profiler_utils.py | 11 +- line_profiler/line_profiler_utils.pyi | 26 -- line_profiler/profiler_mixin.py | 210 ++++++++++++-- line_profiler/profiler_mixin.pyi | 271 ------------------ line_profiler/scoping_policy.py | 156 ++++++---- line_profiler/scoping_policy.pyi | 50 ---- line_profiler/toml_config.py | 70 +++-- line_profiler/toml_config.pyi | 54 ---- requirements/runtime.txt | 1 + 36 files changed, 898 insertions(+), 1055 deletions(-) delete mode 100644 line_profiler/__main__.pyi create mode 100644 line_profiler/_line_profiler.pyi delete mode 100644 line_profiler/autoprofile/ast_profile_transformer.pyi delete mode 100644 line_profiler/autoprofile/ast_tree_profiler.pyi delete mode 100644 line_profiler/autoprofile/autoprofile.pyi delete mode 100644 line_profiler/autoprofile/eager_preimports.pyi delete mode 100644 line_profiler/autoprofile/line_profiler_utils.pyi delete mode 100644 line_profiler/autoprofile/profmod_extractor.pyi delete mode 100644 line_profiler/autoprofile/run_module.pyi delete mode 100644 line_profiler/autoprofile/util_static.pyi delete mode 100644 line_profiler/cli_utils.pyi delete mode 100644 line_profiler/ipython_extension.pyi delete mode 100644 line_profiler/line_profiler_utils.pyi delete mode 100644 line_profiler/profiler_mixin.pyi delete mode 100644 line_profiler/scoping_policy.pyi delete mode 100644 line_profiler/toml_config.pyi diff --git a/line_profiler/__main__.py b/line_profiler/__main__.py index c626c205..33831ea3 100644 --- a/line_profiler/__main__.py +++ b/line_profiler/__main__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from .line_profiler import main if __name__ == '__main__': diff --git a/line_profiler/__main__.pyi b/line_profiler/__main__.pyi deleted file mode 100644 index 8b137891..00000000 --- a/line_profiler/__main__.pyi +++ /dev/null @@ -1 +0,0 @@ - diff --git a/line_profiler/_diagnostics.py b/line_profiler/_diagnostics.py index cbf3d24b..a2639746 100644 --- a/line_profiler/_diagnostics.py +++ b/line_profiler/_diagnostics.py @@ -71,10 +71,10 @@ def _boolean_environ( """ # (TODO: migrate to `line_profiler.cli_utils.boolean()` after # merging #335) - try: - value = os.environ.get(envvar).casefold() - except AttributeError: # None + value = os.environ.get(envvar) + if value is None: return default + value = value.casefold() non_default_values = falsy if default else truey if value in {v.casefold() for v in non_default_values}: return not default diff --git a/line_profiler/_line_profiler.pyi b/line_profiler/_line_profiler.pyi new file mode 100644 index 00000000..a51cfffd --- /dev/null +++ b/line_profiler/_line_profiler.pyi @@ -0,0 +1,25 @@ +from __future__ import annotations + +from typing import Any, Mapping + + +class LineStats: + timings: Mapping[tuple[str, int, str], list[tuple[int, int, int]]] + unit: float + + def __init__( + self, + timings: Mapping[tuple[str, int, str], list[tuple[int, int, int]]], + unit: float, + ) -> None: ... + + +class LineProfiler: + def enable_by_count(self) -> None: ... + def disable_by_count(self) -> None: ... + def add_function(self, func: Any) -> None: ... + def get_stats(self) -> LineStats: ... + def dump_stats(self, filename: str) -> None: ... + + +def label(code: Any) -> Any: ... diff --git a/line_profiler/autoprofile/ast_profile_transformer.py b/line_profiler/autoprofile/ast_profile_transformer.py index 1f4655d8..fdadd22f 100644 --- a/line_profiler/autoprofile/ast_profile_transformer.py +++ b/line_profiler/autoprofile/ast_profile_transformer.py @@ -1,7 +1,12 @@ +from __future__ import annotations + import ast +from typing import cast -def ast_create_profile_node(modname, profiler_name='profile', attr='add_imported_function_or_module'): +def ast_create_profile_node( + modname: str, profiler_name: str = 'profile', + attr: str = 'add_imported_function_or_module') -> ast.Expr: """Create an abstract syntax tree node that adds an object to the profiler to be profiled. An abstract syntax tree node is created which calls the attr method from profile and @@ -45,7 +50,9 @@ class AstProfileTransformer(ast.NodeTransformer): immediately after the import. """ - def __init__(self, profile_imports=False, profiled_imports=None, profiler_name='profile'): + def __init__(self, profile_imports: bool = False, + profiled_imports: list[str] | None = None, + profiler_name: str = 'profile') -> None: """Initializes the AST transformer with the profiler name. Args: @@ -63,7 +70,9 @@ def __init__(self, profile_imports=False, profiled_imports=None, profiler_name=' self._profiled_imports = profiled_imports if profiled_imports is not None else [] self._profiler_name = profiler_name - def _visit_func_def(self, node): + def _visit_func_def( + self, node: ast.FunctionDef | ast.AsyncFunctionDef + ) -> ast.FunctionDef | ast.AsyncFunctionDef: """Decorate functions/methods with profiler. Checks if the function/method already has a profile_name decorator, if not, it will append @@ -81,17 +90,19 @@ def _visit_func_def(self, node): """ decor_ids = set() for decor in node.decorator_list: - try: + if isinstance(decor, ast.Name): decor_ids.add(decor.id) - except AttributeError: - ... if self._profiler_name not in decor_ids: node.decorator_list.append(ast.Name(id=self._profiler_name, ctx=ast.Load())) - return self.generic_visit(node) + self.generic_visit(node) + return node visit_FunctionDef = visit_AsyncFunctionDef = _visit_func_def - def _visit_import(self, node): + def _visit_import( + self, node: ast.Import | ast.ImportFrom + ) -> (ast.Import | ast.ImportFrom + | list[ast.Import | ast.ImportFrom | ast.Expr]): """Add a node that profiles an import If profile_imports is True and the import is not in profiled_imports, @@ -110,8 +121,9 @@ def _visit_import(self, node): returns list containing the import node and the profiling node """ if not self._profile_imports: - return self.generic_visit(node) - visited = [self.generic_visit(node)] + self.generic_visit(node) + return node + visited = [cast(ast.Import | ast.ImportFrom, self.generic_visit(node))] for names in node.names: node_name = names.name if names.asname is None else names.asname if node_name in self._profiled_imports: @@ -121,7 +133,9 @@ def _visit_import(self, node): visited.append(expr) return visited - def visit_Import(self, node): + def visit_Import( + self, node: ast.Import + ) -> ast.Import | list[ast.Import | ast.Expr]: """Add a node that profiles an object imported using the "import foo" sytanx Args: @@ -135,9 +149,12 @@ def visit_Import(self, node): if profile_imports is True: returns list containing the import node and the profiling node """ - return self._visit_import(node) + return cast(ast.Import | list[ast.Import | ast.Expr], + self._visit_import(node)) - def visit_ImportFrom(self, node): + def visit_ImportFrom( + self, node: ast.ImportFrom + ) -> ast.ImportFrom | list[ast.ImportFrom | ast.Expr]: """Add a node that profiles an object imported using the "from foo import bar" syntax Args: @@ -151,4 +168,5 @@ def visit_ImportFrom(self, node): if profile_imports is True: returns list containing the import node and the profiling node """ - return self._visit_import(node) + return cast(ast.ImportFrom | list[ast.ImportFrom | ast.Expr], + self._visit_import(node)) diff --git a/line_profiler/autoprofile/ast_profile_transformer.pyi b/line_profiler/autoprofile/ast_profile_transformer.pyi deleted file mode 100644 index 9d64182c..00000000 --- a/line_profiler/autoprofile/ast_profile_transformer.pyi +++ /dev/null @@ -1,36 +0,0 @@ -from typing import List -import _ast -import ast -from typing import Union - - -def ast_create_profile_node(modname, - profiler_name: str = ..., - attr: str = ...) -> (_ast.Expr): - ... - - -class AstProfileTransformer(ast.NodeTransformer): - - def __init__(self, - profile_imports: bool = False, - profiled_imports: List[str] | None = None, - profiler_name: str = 'profile') -> None: - ... - - def visit_FunctionDef(self, node: _ast.FunctionDef) -> (_ast.FunctionDef): - ... - - def visit_AsyncFunctionDef( - self, node: _ast.AsyncFunctionDef) -> (_ast.AsyncFunctionDef): - ... - - def visit_Import( - self, node: _ast.Import - ) -> (Union[_ast.Import, List[Union[_ast.Import, _ast.Expr]]]): - ... - - def visit_ImportFrom( - self, node: _ast.ImportFrom - ) -> (Union[_ast.ImportFrom, List[Union[_ast.ImportFrom, _ast.Expr]]]): - ... diff --git a/line_profiler/autoprofile/ast_tree_profiler.py b/line_profiler/autoprofile/ast_tree_profiler.py index 892ebfbc..994074f6 100644 --- a/line_profiler/autoprofile/ast_tree_profiler.py +++ b/line_profiler/autoprofile/ast_tree_profiler.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import ast import os +from typing import Type from .ast_profile_transformer import (AstProfileTransformer, ast_create_profile_node) @@ -20,11 +23,11 @@ class AstTreeProfiler: """ def __init__(self, - script_file, - prof_mod, - profile_imports, - ast_transformer_class_handler=AstProfileTransformer, - profmod_extractor_class_handler=ProfmodExtractor): + script_file: str, + prof_mod: list[str], + profile_imports: bool, + ast_transformer_class_handler: Type = AstProfileTransformer, + profmod_extractor_class_handler: Type = ProfmodExtractor) -> None: """Initializes the AST tree profiler instance with the script file path Args: @@ -52,7 +55,8 @@ def __init__(self, self._profmod_extractor_class_handler = profmod_extractor_class_handler @staticmethod - def _check_profile_full_script(script_file, prof_mod): + def _check_profile_full_script( + script_file: str, prof_mod: list[str]) -> bool: """Check whether whole script should be profiled. Checks whether path to script has been passed to prof_mod indicating that @@ -76,7 +80,7 @@ def _check_profile_full_script(script_file, prof_mod): return profile_full_script @staticmethod - def _get_script_ast_tree(script_file): + def _get_script_ast_tree(script_file: str) -> ast.Module: """Generate an abstract syntax from a script file. Args: @@ -93,10 +97,10 @@ def _get_script_ast_tree(script_file): return tree def _profile_ast_tree(self, - tree, - tree_imports_to_profile_dict, - profile_full_script=False, - profile_imports=False): + tree: ast.Module, + tree_imports_to_profile_dict: dict[int, str], + profile_full_script: bool = False, + profile_imports: bool = False) -> ast.Module: """Add profiling to an abstract syntax tree. Adds nodes to the AST that adds the specified objects to the profiler. @@ -139,7 +143,7 @@ def _profile_ast_tree(self, ast.fix_missing_locations(tree) return tree - def profile(self): + def profile(self) -> ast.Module: """Create an abstract syntax tree of a script and add profiling to it. Reads a script file and generates an abstract syntax tree. diff --git a/line_profiler/autoprofile/ast_tree_profiler.pyi b/line_profiler/autoprofile/ast_tree_profiler.pyi deleted file mode 100644 index fc533e86..00000000 --- a/line_profiler/autoprofile/ast_tree_profiler.pyi +++ /dev/null @@ -1,23 +0,0 @@ -from typing import List -from typing import Type -import _ast - -from .ast_profile_transformer import AstProfileTransformer -from .profmod_extractor import ProfmodExtractor - -__docstubs__: str - - -class AstTreeProfiler: - - def __init__( - self, - script_file: str, - prof_mod: List[str], - profile_imports: bool, - ast_transformer_class_handler: Type = AstProfileTransformer, - profmod_extractor_class_handler: Type = ProfmodExtractor) -> None: - ... - - def profile(self) -> (_ast.Module): - ... diff --git a/line_profiler/autoprofile/autoprofile.py b/line_profiler/autoprofile/autoprofile.py index 5985a84b..019b6154 100644 --- a/line_profiler/autoprofile/autoprofile.py +++ b/line_profiler/autoprofile/autoprofile.py @@ -44,12 +44,15 @@ def main(): python -m kernprof -p demo.py -l demo.py python -m line_profiler -rmt demo.py.lprof """ +from __future__ import annotations import contextlib import functools import importlib.util import operator import sys import types +from collections.abc import MutableMapping +from typing import Any, cast from .ast_tree_profiler import AstTreeProfiler from .run_module import AstTreeModuleProfiler from .line_profiler_utils import add_imported_function_or_module @@ -58,7 +61,7 @@ def main(): PROFILER_LOCALS_NAME = 'prof' -def _extend_line_profiler_for_profiling_imports(prof): +def _extend_line_profiler_for_profiling_imports(prof: Any) -> None: """Allow profiler to handle functions/methods, classes & modules with a single call. Add a method to LineProfiler that can identify whether the object is a @@ -73,7 +76,9 @@ def _extend_line_profiler_for_profiling_imports(prof): prof.add_imported_function_or_module = types.MethodType(add_imported_function_or_module, prof) -def run(script_file, ns, prof_mod, profile_imports=False, as_module=False): +def run(script_file: str, ns: MutableMapping[str, Any], + prof_mod: list[str], profile_imports: bool = False, + as_module: bool = False) -> None: """Automatically profile a script and run it. Profile functions, classes & modules specified in prof_mod without needing to add @@ -98,19 +103,20 @@ def run(script_file, ns, prof_mod, profile_imports=False, as_module=False): Whether we're running script_file as a module """ class restore_dict: - def __init__(self, d, target=None): + def __init__(self, d: MutableMapping[str, Any], target=None): self.d = d self.target = target - self.copy = None + self.copy: dict[str, Any] | None = None def __enter__(self): assert self.copy is None - self.copy = self.d.copy() + self.copy = dict(self.d) return self.target def __exit__(self, *_, **__): self.d.clear() - self.d.update(self.copy) + if self.copy is not None: + self.d.update(self.copy) self.copy = None if as_module: @@ -144,4 +150,4 @@ def __exit__(self, *_, **__): code_obj = compile(tree_profiled, script_file, 'exec') with ctx as callback: callback() - exec(code_obj, namespace, namespace) + exec(code_obj, cast(dict[str, Any], namespace), namespace) diff --git a/line_profiler/autoprofile/autoprofile.pyi b/line_profiler/autoprofile/autoprofile.pyi deleted file mode 100644 index 65ddbf2b..00000000 --- a/line_profiler/autoprofile/autoprofile.pyi +++ /dev/null @@ -1,11 +0,0 @@ -from typing import List - -PROFILER_LOCALS_NAME: str - - -def run(script_file: str, - ns: dict, - prof_mod: List[str], - profile_imports: bool = False, - as_module: bool = False) -> None: - ... diff --git a/line_profiler/autoprofile/eager_preimports.py b/line_profiler/autoprofile/eager_preimports.py index 1e8444df..4c527a97 100644 --- a/line_profiler/autoprofile/eager_preimports.py +++ b/line_profiler/autoprofile/eager_preimports.py @@ -2,16 +2,18 @@ Tools for eagerly pre-importing everything as specified in ``line_profiler.autoprof.run(prof_mod=...)``. """ +from __future__ import annotations + import ast import functools import itertools -from collections import namedtuple from collections.abc import Collection from keyword import iskeyword from importlib.util import find_spec from pkgutil import walk_packages from textwrap import dedent, indent as indent_ from warnings import warn +from typing import Any, Generator, NamedTuple, TextIO from .util_static import ( modname_to_modpath, modpath_to_modname, package_modpaths) @@ -20,7 +22,7 @@ 'resolve_profiling_targets', 'write_eager_import_module') -def is_dotted_path(obj): +def is_dotted_path(obj: Any) -> bool: """ Example: >>> assert not is_dotted_path(object()) @@ -37,7 +39,7 @@ def is_dotted_path(obj): return True -def get_expression(obj): +def get_expression(obj: Any) -> ast.Expression | None: """ Example: >>> assert not get_expression(object()) @@ -55,7 +57,8 @@ def get_expression(obj): return None -def split_dotted_path(dotted_path, static=True): +def split_dotted_path( + dotted_path: str, static: bool = True) -> tuple[str, str | None]: """ Arguments: dotted_path (str): @@ -133,7 +136,7 @@ def split_dotted_path(dotted_path, static=True): f'module: {checked_locs!r}') -def strip(s): +def strip(s: str) -> str: return dedent(s).strip('\n') @@ -163,18 +166,20 @@ class LoadedNameFinder(ast.NodeVisitor): >>> names = LoadedNameFinder.find(ast.parse(module)) >>> assert names == {'bar', 'foobar', 'a', 'str'}, names """ - def __init__(self): - self.names = set() - self.contexts = [] + def __init__(self) -> None: + self.names: set[str] = set() + self.contexts: list[set[str]] = [] - def visit_Name(self, node): + def visit_Name(self, node: ast.Name) -> None: if not isinstance(node.ctx, ast.Load): return name = node.id if not any(name in ctx for ctx in self.contexts): self.names.add(node.id) - def _visit_func_def(self, node): + def _visit_func_def( + self, node: ast.FunctionDef | ast.AsyncFunctionDef | ast.Lambda + ) -> None: args = node.args arg_names = { arg.arg @@ -191,13 +196,13 @@ def _visit_func_def(self, node): visit_FunctionDef = visit_AsyncFunctionDef = visit_Lambda = _visit_func_def @classmethod - def find(cls, node): + def find(cls, node: ast.AST) -> set[str]: finder = cls() finder.visit(node) return finder.names -def propose_names(prefixes): +def propose_names(prefixes: Collection[str]) -> Generator[str, None, None]: """ Generate names based on prefixes. @@ -235,7 +240,9 @@ def propose_names(prefixes): yield pattern(prefix, i) -def resolve_profiling_targets(dotted_paths, static=True, recurse=False): +def resolve_profiling_targets( + dotted_paths: Collection[str], static: bool = True, + recurse: Collection[str] | bool = False) -> ResolvedResult: """ Arguments: dotted_paths (Collection[str]): @@ -327,11 +334,12 @@ def walk_packages_import_sys(pkg): return ResolvedResult(all_targets, indirect_submods, unknown_locs) -def write_eager_import_module(dotted_paths, stream=None, *, - static=True, - recurse=False, - adder='profile.add_imported_function_or_module', - indent=' '): +def write_eager_import_module( + dotted_paths: Collection[str], stream: TextIO | None = None, *, + static: bool = True, + recurse: Collection[str] | bool = False, + adder: str = 'profile.add_imported_function_or_module', + indent: str = ' ') -> None: r""" Write a module which autoprofiles all its imports. @@ -472,6 +480,7 @@ def write_eager_import_module(dotted_paths, stream=None, *, # Get the names loaded by `adder`; # these names are not allowed in the namespace + assert expr is not None forbidden_names = LoadedNameFinder.find(expr) # We need three free names: # - One for `adder` @@ -564,5 +573,7 @@ def write_eager_import_module(dotted_paths, stream=None, *, """)) -ResolvedResult = namedtuple('ResolvedResult', - ('targets', 'indirect', 'unresolved')) +class ResolvedResult(NamedTuple): + targets: dict[str, set[str | None]] + indirect: set[str] + unresolved: list[str] diff --git a/line_profiler/autoprofile/eager_preimports.pyi b/line_profiler/autoprofile/eager_preimports.pyi deleted file mode 100644 index 756a6b7b..00000000 --- a/line_profiler/autoprofile/eager_preimports.pyi +++ /dev/null @@ -1,67 +0,0 @@ -import ast -from typing import ( - Any, Union, - Collection, Dict, Generator, List, NamedTuple, Set, Tuple, - TextIO) - - -def is_dotted_path(obj: Any) -> bool: - ... - - -def get_expression(obj: Any) -> Union[ast.Expression, None]: - ... - - -def split_dotted_path( - dotted_path: str, static: bool = True) -> Tuple[str, Union[str, None]]: - ... - - -def strip(s: str) -> str: - ... - - -class LoadedNameFinder(ast.NodeVisitor): - names: Set[str] - contexts: List[Set[str]] - - def visit_Name(self, node: ast.Name) -> None: - ... - - def visit_FunctionDef(self, - node: Union[ast.FunctionDef, ast.AsyncFunctionDef, - ast.Lambda]) -> None: - ... - - visit_AsyncFunctionDef = visit_Lambda = visit_FunctionDef - - @classmethod - def find(cls, node: ast.AST) -> Set[str]: - ... - - -def propose_names(prefixes: Collection[str]) -> Generator[str, None, None]: - ... - - -def resolve_profiling_targets( - dotted_paths: Collection[str], - static: bool = True, - recurse: Union[Collection[str], bool] = False) -> 'ResolvedResult': - ... - - -def write_eager_import_module( - dotted_paths: Collection[str], stream: Union[TextIO, None] = None, *, - static: bool = True, - recurse: Union[Collection[str], bool] = False, - adder: str = 'profile.add_imported_function_or_module', - indent: str = ' ') -> None: - ... - - -class ResolvedResult(NamedTuple): - targets: Dict[str, Set[Union[str, None]]] - indirect: Set[str] - unresolved: List[str] diff --git a/line_profiler/autoprofile/line_profiler_utils.py b/line_profiler/autoprofile/line_profiler_utils.py index c4e736d1..fff7d43e 100644 --- a/line_profiler/autoprofile/line_profiler_utils.py +++ b/line_profiler/autoprofile/line_profiler_utils.py @@ -1,8 +1,38 @@ +from __future__ import annotations + import inspect +from functools import cached_property, partial, partialmethod +from types import FunctionType, MethodType, ModuleType +from typing import TYPE_CHECKING, Any, Literal, overload + +if TYPE_CHECKING: # pragma: no cover + from ..profiler_mixin import CLevelCallable, CythonCallable + from ..scoping_policy import ScopingPolicy, ScopingPolicyDict + + +@overload +def add_imported_function_or_module( + self, item: CLevelCallable | Any, *, + scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> Literal[0]: + ... + + +@overload +def add_imported_function_or_module( + self, + item: (FunctionType | CythonCallable | type | partial | property + | cached_property | MethodType | staticmethod | classmethod + | partialmethod | ModuleType), + *, scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> Literal[0, 1]: + ... -def add_imported_function_or_module(self, item, *, - scoping_policy=None, wrap=False): +def add_imported_function_or_module( + self, item: object, *, + scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> Literal[0, 1]: """ Method to add an object to :py:class:`~.line_profiler.LineProfiler` to be profiled. diff --git a/line_profiler/autoprofile/line_profiler_utils.pyi b/line_profiler/autoprofile/line_profiler_utils.pyi deleted file mode 100644 index 2d114b34..00000000 --- a/line_profiler/autoprofile/line_profiler_utils.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from functools import partial, partialmethod, cached_property -from types import FunctionType, MethodType, ModuleType -from typing import overload, Any, Literal, TypeVar, TYPE_CHECKING - -if TYPE_CHECKING: # Stub-only annotations - from ..profiler_mixin import CLevelCallable, CythonCallable - from ..scoping_policy import ScopingPolicy, ScopingPolicyDict - - - - -@overload -def add_imported_function_or_module( - self, item: CLevelCallable | Any, - scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, - wrap: bool = False) -> Literal[0]: - ... - - -@overload -def add_imported_function_or_module( - self, - item: (FunctionType | CythonCallable - | type | partial | property | cached_property - | MethodType | staticmethod | classmethod | partialmethod - | ModuleType), - scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, - wrap: bool = False) -> Literal[0, 1]: - ... diff --git a/line_profiler/autoprofile/profmod_extractor.py b/line_profiler/autoprofile/profmod_extractor.py index 929a1e9e..7268ea76 100644 --- a/line_profiler/autoprofile/profmod_extractor.py +++ b/line_profiler/autoprofile/profmod_extractor.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import ast import os import sys @@ -13,7 +15,8 @@ class ProfmodExtractor: abstract syntax tree. """ - def __init__(self, tree, script_file, prof_mod): + def __init__(self, tree: ast.Module, script_file: str, + prof_mod: list[str]) -> None: """Initializes the AST tree profiler instance with the AST, script file path and prof_mod Args: @@ -33,7 +36,7 @@ def __init__(self, tree, script_file, prof_mod): self._prof_mod = prof_mod @staticmethod - def _is_path(text): + def _is_path(text: str) -> bool: """Check whether a string is a path. Checks if a string contains a slash or ends with .py indicating it is a path. @@ -50,7 +53,8 @@ def _is_path(text): return ret @classmethod - def _get_modnames_to_profile_from_prof_mod(cls, script_file, prof_mod): + def _get_modnames_to_profile_from_prof_mod( + cls, script_file: str, prof_mod: list[str]) -> list[str]: """Grab the valid paths and all dotted paths in prof_mod and their subpackages and submodules, in the form of dotted paths. @@ -127,7 +131,8 @@ def _get_modnames_to_profile_from_prof_mod(cls, script_file, prof_mod): return modnames_to_profile @staticmethod - def _ast_get_imports_from_tree(tree): + def _ast_get_imports_from_tree( + tree: ast.Module) -> list[dict[str, str | int | None]]: """Get all imports in an abstract syntax tree. Args: @@ -160,8 +165,10 @@ def _ast_get_imports_from_tree(tree): module_dict_list.append(module_dict) modname_list.append(modname) elif isinstance(node, ast.ImportFrom): + if node.module is None: + continue for name in node.names: - modname = node.module + '.' + name.name + modname = f'{node.module}.{name.name}' if modname not in modname_list: alias = name.asname or name.name module_dict = { @@ -174,7 +181,10 @@ def _ast_get_imports_from_tree(tree): return module_dict_list @staticmethod - def _find_modnames_in_tree_imports(modnames_to_profile, module_dict_list): + def _find_modnames_in_tree_imports( + modnames_to_profile: list[str], + module_dict_list: list[dict[str, str | int | None]] + ) -> dict[int, str]: """Map modnames to imports from an abstract sytax tree. Find imports in modue_dict_list, created from an abstract syntax tree, that match @@ -204,6 +214,8 @@ def _find_modnames_in_tree_imports(modnames_to_profile, module_dict_list): modname_added_list = [] for i, module_dict in enumerate(module_dict_list): modname = module_dict['name'] + if not isinstance(modname, str): + continue if modname in modname_added_list: continue """check if either the parent module or submodule are in modnames_to_profile""" @@ -214,7 +226,7 @@ def _find_modnames_in_tree_imports(modnames_to_profile, module_dict_list): modnames_found_in_tree[module_dict['tree_index']] = name return modnames_found_in_tree - def run(self): + def run(self) -> dict[int, str]: """Map prof_mod to imports in an abstract syntax tree. Takes the paths and dotted paths in prod_mod and finds their respective imports in an diff --git a/line_profiler/autoprofile/profmod_extractor.pyi b/line_profiler/autoprofile/profmod_extractor.pyi deleted file mode 100644 index ebaf7526..00000000 --- a/line_profiler/autoprofile/profmod_extractor.pyi +++ /dev/null @@ -1,13 +0,0 @@ -import _ast -from typing import List -from typing import Dict - - -class ProfmodExtractor: - - def __init__(self, tree: _ast.Module, script_file: str, - prof_mod: List[str]) -> None: - ... - - def run(self) -> (Dict[int, str]): - ... diff --git a/line_profiler/autoprofile/run_module.py b/line_profiler/autoprofile/run_module.py index f4461409..6e545126 100644 --- a/line_profiler/autoprofile/run_module.py +++ b/line_profiler/autoprofile/run_module.py @@ -1,11 +1,14 @@ +from __future__ import annotations + import ast import os +from typing import cast from .ast_tree_profiler import AstTreeProfiler from .util_static import modname_to_modpath, modpath_to_modname -def get_module_from_importfrom(node, module): +def get_module_from_importfrom(node: ast.ImportFrom, module: str) -> str: r"""Resolve the full path of a relative import. Args: @@ -44,7 +47,7 @@ def get_module_from_importfrom(node, module): """ level = node.level if not level: - return node.module + return node.module or '' chunks = module.split('.')[:-level] if node.module: chunks.append(node.module) @@ -53,16 +56,18 @@ def get_module_from_importfrom(node, module): class ImportFromTransformer(ast.NodeTransformer): """Turn all the relative imports into absolute imports.""" - def __init__(self, module): + def __init__(self, module: str) -> None: self.module = module - def visit_ImportFrom(self, node): + def visit_ImportFrom(self, node: ast.ImportFrom) -> ast.ImportFrom: level = node.level if not level: - return self.generic_visit(node) + self.generic_visit(node) + return node module = get_module_from_importfrom(node, self.module) new_node = ast.ImportFrom(module=module, names=node.names, level=0) - return self.generic_visit(ast.copy_location(new_node, node)) + visited = self.generic_visit(ast.copy_location(new_node, node)) + return cast(ast.ImportFrom, visited) class AstTreeModuleProfiler(AstTreeProfiler): @@ -74,7 +79,7 @@ class AstTreeModuleProfiler(AstTreeProfiler): classes & modules in prof_mod to the profiler to be profiled. """ @classmethod - def _get_script_ast_tree(cls, script_file): + def _get_script_ast_tree(cls, script_file: str) -> ast.Module: tree = super()._get_script_ast_tree(script_file) # Note: don't drop the `.__init__` or `.__main__` suffix, lest # the relative imports fail @@ -83,11 +88,12 @@ def _get_script_ast_tree(cls, script_file): return ImportFromTransformer(module).visit(tree) @staticmethod - def _is_main(fname): + def _is_main(fname: str) -> bool: return os.path.basename(fname) == '__main__.py' @classmethod - def _check_profile_full_script(cls, script_file, prof_mod): + def _check_profile_full_script( + cls, script_file: str, prof_mod: list[str]) -> bool: rp = os.path.realpath paths_to_check = {rp(script_file)} if cls._is_main(script_file): diff --git a/line_profiler/autoprofile/run_module.pyi b/line_profiler/autoprofile/run_module.pyi deleted file mode 100644 index 6d63a6d5..00000000 --- a/line_profiler/autoprofile/run_module.pyi +++ /dev/null @@ -1,21 +0,0 @@ -import ast - -from .ast_tree_profiler import AstTreeProfiler - - -def get_module_from_importfrom(node: ast.ImportFrom, module: str) -> str: - ... - - -class ImportFromTransformer(ast.NodeTransformer): - def __init__(self, module: str) -> None: - ... - - def visit_ImportFrom(self, node: ast.ImportFrom) -> ast.ImportFrom: - ... - - module: str - - -class AstTreeModuleProfiler(AstTreeProfiler): - ... diff --git a/line_profiler/autoprofile/util_static.py b/line_profiler/autoprofile/util_static.py index 117eef4e..dfa32ad1 100644 --- a/line_profiler/autoprofile/util_static.py +++ b/line_profiler/autoprofile/util_static.py @@ -3,6 +3,7 @@ :py:mod:`xdoctest` via dev/maintain/port_utilities.py in the line_profiler repo. """ +from __future__ import annotations from os.path import abspath from os.path import dirname @@ -18,19 +19,22 @@ from os.path import isfile from os.path import realpath import sys +from os import PathLike +from collections.abc import Generator +from typing import Any # from xdoctest import utils def package_modpaths( - pkgpath, - with_pkg=False, - with_mod=True, - followlinks=True, - recursive=True, - with_libs=False, - check=True, -): + pkgpath: str, + with_pkg: bool = False, + with_mod: bool = True, + followlinks: bool = True, + recursive: bool = True, + with_libs: bool = False, + check: bool = True, +) -> Generator[Any, None, None]: r""" Finds sub-packages and sub-modules belonging to a package. @@ -419,7 +423,11 @@ def check_dpath(dpath): return found_modpath -def modname_to_modpath(modname, hide_init=True, hide_main=False, sys_path=None): +def modname_to_modpath( + modname: str, + hide_init: bool = True, + hide_main: bool = False, + sys_path: list[str | PathLike] | None = None) -> str | None: """ Finds the path to a python module from its name. @@ -470,7 +478,7 @@ def modname_to_modpath(modname, hide_init=True, hide_main=False, sys_path=None): return modpath -def split_modpath(modpath, check=True): +def split_modpath(modpath: str, check: bool = True) -> tuple[str, str]: """ Splits the modpath into the dir that must be in PYTHONPATH for the module to be imported and the modulepath relative to this directory. @@ -514,7 +522,9 @@ def split_modpath(modpath, check=True): return (dpath, rel_modpath) -def normalize_modpath(modpath, hide_init=True, hide_main=False): +def normalize_modpath( + modpath: str | PathLike, hide_init: bool = True, + hide_main: bool = False) -> str: """ Normalizes __init__ and __main__ paths. @@ -550,6 +560,7 @@ def normalize_modpath(modpath, hide_init=True, hide_main=False): >>> assert not res2.endswith('.py') >>> assert not res3.endswith('.py') """ + modpath = os.fspath(modpath) if hide_init: if basename(modpath) == "__init__.py": modpath = dirname(modpath) @@ -567,8 +578,12 @@ def normalize_modpath(modpath, hide_init=True, hide_main=False): def modpath_to_modname( - modpath, hide_init=True, hide_main=False, check=True, relativeto=None -): + modpath: str, + hide_init: bool = True, + hide_main: bool = False, + check: bool = True, + relativeto: str | None = None, +) -> str: """ Determines importable name from file path diff --git a/line_profiler/autoprofile/util_static.pyi b/line_profiler/autoprofile/util_static.pyi deleted file mode 100644 index 42ccd84e..00000000 --- a/line_profiler/autoprofile/util_static.pyi +++ /dev/null @@ -1,41 +0,0 @@ -from typing import List -from os import PathLike -from typing import Tuple -from collections.abc import Generator -from typing import Any - - -def package_modpaths(pkgpath, - with_pkg: bool = ..., - with_mod: bool = ..., - followlinks: bool = ..., - recursive: bool = ..., - with_libs: bool = ..., - check: bool = ...) -> Generator[Any, None, None]: - ... - - -def modname_to_modpath( - modname: str, - hide_init: bool = True, - hide_main: bool = False, - sys_path: None | List[str | PathLike] = None) -> str | None: - ... - - -def split_modpath(modpath: str, check: bool = True) -> Tuple[str, str]: - ... - - -def normalize_modpath(modpath: str | PathLike, - hide_init: bool = True, - hide_main: bool = False) -> str | PathLike: - ... - - -def modpath_to_modname(modpath: str, - hide_init: bool = True, - hide_main: bool = False, - check: bool = True, - relativeto: str | None = None) -> str: - ... diff --git a/line_profiler/cli_utils.py b/line_profiler/cli_utils.py index 6344b6fa..815521b2 100644 --- a/line_profiler/cli_utils.py +++ b/line_profiler/cli_utils.py @@ -2,12 +2,16 @@ Shared utilities between the :command:`python -m line_profiler` and :command:`kernprof` CLI tools. """ +from __future__ import annotations + import argparse import functools import os import pathlib import shutil import sys +from os import PathLike +from typing import Protocol, Sequence, TypeVar, cast from .toml_config import ConfigSource @@ -16,9 +20,32 @@ **{k.casefold(): True for k in ('1', 'on', 'True', 'T', 'yes', 'Y')}} +P_con = TypeVar('P_con', bound='ParserLike', contravariant=True) +A_co = TypeVar('A_co', bound='ActionLike', covariant=True) + + +class ActionLike(Protocol[P_con]): + def __call__(self, parser: P_con, namespace: argparse.Namespace, + values: str | Sequence[object] | None, + option_string: str | None = None) -> None: + ... + + def format_usage(self) -> str: + ... + + +class ParserLike(Protocol[A_co]): + def add_argument(self, arg: str, /, *args: str, **kwargs: object) -> A_co: + ... + + @property + def prefix_chars(self) -> str: + ... + -def add_argument(parser_like, arg, /, *args, - hide_complementary_options=True, **kwargs): +def add_argument(parser_like: ParserLike[A_co], arg: str, /, *args: str, + hide_complementary_options: bool = True, + **kwargs: object) -> A_co: """ Override the ``'store_true'`` and ``'store_false'`` actions so that they are turned into options which: @@ -70,7 +97,7 @@ def negated(*args, **kwargs): return negated # Make sure there's at least one positional argument - args = [arg, *args] + args = (arg, *args) if kwargs.get('action') not in ('store_true', 'store_false'): return parser_like.add_argument(*args, **kwargs) @@ -111,7 +138,9 @@ def negated(*args, **kwargs): 'form' if len(short_flags) == 1 else 'forms', ', '.join(short_flags)) if long_kwargs.get('help'): - help_text = long_kwargs['help'].strip() + raw_help = long_kwargs['help'] + help_text = raw_help if isinstance(raw_help, str) else str(raw_help) + help_text = help_text.strip() if help_text.endswith((')', ']')): # Interpolate into existing parenthetical help_text = '{}; {}{}{}'.format( @@ -126,7 +155,8 @@ def negated(*args, **kwargs): long_kwargs['help'] = f'({additional_msg})' short_kwargs['help'] = argparse.SUPPRESS - long_action = short_action = None + long_action: A_co | None = None + short_action: A_co | None = None if long_flags: long_action = parser_like.add_argument(*long_flags, **long_kwargs) short_kwargs['dest'] = long_action.dest @@ -158,7 +188,10 @@ def negated(*args, **kwargs): return action -def get_cli_config(subtable, /, *args, **kwargs): +def get_cli_config( + subtable: str, /, + config: str | PathLike[str] | bool | None = None, + *, read_env: bool = True) -> ConfigSource: """ Get the ``tool.line_profiler.`` configs and normalize its keys (``some-key`` -> ``some_key``). @@ -175,28 +208,32 @@ def get_cli_config(subtable, /, *args, **kwargs): New :py:class:`~.line_profiler.toml_config.ConfigSource` instance """ - config = ConfigSource.from_config(*args, **kwargs).get_subconfig(subtable) - config.conf_dict = {key.replace('-', '_'): value - for key, value in config.conf_dict.items()} - return config + config_source = ConfigSource.from_config( + config, read_env=read_env).get_subconfig(subtable) + config_source.conf_dict = { + key.replace('-', '_'): value + for key, value in config_source.conf_dict.items()} + return config_source -def get_python_executable(): +def get_python_executable() -> str: """ Returns: str: command Command or path thereto corresponding to :py:data:`sys.executable`. """ - if os.path.samefile(shutil.which('python'), sys.executable): + python_path = shutil.which('python') + python3_path = shutil.which('python3') + if python_path and os.path.samefile(python_path, sys.executable): return 'python' - elif os.path.samefile(shutil.which('python3'), sys.executable): + elif python3_path and os.path.samefile(python3_path, sys.executable): return 'python3' else: return short_string_path(sys.executable) -def positive_float(value): +def positive_float(value: str) -> float: """ Arguments: value (str) @@ -214,7 +251,8 @@ def positive_float(value): return val -def boolean(value, *, fallback=None, invert=False): +def boolean(value: str, *, fallback: bool | None = None, + invert: bool = False) -> bool: """ Arguments: value (str) @@ -275,7 +313,7 @@ def boolean(value, *, fallback=None, invert=False): return fallback -def short_string_path(path): +def short_string_path(path: str | PathLike[str]) -> str: """ Arguments: path (str | os.PathLike[str]): @@ -288,11 +326,12 @@ def short_string_path(path): current directory. """ path = pathlib.Path(path) - paths = {str(path)} + paths: set[str] = {str(path)} abspath = path.absolute() paths.add(str(abspath)) try: paths.add(str(abspath.relative_to(path.cwd().absolute()))) except ValueError: # Not relative to the curdir pass - return min(paths, key=len) + paths_list = list(paths) + return cast(str, min(paths_list, key=len)) diff --git a/line_profiler/cli_utils.pyi b/line_profiler/cli_utils.pyi deleted file mode 100644 index 182efe98..00000000 --- a/line_profiler/cli_utils.pyi +++ /dev/null @@ -1,60 +0,0 @@ -""" -Shared utilities between the :command:`python -m line_profiler` and -:command:`kernprof` CLI tools. -""" -import argparse -import pathlib -from os import PathLike -from typing import Protocol, Sequence, Tuple, TypeVar - -from line_profiler.toml_config import ConfigSource - - -P_con = TypeVar('P_con', bound='ParserLike', contravariant=True) -A_co = TypeVar('A_co', bound='ActionLike', covariant=True) - - -class ActionLike(Protocol[P_con]): - def __call__(self, parser: P_con, - namespace: argparse.Namespace, - values: str | Sequence | None, - option_string: str | None = None) -> None: - ... - - def format_usage(self) -> str: - ... - - -class ParserLike(Protocol[A_co]): - def add_argument(self, arg: str, /, *args: str, **kwargs) -> A_co: - ... - - @property - def prefix_chars(self) -> str: - ... - - -def add_argument(parser_like: ParserLike[A_co], arg: str, /, *args: str, - hide_complementary_options: bool = True, **kwargs) -> A_co: - ... - - -def get_cli_config(subtable: str, /, *args, **kwargs) -> ConfigSource: - ... - - -def get_python_executable() -> str: - ... - - -def positive_float(value: str) -> float: - ... - - -def boolean(value: str, *, - fallback: bool | None = None, invert: bool = False) -> bool: - ... - - -def short_string_path(path: str | PathLike[str]) -> str: - ... diff --git a/line_profiler/explicit_profiler.py b/line_profiler/explicit_profiler.py index 787c4ac2..c288ac35 100644 --- a/line_profiler/explicit_profiler.py +++ b/line_profiler/explicit_profiler.py @@ -170,7 +170,7 @@ def func4(): import pathlib import sys import typing -from typing import Any, Callable +from typing import Any, Callable, TypeVar if typing.TYPE_CHECKING: ConfigArg = str | pathlib.PurePath | bool | None @@ -181,6 +181,9 @@ def func4(): from .line_profiler import LineProfiler from .toml_config import ConfigSource +F = TypeVar('F', bound=Callable[..., Any]) +ConfigArg = str | pathlib.PurePath | bool | None + # The first process that enables profiling records its PID here. Child processes # created via multiprocessing (spawn/forkserver) inherit this environment value, # which helps prevent helper processes from claiming ownership and clobbering @@ -291,6 +294,7 @@ def __init__(self, config: ConfigArg = None) -> None: self._profile = None self._owner_pid = None self.enabled = None + # Configs: # - How to toggle the profiler self.setup_config = config_source.conf_dict['setup'] diff --git a/line_profiler/ipython_extension.py b/line_profiler/ipython_extension.py index 17a58867..7541c615 100644 --- a/line_profiler/ipython_extension.py +++ b/line_profiler/ipython_extension.py @@ -32,6 +32,7 @@ .. |lprun_all| replace:: :py:data:`%%lprun_all ` .. |builtins| replace:: :py:mod:`__builtins__ ` """ +from __future__ import annotations import ast import builtins @@ -44,22 +45,56 @@ from contextlib import ExitStack from dataclasses import dataclass from pathlib import Path -from typing import TYPE_CHECKING, Union -if TYPE_CHECKING: # pragma: no cover - from typing import (Callable, ParamSpec, # noqa: F401 - Any, ClassVar, TypeVar) +from typing import TYPE_CHECKING, Any, Callable, ClassVar, TypeVar +from typing_extensions import ParamSpec +if TYPE_CHECKING: # pragma: no cover PS = ParamSpec('PS') PD = TypeVar('PD', bound='_PatchDict') DefNode = TypeVar('DefNode', ast.FunctionDef, ast.AsyncFunctionDef) from io import StringIO -from IPython.core.getipython import get_ipython -from IPython.core.magic import Magics, magics_class, line_magic, cell_magic -from IPython.core.page import page -from IPython.utils.ipstruct import Struct -from IPython.core.error import UsageError +try: # pragma: no cover - optional dependency + import importlib + + get_ipython = importlib.import_module( + 'IPython.core.getipython').get_ipython + magic_module = importlib.import_module('IPython.core.magic') + Magics = magic_module.Magics + magics_class = magic_module.magics_class + line_magic = magic_module.line_magic + cell_magic = magic_module.cell_magic + page = importlib.import_module('IPython.core.page').page + Struct = importlib.import_module('IPython.utils.ipstruct').Struct + UsageError = importlib.import_module('IPython.core.error').UsageError +except ImportError: # pragma: no cover - IPython is optional + if TYPE_CHECKING: + raise + + def get_ipython(): + return None + + class Magics: + pass + + def magics_class(cls): + return cls + + def line_magic(func): + return func + + def cell_magic(func): + return func + + def page(*_args: object, **_kwargs: object) -> None: + return None + + class Struct(dict): + pass + + class UsageError(Exception): + pass from line_profiler import line_profiler, LineProfiler, LineStats from line_profiler.autoprofile.ast_tree_profiler import AstTreeProfiler @@ -104,26 +139,26 @@ class _ParseParamResult: opts: Struct arg_str: str - def __getattr__(self, attr): # type: (str) -> Any + def __getattr__(self, attr: str) -> Any: """ Defers to :py:attr:`_ParseParamResult.opts`.""" return getattr(self.opts, attr) @functools.cached_property - def dump_raw_dest(self): # type: () -> Path | None + def dump_raw_dest(self) -> Path | None: path = self.opts.D[0] if path: return Path(path) return None @functools.cached_property - def dump_text_dest(self): # type: () -> Path | None + def dump_text_dest(self) -> Path | None: path = self.opts.T[0] if path: return Path(path) return None @functools.cached_property - def output_unit(self): # type: () -> float | None + def output_unit(self) -> float | None: if self.opts.u is None: return None try: @@ -132,11 +167,11 @@ def output_unit(self): # type: () -> float | None raise TypeError("Timer unit setting must be a float.") @functools.cached_property - def strip_zero(self): # type: () -> bool + def strip_zero(self) -> bool: return "z" in self.opts @functools.cached_property - def return_profiler(self): # type: () -> bool + def return_profiler(self) -> bool: return "r" in self.opts @@ -147,9 +182,9 @@ class _RunAndProfileResult: """ stats: LineStats parse_result: _ParseParamResult - message: Union[str, None] = None - time_elapsed: Union[float, None] = None - tempfile: Union[str, 'os.PathLike[str]', None] = None + message: str | None = None + time_elapsed: float | None = None + tempfile: str | os.PathLike[str] | None = None def __post_init__(self): if self.tempfile is not None: @@ -185,7 +220,7 @@ def show_func_wrapper( line_profiler, get_code_block=get_code_block_wrapper): return call() - def get_code_block_wrapper(filename, lineno): + def get_code_block_wrapper(filename: str, lineno: int) -> list[str]: """ Return the entire content of :py:attr:`~.tempfile`.""" with tmp.open(mode='r') as fobj: return fobj.read().splitlines(keepends=True) @@ -193,7 +228,7 @@ def get_code_block_wrapper(filename, lineno): return show_func_wrapper @functools.cached_property - def output(self): # type: () -> str + def output(self) -> str: with ExitStack() as stack: cap = stack.enter_context(StringIO()) # Trap text output patch_show_func = _PatchDict.from_module( @@ -229,14 +264,13 @@ class _PatchProfilerIntoBuiltins: skip this doctest if :py:mod:`IPython` (and hence this module) can't be imported. """ - def __init__(self, prof=None): - # type: (LineProfiler | None) -> None + def __init__(self, prof: LineProfiler | None = None) -> None: if prof is None: prof = LineProfiler() self.prof = prof self._ctx = _PatchDict.from_module(builtins, profile=self.prof) - def __enter__(self): # type: () -> LineProfiler + def __enter__(self) -> LineProfiler: self._ctx.__enter__() return self.prof @@ -245,14 +279,14 @@ def __exit__(self, *a, **k): class _PatchDict: - def __init__(self, namespace, /, **kwargs): - # type: (dict[str, Any], Any) -> None + def __init__(self, namespace: dict[str, Any], /, + **kwargs: Any) -> None: self.namespace = namespace self.replacements = kwargs - self._stack = [] # type: list[dict[str, Any]] + self._stack: list[dict[str, Any]] = [] self._absent = object() - def __enter__(self): # type: (PD) -> PD + def __enter__(self: PD) -> PD: self._push() return self @@ -278,15 +312,16 @@ def _pop(self): namespace[key] = value @classmethod - def from_module(cls, module, /, **kwargs): - # type: (type[PD], types.ModuleType, Any) -> PD + def from_module(cls: type[PD], module: types.ModuleType, /, + **kwargs: Any) -> PD: return cls(vars(module), **kwargs) @magics_class class LineProfilerMagics(Magics): - def _parse_parameters(self, parameter_s, getopt_spec, opts_def): - # type: (str, str, Struct) -> _ParseParamResult + def _parse_parameters( + self, parameter_s: str, getopt_spec: str, + opts_def: Struct) -> _ParseParamResult: # FIXME: There is a chance that this handling will need to be # updated to handle single-quoted characters better (#382) parameter_s = parameter_s.replace('"', r"\"").replace("'", r"\"") @@ -297,13 +332,13 @@ def _parse_parameters(self, parameter_s, getopt_spec, opts_def): return _ParseParamResult(opts, arg_str) @staticmethod - def _run_and_profile(prof, # type: LineProfiler - parse_result, # type: _ParseParamResult - tempfile, # type: str | None - method, # type: Callable[PS, Any] - *args, # type: PS.args - **kwargs, # type: PS.kwargs - ): # type: (...) -> _RunAndProfileResult + def _run_and_profile( + prof: LineProfiler, + parse_result: _ParseParamResult, + tempfile: str | None, + method: Callable[PS, Any], + *args: PS.args, + **kwargs: PS.kwargs) -> _RunAndProfileResult: # Use the time module because it's easier than parsing the # output from `show_text()`. # `perf_counter()` is a monotonically increasing alternative to @@ -323,8 +358,8 @@ def _run_and_profile(prof, # type: LineProfiler message=message, time_elapsed=total_time, tempfile=tempfile) @classmethod - def _lprun_all_get_rewritten_profiled_code(cls, tmpfile): - # type: (str) -> types.CodeType + def _lprun_all_get_rewritten_profiled_code( + cls, tmpfile: str) -> types.CodeType: """ Transform and compile the AST of the profiled code. This is similar to :py:meth:`.LineProfiler.runctx`, """ @@ -334,15 +369,16 @@ def _lprun_all_get_rewritten_profiled_code(cls, tmpfile): return compile(tree, tmpfile, "exec") @classmethod - def _lprun_get_top_level_profiled_code(cls, tmpfile): - # type: (str) -> types.CodeType + def _lprun_get_top_level_profiled_code( + cls, tmpfile: str) -> types.CodeType: """ Compile the profiled code.""" with open(tmpfile, mode='r') as fobj: return compile(fobj.read(), tmpfile, "exec") @staticmethod - def _handle_end(prof, run_result): - # type: (LineProfiler, _RunAndProfileResult) -> LineProfiler | None + def _handle_end( + prof: LineProfiler, + run_result: _RunAndProfileResult) -> LineProfiler | None: page(run_result.output) dump_file = run_result.parse_result.dump_raw_dest @@ -363,7 +399,7 @@ def _handle_end(prof, run_result): return prof if run_result.parse_result.return_profiler else None @line_magic - def lprun(self, parameter_s=""): + def lprun(self, parameter_s: str = "") -> LineProfiler | None: """Execute a statement under the line-by-line profiler from the :py:mod:`line_profiler` module. @@ -449,7 +485,8 @@ def lprun(self, parameter_s=""): return self._handle_end(profile, run) @cell_magic - def lprun_all(self, parameter_s="", cell=""): + def lprun_all(self, parameter_s: str = "", + cell: str = "") -> LineProfiler | None: """Execute the whole notebook cell under the line-by-line profiler from the :py:mod:`line_profiler` module. @@ -530,7 +567,7 @@ def lprun_all(self, parameter_s="", cell=""): # - `prof.add_function()` might have replaced the code # object, so retrieve it back from the dummy function mock_func = types.SimpleNamespace(__code__=code) - prof.add_function(mock_func) # type: ignore[arg-type] + prof.add_function(mock_func) code = mock_func.__code__ # Notes: # - We don't define `ip.user_global_ns` and `ip.user_ns` diff --git a/line_profiler/ipython_extension.pyi b/line_profiler/ipython_extension.pyi deleted file mode 100644 index 8bba105b..00000000 --- a/line_profiler/ipython_extension.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from IPython.core.magic import Magics -from . import LineProfiler - - -class LineProfilerMagics(Magics): - def lprun(self, parameter_s: str = ...) -> LineProfiler | None: - ... - - def lprun_all(self, - parameter_s: str = "", - cell: str = "") -> LineProfiler | None: - ... diff --git a/line_profiler/line_profiler.py b/line_profiler/line_profiler.py index 7bada629..c8a915af 100755 --- a/line_profiler/line_profiler.py +++ b/line_profiler/line_profiler.py @@ -4,7 +4,10 @@ inspect its output. This depends on the :py:mod:`line_profiler._line_profiler` Cython backend. """ +from __future__ import annotations + import functools +import io import inspect import linecache import operator @@ -16,6 +19,11 @@ import tokenize from argparse import ArgumentParser from datetime import datetime +from os import PathLike +from typing import (TYPE_CHECKING, IO, Callable, Literal, Mapping, Protocol, + Sequence, TypeVar, cast, overload) +from typing_extensions import ParamSpec, Self +from functools import cached_property, partial, partialmethod try: from ._line_profiler import (LineProfiler as CLineProfiler, @@ -29,16 +37,30 @@ from .cli_utils import ( add_argument, get_cli_config, positive_float, short_string_path) from .profiler_mixin import ByCountProfilerMixin, is_c_level_callable -from .scoping_policy import ScopingPolicy +from .scoping_policy import ScopingPolicy, ScopingPolicyDict from .toml_config import ConfigSource +if TYPE_CHECKING: # pragma: no cover + from .profiler_mixin import CLevelCallable, UnparametrizedCallableLike + + class _IPythonLike(Protocol): + def register_magics(self, magics: type) -> None: + ... + # NOTE: This needs to be in sync with ../kernprof.py and __init__.py __version__ = '5.0.2' +T = TypeVar('T') +T_co = TypeVar('T_co', covariant=True) +PS = ParamSpec('PS') +_TimingsMap = Mapping[tuple[str, int, str], list[tuple[int, int, int]]] + @functools.lru_cache() -def get_column_widths(config=False): +def get_column_widths( + config: bool | str | PathLike[str] | None = False +) -> Mapping[Literal['line', 'hits', 'time', 'perhit', 'percent'], int]: """ Arguments config (bool | str | pathlib.PurePath | None) @@ -50,17 +72,21 @@ def get_column_widths(config=False): """ subconf = (ConfigSource.from_config(config) .get_subconfig('show', 'column_widths')) - return types.MappingProxyType(subconf.conf_dict) + return types.MappingProxyType( + cast(Mapping[Literal['line', 'hits', 'time', 'perhit', 'percent'], int], + subconf.conf_dict)) -def load_ipython_extension(ip): +def load_ipython_extension(ip: object) -> None: """ API for IPython to recognize this module as an IPython extension. """ from .ipython_extension import LineProfilerMagics + if TYPE_CHECKING: + ip = cast(_IPythonLike, ip) ip.register_magics(LineProfilerMagics) -def get_code_block(filename, lineno): +def get_code_block(filename: os.PathLike[str] | str, lineno: int) -> list[str]: """ Get the lines in the code block in a file starting from required line number; understands Cython code. @@ -147,7 +173,7 @@ def get_code_block(filename, lineno): namespace = inspect.getblock.__globals__ namespace['BlockFinder'] = _CythonBlockFinder try: - return inspect.getblock(linecache.getlines(filename)[lineno - 1:]) + return inspect.getblock(linecache.getlines(os.fspath(filename))[lineno - 1:]) finally: namespace['BlockFinder'] = BlockFinder @@ -163,14 +189,17 @@ class _CythonBlockFinder(inspect.BlockFinder): is public but undocumented API. See similar caveat in :py:func:`~.get_code_block`. """ - def tokeneater(self, type, token, *args, **kwargs): + def tokeneater( + self, type: int, token: str, + srowcol: tuple[int, int], erowcol: tuple[int, int], + line: str) -> None: if ( not self.started and type == tokenize.NAME and token in ('cdef', 'cpdef', 'property')): # Fudge the token to get the desired 'scoping' behavior token = 'def' - return super().tokeneater(type, token, *args, **kwargs) + return super().tokeneater(type, token, srowcol, erowcol, line) class _WrapperInfo: @@ -183,17 +212,28 @@ class _WrapperInfo: profiler_id (int) ID of the `LineProfiler`. """ - def __init__(self, func, profiler_id): + def __init__(self, func: types.FunctionType, profiler_id: int) -> None: self.func = func self.profiler_id = profiler_id +class _StatsLike(Protocol): + timings: _TimingsMap + unit: float + + class LineStats(CLineStats): - def __repr__(self): + timings: _TimingsMap + unit: float + + def __init__(self, timings: _TimingsMap, unit: float) -> None: + super().__init__(timings, unit) + + def __repr__(self) -> str: return '{}({}, {:.2G})'.format( type(self).__name__, self.timings, self.unit) - def __eq__(self, other): + def __eq__(self, other: object) -> bool: """ Example: >>> from copy import deepcopy @@ -222,7 +262,7 @@ def __eq__(self, other): return NotImplemented return True - def __add__(self, other): + def __add__(self, other: _StatsLike) -> Self: """ Example: >>> stats1 = LineStats( @@ -246,7 +286,7 @@ def __add__(self, other): timings, unit = self._get_aggregated_timings([self, other]) return type(self)(timings, unit) - def __iadd__(self, other): + def __iadd__(self, other: _StatsLike) -> Self: """ Example: >>> stats1 = LineStats( @@ -273,17 +313,26 @@ def __iadd__(self, other): self.timings, self.unit = self._get_aggregated_timings([self, other]) return self - def print(self, stream=None, **kwargs): - show_text(self.timings, self.unit, stream=stream, **kwargs) - - def to_file(self, filename): + def print( + self, stream: io.TextIOBase | None = None, + output_unit: float | None = None, + stripzeros: bool = False, details: bool = True, + summarize: bool = False, sort: bool = False, rich: bool = False, + *, config: str | PathLike[str] | bool | None = None) -> None: + show_text(self.timings, self.unit, output_unit=output_unit, + stream=stream, stripzeros=stripzeros, details=details, + summarize=summarize, sort=sort, rich=rich, config=config) + + def to_file(self, filename: PathLike[str] | str) -> None: """ Pickle the instance to the given filename. """ with open(filename, 'wb') as f: pickle.dump(self, f, pickle.HIGHEST_PROTOCOL) @classmethod - def from_files(cls, file, /, *files): + def from_files( + cls, file: PathLike[str] | str, /, + *files: PathLike[str] | str) -> Self: """ Utility function to load an instance from the given filenames. """ @@ -294,7 +343,9 @@ def from_files(cls, file, /, *files): return cls.from_stats_objects(*stats_objs) @classmethod - def from_stats_objects(cls, stats, /, *more_stats): + def from_stats_objects( + cls, stats: _StatsLike, /, + *more_stats: _StatsLike) -> Self: """ Example: >>> stats1 = LineStats( @@ -368,7 +419,45 @@ class LineProfiler(CLineProfiler, ByCountProfilerMixin): >>> func() >>> profile.print_stats() """ - def __call__(self, func): + @overload + def __call__(self, func: CLevelCallable) -> CLevelCallable: + ... + + @overload + def __call__(self, func: UnparametrizedCallableLike) -> UnparametrizedCallableLike: + ... + + @overload + def __call__(self, func: type[T]) -> type[T]: + ... + + @overload + def __call__(self, func: partial[T]) -> partial[T]: + ... + + @overload + def __call__(self, func: partialmethod[T]) -> partialmethod[T]: + ... + + @overload + def __call__(self, func: cached_property[T_co]) -> cached_property[T_co]: + ... + + @overload + def __call__(self, func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: + ... + + @overload + def __call__( + self, func: classmethod[type[T], PS, T_co], + ) -> classmethod[type[T], PS, T_co]: + ... + + @overload + def __call__(self, func: Callable) -> types.FunctionType: + ... + + def __call__(self, func: object): """ Decorate a function, method, :py:class:`property`, :py:func:`~functools.partial` object etc. to start the profiler @@ -384,12 +473,15 @@ def __call__(self, func): self.add_callable(func) return self.wrap_callable(func) - def wrap_callable(self, func): + def wrap_callable(self, func: object): if is_c_level_callable(func): # Non-profilable return func return super().wrap_callable(func) - def add_callable(self, func, guard=None, name=None): + def add_callable( + self, func: object, + guard: Callable[[types.FunctionType], bool] | None = None, + name: str | None = None) -> Literal[0, 1]: """ Register a function, method, :py:class:`property`, :py:func:`~functools.partial` object, etc. with the underlying @@ -461,18 +553,21 @@ def _debug(self, msg): msg = f'{self_repr}: {msg}' logger.debug(msg) - def get_stats(self): + def get_stats(self) -> LineStats: return LineStats.from_stats_objects(super().get_stats()) - def dump_stats(self, filename): + def dump_stats(self, filename: os.PathLike[str] | str) -> None: """ Dump a representation of the data to a file as a pickled :py:class:`~.LineStats` object from :py:meth:`~.get_stats()`. """ self.get_stats().to_file(filename) - def print_stats(self, stream=None, output_unit=None, stripzeros=False, - details=True, summarize=False, sort=False, rich=False, *, - config=None): + def print_stats( + self, stream: io.TextIOBase | None = None, + output_unit: float | None = None, stripzeros: bool = False, + details: bool = True, summarize: bool = False, + sort: bool = False, rich: bool = False, *, + config: str | PathLike[str] | bool | None = None) -> None: """ Show the gathered statistics. """ self.get_stats().print( @@ -481,13 +576,16 @@ def print_stats(self, stream=None, output_unit=None, stripzeros=False, sort=sort, rich=rich, config=config) def _add_namespace( - self, namespace, *, - seen=None, - func_scoping_policy=ScopingPolicy.NONE, - class_scoping_policy=ScopingPolicy.NONE, - module_scoping_policy=ScopingPolicy.NONE, - wrap=False, - name=None): + self, namespace: type | types.ModuleType, *, + seen: set[int] | None = None, + func_scoping_policy: ScopingPolicy = cast( + ScopingPolicy, ScopingPolicy.NONE), + class_scoping_policy: ScopingPolicy = cast( + ScopingPolicy, ScopingPolicy.NONE), + module_scoping_policy: ScopingPolicy = cast( + ScopingPolicy, ScopingPolicy.NONE), + wrap: bool = False, + name: str | None = None) -> int: def func_guard(func): return self._already_a_wrapper(func) or not func_check(func) @@ -546,7 +644,10 @@ def func_guard(func): self._repr_for_log(namespace, name))) return count - def add_class(self, cls, *, scoping_policy=None, wrap=False): + def add_class( + self, cls: type, *, + scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> int: """ Add the members (callables (wrappers), methods, classes, ...) in a class' local namespace and profile them. @@ -591,7 +692,10 @@ def add_class(self, cls, *, scoping_policy=None, wrap=False): module_scoping_policy=policies['module'], wrap=wrap) - def add_module(self, mod, *, scoping_policy=None, wrap=False): + def add_module( + self, mod: types.ModuleType, *, + scoping_policy: ScopingPolicy | str | ScopingPolicyDict | None = None, + wrap: bool = False) -> int: """ Add the members (callables (wrappers), methods, classes, ...) in a module's local namespace and profile them. @@ -658,7 +762,7 @@ def _mark_wrapper(self, wrapper): # This could be in the ipython_extension submodule, # but it doesn't depend on the IPython module so it's easier to just let it stay here. -def is_generated_code(filename): +def is_generated_code(filename: str) -> bool: """ Return True if a filename corresponds to generated code, such as a Jupyter Notebook cell. """ @@ -672,10 +776,13 @@ def is_generated_code(filename): ) -def show_func(filename, start_lineno, func_name, timings, unit, - output_unit=None, stream=None, stripzeros=False, rich=False, +def show_func(filename: str, start_lineno: int, func_name: str, + timings: Sequence[tuple[int, int, int | float]], unit: float, + output_unit: float | None = None, + stream: io.TextIOBase | None = None, + stripzeros: bool = False, rich: bool = False, *, - config=None): + config: str | PathLike[str] | bool | None = None) -> None: """ Show results for a single function. @@ -740,7 +847,7 @@ def show_func(filename, start_lineno, func_name, timings, unit, ... output_unit, stream, stripzeros, rich) """ if stream is None: - stream = sys.stdout + stream = cast(io.TextIOBase, sys.stdout) total_hits = sum(t[1] for t in timings) total_time = sum(t[2] for t in timings) @@ -752,13 +859,16 @@ def show_func(filename, start_lineno, func_name, timings, unit, # References: # https://github.com/Textualize/rich/discussions/3076 try: - from rich.syntax import Syntax - from rich.highlighter import ReprHighlighter - from rich.text import Text - from rich.console import Console - from rich.table import Table + import importlib + + Syntax = importlib.import_module('rich.syntax').Syntax + ReprHighlighter = importlib.import_module( + 'rich.highlighter').ReprHighlighter + Text = importlib.import_module('rich.text').Text + Console = importlib.import_module('rich.console').Console + Table = importlib.import_module('rich.table').Table except ImportError: - rich = 0 + rich = False if output_unit is None: output_unit = unit @@ -874,7 +984,10 @@ def show_func(filename, start_lineno, func_name, timings, unit, # Use a Console to render to the stream # Not sure if we should force-terminal or just specify the color system # write_console = Console(file=stream, force_terminal=True, soft_wrap=True) - write_console = Console(file=stream, soft_wrap=True, color_system='standard') + write_console = Console( + file=cast(IO[str], stream), + soft_wrap=True, + color_system='standard') write_console.print(table) stream.write('\n') else: @@ -895,9 +1008,12 @@ def show_func(filename, start_lineno, func_name, timings, unit, stream.write('\n') -def show_text(stats, unit, output_unit=None, stream=None, stripzeros=False, - details=True, summarize=False, sort=False, rich=False, *, - config=None): +def show_text(stats: _TimingsMap, unit: float, + output_unit: float | None = None, + stream: io.TextIOBase | None = None, + stripzeros: bool = False, details: bool = True, + summarize: bool = False, sort: bool = False, rich: bool = False, + *, config: str | PathLike[str] | bool | None = None) -> None: """ Show text for the given timings. @@ -912,7 +1028,7 @@ def show_text(stats, unit, output_unit=None, stream=None, stripzeros=False, python -m line_profiler -mtz "uuid.lprof" """ if stream is None: - stream = sys.stdout + stream = cast(io.TextIOBase, sys.stdout) if output_unit is not None: stream.write('Timer unit: %g s\n\n' % output_unit) @@ -940,13 +1056,15 @@ def show_text(stats, unit, output_unit=None, stream=None, stripzeros=False, # Summarize the total time for each function if rich: try: - from rich.console import Console - from rich.markup import escape + import importlib + + Console = importlib.import_module('rich.console').Console + escape = importlib.import_module('rich.markup').escape except ImportError: - rich = 0 + rich = False line_template = '%6.2f seconds - %s:%s - %s' if rich: - write_console = Console(file=stream, soft_wrap=True, + write_console = Console(file=cast(IO[str], stream), soft_wrap=True, color_system='standard') for (fn, lineno, name), timings in stats_order: total_time = sum(t[2] for t in timings) * unit @@ -967,7 +1085,7 @@ def show_text(stats, unit, output_unit=None, stream=None, stripzeros=False, load_stats = LineStats.from_files -def main(): +def main() -> None: """ The line profiler CLI to view output from :command:`kernprof -l`. """ diff --git a/line_profiler/line_profiler.pyi b/line_profiler/line_profiler.pyi index 0a9bdbb0..6f535718 100644 --- a/line_profiler/line_profiler.pyi +++ b/line_profiler/line_profiler.pyi @@ -4,14 +4,9 @@ from os import PathLike from types import FunctionType, ModuleType from typing import (TYPE_CHECKING, overload, - Callable, Mapping, + Callable, Mapping, Sequence, Literal, Self, - Protocol, TypeVar) -try: - from typing import ( # type: ignore[attr-defined] # noqa: F401 - ParamSpec) -except ImportError: - from typing_extensions import ParamSpec # noqa: F401 + Protocol, TypeVar, ParamSpec) from _typeshed import Incomplete from ._line_profiler import (LineProfiler as CLineProfiler, LineStats as CLineStats) @@ -25,6 +20,7 @@ if TYPE_CHECKING: T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) PS = ParamSpec('PS') +_TimingsMap = Mapping[tuple[str, int, str], list[tuple[int, int, int]]] def get_column_widths( @@ -38,16 +34,23 @@ def load_ipython_extension(ip) -> None: class _StatsLike(Protocol): - timings: Mapping[tuple[str, int, str], # funcname, lineno, filename - list[tuple[int, int, int]]] # lineno, nhits, time + timings: _TimingsMap unit: float class LineStats(CLineStats): + def __init__(self, timings: _TimingsMap, unit: float) -> None: + ... + def to_file(self, filename: PathLike[str] | str) -> None: ... - def print(self, stream: Incomplete | None = None, **kwargs) -> None: + def print( + self, stream: io.TextIOBase | None = None, + output_unit: float | None = None, + stripzeros: bool = False, details: bool = True, + summarize: bool = False, sort: bool = False, rich: bool = False, + *, config: str | PathLike[str] | bool | None = None) -> None: ... @classmethod @@ -75,23 +78,23 @@ class LineStats(CLineStats): class LineProfiler(CLineProfiler, ByCountProfilerMixin): @overload - def __call__(self, # type: ignore[overload-overlap] + def __call__(self, func: CLevelCallable) -> CLevelCallable: ... @overload - def __call__( # type: ignore[overload-overlap] + def __call__( self, func: UnparametrizedCallableLike, ) -> UnparametrizedCallableLike: ... @overload - def __call__(self, # type: ignore[overload-overlap] + def __call__(self, func: type[T]) -> type[T]: ... @overload - def __call__(self, # type: ignore[overload-overlap] + def __call__(self, func: partial[T]) -> partial[T]: ... @@ -104,7 +107,7 @@ class LineProfiler(CLineProfiler, ByCountProfilerMixin): ... @overload - def __call__(self, # type: ignore[overload-overlap] + def __call__(self, func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: ... @@ -166,7 +169,7 @@ def is_generated_code(filename): def show_func(filename: str, start_lineno: int, func_name: str, - timings: list[tuple[int, int, float]], + timings: Sequence[tuple[int, int, int | float]], unit: float, output_unit: float | None = None, stream: io.TextIOBase | None = None, @@ -177,10 +180,10 @@ def show_func(filename: str, ... -def show_text(stats, - unit, - output_unit: Incomplete | None = ..., - stream: Incomplete | None = ..., +def show_text(stats: _TimingsMap, + unit: float, + output_unit: float | None = ..., + stream: io.TextIOBase | None = ..., stripzeros: bool = ..., details: bool = ..., summarize: bool = ..., diff --git a/line_profiler/line_profiler_utils.py b/line_profiler/line_profiler_utils.py index 0d903888..fcc6c2cf 100644 --- a/line_profiler/line_profiler_utils.py +++ b/line_profiler/line_profiler_utils.py @@ -1,7 +1,10 @@ """ Miscellaneous utilities that :py:mod:`line_profiler` uses. """ +from __future__ import annotations + import enum +from typing_extensions import Self class _StrEnumBase(str, enum.Enum): @@ -28,13 +31,13 @@ class _StrEnumBase(str, enum.Enum): ValueError: 'baz' is not a valid MyEnum """ @staticmethod - def _generate_next_value_(name, *_, **__): + def _generate_next_value_(name: str, *_, **__) -> str: return name.lower() - def __eq__(self, other): + def __eq__(self, other: object) -> bool: return self.value == other - def __str__(self): + def __str__(self) -> str: return self.value @@ -65,7 +68,7 @@ class StringEnum(getattr(enum, 'StrEnum', _StrEnumBase)): 'bar' """ @classmethod - def _missing_(cls, value): + def _missing_(cls, value: object) -> Self | None: if not isinstance(value, str): return None members = {name.casefold(): instance diff --git a/line_profiler/line_profiler_utils.pyi b/line_profiler/line_profiler_utils.pyi deleted file mode 100644 index a510cf94..00000000 --- a/line_profiler/line_profiler_utils.pyi +++ /dev/null @@ -1,26 +0,0 @@ -import enum -try: - from typing import Self # type: ignore[attr-defined] # noqa: F401 -except ImportError: # Python < 3.11 - from typing_extensions import Self # noqa: F401 - - -# Note: `mypy` tries to read this class as a free-standing enum -# (instead of an `enum.Enum` subclass that string enums are to inherit -# from), and complains that it has no members -- so silence that - - -class StringEnum(str, enum.Enum): # type: ignore[misc] - @staticmethod - def _generate_next_value_(name: str, *_, **__) -> str: - ... - - def __eq__(self, other) -> bool: - ... - - def __str__(self) -> str: - ... - - @classmethod - def _missing_(cls, value) -> Self | None: - ... diff --git a/line_profiler/profiler_mixin.py b/line_profiler/profiler_mixin.py index 3bc98c1d..121bf18b 100644 --- a/line_profiler/profiler_mixin.py +++ b/line_profiler/profiler_mixin.py @@ -1,8 +1,15 @@ +from __future__ import annotations + import functools import inspect import types +from functools import cached_property, partial, partialmethod from sys import version_info +from typing import (TYPE_CHECKING, Any, Callable, Mapping, Protocol, TypeVar, + overload, cast) +from typing_extensions import ParamSpec, TypeIs from warnings import warn +from ._line_profiler import label from .scoping_policy import ScopingPolicy @@ -26,8 +33,108 @@ # https://cython.readthedocs.io/en/latest/src/tutorial/profiling_tutorial.html _CANNOT_LINE_TRACE_CYTHON = (3, 12) <= version_info < (3, 13, 0, 'beta', 1) +UnparametrizedCallableLike = TypeVar( + 'UnparametrizedCallableLike', + types.FunctionType, property, types.MethodType) +T = TypeVar('T') +T_co = TypeVar('T_co', covariant=True) +PS = ParamSpec('PS') + +if TYPE_CHECKING: + class CythonCallable(Protocol[PS, T_co]): + def __call__(self, *args: PS.args, **kwargs: PS.kwargs) -> T_co: + ... + + @property + def __code__(self) -> types.CodeType: + ... + + @property + def func_code(self) -> types.CodeType: + ... + + @property + def __name__(self) -> str: + ... + + @property + def func_name(self) -> str: + ... + + @property + def __qualname__(self) -> str: + ... + + @property + def __doc__(self) -> str | None: + ... + + @__doc__.setter + def __doc__(self, doc: str | None) -> None: + ... + + @property + def func_doc(self) -> str | None: + ... + + @property + def __globals__(self) -> dict[str, Any]: + ... + + @property + def func_globals(self) -> dict[str, Any]: + ... + + @property + def __dict__(self) -> dict[str, Any]: + ... + + @__dict__.setter + def __dict__(self, dict: dict[str, Any]) -> None: + ... + + @property + def func_dict(self) -> dict[str, Any]: + ... + + @property + def __annotations__(self) -> dict[str, Any]: + ... + + @__annotations__.setter + def __annotations__(self, annotations: dict[str, Any]) -> None: + ... -def is_c_level_callable(func): + @property + def __defaults__(self): + ... + + @property + def func_defaults(self): + ... + + @property + def __kwdefaults__(self): + ... + + @property + def __closure__(self): + ... + + @property + def func_closure(self): + ... +else: + CythonCallable = type(label) + +CLevelCallable = TypeVar( + 'CLevelCallable', + types.BuiltinFunctionType, types.BuiltinMethodType, + types.ClassMethodDescriptorType, types.MethodDescriptorType, + types.MethodWrapperType, types.WrapperDescriptorType) + + +def is_c_level_callable(func: Any) -> TypeIs[CLevelCallable]: """ Returns: func_is_c_level (bool): @@ -37,7 +144,7 @@ def is_c_level_callable(func): return isinstance(func, C_LEVEL_CALLABLE_TYPES) -def is_cython_callable(func): +def is_cython_callable(func: Any) -> TypeIs[CythonCallable]: if not callable(func): return False # Note: don't directly check against a Cython function type, since @@ -48,31 +155,31 @@ def is_cython_callable(func): in ('cython_function_or_method', 'fused_cython_function')) -def is_classmethod(f): +def is_classmethod(f: Any) -> TypeIs[classmethod]: return isinstance(f, classmethod) -def is_staticmethod(f): +def is_staticmethod(f: Any) -> TypeIs[staticmethod]: return isinstance(f, staticmethod) -def is_boundmethod(f): +def is_boundmethod(f: Any) -> TypeIs[types.MethodType]: return isinstance(f, types.MethodType) -def is_partialmethod(f): +def is_partialmethod(f: Any) -> TypeIs[partialmethod]: return isinstance(f, functools.partialmethod) -def is_partial(f): +def is_partial(f: Any) -> TypeIs[partial]: return isinstance(f, functools.partial) -def is_property(f): +def is_property(f: Any) -> TypeIs[property]: return isinstance(f, property) -def is_cached_property(f): +def is_cached_property(f: Any) -> TypeIs[cached_property]: return isinstance(f, functools.cached_property) @@ -86,7 +193,43 @@ class ByCountProfilerMixin: Used by :py:class:`line_profiler.line_profiler.LineProfiler` and :py:class:`kernprof.ContextualProfile`. """ - def wrap_callable(self, func): + @overload + def wrap_callable(self, func: CLevelCallable) -> CLevelCallable: + ... + + @overload + def wrap_callable( + self, func: UnparametrizedCallableLike, + ) -> UnparametrizedCallableLike: + ... + + @overload + def wrap_callable(self, func: type[T]) -> type[T]: + ... + + @overload + def wrap_callable(self, func: partial[T]) -> partial[T]: + ... + + @overload + def wrap_callable(self, func: partialmethod[T]) -> partialmethod[T]: + ... + + @overload + def wrap_callable(self, func: cached_property[T_co]) -> cached_property[T_co]: + ... + + @overload + def wrap_callable(self, func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: + ... + + def enable_by_count(self) -> None: # pragma: no cover - implemented in C + raise NotImplementedError + + def disable_by_count(self) -> None: # pragma: no cover - implemented in C + raise NotImplementedError + + def wrap_callable(self, func: object): """ Decorate a function to start the profiler on function entry and stop it on function exit. @@ -119,7 +262,8 @@ def wrap_callable(self, func): 'callable wrapper') @classmethod - def get_underlying_functions(cls, func): + def get_underlying_functions( + cls, func: object) -> list[types.FunctionType]: """ Get the underlying function objects of a callable or an adjacent object. @@ -127,27 +271,35 @@ def get_underlying_functions(cls, func): Returns: funcs (list[Callable]) """ - return cls._get_underlying_functions(func) + return [impl for impl in cls._get_underlying_functions(func) + if isinstance(impl, types.FunctionType)] @classmethod - def _get_underlying_functions(cls, func, seen=None, stop_at_classes=False): + def _get_underlying_functions( + cls, func: object, seen: set[int] | None = None, + stop_at_classes: bool = False + ) -> list[types.FunctionType | type | CythonCallable]: if seen is None: seen = set() - kwargs = {'seen': seen, 'stop_at_classes': stop_at_classes} # Extract inner functions - if any(check(func) - for check in (is_boundmethod, is_classmethod, is_staticmethod)): - return cls._get_underlying_functions(func.__func__, **kwargs) - if any(check(func) - for check in (is_partial, is_partialmethod, is_cached_property)): - return cls._get_underlying_functions(func.func, **kwargs) + if is_boundmethod(func): + return cls._get_underlying_functions( + func.__func__, seen=seen, stop_at_classes=stop_at_classes) + if is_classmethod(func) or is_staticmethod(func): + return cls._get_underlying_functions( + func.__func__, seen=seen, stop_at_classes=stop_at_classes) + if is_partial(func) or is_partialmethod(func) or is_cached_property(func): + return cls._get_underlying_functions( + func.func, seen=seen, stop_at_classes=stop_at_classes) # Dispatch to specific handlers if is_property(func): - return cls._get_underlying_functions_from_property(func, **kwargs) + return cls._get_underlying_functions_from_property( + func, seen, stop_at_classes) if isinstance(func, type): if stop_at_classes: return [func] - return cls._get_underlying_functions_from_type(func, **kwargs) + return cls._get_underlying_functions_from_type( + func, seen, stop_at_classes) # Otherwise, the object should either be a function... if not callable(func): raise TypeError(f'func = {func!r}: ' @@ -165,11 +317,13 @@ def _get_underlying_functions(cls, func, seen=None, stop_at_classes=False): func = type(func).__call__ if is_c_level_callable(func): # Can happen with builtin types return [] - return [func] + return [cast(types.FunctionType, func)] @classmethod def _get_underlying_functions_from_property( - cls, prop, seen, stop_at_classes): + cls, prop: property, seen: set[int], + stop_at_classes: bool + ) -> list[types.FunctionType | type | CythonCallable]: result = [] for impl in prop.fget, prop.fset, prop.fdel: if impl is not None: @@ -178,7 +332,10 @@ def _get_underlying_functions_from_property( return result @classmethod - def _get_underlying_functions_from_type(cls, kls, seen, stop_at_classes): + def _get_underlying_functions_from_type( + cls, kls: type, seen: set[int], + stop_at_classes: bool + ) -> list[types.FunctionType | type | CythonCallable]: result = [] get_filter = cls._class_scoping_policy.get_filter func_check = get_filter(kls, 'func') @@ -515,4 +672,5 @@ def __exit__(self, *_, **__): self.disable_by_count() _profiler_wrapped_marker = '__line_profiler_id__' - _class_scoping_policy = ScopingPolicy.CHILDREN + _class_scoping_policy: ScopingPolicy = cast( + ScopingPolicy, ScopingPolicy.CHILDREN) diff --git a/line_profiler/profiler_mixin.pyi b/line_profiler/profiler_mixin.pyi deleted file mode 100644 index ba7a9d3a..00000000 --- a/line_profiler/profiler_mixin.pyi +++ /dev/null @@ -1,271 +0,0 @@ -from functools import cached_property, partial, partialmethod -from types import (CodeType, FunctionType, MethodType, - BuiltinFunctionType, BuiltinMethodType, - ClassMethodDescriptorType, MethodDescriptorType, - MethodWrapperType, WrapperDescriptorType) -from typing import (TYPE_CHECKING, overload, - Any, Callable, Mapping, Protocol, TypeVar) -try: - from typing import ( # type: ignore[attr-defined] # noqa: F401 - ParamSpec) -except ImportError: # Python < 3.10 - from typing_extensions import ParamSpec # noqa: F401 -try: - from typing import ( # type: ignore[attr-defined] # noqa: F401 - Self) -except ImportError: # Python < 3.11 - from typing_extensions import Self # noqa: F401 -try: - from typing import ( # type: ignore[attr-defined] # noqa: F401 - TypeIs) -except ImportError: # Python < 3.13 - from typing_extensions import TypeIs # noqa: F401 -from ._line_profiler import label - - -UnparametrizedCallableLike = TypeVar('UnparametrizedCallableLike', - FunctionType, property, MethodType) -T = TypeVar('T') -T_co = TypeVar('T_co', covariant=True) -PS = ParamSpec('PS') - -if TYPE_CHECKING: - class CythonCallable(Protocol[PS, T_co]): - def __call__(self, *args: PS.args, **kwargs: PS.kwargs) -> T_co: - ... - - @property - def __code__(self) -> CodeType: - ... - - @property - def func_code(self) -> CodeType: - ... - - @property - def __name__(self) -> str: - ... - - @property - def func_name(self) -> str: - ... - - @property - def __qualname__(self) -> str: - ... - - @property - def __doc__(self) -> str | None: - ... - - @__doc__.setter - def __doc__(self, doc: str | None) -> None: - ... - - @property - def func_doc(self) -> str | None: - ... - - @property - def __globals__(self) -> dict[str, Any]: - ... - - @property - def func_globals(self) -> dict[str, Any]: - ... - - @property - def __dict__(self) -> dict[str, Any]: - ... - - @__dict__.setter - def __dict__(self, dict: dict[str, Any]) -> None: - ... - - @property - def func_dict(self) -> dict[str, Any]: - ... - - @property - def __annotations__(self) -> dict[str, Any]: - ... - - @__annotations__.setter - def __annotations__(self, annotations: dict[str, Any]) -> None: - ... - - @property - def __defaults__(self): - ... - - @property - def func_defaults(self): - ... - - @property - def __kwdefaults__(self): - ... - - @property - def __closure__(self): - ... - - @property - def func_closure(self): - ... - - -else: - CythonCallable = type(label) - -CLevelCallable = TypeVar('CLevelCallable', - BuiltinFunctionType, BuiltinMethodType, - ClassMethodDescriptorType, MethodDescriptorType, - MethodWrapperType, WrapperDescriptorType) - - -def is_c_level_callable(func: Any) -> TypeIs[CLevelCallable]: - ... - - -def is_cython_callable(func: Any) -> TypeIs[CythonCallable]: - ... - - -def is_classmethod(f: Any) -> TypeIs[classmethod]: - ... - - -def is_staticmethod(f: Any) -> TypeIs[staticmethod]: - ... - - -def is_boundmethod(f: Any) -> TypeIs[MethodType]: - ... - - -def is_partialmethod(f: Any) -> TypeIs[partialmethod]: - ... - - -def is_partial(f: Any) -> TypeIs[partial]: - ... - - -def is_property(f: Any) -> TypeIs[property]: - ... - - -def is_cached_property(f: Any) -> TypeIs[cached_property]: - ... - - -class ByCountProfilerMixin: - def get_underlying_functions(self, func) -> list[FunctionType]: - ... - - @overload - def wrap_callable(self, # type: ignore[overload-overlap] - func: CLevelCallable) -> CLevelCallable: - ... - - @overload - def wrap_callable( # type: ignore[overload-overlap] - self, func: UnparametrizedCallableLike, - ) -> UnparametrizedCallableLike: - ... - - @overload - def wrap_callable(self, # type: ignore[overload-overlap] - func: type[T]) -> type[T]: - ... - - @overload - def wrap_callable(self, # type: ignore[overload-overlap] - func: partial[T]) -> partial[T]: - ... - - @overload - def wrap_callable(self, func: partialmethod[T]) -> partialmethod[T]: - ... - - @overload - def wrap_callable(self, - func: cached_property[T_co]) -> cached_property[T_co]: - ... - - @overload - def wrap_callable(self, # type: ignore[overload-overlap] - func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: - ... - - @overload - def wrap_callable( - self, func: classmethod[type[T], PS, T_co], - ) -> classmethod[type[T], PS, T_co]: - ... - - # Fallback: just return a wrapper function around a generic callable - - @overload - def wrap_callable(self, func: Callable) -> FunctionType: - ... - - def wrap_classmethod( - self, func: classmethod[type[T], PS, T_co], - ) -> classmethod[type[T], PS, T_co]: - ... - - def wrap_staticmethod( - self, func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: - ... - - def wrap_boundmethod(self, func: MethodType) -> MethodType: - ... - - def wrap_partialmethod(self, func: partialmethod[T]) -> partialmethod[T]: - ... - - def wrap_partial(self, func: partial[T]) -> partial[T]: - ... - - def wrap_property(self, func: property) -> property: - ... - - def wrap_cached_property( - self, func: cached_property[T_co]) -> cached_property[T_co]: - ... - - def wrap_async_generator(self, func: FunctionType) -> FunctionType: - ... - - def wrap_coroutine(self, func: FunctionType) -> FunctionType: - ... - - def wrap_generator(self, func: FunctionType) -> FunctionType: - ... - - def wrap_function(self, func: Callable) -> FunctionType: - ... - - def wrap_class(self, func: type[T]) -> type[T]: - ... - - def run(self, cmd: str) -> Self: - ... - - def runctx(self, - cmd: str, - globals: dict[str, Any] | None, - locals: Mapping[str, Any] | None) -> Self: - ... - - def runcall(self, func: Callable[PS, T], /, - *args: PS.args, **kw: PS.kwargs) -> T: - ... - - def __enter__(self) -> Self: - ... - - def __exit__(self, *_, **__) -> None: - ... diff --git a/line_profiler/scoping_policy.py b/line_profiler/scoping_policy.py index cedf51e9..3f7dc339 100644 --- a/line_profiler/scoping_policy.py +++ b/line_profiler/scoping_policy.py @@ -1,6 +1,8 @@ +from __future__ import annotations + from enum import auto -from types import MappingProxyType, ModuleType -from typing import Union, TypedDict +from types import FunctionType, MappingProxyType, ModuleType +from typing import Callable, Literal, TypedDict, cast, overload from .line_profiler_utils import StringEnum @@ -97,7 +99,7 @@ class ScopingPolicy(StringEnum): # Verification - def __init_subclass__(cls, *args, **kwargs): + def __init_subclass__(cls, *args: object, **kwargs: object) -> None: """ Call :py:meth:`_check_class`. """ @@ -105,7 +107,7 @@ def __init_subclass__(cls, *args, **kwargs): cls._check_class() @classmethod - def _check_class(cls): + def _check_class(cls) -> None: """ Verify that :py:meth:`.get_filter` return a callable for all policy values and object types. @@ -122,7 +124,25 @@ class MockClass: # Filtering - def get_filter(self, namespace, obj_type): + @overload + def get_filter( + self, namespace: type | ModuleType, + obj_type: Literal['func']) -> Callable[[FunctionType], bool]: + ... + + @overload + def get_filter( + self, namespace: type | ModuleType, + obj_type: Literal['class']) -> Callable[[type], bool]: + ... + + @overload + def get_filter( + self, namespace: type | ModuleType, + obj_type: Literal['module']) -> Callable[[ModuleType], bool]: + ... + + def get_filter(self, namespace: type | ModuleType, obj_type: str): """ Args: namespace (Union[type, types.ModuleType]): @@ -149,15 +169,18 @@ def get_filter(self, namespace, obj_type): if obj_type == 'module': if is_class: return self._return_const(False) - return self._get_module_filter_in_module(namespace) + return self._get_module_filter_in_module(cast(ModuleType, namespace)) if is_class: - method = self._get_callable_filter_in_class - else: - method = self._get_callable_filter_in_module - return method(namespace, is_class=(obj_type == 'class')) + return self._get_callable_filter_in_class( + cast(type, namespace), is_class=(obj_type == 'class')) + return self._get_callable_filter_in_module( + cast(ModuleType, namespace), is_class=(obj_type == 'class')) @classmethod - def to_policies(cls, policies=None): + def to_policies( + cls, + policies: str | ScopingPolicy | ScopingPolicyDict | None = None + ) -> _ScopingPolicyDict: """ Normalize ``policies`` into a dictionary of policies for various object types. @@ -222,90 +245,107 @@ def to_policies(cls, policies=None): 'module': cls(policies['module'])}) @staticmethod - def _return_const(value): + def _return_const(value: bool) -> Callable[[object], bool]: def return_const(*_, **__): return value return return_const @staticmethod - def _match_prefix(s, prefix, sep='.'): + def _match_prefix(s: str, prefix: str, sep: str = '.') -> bool: return s == prefix or s.startswith(prefix + sep) - def _get_callable_filter_in_class(self, cls, is_class): - def func_is_child(other): + def _get_callable_filter_in_class( + self, cls: type, is_class: bool + ) -> Callable[[FunctionType | type], bool]: + def func_is_child(other: FunctionType | type): if not modules_are_equal(other): return False return other.__qualname__ == f'{cls.__qualname__}.{other.__name__}' - def modules_are_equal(other): # = sibling check + def modules_are_equal(other: FunctionType | type): # = sibling check return cls.__module__ == other.__module__ - def func_is_descdendant(other): + def func_is_descdendant(other: FunctionType | type): if not modules_are_equal(other): return False return other.__qualname__.startswith(cls.__qualname__ + '.') - return {'exact': (self._return_const(False) - if is_class else - func_is_child), - 'children': func_is_child, - 'descendants': func_is_descdendant, - 'siblings': modules_are_equal, - 'none': self._return_const(True)}[self.value] - - def _get_callable_filter_in_module(self, mod, is_class): - def func_is_child(other): + policies: dict[str, Callable[[FunctionType | type], bool]] = { + 'exact': (self._return_const(False) + if is_class else + func_is_child), + 'children': func_is_child, + 'descendants': func_is_descdendant, + 'siblings': modules_are_equal, + 'none': self._return_const(True), + } + return policies[self.value] + + def _get_callable_filter_in_module( + self, mod: ModuleType, is_class: bool + ) -> Callable[[FunctionType | type], bool]: + def func_is_child(other: FunctionType | type): return other.__module__ == mod.__name__ - def func_is_descdendant(other): + def func_is_descdendant(other: FunctionType | type): return self._match_prefix(other.__module__, mod.__name__) - def func_is_cousin(other): + def func_is_cousin(other: FunctionType | type): if func_is_descdendant(other): return True return self._match_prefix(other.__module__, parent) parent, _, basename = mod.__name__.rpartition('.') - return {'exact': (self._return_const(False) - if is_class else - func_is_child), - 'children': func_is_child, - 'descendants': func_is_descdendant, - 'siblings': (func_is_cousin # Only if a pkg - if basename else - func_is_descdendant), - 'none': self._return_const(True)}[self.value] - - def _get_module_filter_in_module(self, mod): - def module_is_descendant(other): + policies: dict[str, Callable[[FunctionType | type], bool]] = { + 'exact': (self._return_const(False) + if is_class else + func_is_child), + 'children': func_is_child, + 'descendants': func_is_descdendant, + 'siblings': (func_is_cousin # Only if a pkg + if basename else + func_is_descdendant), + 'none': self._return_const(True), + } + return policies[self.value] + + def _get_module_filter_in_module( + self, mod: ModuleType + ) -> Callable[[ModuleType], bool]: + def module_is_descendant(other: ModuleType): return other.__name__.startswith(mod.__name__ + '.') - def module_is_child(other): + def module_is_child(other: ModuleType): return other.__name__.rpartition('.')[0] == mod.__name__ - def module_is_sibling(other): + def module_is_sibling(other: ModuleType): return other.__name__.startswith(parent + '.') parent, _, basename = mod.__name__.rpartition('.') - return {'exact': self._return_const(False), - 'children': module_is_child, - 'descendants': module_is_descendant, - 'siblings': (module_is_sibling # Only if a pkg - if basename else - self._return_const(False)), - 'none': self._return_const(True)}[self.value] + policies: dict[str, Callable[[ModuleType], bool]] = { + 'exact': self._return_const(False), + 'children': module_is_child, + 'descendants': module_is_descendant, + 'siblings': (module_is_sibling # Only if a pkg + if basename else + self._return_const(False)), + 'none': self._return_const(True), + } + return policies[self.value] # Sanity check in case we extended `ScopingPolicy` and forgot to update # the corresponding methods ScopingPolicy._check_class() -ScopingPolicyDict = TypedDict('ScopingPolicyDict', - {'func': Union[str, ScopingPolicy], - 'class': Union[str, ScopingPolicy], - 'module': Union[str, ScopingPolicy]}) -_ScopingPolicyDict = TypedDict('_ScopingPolicyDict', - {'func': ScopingPolicy, - 'class': ScopingPolicy, - 'module': ScopingPolicy}) +ScopingPolicyDict = TypedDict( + 'ScopingPolicyDict', + {'func': str | ScopingPolicy, + 'class': str | ScopingPolicy, + 'module': str | ScopingPolicy}) +_ScopingPolicyDict = TypedDict( + '_ScopingPolicyDict', + {'func': ScopingPolicy, + 'class': ScopingPolicy, + 'module': ScopingPolicy}) diff --git a/line_profiler/scoping_policy.pyi b/line_profiler/scoping_policy.pyi deleted file mode 100644 index e6987289..00000000 --- a/line_profiler/scoping_policy.pyi +++ /dev/null @@ -1,50 +0,0 @@ -from enum import auto -from types import FunctionType, ModuleType -from typing import overload, Literal, Callable, TypedDict -from .line_profiler_utils import StringEnum - - -class ScopingPolicy(StringEnum): - EXACT = auto() - CHILDREN = auto() - DESCENDANTS = auto() - SIBLINGS = auto() - NONE = auto() - - @overload - def get_filter( - self, - namespace: type | ModuleType, - obj_type: Literal['func']) -> Callable[[FunctionType], bool]: - ... - - @overload - def get_filter( - self, - namespace: type | ModuleType, - obj_type: Literal['class']) -> Callable[[type], bool]: - ... - - @overload - def get_filter( - self, - namespace: type | ModuleType, - obj_type: Literal['module']) -> Callable[[ModuleType], bool]: - ... - - @classmethod - def to_policies( - cls, - policies: (str | 'ScopingPolicy' | 'ScopingPolicyDict' - | None) = None) -> '_ScopingPolicyDict': - ... - - -ScopingPolicyDict = TypedDict('ScopingPolicyDict', - {'func': str | ScopingPolicy, - 'class': str | ScopingPolicy, - 'module': str | ScopingPolicy}) -_ScopingPolicyDict = TypedDict('_ScopingPolicyDict', - {'func': str | ScopingPolicy, - 'class': str | ScopingPolicy, - 'module': str | ScopingPolicy}) diff --git a/line_profiler/toml_config.py b/line_profiler/toml_config.py index 06d0870c..5795fef9 100644 --- a/line_profiler/toml_config.py +++ b/line_profiler/toml_config.py @@ -2,6 +2,8 @@ Read and resolve user-supplied TOML files and combine them with the default to generate configurations. """ +from __future__ import annotations + import copy import dataclasses import functools @@ -14,7 +16,9 @@ except ImportError: # Python < 3.11 import tomli as tomllib # type: ignore[no-redef] # noqa: F811 from collections.abc import Mapping -from typing import Dict, List, Any +from os import PathLike +from typing import Any, Mapping as TypingMapping, Sequence, TypeVar, cast +from typing_extensions import Self __all__ = ['ConfigSource'] @@ -23,7 +27,7 @@ TARGETS = 'line_profiler.toml', 'pyproject.toml' ENV_VAR = 'LINE_PROFILER_RC' -_DEFAULTS = None +_DEFAULTS: ConfigSource | None = None @dataclasses.dataclass @@ -45,11 +49,11 @@ class ConfigSource: :py:attr:`~.ConfigSource.path` :py:attr:`~.ConfigSource.conf_dict` can be found. """ - conf_dict: Dict[str, Any] + conf_dict: dict[str, Any] path: pathlib.Path - subtable: List[str] + subtable: list[str] - def copy(self): + def copy(self) -> Self: """ Returns: Copy of the object. @@ -57,7 +61,8 @@ def copy(self): return type(self)( copy.deepcopy(self.conf_dict), self.path, self.subtable.copy()) - def get_subconfig(self, *headers, allow_absence=False, copy=False): + def get_subconfig(self, *headers: str, allow_absence: bool = False, + copy: bool = False) -> Self: """ Arguments: headers (str): @@ -87,13 +92,14 @@ def get_subconfig(self, *headers, allow_absence=False, copy=False): >>> assert (display_widths.conf_dict ... is default.conf_dict['show']['column_widths']) """ - new_dict = get_subtable( - self.conf_dict, headers, allow_absence=allow_absence) + new_dict = cast( + dict[str, Any], + get_subtable(self.conf_dict, headers, allow_absence=allow_absence)) new_subtable = [*self.subtable, *headers] return type(self)(new_dict, self.path, new_subtable) @classmethod - def from_default(cls, *, copy=True): + def from_default(cls, *, copy: bool = True) -> Self: """ Get the default TOML configuration that ships with the package. @@ -120,17 +126,27 @@ def find_file(anc, *chunks): global _DEFAULTS if _DEFAULTS is None: - package = __spec__.name.rpartition('.')[0] + if __spec__ is None: + package = __name__.rpartition('.')[0] + else: + package = __spec__.name.rpartition('.')[0] with find_file(package + '.rc', 'line_profiler.toml') as path: - conf_dict, source = find_and_read_config_file(config=path) - conf_dict = get_subtable(conf_dict, NAMESPACE, allow_absence=False) + result = find_and_read_config_file(config=path) + if result is None: + raise FileNotFoundError( + 'Default configuration file could not be read') + conf_dict, source = result + conf_dict = cast( + dict[str, Any], + get_subtable(conf_dict, NAMESPACE, allow_absence=False)) _DEFAULTS = cls(conf_dict, source, list(NAMESPACE)) if not copy: - return _DEFAULTS - return _DEFAULTS.copy() + return cast(Self, _DEFAULTS) + return cast(Self, _DEFAULTS.copy()) @classmethod - def from_config(cls, config=None, *, read_env=True): + def from_config(cls, config: str | PathLike | bool | None = None, *, + read_env: bool = True) -> Self: """ Create an instance by loading from a config file. @@ -187,7 +203,7 @@ def from_config(cls, config=None, *, read_env=True): configuration (see :py:meth:`~.ConfigSource.from_default`). """ - def merge(template, supplied): + def merge(template: dict[str, Any], supplied: dict[str, Any]): if not (isinstance(template, dict) and isinstance(supplied, dict)): return supplied result = {} @@ -213,9 +229,8 @@ def merge(template, supplied): else: # Shield the lookup from the environment get_conf = functools.partial(find_and_read_config_file, config=config, env_var=None) - try: - content, source = get_conf() - except TypeError: # Got `None` + result = get_conf() + if result is None: if config: if os.path.exists(config): Error = ValueError @@ -224,6 +239,7 @@ def merge(template, supplied): raise Error( f'Cannot load configurations from {config!r}') from None return default_instance + content, source = result conf = {} try: for header in get_headers(default_instance.conf_dict): @@ -257,8 +273,16 @@ def merge(template, supplied): merge(default_instance.conf_dict, conf), source, list(NAMESPACE)) +Config = tuple[dict[str, dict[str, Any]], pathlib.Path] +K = TypeVar('K') +V = TypeVar('V') +NestedTable = TypingMapping[K, 'NestedTable[K, V]' | V] + + def find_and_read_config_file( - *, config=None, env_var=ENV_VAR, targets=TARGETS): + *, config: str | PathLike | None = None, + env_var: str | None = ENV_VAR, + targets: Sequence[str | PathLike] = TARGETS) -> Config | None: """ Arguments: config (str | os.PathLike[str] | None): @@ -308,7 +332,8 @@ def iter_configs(dir_path): return None -def get_subtable(table, keys, *, allow_absence=True): +def get_subtable(table: NestedTable[K, V], keys: Sequence[K], *, + allow_absence: bool = True) -> NestedTable[K, V]: """ Arguments: table (Mapping): @@ -354,7 +379,8 @@ def get_subtable(table, keys, *, allow_absence=True): return subtable -def get_headers(table, *, include_implied=False): +def get_headers(table: NestedTable[K, Any], *, + include_implied: bool = False) -> set[tuple[K, ...]]: """ Arguments: table (Mapping): diff --git a/line_profiler/toml_config.pyi b/line_profiler/toml_config.pyi deleted file mode 100644 index 93409341..00000000 --- a/line_profiler/toml_config.pyi +++ /dev/null @@ -1,54 +0,0 @@ -from dataclasses import dataclass -from os import PathLike -from pathlib import Path -from typing import Mapping, Sequence, Any, Self, TypeVar - - -TARGETS = 'line_profiler.toml', 'pyproject.toml' -ENV_VAR = 'LINE_PROFILER_RC' - -K = TypeVar('K') -V = TypeVar('V') -Config = tuple[dict[str, dict[str, Any]], Path] -NestedTable = Mapping[K, 'NestedTable[K, V]' | V] - - -@dataclass -class ConfigSource: - conf_dict: dict[str, Any] - path: Path - subtable: list[str] - - def copy(self) -> Self: - ... - - def get_subconfig(self, *headers: str, - allow_absence: bool = False, copy: bool = False) -> Self: - ... - - @classmethod - def from_default(cls, *, copy: bool = True) -> Self: - ... - - @classmethod - def from_config(cls, config: str | PathLike | bool | None = None, *, - read_env: bool = True) -> Self: - ... - - -def find_and_read_config_file( - *, - config: str | PathLike | None = None, - env_var: str | None = ENV_VAR, - targets: Sequence[str | PathLike] = TARGETS) -> Config: - ... - - -def get_subtable(table: NestedTable[K, V], keys: Sequence[K], *, - allow_absence: bool = True) -> NestedTable[K, V]: - ... - - -def get_headers(table: NestedTable[K, Any], *, - include_implied: bool = False) -> set[tuple[K, ...]]: - ... diff --git a/requirements/runtime.txt b/requirements/runtime.txt index fe0df55e..85d1f5c2 100644 --- a/requirements/runtime.txt +++ b/requirements/runtime.txt @@ -1 +1,2 @@ tomli; python_version < '3.11' +typing_extensions From cdec523feefe2542e30d54076ddec73d241e0b36 Mon Sep 17 00:00:00 2001 From: joncrall Date: Sun, 8 Feb 2026 12:53:30 -0500 Subject: [PATCH 02/22] Port latest util_static code --- line_profiler/autoprofile/util_static.py | 144 +++++++++++------------ 1 file changed, 70 insertions(+), 74 deletions(-) diff --git a/line_profiler/autoprofile/util_static.py b/line_profiler/autoprofile/util_static.py index dfa32ad1..c9f525d2 100644 --- a/line_profiler/autoprofile/util_static.py +++ b/line_profiler/autoprofile/util_static.py @@ -3,7 +3,6 @@ :py:mod:`xdoctest` via dev/maintain/port_utilities.py in the line_profiler repo. """ -from __future__ import annotations from os.path import abspath from os.path import dirname @@ -16,25 +15,23 @@ from os.path import join import os from os.path import split +import typing from os.path import isfile from os.path import realpath import sys -from os import PathLike -from collections.abc import Generator -from typing import Any # from xdoctest import utils def package_modpaths( - pkgpath: str, - with_pkg: bool = False, - with_mod: bool = True, - followlinks: bool = True, - recursive: bool = True, - with_libs: bool = False, - check: bool = True, -) -> Generator[Any, None, None]: + pkgpath, + with_pkg=False, + with_mod=True, + followlinks=True, + recursive=True, + with_libs=False, + check=True, +): r""" Finds sub-packages and sub-modules belonging to a package. @@ -97,52 +94,51 @@ def package_modpaths( break -IS_PY_GE_308 = sys.version_info[0:2] >= (3, 8) +IS_PY_LT_314: bool = sys.version_info[0:2] < (3, 14) + + +IS_PY_GE_308: bool = sys.version_info[0:2] >= (3, 8) def _parse_static_node_value(node): """ Extract a constant value from a node if possible - - Args: - node (ast.AST): input node - - Returns: - Any: parsed value """ import ast - from collections import OrderedDict import numbers + from collections import OrderedDict - if ( - (isinstance(node, ast.Constant) and isinstance(node.value, numbers.Number)) - if IS_PY_GE_308 - else isinstance(node, ast.Num) - ): - value = node.value if IS_PY_GE_308 else node.n - elif ( - (isinstance(node, ast.Constant) and isinstance(node.value, str)) - if IS_PY_GE_308 - else isinstance(node, ast.Str) - ): - value = node.value if IS_PY_GE_308 else node.s - elif isinstance(node, ast.List): - value = list(map(_parse_static_node_value, node.elts)) - elif isinstance(node, ast.Tuple): - value = tuple(map(_parse_static_node_value, node.elts)) - elif isinstance(node, ast.Dict): + if IS_PY_GE_308: + if isinstance(node, ast.Constant) and isinstance(node.value, numbers.Number): + return node.value + if isinstance(node, ast.Constant) and isinstance(node.value, str): + return node.value + else: + num_type = getattr(ast, "Num", None) + str_type = getattr(ast, "Str", None) + if (num_type is not None) and isinstance(node, num_type): + return node.n + if (str_type is not None) and isinstance(node, str_type): + return node.s + if isinstance(node, ast.List): + return list(map(_parse_static_node_value, node.elts)) + if isinstance(node, ast.Tuple): + return tuple(map(_parse_static_node_value, node.elts)) + if isinstance(node, ast.Dict): keys = map(_parse_static_node_value, node.keys) values = map(_parse_static_node_value, node.values) - value = OrderedDict(zip(keys, values)) - elif isinstance(node, ast.Constant): - value = node.value - else: - raise TypeError( - """Cannot parse a static value from non-static node of type: {!r}""".format( - type(node) - ) + return OrderedDict(zip(keys, values)) + if IS_PY_LT_314: + nameconst_type = getattr(ast, "NameConstant", None) + if (nameconst_type is not None) and isinstance(node, nameconst_type): + return node.value + if isinstance(node, ast.Constant): + return node.value + raise TypeError( + "Cannot parse a static value from non-static node of type: {!r}".format( + type(node) ) - return value + ) def _extension_module_tags(): @@ -188,8 +184,6 @@ def _static_parse(varname, fpath): >>> assert _static_parse('a', fpath) == ("3", 5, 6) >>> fpath.write_text('b = 10' + chr(10) + 'a = None') >>> assert _static_parse('a', fpath) is None - >>> fpath.write_text('a = None') - >>> assert _static_parse('a', fpath) is None >>> import pytest >>> with pytest.raises(TypeError): >>> fpath.write_text('a = list(range(10))') @@ -254,7 +248,7 @@ def _platform_pylib_exts(): return tuple(valid_exts) -def _syspath_modname_to_modpath(modname, sys_path=None, exclude=None): +def _syspath_modname_to_modpath(modname, sys_path=None, exclude=None) -> str | None: """ syspath version of modname_to_modpath @@ -270,7 +264,7 @@ def _syspath_modname_to_modpath(modname, sys_path=None, exclude=None): Defaults to None. Returns: - str: path to the module. + str | None: path to the module or None if it does not exist. Note: This is much slower than the pkgutil mechanisms. @@ -287,12 +281,11 @@ def _syspath_modname_to_modpath(modname, sys_path=None, exclude=None): ...static_analysis.py >>> print(_syspath_modname_to_modpath('xdoctest')) ...xdoctest - >>> # xdoctest: +REQUIRES(CPython) - >>> print(_syspath_modname_to_modpath('_ctypes')) - ..._ctypes... + >>> print(_syspath_modname_to_modpath('json')) + ...json >>> assert _syspath_modname_to_modpath('xdoctest', sys_path=[]) is None >>> assert _syspath_modname_to_modpath('xdoctest.static_analysis', sys_path=[]) is None - >>> assert _syspath_modname_to_modpath('_ctypes', sys_path=[]) is None + >>> assert _syspath_modname_to_modpath('json', sys_path=[]) is None >>> assert _syspath_modname_to_modpath('this', sys_path=[]) is None Example: @@ -332,14 +325,15 @@ def _isvalid(modpath, base): if sys_path is None: sys_path = sys.path candidate_dpaths = [("." if (p == "") else p) for p in sys_path] - if exclude: - def normalize(p): - if sys.platform.startswith("win32"): - return realpath(p).lower() - else: - return realpath(p) + def normalize(p): + if sys.platform.startswith("win32"): + return realpath(p).lower() + else: + return realpath(p) + real_exclude = set() + if exclude: real_exclude = {normalize(p) for p in exclude} candidate_dpaths = [ p for p in candidate_dpaths if (normalize(p) not in real_exclude) @@ -420,14 +414,17 @@ def check_dpath(dpath): if modpath: found_modpath = modpath break + if typing.TYPE_CHECKING: + found_modpath = typing.cast((str | None), found_modpath) return found_modpath def modname_to_modpath( - modname: str, - hide_init: bool = True, - hide_main: bool = False, - sys_path: list[str | PathLike] | None = None) -> str | None: + modname: str, + hide_init: bool = True, + hide_main: bool = False, + sys_path: list[(str | os.PathLike)] | None = None, +) -> str | None: """ Finds the path to a python module from its name. @@ -464,9 +461,8 @@ def modname_to_modpath( >>> modname = 'xdoctest' >>> modpath = modname_to_modpath(modname, hide_init=False) >>> assert modpath.endswith('__init__.py') - >>> # xdoctest: +REQUIRES(CPython) - >>> modpath = basename(modname_to_modpath('_ctypes')) - >>> assert 'ctypes' in modpath + >>> modpath = modname_to_modpath('json') + >>> assert 'json' in modpath """ if hide_main or sys_path: modpath = _syspath_modname_to_modpath(modname, sys_path) @@ -475,10 +471,12 @@ def modname_to_modpath( if modpath is None: return None modpath = normalize_modpath(modpath, hide_init=hide_init, hide_main=hide_main) + if typing.TYPE_CHECKING: + modpath = typing.cast(str, modpath) return modpath -def split_modpath(modpath: str, check: bool = True) -> tuple[str, str]: +def split_modpath(modpath: str | os.PathLike, check: bool = True) -> tuple[(str, str)]: """ Splits the modpath into the dir that must be in PYTHONPATH for the module to be imported and the modulepath relative to this directory. @@ -523,8 +521,8 @@ def split_modpath(modpath: str, check: bool = True) -> tuple[str, str]: def normalize_modpath( - modpath: str | PathLike, hide_init: bool = True, - hide_main: bool = False) -> str: + modpath: str | os.PathLike, hide_init: bool = True, hide_main: bool = False +) -> str | os.PathLike: """ Normalizes __init__ and __main__ paths. @@ -560,7 +558,6 @@ def normalize_modpath( >>> assert not res2.endswith('.py') >>> assert not res3.endswith('.py') """ - modpath = os.fspath(modpath) if hide_init: if basename(modpath) == "__init__.py": modpath = dirname(modpath) @@ -635,10 +632,9 @@ def modpath_to_modname( >>> assert modpath_to_modname(dirname(xdoctest.__file__.replace('.pyc', '.py'))) == 'xdoctest' Example: - >>> # xdoctest: +REQUIRES(CPython) - >>> modpath = modname_to_modpath('_ctypes') + >>> modpath = modname_to_modpath('json') >>> modname = modpath_to_modname(modpath) - >>> assert modname == '_ctypes' + >>> assert modname == 'json' Example: >>> modpath = '/foo/libfoobar.linux-x86_64-3.6.so' From 9b1ce003efb643380315f0bfd04bfc8c48d954c2 Mon Sep 17 00:00:00 2001 From: joncrall Date: Sun, 8 Feb 2026 12:54:06 -0500 Subject: [PATCH 03/22] Remove line_profiler.pyi --- line_profiler/line_profiler.pyi | 201 -------------------------------- 1 file changed, 201 deletions(-) delete mode 100644 line_profiler/line_profiler.pyi diff --git a/line_profiler/line_profiler.pyi b/line_profiler/line_profiler.pyi deleted file mode 100644 index 6f535718..00000000 --- a/line_profiler/line_profiler.pyi +++ /dev/null @@ -1,201 +0,0 @@ -import io -from functools import cached_property, partial, partialmethod -from os import PathLike -from types import FunctionType, ModuleType -from typing import (TYPE_CHECKING, - overload, - Callable, Mapping, Sequence, - Literal, Self, - Protocol, TypeVar, ParamSpec) -from _typeshed import Incomplete -from ._line_profiler import (LineProfiler as CLineProfiler, - LineStats as CLineStats) -from .profiler_mixin import ByCountProfilerMixin, CLevelCallable -from .scoping_policy import ScopingPolicy, ScopingPolicyDict - -if TYPE_CHECKING: - from .profiler_mixin import UnparametrizedCallableLike - - -T = TypeVar('T') -T_co = TypeVar('T_co', covariant=True) -PS = ParamSpec('PS') -_TimingsMap = Mapping[tuple[str, int, str], list[tuple[int, int, int]]] - - -def get_column_widths( - config: bool | str | PathLike[str] | None = False) -> Mapping[ - Literal['line', 'hits', 'time', 'perhit', 'percent'], int]: - ... - - -def load_ipython_extension(ip) -> None: - ... - - -class _StatsLike(Protocol): - timings: _TimingsMap - unit: float - - -class LineStats(CLineStats): - def __init__(self, timings: _TimingsMap, unit: float) -> None: - ... - - def to_file(self, filename: PathLike[str] | str) -> None: - ... - - def print( - self, stream: io.TextIOBase | None = None, - output_unit: float | None = None, - stripzeros: bool = False, details: bool = True, - summarize: bool = False, sort: bool = False, rich: bool = False, - *, config: str | PathLike[str] | bool | None = None) -> None: - ... - - @classmethod - def from_files(cls, file: PathLike[str] | str, /, - *files: PathLike[str] | str) -> Self: - ... - - @classmethod - def from_stats_objects(cls, stats: _StatsLike, /, - *more_stats: _StatsLike) -> Self: - ... - - def __repr__(self) -> str: - ... - - def __eq__(self, other) -> bool: - ... - - def __add__(self, other: _StatsLike) -> Self: - ... - - def __iadd__(self, other: _StatsLike) -> Self: - ... - - -class LineProfiler(CLineProfiler, ByCountProfilerMixin): - @overload - def __call__(self, - func: CLevelCallable) -> CLevelCallable: - ... - - @overload - def __call__( - self, func: UnparametrizedCallableLike, - ) -> UnparametrizedCallableLike: - ... - - @overload - def __call__(self, - func: type[T]) -> type[T]: - ... - - @overload - def __call__(self, - func: partial[T]) -> partial[T]: - ... - - @overload - def __call__(self, func: partialmethod[T]) -> partialmethod[T]: - ... - - @overload - def __call__(self, func: cached_property[T_co]) -> cached_property[T_co]: - ... - - @overload - def __call__(self, - func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: - ... - - @overload - def __call__( - self, func: classmethod[type[T], PS, T_co], - ) -> classmethod[type[T], PS, T_co]: - ... - - # Fallback: just wrap the `.__call__()` of a generic callable - - @overload - def __call__(self, func: Callable) -> Callable: - ... - - def add_callable( - self, func, - guard: Callable[[FunctionType], bool] | None = None, - name: str | None = None) -> Literal[0, 1]: - ... - - def get_stats(self) -> LineStats: - ... - - def dump_stats(self, filename) -> None: - ... - - def print_stats(self, - stream: Incomplete | None = ..., - output_unit: Incomplete | None = ..., - stripzeros: bool = ..., - details: bool = ..., - summarize: bool = ..., - sort: bool = ..., - rich: bool = ..., - *, - config: str | PathLike[str] | bool | None = None) -> None: - ... - - def add_module( - self, mod: ModuleType, *, - scoping_policy: ( - ScopingPolicy | str | ScopingPolicyDict | None) = None, - wrap: bool = False) -> int: - ... - - def add_class( - self, cls: type, *, - scoping_policy: ( - ScopingPolicy | str | ScopingPolicyDict | None) = None, - wrap: bool = False) -> int: - ... - - -def is_generated_code(filename): - ... - - -def show_func(filename: str, - start_lineno: int, - func_name: str, - timings: Sequence[tuple[int, int, int | float]], - unit: float, - output_unit: float | None = None, - stream: io.TextIOBase | None = None, - stripzeros: bool = False, - rich: bool = False, - *, - config: str | PathLike[str] | bool | None = None) -> None: - ... - - -def show_text(stats: _TimingsMap, - unit: float, - output_unit: float | None = ..., - stream: io.TextIOBase | None = ..., - stripzeros: bool = ..., - details: bool = ..., - summarize: bool = ..., - sort: bool = ..., - rich: bool = ..., - *, - config: str | PathLike[str] | bool | None = None) -> None: - ... - - -load_stats = LineStats.from_files - - -def main(): - ... From 44575e886077237fe257174275476e8f66aa633b Mon Sep 17 00:00:00 2001 From: joncrall Date: Sun, 8 Feb 2026 12:56:48 -0500 Subject: [PATCH 04/22] Minor type fixes --- line_profiler/line_profiler.py | 1 - line_profiler/toml_config.py | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/line_profiler/line_profiler.py b/line_profiler/line_profiler.py index c8a915af..9d345f3e 100755 --- a/line_profiler/line_profiler.py +++ b/line_profiler/line_profiler.py @@ -381,7 +381,6 @@ def _get_aggregated_timings(stats_objs): # rounding errors stats_objs = sorted(stats_objs, key=operator.attrgetter('unit')) unit = stats_objs[-1].unit - # type: dict[tuple[str, int, int], dict[int, tuple[int, float]] timing_dict = {} for stats in stats_objs: factor = stats.unit / unit diff --git a/line_profiler/toml_config.py b/line_profiler/toml_config.py index 5795fef9..5a3099fb 100644 --- a/line_profiler/toml_config.py +++ b/line_profiler/toml_config.py @@ -220,6 +220,7 @@ def merge(template: dict[str, Any], supplied: dict[str, Any]): config = None else: return default_instance + assert not isinstance(config, bool) if config is not None: # Promote to `Path` (and catch type errors) early config = pathlib.Path(config) From 37ff2c19755f9be57b355dd2a8047f0d5692da74 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 18:31:45 +0000 Subject: [PATCH 05/22] Fix Cython test environment issues in dev container - Set PIP_NO_BINARY=Cython to build Cython from source, avoiding mmap issues - Fall back to non-editable install if editable install fails due to .so mapping errors - These changes fix compatibility with container environments that have memory mapping restrictions This allows the tests to run properly and fail on actual test logic rather than environment issues. --- tests/test_cython.py | 32 +++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/tests/test_cython.py b/tests/test_cython.py index d54e997d..c8c0f7c7 100644 --- a/tests/test_cython.py +++ b/tests/test_cython.py @@ -61,13 +61,20 @@ def _install_cython_example( pip_install += ['--editable', str(tmp_path)] else: pip_install.append(str(tmp_path)) + + # Set environment variables to avoid isolated build environment issues + # with Cython's compiled extensions + env = os.environ.copy() + env['PIP_NO_BUILD_ISOLATION'] = '0' + env['PIP_NO_BINARY'] = 'Cython' + try: - subprocess.run(pip_install).check_returncode() - subprocess.run(pip + ['list']).check_returncode() + subprocess.run(pip_install, env=env).check_returncode() + subprocess.run(pip + ['list'], env=env).check_returncode() yield cython_source, module finally: pip_uninstall = pip + ['uninstall', '--verbose', '--yes', module] - subprocess.run(pip_uninstall).check_returncode() + subprocess.run(pip_uninstall, env=env).check_returncode() @pytest.fixture(scope='module') @@ -79,10 +86,21 @@ def cython_example( source file and the corresponding module, uninstall it at teardown. """ # With editable installs, we need to refresh `sys.meta_path` before - # the installed module is available - for path, mod_name in _install_cython_example(tmp_path_factory, True): - reload(import_module('site')) - yield (path, import_module(mod_name)) + # the installed module is available. However, in some container environments + # with memory mapping restrictions, editable installs fail. Try editable first, + # then fall back to regular install. + for editable in [True, False]: + try: + for path, mod_name in _install_cython_example(tmp_path_factory, editable): + reload(import_module('site')) + yield (path, import_module(mod_name)) + return + except ImportError: + if editable: + # Try non-editable install as fallback + continue + # If non-editable also failed, re-raise + raise def test_recover_cython_source(cython_example: Tuple[Path, ModuleType]) -> None: From fd357d8c6b6c3cda767a904a8b98f9d701aadc20 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 14:12:32 -0500 Subject: [PATCH 06/22] Fix cython regression --- line_profiler/profiler_mixin.py | 10 +++++++--- tests/cython_example/setup.py | 6 +++++- tests/test_cython.py | 32 +++++++++++++++++++++----------- 3 files changed, 33 insertions(+), 15 deletions(-) diff --git a/line_profiler/profiler_mixin.py b/line_profiler/profiler_mixin.py index 121bf18b..bc597e0a 100644 --- a/line_profiler/profiler_mixin.py +++ b/line_profiler/profiler_mixin.py @@ -263,7 +263,7 @@ def wrap_callable(self, func: object): @classmethod def get_underlying_functions( - cls, func: object) -> list[types.FunctionType]: + cls, func: object) -> list[types.FunctionType | CythonCallable]: """ Get the underlying function objects of a callable or an adjacent object. @@ -271,8 +271,12 @@ def get_underlying_functions( Returns: funcs (list[Callable]) """ - return [impl for impl in cls._get_underlying_functions(func) - if isinstance(impl, types.FunctionType)] + result = [] + for impl in cls._get_underlying_functions(func): + # Include FunctionType and CythonCallable, but not type objects + if isinstance(impl, types.FunctionType) or is_cython_callable(impl): + result.append(impl) + return result @classmethod def _get_underlying_functions( diff --git a/tests/cython_example/setup.py b/tests/cython_example/setup.py index 3d6c83da..34dbe5bc 100644 --- a/tests/cython_example/setup.py +++ b/tests/cython_example/setup.py @@ -1,4 +1,8 @@ from setuptools import setup from Cython.Build import cythonize -setup(ext_modules=cythonize('cython_example.pyx')) +setup( + ext_modules=cythonize('cython_example.pyx'), + package_data={'': ['*.pyx']}, + include_package_data=True, +) diff --git a/tests/test_cython.py b/tests/test_cython.py index c8c0f7c7..1411969e 100644 --- a/tests/test_cython.py +++ b/tests/test_cython.py @@ -3,6 +3,7 @@ """ import math import os +import shutil import subprocess import sys from importlib import reload, import_module @@ -56,7 +57,7 @@ def _install_cython_example( file_out.write_text(replace(file_in.read_text())) # There should only be one Cython source file cython_source, = tmp_path.glob('*.pyx') - pip_install = pip + ['install', '--verbose'] + pip_install = pip + ['install', '--verbose', '--no-build-isolation'] if editable: pip_install += ['--editable', str(tmp_path)] else: @@ -65,13 +66,24 @@ def _install_cython_example( # Set environment variables to avoid isolated build environment issues # with Cython's compiled extensions env = os.environ.copy() - env['PIP_NO_BUILD_ISOLATION'] = '0' env['PIP_NO_BINARY'] = 'Cython' try: subprocess.run(pip_install, env=env).check_returncode() subprocess.run(pip + ['list'], env=env).check_returncode() - yield cython_source, module + + # For non-editable installs, we need to manually copy the .pyx file + # to where find_cython_source_file() can find it, and yield that location + actual_source = cython_source + if not editable: + spec = find_spec(module) + if spec and spec.origin: + installed_so = Path(spec.origin) + installed_pyx = installed_so.parent / cython_source.name + shutil.copy2(cython_source, installed_pyx) + actual_source = installed_pyx + + yield actual_source, module finally: pip_uninstall = pip + ['uninstall', '--verbose', '--yes', module] subprocess.run(pip_uninstall, env=env).check_returncode() @@ -85,21 +97,19 @@ def cython_example( Install the example Cython module, yield the path to the Cython source file and the corresponding module, uninstall it at teardown. """ - # With editable installs, we need to refresh `sys.meta_path` before - # the installed module is available. However, in some container environments - # with memory mapping restrictions, editable installs fail. Try editable first, - # then fall back to regular install. + # Try editable install first (preferred because it allows source location recovery) + # Fall back to regular install if editable fails due to environment issues for editable in [True, False]: try: for path, mod_name in _install_cython_example(tmp_path_factory, editable): reload(import_module('site')) yield (path, import_module(mod_name)) return - except ImportError: - if editable: - # Try non-editable install as fallback + except ImportError as e: + if editable and 'failed to map segment from shared object' in str(e): + # Container mmap limitation - try non-editable install instead continue - # If non-editable also failed, re-raise + # Other import errors should be re-raised raise From 8551eab71015854be9306be4cb37fec467d73a6b Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 14:14:48 -0500 Subject: [PATCH 07/22] Fix type errors --- line_profiler/line_profiler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/line_profiler/line_profiler.py b/line_profiler/line_profiler.py index 9d345f3e..e519439e 100755 --- a/line_profiler/line_profiler.py +++ b/line_profiler/line_profiler.py @@ -479,7 +479,7 @@ def wrap_callable(self, func: object): def add_callable( self, func: object, - guard: Callable[[types.FunctionType], bool] | None = None, + guard: Callable[[Callable], bool] | None = None, name: str | None = None) -> Literal[0, 1]: """ Register a function, method, :py:class:`property`, @@ -489,7 +489,7 @@ def add_callable( Args: func (...): Function, class/static/bound method, property, etc. - guard (Optional[Callable[[types.FunctionType], bool]]) + guard (Optional[Callable[[Callable], bool]]) Optional checker callable, which takes a function object and returns true(-y) if it *should not* be passed to :py:meth:`.add_function()`. Defaults to checking From d5f7ba48caab671ba27a6d636c0701953b7c8d22 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 14:30:04 -0500 Subject: [PATCH 08/22] Fix type annotations on 3.8 --- .../autoprofile/ast_profile_transformer.py | 8 ++-- line_profiler/autoprofile/util_static.py | 2 +- line_profiler/explicit_profiler.py | 1 - line_profiler/scoping_policy.py | 8 ++-- line_profiler/toml_config.py | 45 +++++++++---------- tests/test_eager_preimports.py | 1 + 6 files changed, 31 insertions(+), 34 deletions(-) diff --git a/line_profiler/autoprofile/ast_profile_transformer.py b/line_profiler/autoprofile/ast_profile_transformer.py index fdadd22f..60564c36 100644 --- a/line_profiler/autoprofile/ast_profile_transformer.py +++ b/line_profiler/autoprofile/ast_profile_transformer.py @@ -1,7 +1,7 @@ from __future__ import annotations import ast -from typing import cast +from typing import cast, Union def ast_create_profile_node( @@ -123,7 +123,7 @@ def _visit_import( if not self._profile_imports: self.generic_visit(node) return node - visited = [cast(ast.Import | ast.ImportFrom, self.generic_visit(node))] + visited = [cast(Union[ast.Import, ast.ImportFrom], self.generic_visit(node))] for names in node.names: node_name = names.name if names.asname is None else names.asname if node_name in self._profiled_imports: @@ -149,7 +149,7 @@ def visit_Import( if profile_imports is True: returns list containing the import node and the profiling node """ - return cast(ast.Import | list[ast.Import | ast.Expr], + return cast(Union[ast.Import, list[Union[ast.Import, ast.Expr]]], self._visit_import(node)) def visit_ImportFrom( @@ -168,5 +168,5 @@ def visit_ImportFrom( if profile_imports is True: returns list containing the import node and the profiling node """ - return cast(ast.ImportFrom | list[ast.ImportFrom | ast.Expr], + return cast(Union[ast.ImportFrom, list[Union[ast.ImportFrom, ast.Expr]]], self._visit_import(node)) diff --git a/line_profiler/autoprofile/util_static.py b/line_profiler/autoprofile/util_static.py index c9f525d2..555d972f 100644 --- a/line_profiler/autoprofile/util_static.py +++ b/line_profiler/autoprofile/util_static.py @@ -3,7 +3,7 @@ :py:mod:`xdoctest` via dev/maintain/port_utilities.py in the line_profiler repo. """ - +from __future__ import annotations from os.path import abspath from os.path import dirname from os.path import exists diff --git a/line_profiler/explicit_profiler.py b/line_profiler/explicit_profiler.py index c288ac35..0d88ca42 100644 --- a/line_profiler/explicit_profiler.py +++ b/line_profiler/explicit_profiler.py @@ -182,7 +182,6 @@ def func4(): from .toml_config import ConfigSource F = TypeVar('F', bound=Callable[..., Any]) -ConfigArg = str | pathlib.PurePath | bool | None # The first process that enables profiling records its PID here. Child processes # created via multiprocessing (spawn/forkserver) inherit this environment value, diff --git a/line_profiler/scoping_policy.py b/line_profiler/scoping_policy.py index 3f7dc339..f317a379 100644 --- a/line_profiler/scoping_policy.py +++ b/line_profiler/scoping_policy.py @@ -2,7 +2,7 @@ from enum import auto from types import FunctionType, MappingProxyType, ModuleType -from typing import Callable, Literal, TypedDict, cast, overload +from typing import Callable, Literal, TypedDict, cast, overload, Union from .line_profiler_utils import StringEnum @@ -341,9 +341,9 @@ def module_is_sibling(other: ModuleType): ScopingPolicyDict = TypedDict( 'ScopingPolicyDict', - {'func': str | ScopingPolicy, - 'class': str | ScopingPolicy, - 'module': str | ScopingPolicy}) + {'func': Union[str, ScopingPolicy], + 'class': Union[str, ScopingPolicy], + 'module': Union[str, ScopingPolicy]}) _ScopingPolicyDict = TypedDict( '_ScopingPolicyDict', {'func': ScopingPolicy, diff --git a/line_profiler/toml_config.py b/line_profiler/toml_config.py index 5a3099fb..03ea6fdc 100644 --- a/line_profiler/toml_config.py +++ b/line_profiler/toml_config.py @@ -6,7 +6,6 @@ import copy import dataclasses -import functools import importlib.resources import itertools import os @@ -17,19 +16,24 @@ import tomli as tomllib # type: ignore[no-redef] # noqa: F811 from collections.abc import Mapping from os import PathLike -from typing import Any, Mapping as TypingMapping, Sequence, TypeVar, cast +from typing import Any, Sequence, TypeVar, cast, Tuple, Dict from typing_extensions import Self __all__ = ['ConfigSource'] NAMESPACE = 'tool', 'line_profiler' -TARGETS = 'line_profiler.toml', 'pyproject.toml' +TARGETS = ['line_profiler.toml', 'pyproject.toml'] ENV_VAR = 'LINE_PROFILER_RC' _DEFAULTS: ConfigSource | None = None +Config = Tuple[Dict[str, Dict[str, Any]], pathlib.Path] +K = TypeVar('K') +V = TypeVar('V') + + @dataclasses.dataclass class ConfigSource: """ @@ -37,7 +41,7 @@ class ConfigSource: read from. Attributes: - conf_dict (dict[str, Any]) + conf_dict (Mapping[str, Any]) The combination of the ``tool.line_profiler`` tables of the provided/looked-up config file (if any) and the default as a dictionary. @@ -49,7 +53,7 @@ class ConfigSource: :py:attr:`~.ConfigSource.path` :py:attr:`~.ConfigSource.conf_dict` can be found. """ - conf_dict: dict[str, Any] + conf_dict: Mapping[str, Any] path: pathlib.Path subtable: list[str] @@ -203,8 +207,8 @@ def from_config(cls, config: str | PathLike | bool | None = None, *, configuration (see :py:meth:`~.ConfigSource.from_default`). """ - def merge(template: dict[str, Any], supplied: dict[str, Any]): - if not (isinstance(template, dict) and isinstance(supplied, dict)): + def merge(template: Mapping[str, Any], supplied: Mapping[str, Any]): + if not (isinstance(template, Mapping) and isinstance(supplied, Mapping)): return supplied result = {} for key, default in template.items(): @@ -225,13 +229,11 @@ def merge(template: dict[str, Any], supplied: dict[str, Any]): # Promote to `Path` (and catch type errors) early config = pathlib.Path(config) if read_env: - get_conf = functools.partial(find_and_read_config_file, - config=config) - else: # Shield the lookup from the environment - get_conf = functools.partial(find_and_read_config_file, - config=config, env_var=None) - result = get_conf() - if result is None: + _result = find_and_read_config_file(config=config) + else: + # Shield the lookup from the environment + _result = find_and_read_config_file(config=config, env_var=None) + if _result is None: if config: if os.path.exists(config): Error = ValueError @@ -240,7 +242,8 @@ def merge(template: dict[str, Any], supplied: dict[str, Any]): raise Error( f'Cannot load configurations from {config!r}') from None return default_instance - content, source = result + else: + content, source = _result conf = {} try: for header in get_headers(default_instance.conf_dict): @@ -274,12 +277,6 @@ def merge(template: dict[str, Any], supplied: dict[str, Any]): merge(default_instance.conf_dict, conf), source, list(NAMESPACE)) -Config = tuple[dict[str, dict[str, Any]], pathlib.Path] -K = TypeVar('K') -V = TypeVar('V') -NestedTable = TypingMapping[K, 'NestedTable[K, V]' | V] - - def find_and_read_config_file( *, config: str | PathLike | None = None, env_var: str | None = ENV_VAR, @@ -333,8 +330,8 @@ def iter_configs(dir_path): return None -def get_subtable(table: NestedTable[K, V], keys: Sequence[K], *, - allow_absence: bool = True) -> NestedTable[K, V]: +def get_subtable(table: Mapping[K, V], keys: Sequence[K], *, + allow_absence: bool = True) -> Mapping[K, V]: """ Arguments: table (Mapping): @@ -380,7 +377,7 @@ def get_subtable(table: NestedTable[K, V], keys: Sequence[K], *, return subtable -def get_headers(table: NestedTable[K, Any], *, +def get_headers(table: Mapping[K, Any], *, include_implied: bool = False) -> set[tuple[K, ...]]: """ Arguments: diff --git a/tests/test_eager_preimports.py b/tests/test_eager_preimports.py index ba5d344d..79ba6035 100644 --- a/tests/test_eager_preimports.py +++ b/tests/test_eager_preimports.py @@ -5,6 +5,7 @@ ----- Most of the features are already covered by the doctests. """ +from __future__ import annotations import subprocess import sys from contextlib import ExitStack From 1083c38b37ad24415070a7baa7d3a84671b980d4 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 14:37:38 -0500 Subject: [PATCH 09/22] Update reqs --- requirements/build.txt | 4 ++-- requirements/optional.txt | 1 + requirements/runtime.txt | 1 - 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/build.txt b/requirements/build.txt index 6b1bfa99..5d3d1e05 100644 --- a/requirements/build.txt +++ b/requirements/build.txt @@ -9,5 +9,5 @@ ninja>=1.10.2 cibuildwheel>=3.1.2 ; python_version < '4.0' and python_version >= '3.11' # Python 3.11+ cibuildwheel>=3.1.2 ; python_version < '3.11' and python_version >= '3.10' # Python 3.10 -cibuildwheel>=3.1.2 ; python_version < '3.10' and python_version >= '3.9' # Python 3.9 -cibuildwheel>=3.1.2 ; python_version < '3.9' and python_version >= '3.8' # Python 3.8 +cibuildwheel>=2.19.2 ; python_version < '3.10' and python_version >= '3.9' # Python 3.9 +cibuildwheel>=2.19.2 ; python_version < '3.9' and python_version >= '3.8' # Python 3.8 diff --git a/requirements/optional.txt b/requirements/optional.txt index 24b6ba08..466c73d9 100644 --- a/requirements/optional.txt +++ b/requirements/optional.txt @@ -2,3 +2,4 @@ # xdev availpkg rich rich>=12.3.0 -r ipython.txt +typing_extensions diff --git a/requirements/runtime.txt b/requirements/runtime.txt index 85d1f5c2..fe0df55e 100644 --- a/requirements/runtime.txt +++ b/requirements/runtime.txt @@ -1,2 +1 @@ tomli; python_version < '3.11' -typing_extensions From d2f263c6f1f8fb38e996fcbbab6c75ae766c49b0 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 14:58:54 -0500 Subject: [PATCH 10/22] Fix some types --- line_profiler/ipython_extension.py | 4 ++-- line_profiler/line_profiler.py | 14 ++++++------- line_profiler/line_profiler_utils.py | 5 ++++- line_profiler/profiler_mixin.py | 16 +++++++-------- line_profiler/toml_config.py | 30 ++++++++++++++-------------- requirements/optional.txt | 1 - requirements/runtime.txt | 1 + 7 files changed, 37 insertions(+), 34 deletions(-) diff --git a/line_profiler/ipython_extension.py b/line_profiler/ipython_extension.py index 7541c615..3d1e9526 100644 --- a/line_profiler/ipython_extension.py +++ b/line_profiler/ipython_extension.py @@ -42,18 +42,18 @@ import textwrap import time import types +from io import StringIO from contextlib import ExitStack from dataclasses import dataclass from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, ClassVar, TypeVar -from typing_extensions import ParamSpec if TYPE_CHECKING: # pragma: no cover + from typing_extensions import ParamSpec PS = ParamSpec('PS') PD = TypeVar('PD', bound='_PatchDict') DefNode = TypeVar('DefNode', ast.FunctionDef, ast.AsyncFunctionDef) -from io import StringIO try: # pragma: no cover - optional dependency import importlib diff --git a/line_profiler/line_profiler.py b/line_profiler/line_profiler.py index e519439e..cf5c7bc9 100755 --- a/line_profiler/line_profiler.py +++ b/line_profiler/line_profiler.py @@ -21,8 +21,7 @@ from datetime import datetime from os import PathLike from typing import (TYPE_CHECKING, IO, Callable, Literal, Mapping, Protocol, - Sequence, TypeVar, cast, overload) -from typing_extensions import ParamSpec, Self + Sequence, TypeVar, cast, overload, Tuple) from functools import cached_property, partial, partialmethod try: @@ -41,21 +40,22 @@ from .toml_config import ConfigSource if TYPE_CHECKING: # pragma: no cover + from typing_extensions import ParamSpec, Self from .profiler_mixin import CLevelCallable, UnparametrizedCallableLike class _IPythonLike(Protocol): def register_magics(self, magics: type) -> None: ... + PS = ParamSpec('PS') + _TimingsMap = Mapping[Tuple[str, int, str], list[Tuple[int, int, int]]] + T = TypeVar('T') + T_co = TypeVar('T_co', covariant=True) + # NOTE: This needs to be in sync with ../kernprof.py and __init__.py __version__ = '5.0.2' -T = TypeVar('T') -T_co = TypeVar('T_co', covariant=True) -PS = ParamSpec('PS') -_TimingsMap = Mapping[tuple[str, int, str], list[tuple[int, int, int]]] - @functools.lru_cache() def get_column_widths( diff --git a/line_profiler/line_profiler_utils.py b/line_profiler/line_profiler_utils.py index fcc6c2cf..f62b8f32 100644 --- a/line_profiler/line_profiler_utils.py +++ b/line_profiler/line_profiler_utils.py @@ -4,7 +4,10 @@ from __future__ import annotations import enum -from typing_extensions import Self +import typing + +if typing.TYPE_CHECKING: + from typing_extensions import Self class _StrEnumBase(str, enum.Enum): diff --git a/line_profiler/profiler_mixin.py b/line_profiler/profiler_mixin.py index bc597e0a..956d242b 100644 --- a/line_profiler/profiler_mixin.py +++ b/line_profiler/profiler_mixin.py @@ -7,7 +7,6 @@ from sys import version_info from typing import (TYPE_CHECKING, Any, Callable, Mapping, Protocol, TypeVar, overload, cast) -from typing_extensions import ParamSpec, TypeIs from warnings import warn from ._line_profiler import label from .scoping_policy import ScopingPolicy @@ -33,14 +32,15 @@ # https://cython.readthedocs.io/en/latest/src/tutorial/profiling_tutorial.html _CANNOT_LINE_TRACE_CYTHON = (3, 12) <= version_info < (3, 13, 0, 'beta', 1) -UnparametrizedCallableLike = TypeVar( - 'UnparametrizedCallableLike', - types.FunctionType, property, types.MethodType) -T = TypeVar('T') -T_co = TypeVar('T_co', covariant=True) -PS = ParamSpec('PS') - if TYPE_CHECKING: + from typing_extensions import ParamSpec, TypeIs + UnparametrizedCallableLike = TypeVar( + 'UnparametrizedCallableLike', + types.FunctionType, property, types.MethodType) + T = TypeVar('T') + T_co = TypeVar('T_co', covariant=True) + PS = ParamSpec('PS') + class CythonCallable(Protocol[PS, T_co]): def __call__(self, *args: PS.args, **kwargs: PS.kwargs) -> T_co: ... diff --git a/line_profiler/toml_config.py b/line_profiler/toml_config.py index 03ea6fdc..cf7ee632 100644 --- a/line_profiler/toml_config.py +++ b/line_profiler/toml_config.py @@ -16,8 +16,13 @@ import tomli as tomllib # type: ignore[no-redef] # noqa: F811 from collections.abc import Mapping from os import PathLike +import typing from typing import Any, Sequence, TypeVar, cast, Tuple, Dict -from typing_extensions import Self + +if typing.TYPE_CHECKING: + Config = Tuple[Dict[str, Dict[str, Any]], pathlib.Path] + K = TypeVar('K') + V = TypeVar('V') __all__ = ['ConfigSource'] @@ -29,11 +34,6 @@ _DEFAULTS: ConfigSource | None = None -Config = Tuple[Dict[str, Dict[str, Any]], pathlib.Path] -K = TypeVar('K') -V = TypeVar('V') - - @dataclasses.dataclass class ConfigSource: """ @@ -57,7 +57,7 @@ class ConfigSource: path: pathlib.Path subtable: list[str] - def copy(self) -> Self: + def copy(self) -> ConfigSource: """ Returns: Copy of the object. @@ -66,7 +66,7 @@ def copy(self) -> Self: copy.deepcopy(self.conf_dict), self.path, self.subtable.copy()) def get_subconfig(self, *headers: str, allow_absence: bool = False, - copy: bool = False) -> Self: + copy: bool = False) -> ConfigSource: """ Arguments: headers (str): @@ -97,13 +97,13 @@ def get_subconfig(self, *headers: str, allow_absence: bool = False, ... is default.conf_dict['show']['column_widths']) """ new_dict = cast( - dict[str, Any], + Dict[str, Any], get_subtable(self.conf_dict, headers, allow_absence=allow_absence)) new_subtable = [*self.subtable, *headers] return type(self)(new_dict, self.path, new_subtable) @classmethod - def from_default(cls, *, copy: bool = True) -> Self: + def from_default(cls, *, copy: bool = True) -> ConfigSource: """ Get the default TOML configuration that ships with the package. @@ -141,16 +141,16 @@ def find_file(anc, *chunks): 'Default configuration file could not be read') conf_dict, source = result conf_dict = cast( - dict[str, Any], + Dict[str, Any], get_subtable(conf_dict, NAMESPACE, allow_absence=False)) _DEFAULTS = cls(conf_dict, source, list(NAMESPACE)) if not copy: - return cast(Self, _DEFAULTS) - return cast(Self, _DEFAULTS.copy()) + return _DEFAULTS + return _DEFAULTS.copy() @classmethod def from_config(cls, config: str | PathLike | bool | None = None, *, - read_env: bool = True) -> Self: + read_env: bool = True) -> ConfigSource: """ Create an instance by loading from a config file. @@ -331,7 +331,7 @@ def iter_configs(dir_path): def get_subtable(table: Mapping[K, V], keys: Sequence[K], *, - allow_absence: bool = True) -> Mapping[K, V]: + allow_absence: bool = True) -> Mapping: """ Arguments: table (Mapping): diff --git a/requirements/optional.txt b/requirements/optional.txt index 466c73d9..24b6ba08 100644 --- a/requirements/optional.txt +++ b/requirements/optional.txt @@ -2,4 +2,3 @@ # xdev availpkg rich rich>=12.3.0 -r ipython.txt -typing_extensions diff --git a/requirements/runtime.txt b/requirements/runtime.txt index fe0df55e..85d1f5c2 100644 --- a/requirements/runtime.txt +++ b/requirements/runtime.txt @@ -1 +1,2 @@ tomli; python_version < '3.11' +typing_extensions From 60a931e1f58932ae94ca6ad8f90edea6b1b512b1 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 15:03:07 -0500 Subject: [PATCH 11/22] Add future annots to tests --- tests/complex_example.py | 1 + tests/test_autoprofile.py | 1 + tests/test_cli.py | 1 + tests/test_complex_case.py | 1 + tests/test_cython.py | 9 +++++---- tests/test_duplicate_functions.py | 3 +++ tests/test_explicit_profile.py | 1 + tests/test_import.py | 3 +++ tests/test_ipython.py | 1 + tests/test_kernprof.py | 1 + tests/test_line_profiler.py | 1 + tests/test_sys_monitoring.py | 1 + tests/test_sys_trace.py | 1 + tests/test_toml_config.py | 1 + 14 files changed, 22 insertions(+), 4 deletions(-) diff --git a/tests/complex_example.py b/tests/complex_example.py index 698924b1..af66d399 100644 --- a/tests/complex_example.py +++ b/tests/complex_example.py @@ -47,6 +47,7 @@ PROFILE_TYPE=custom python complex_example.py """ +from __future__ import annotations import os # The test will define how we expect the profile decorator to exist diff --git a/tests/test_autoprofile.py b/tests/test_autoprofile.py index 77a319d0..c6f8656b 100644 --- a/tests/test_autoprofile.py +++ b/tests/test_autoprofile.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import re import subprocess diff --git a/tests/test_cli.py b/tests/test_cli.py index a347bf0b..c36aa899 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,3 +1,4 @@ +from __future__ import annotations import re from argparse import ArgumentParser, HelpFormatter from contextlib import nullcontext diff --git a/tests/test_complex_case.py b/tests/test_complex_case.py index ace7502b..ffc5e88b 100644 --- a/tests/test_complex_case.py +++ b/tests/test_complex_case.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import sys import tempfile diff --git a/tests/test_cython.py b/tests/test_cython.py index 1411969e..dec1aae5 100644 --- a/tests/test_cython.py +++ b/tests/test_cython.py @@ -1,6 +1,7 @@ """ Tests for profiling Cython code. """ +from __future__ import annotations import math import os import shutil @@ -62,16 +63,16 @@ def _install_cython_example( pip_install += ['--editable', str(tmp_path)] else: pip_install.append(str(tmp_path)) - + # Set environment variables to avoid isolated build environment issues # with Cython's compiled extensions env = os.environ.copy() env['PIP_NO_BINARY'] = 'Cython' - + try: subprocess.run(pip_install, env=env).check_returncode() subprocess.run(pip + ['list'], env=env).check_returncode() - + # For non-editable installs, we need to manually copy the .pyx file # to where find_cython_source_file() can find it, and yield that location actual_source = cython_source @@ -82,7 +83,7 @@ def _install_cython_example( installed_pyx = installed_so.parent / cython_source.name shutil.copy2(cython_source, installed_pyx) actual_source = installed_pyx - + yield actual_source, module finally: pip_uninstall = pip + ['uninstall', '--verbose', '--yes', module] diff --git a/tests/test_duplicate_functions.py b/tests/test_duplicate_functions.py index 8c470a8c..6cdc77bc 100644 --- a/tests/test_duplicate_functions.py +++ b/tests/test_duplicate_functions.py @@ -1,3 +1,6 @@ +from __future__ import annotations + + def test_duplicate_function(): """ Test from https://github.com/pyutils/line_profiler/issues/232 diff --git a/tests/test_explicit_profile.py b/tests/test_explicit_profile.py index 511509b1..73d8a79e 100644 --- a/tests/test_explicit_profile.py +++ b/tests/test_explicit_profile.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import re import sys diff --git a/tests/test_import.py b/tests/test_import.py index ab9a8f29..4c2184e7 100644 --- a/tests/test_import.py +++ b/tests/test_import.py @@ -1,3 +1,6 @@ +from __future__ import annotations + + def test_import(): import line_profiler assert hasattr(line_profiler, 'LineProfiler') diff --git a/tests/test_ipython.py b/tests/test_ipython.py index 4066a500..29ba4ddd 100644 --- a/tests/test_ipython.py +++ b/tests/test_ipython.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import re import shlex diff --git a/tests/test_kernprof.py b/tests/test_kernprof.py index f8ac4678..3d6b7e97 100644 --- a/tests/test_kernprof.py +++ b/tests/test_kernprof.py @@ -1,3 +1,4 @@ +from __future__ import annotations import contextlib import os import re diff --git a/tests/test_line_profiler.py b/tests/test_line_profiler.py index d39e0040..067cb9bf 100644 --- a/tests/test_line_profiler.py +++ b/tests/test_line_profiler.py @@ -1,3 +1,4 @@ +from __future__ import annotations import asyncio import contextlib import functools diff --git a/tests/test_sys_monitoring.py b/tests/test_sys_monitoring.py index af6ef0ab..98007607 100644 --- a/tests/test_sys_monitoring.py +++ b/tests/test_sys_monitoring.py @@ -1,3 +1,4 @@ +from __future__ import annotations import gc import inspect import sys diff --git a/tests/test_sys_trace.py b/tests/test_sys_trace.py index 6210fbd0..23432892 100644 --- a/tests/test_sys_trace.py +++ b/tests/test_sys_trace.py @@ -10,6 +10,7 @@ - However, there effects are isolated since each test is run in a separate Python subprocess. """ +from __future__ import annotations import concurrent.futures import functools import inspect diff --git a/tests/test_toml_config.py b/tests/test_toml_config.py index 7ce7174f..3b1b1602 100644 --- a/tests/test_toml_config.py +++ b/tests/test_toml_config.py @@ -1,6 +1,7 @@ """ Test the handling of TOML configs. """ +from __future__ import annotations import os import re import sys From 2b28dd7cb6be3143852a69678498a44f766fc969 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 15:16:15 -0500 Subject: [PATCH 12/22] Fix type error in 38 --- line_profiler/autoprofile/autoprofile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/line_profiler/autoprofile/autoprofile.py b/line_profiler/autoprofile/autoprofile.py index 019b6154..725b28e4 100644 --- a/line_profiler/autoprofile/autoprofile.py +++ b/line_profiler/autoprofile/autoprofile.py @@ -52,7 +52,7 @@ def main(): import sys import types from collections.abc import MutableMapping -from typing import Any, cast +from typing import Any, cast, Dict from .ast_tree_profiler import AstTreeProfiler from .run_module import AstTreeModuleProfiler from .line_profiler_utils import add_imported_function_or_module @@ -150,4 +150,4 @@ def __exit__(self, *_, **__): code_obj = compile(tree_profiled, script_file, 'exec') with ctx as callback: callback() - exec(code_obj, cast(dict[str, Any], namespace), namespace) + exec(code_obj, cast(Dict[str, Any], namespace), namespace) From 9f4f36faf29fff78b1bf6b20d30a89e76c97a894 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 15:18:15 -0500 Subject: [PATCH 13/22] Fix another 38 type error --- line_profiler/autoprofile/ast_profile_transformer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/line_profiler/autoprofile/ast_profile_transformer.py b/line_profiler/autoprofile/ast_profile_transformer.py index 60564c36..84907f38 100644 --- a/line_profiler/autoprofile/ast_profile_transformer.py +++ b/line_profiler/autoprofile/ast_profile_transformer.py @@ -1,7 +1,7 @@ from __future__ import annotations import ast -from typing import cast, Union +from typing import cast, Union, List def ast_create_profile_node( @@ -149,7 +149,7 @@ def visit_Import( if profile_imports is True: returns list containing the import node and the profiling node """ - return cast(Union[ast.Import, list[Union[ast.Import, ast.Expr]]], + return cast(Union[ast.Import, List[Union[ast.Import, ast.Expr]]], self._visit_import(node)) def visit_ImportFrom( @@ -168,5 +168,5 @@ def visit_ImportFrom( if profile_imports is True: returns list containing the import node and the profiling node """ - return cast(Union[ast.ImportFrom, list[Union[ast.ImportFrom, ast.Expr]]], + return cast(Union[ast.ImportFrom, List[Union[ast.ImportFrom, ast.Expr]]], self._visit_import(node)) From cd6f675b39f47a363ff0fc15d7faa8493dd7c244 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 15:38:05 -0500 Subject: [PATCH 14/22] Revert test cython changes --- tests/test_cython.py | 48 +++++++++----------------------------------- 1 file changed, 10 insertions(+), 38 deletions(-) diff --git a/tests/test_cython.py b/tests/test_cython.py index dec1aae5..11b402d5 100644 --- a/tests/test_cython.py +++ b/tests/test_cython.py @@ -4,7 +4,6 @@ from __future__ import annotations import math import os -import shutil import subprocess import sys from importlib import reload, import_module @@ -58,36 +57,18 @@ def _install_cython_example( file_out.write_text(replace(file_in.read_text())) # There should only be one Cython source file cython_source, = tmp_path.glob('*.pyx') - pip_install = pip + ['install', '--verbose', '--no-build-isolation'] + pip_install = pip + ['install', '--verbose'] if editable: pip_install += ['--editable', str(tmp_path)] else: pip_install.append(str(tmp_path)) - - # Set environment variables to avoid isolated build environment issues - # with Cython's compiled extensions - env = os.environ.copy() - env['PIP_NO_BINARY'] = 'Cython' - try: - subprocess.run(pip_install, env=env).check_returncode() - subprocess.run(pip + ['list'], env=env).check_returncode() - - # For non-editable installs, we need to manually copy the .pyx file - # to where find_cython_source_file() can find it, and yield that location - actual_source = cython_source - if not editable: - spec = find_spec(module) - if spec and spec.origin: - installed_so = Path(spec.origin) - installed_pyx = installed_so.parent / cython_source.name - shutil.copy2(cython_source, installed_pyx) - actual_source = installed_pyx - - yield actual_source, module + subprocess.run(pip_install).check_returncode() + subprocess.run(pip + ['list']).check_returncode() + yield cython_source, module finally: pip_uninstall = pip + ['uninstall', '--verbose', '--yes', module] - subprocess.run(pip_uninstall, env=env).check_returncode() + subprocess.run(pip_uninstall).check_returncode() @pytest.fixture(scope='module') @@ -98,20 +79,11 @@ def cython_example( Install the example Cython module, yield the path to the Cython source file and the corresponding module, uninstall it at teardown. """ - # Try editable install first (preferred because it allows source location recovery) - # Fall back to regular install if editable fails due to environment issues - for editable in [True, False]: - try: - for path, mod_name in _install_cython_example(tmp_path_factory, editable): - reload(import_module('site')) - yield (path, import_module(mod_name)) - return - except ImportError as e: - if editable and 'failed to map segment from shared object' in str(e): - # Container mmap limitation - try non-editable install instead - continue - # Other import errors should be re-raised - raise + # With editable installs, we need to refresh `sys.meta_path` before + # the installed module is available + for path, mod_name in _install_cython_example(tmp_path_factory, True): + reload(import_module('site')) + yield (path, import_module(mod_name)) def test_recover_cython_source(cython_example: Tuple[Path, ModuleType]) -> None: From b582678fe6a9a5c95b376a00861ce40713207a13 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 16:12:50 -0500 Subject: [PATCH 15/22] Fix mypy errors --- .../autoprofile/ast_profile_transformer.py | 5 ++-- line_profiler/scoping_policy.py | 24 ++++++++++++------- line_profiler/toml_config.py | 17 ++++++------- 3 files changed, 28 insertions(+), 18 deletions(-) diff --git a/line_profiler/autoprofile/ast_profile_transformer.py b/line_profiler/autoprofile/ast_profile_transformer.py index 84907f38..95827aeb 100644 --- a/line_profiler/autoprofile/ast_profile_transformer.py +++ b/line_profiler/autoprofile/ast_profile_transformer.py @@ -34,7 +34,7 @@ def ast_create_profile_node( """ func = ast.Attribute(value=ast.Name(id=profiler_name, ctx=ast.Load()), attr=attr, ctx=ast.Load()) names = modname.split('.') - value = ast.Name(id=names[0], ctx=ast.Load()) + value: ast.expr = ast.Name(id=names[0], ctx=ast.Load()) for name in names[1:]: value = ast.Attribute(attr=name, ctx=ast.Load(), value=value) expr = ast.Expr(value=ast.Call(func=func, args=[value], keywords=[])) @@ -123,7 +123,8 @@ def _visit_import( if not self._profile_imports: self.generic_visit(node) return node - visited = [cast(Union[ast.Import, ast.ImportFrom], self.generic_visit(node))] + this_visit = cast(Union[ast.Import, ast.ImportFrom], self.generic_visit(node)) + visited: list[ast.Import | ast.ImportFrom | ast.Expr] = [this_visit] for names in node.names: node_name = names.name if names.asname is None else names.asname if node_name in self._profiled_imports: diff --git a/line_profiler/scoping_policy.py b/line_profiler/scoping_policy.py index f317a379..ede61777 100644 --- a/line_profiler/scoping_policy.py +++ b/line_profiler/scoping_policy.py @@ -13,8 +13,13 @@ #: * Descend ingo sibling and descendant classes #: (:py:attr:`ScopingPolicy.SIBLINGS`) #: * Don't descend into modules (:py:attr:`ScopingPolicy.EXACT`) -DEFAULT_SCOPING_POLICIES = MappingProxyType( - {'func': 'siblings', 'class': 'siblings', 'module': 'exact'}) +DEFAULT_SCOPING_POLICIES: ScopingPolicyDict = { + 'func': 'siblings', + 'class': 'siblings', + 'module': 'exact' +} + + class ScopingPolicy(StringEnum): @@ -186,8 +191,7 @@ def to_policies( object types. Args: - policies (Union[str, ScopingPolicy, \ -ScopingPolicyDict, None]): + policies (Union[str, ScopingPolicy, ScopingPolicyDict, None]): :py:class:`ScopingPolicy`, string convertible thereto (case-insensitive), or a mapping containing such values and the keys as outlined in the return value; @@ -195,8 +199,7 @@ def to_policies( :py:data:`DEFAULT_SCOPING_POLICIES`. Returns: - normalized_policies (dict[Literal['func', 'class', \ -'module'], ScopingPolicy]): + normalized_policies (dict[Literal['func', 'class', 'module'], ScopingPolicy]): Dictionary with the following key-value pairs: ``'func'`` @@ -238,8 +241,11 @@ def to_policies( policies = DEFAULT_SCOPING_POLICIES if isinstance(policies, str): policy = cls(policies) - return _ScopingPolicyDict( - dict.fromkeys(['func', 'class', 'module'], policy)) + return _ScopingPolicyDict({ + 'func': policy, + 'class': policy, + 'module': policy, + }) return _ScopingPolicyDict({'func': cls(policies['func']), 'class': cls(policies['class']), 'module': cls(policies['module'])}) @@ -339,11 +345,13 @@ def module_is_sibling(other: ModuleType): # the corresponding methods ScopingPolicy._check_class() + ScopingPolicyDict = TypedDict( 'ScopingPolicyDict', {'func': Union[str, ScopingPolicy], 'class': Union[str, ScopingPolicy], 'module': Union[str, ScopingPolicy]}) + _ScopingPolicyDict = TypedDict( '_ScopingPolicyDict', {'func': ScopingPolicy, diff --git a/line_profiler/toml_config.py b/line_profiler/toml_config.py index cf7ee632..61557276 100644 --- a/line_profiler/toml_config.py +++ b/line_profiler/toml_config.py @@ -236,15 +236,16 @@ def merge(template: Mapping[str, Any], supplied: Mapping[str, Any]): if _result is None: if config: if os.path.exists(config): - Error = ValueError - else: - Error = FileNotFoundError - raise Error( - f'Cannot load configurations from {config!r}') from None + raise ValueError( + f'Cannot load configurations from {config!r}' + ) from None + raise FileNotFoundError( + f'Cannot load configurations from {config!r}' + ) from None return default_instance else: content, source = _result - conf = {} + conf: dict[str, Mapping[str, Any]] = {} try: for header in get_headers(default_instance.conf_dict): # Get the top-level subtable @@ -330,7 +331,7 @@ def iter_configs(dir_path): return None -def get_subtable(table: Mapping[K, V], keys: Sequence[K], *, +def get_subtable(table: Mapping[K, Mapping], keys: Sequence[K], *, allow_absence: bool = True) -> Mapping: """ Arguments: @@ -405,7 +406,7 @@ def get_headers(table: Mapping[K, Any], *, >>> assert get_headers({}) == set() >>> assert get_headers({'a': 1, 'b': 2}) == set() """ - results = set() + results: set[tuple[K, ...]] = set() for key, value in table.items(): if not isinstance(value, Mapping): continue From 6215acdd09518dc443a98504953ade692736594a Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 16:29:30 -0500 Subject: [PATCH 16/22] Fix mypy issues --- line_profiler/cli_utils.py | 37 +++------------ line_profiler/line_profiler.py | 83 +++++++++------------------------ line_profiler/profiler_mixin.py | 33 +------------ 3 files changed, 32 insertions(+), 121 deletions(-) diff --git a/line_profiler/cli_utils.py b/line_profiler/cli_utils.py index 815521b2..0238c602 100644 --- a/line_profiler/cli_utils.py +++ b/line_profiler/cli_utils.py @@ -20,32 +20,10 @@ **{k.casefold(): True for k in ('1', 'on', 'True', 'T', 'yes', 'Y')}} -P_con = TypeVar('P_con', bound='ParserLike', contravariant=True) -A_co = TypeVar('A_co', bound='ActionLike', covariant=True) - -class ActionLike(Protocol[P_con]): - def __call__(self, parser: P_con, namespace: argparse.Namespace, - values: str | Sequence[object] | None, - option_string: str | None = None) -> None: - ... - - def format_usage(self) -> str: - ... - - -class ParserLike(Protocol[A_co]): - def add_argument(self, arg: str, /, *args: str, **kwargs: object) -> A_co: - ... - - @property - def prefix_chars(self) -> str: - ... - - -def add_argument(parser_like: ParserLike[A_co], arg: str, /, *args: str, +def add_argument(parser_like, arg: str, /, *args: str, hide_complementary_options: bool = True, - **kwargs: object) -> A_co: + **kwargs: object) -> argparse.Action: """ Override the ``'store_true'`` and ``'store_false'`` actions so that they are turned into options which: @@ -123,10 +101,9 @@ def negated(*args, **kwargs): kwargs.setdefault(key, value) long_kwargs = kwargs.copy() short_kwargs = {**kwargs, 'action': 'store_const'} - for key, value in dict( - nargs='?', - type=functools.partial(boolean, invert=not const)).items(): - long_kwargs.setdefault(key, value) + + long_kwargs.setdefault('nargs', '?') + long_kwargs.setdefault('type', functools.partial(boolean, invert=not const)) # Mention the short options in the long options' documentation, and # suppress the short options in the help @@ -155,8 +132,8 @@ def negated(*args, **kwargs): long_kwargs['help'] = f'({additional_msg})' short_kwargs['help'] = argparse.SUPPRESS - long_action: A_co | None = None - short_action: A_co | None = None + long_action = None + short_action = None if long_flags: long_action = parser_like.add_argument(*long_flags, **long_kwargs) short_kwargs['dest'] = long_action.dest diff --git a/line_profiler/line_profiler.py b/line_profiler/line_profiler.py index cf5c7bc9..2428da2f 100755 --- a/line_profiler/line_profiler.py +++ b/line_profiler/line_profiler.py @@ -21,7 +21,7 @@ from datetime import datetime from os import PathLike from typing import (TYPE_CHECKING, IO, Callable, Literal, Mapping, Protocol, - Sequence, TypeVar, cast, overload, Tuple) + Sequence, TypeVar, cast, Tuple) from functools import cached_property, partial, partialmethod try: @@ -52,6 +52,8 @@ def register_magics(self, magics: type) -> None: T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) + ColumnLiterals = Literal['line', 'hits', 'time', 'perhit', 'percent'] + # NOTE: This needs to be in sync with ../kernprof.py and __init__.py __version__ = '5.0.2' @@ -59,11 +61,11 @@ def register_magics(self, magics: type) -> None: @functools.lru_cache() def get_column_widths( - config: bool | str | PathLike[str] | None = False -) -> Mapping[Literal['line', 'hits', 'time', 'perhit', 'percent'], int]: + config: bool | str | None = False +) -> Mapping[ColumnLiterals, int]: """ Arguments - config (bool | str | pathlib.PurePath | None) + config (bool | str | None) Passed to :py:meth:`.ConfigSource.from_config`. Note: * Results are cached. @@ -73,8 +75,7 @@ def get_column_widths( subconf = (ConfigSource.from_config(config) .get_subconfig('show', 'column_widths')) return types.MappingProxyType( - cast(Mapping[Literal['line', 'hits', 'time', 'perhit', 'percent'], int], - subconf.conf_dict)) + cast(Mapping[ColumnLiterals, int], subconf.conf_dict)) def load_ipython_extension(ip: object) -> None: @@ -107,8 +108,7 @@ def get_code_block(filename: os.PathLike[str] | str, lineno: int) -> list[str]: this repo since 2008 (`fb60664`_), so we will continue using it until we can't. - .. _fb60664: https://github.com/pyutils/line_profiler/commit/\ -fb60664135296ba6061cfaa2bb66d4ba77964c53 + .. _fb60664: https://github.com/pyutils/line_profiler/commit/fb60664135296ba6061cfaa2bb66d4ba77964c53 Example: @@ -237,8 +237,6 @@ def __eq__(self, other: object) -> bool: """ Example: >>> from copy import deepcopy - >>> - >>> >>> stats1 = LineStats( ... {('foo', 1, 'spam.py'): [(2, 10, 300)], ... ('bar', 10, 'spam.py'): @@ -418,45 +416,8 @@ class LineProfiler(CLineProfiler, ByCountProfilerMixin): >>> func() >>> profile.print_stats() """ - @overload - def __call__(self, func: CLevelCallable) -> CLevelCallable: - ... - - @overload - def __call__(self, func: UnparametrizedCallableLike) -> UnparametrizedCallableLike: - ... - - @overload - def __call__(self, func: type[T]) -> type[T]: - ... - - @overload - def __call__(self, func: partial[T]) -> partial[T]: - ... - - @overload - def __call__(self, func: partialmethod[T]) -> partialmethod[T]: - ... - - @overload - def __call__(self, func: cached_property[T_co]) -> cached_property[T_co]: - ... - - @overload - def __call__(self, func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: - ... - - @overload - def __call__( - self, func: classmethod[type[T], PS, T_co], - ) -> classmethod[type[T], PS, T_co]: - ... - - @overload - def __call__(self, func: Callable) -> types.FunctionType: - ... - def __call__(self, func: object): + def __call__(self, func: Callable) -> Callable: """ Decorate a function, method, :py:class:`property`, :py:func:`~functools.partial` object etc. to start the profiler @@ -472,7 +433,7 @@ def __call__(self, func: object): self.add_callable(func) return self.wrap_callable(func) - def wrap_callable(self, func: object): + def wrap_callable(self, func: Callable) -> Callable: if is_c_level_callable(func): # Non-profilable return func return super().wrap_callable(func) @@ -894,6 +855,8 @@ def show_func(filename: str, start_lineno: int, func_name: str, sublines = [''] * nlines # Define minimum column sizes so text fits and usually looks consistent + if isinstance(config, os.PathLike): + config = os.fspath(config) conf_column_sizes = get_column_widths(config) default_column_sizes = { col: max(width, conf_column_sizes.get(col, width)) @@ -933,18 +896,18 @@ def show_func(filename: str, start_lineno: int, func_name: str, column_sizes['time'] = max(column_sizes['time'], max_timelen) column_sizes['perhit'] = max(column_sizes['perhit'], max_perhitlen) - col_order = ['line', 'hits', 'time', 'perhit', 'percent'] + col_order: list[ColumnLiterals] = ['line', 'hits', 'time', 'perhit', 'percent'] lhs_template = ' '.join(['%' + str(column_sizes[k]) + 's' for k in col_order]) template = lhs_template + ' %-s' - linenos = range(start_lineno, start_lineno + len(sublines)) + linenos = list(range(start_lineno, start_lineno + len(sublines))) empty = ('', '', '', '') header = ('Line #', 'Hits', 'Time', 'Per Hit', '% Time', 'Line Contents') - header = template % header + header_line = template % header stream.write('\n') - stream.write(header) + stream.write(header_line) stream.write('\n') - stream.write('=' * len(header)) + stream.write('=' * len(header_line)) stream.write('\n') if rich: @@ -952,8 +915,8 @@ def show_func(filename: str, start_lineno: int, func_name: str, lhs_lines = [] rhs_lines = [] for lineno, line in zip(linenos, sublines): - nhits, time, per_hit, percent = display.get(lineno, empty) - txt = lhs_template % (lineno, nhits, time, per_hit, percent) + nhits_s, time_s, per_hit_s, percent_s = display.get(lineno, empty) + txt = lhs_template % (lineno, nhits_s, time_s, per_hit_s, percent_s) rhs_lines.append(line.rstrip('\n').rstrip('\r')) lhs_lines.append(txt) @@ -991,16 +954,16 @@ def show_func(filename: str, start_lineno: int, func_name: str, stream.write('\n') else: for lineno, line in zip(linenos, sublines): - nhits, time, per_hit, percent = display.get(lineno, empty) + nhits_s, time_s, per_hit_s, percent_s = display.get(lineno, empty) line_ = line.rstrip('\n').rstrip('\r') - txt = template % (lineno, nhits, time, per_hit, percent, line_) + txt = template % (lineno, nhits_s, time_s, per_hit_s, percent_s, line_) try: stream.write(txt) except UnicodeEncodeError: # todo: better handling of windows encoding issue # for now just work around it line_ = 'UnicodeEncodeError - help wanted for a fix' - txt = template % (lineno, nhits, time, per_hit, percent, line_) + txt = template % (lineno, nhits_s, time_s, per_hit_s, percent_s, line_) stream.write(txt) stream.write('\n') @@ -1039,7 +1002,7 @@ def show_text(stats: _TimingsMap, unit: float, stats_order = sorted(stats.items(), key=lambda kv: sum(t[2] for t in kv[1])) else: # Default ordering - stats_order = stats.items() + stats_order = list(stats.items()) # Pre-lookup the appropriate config file config = ConfigSource.from_config(config).path diff --git a/line_profiler/profiler_mixin.py b/line_profiler/profiler_mixin.py index 956d242b..3b39eac8 100644 --- a/line_profiler/profiler_mixin.py +++ b/line_profiler/profiler_mixin.py @@ -6,7 +6,7 @@ from functools import cached_property, partial, partialmethod from sys import version_info from typing import (TYPE_CHECKING, Any, Callable, Mapping, Protocol, TypeVar, - overload, cast) + cast) from warnings import warn from ._line_profiler import label from .scoping_policy import ScopingPolicy @@ -193,35 +193,6 @@ class ByCountProfilerMixin: Used by :py:class:`line_profiler.line_profiler.LineProfiler` and :py:class:`kernprof.ContextualProfile`. """ - @overload - def wrap_callable(self, func: CLevelCallable) -> CLevelCallable: - ... - - @overload - def wrap_callable( - self, func: UnparametrizedCallableLike, - ) -> UnparametrizedCallableLike: - ... - - @overload - def wrap_callable(self, func: type[T]) -> type[T]: - ... - - @overload - def wrap_callable(self, func: partial[T]) -> partial[T]: - ... - - @overload - def wrap_callable(self, func: partialmethod[T]) -> partialmethod[T]: - ... - - @overload - def wrap_callable(self, func: cached_property[T_co]) -> cached_property[T_co]: - ... - - @overload - def wrap_callable(self, func: staticmethod[PS, T_co]) -> staticmethod[PS, T_co]: - ... def enable_by_count(self) -> None: # pragma: no cover - implemented in C raise NotImplementedError @@ -229,7 +200,7 @@ def enable_by_count(self) -> None: # pragma: no cover - implemented in C def disable_by_count(self) -> None: # pragma: no cover - implemented in C raise NotImplementedError - def wrap_callable(self, func: object): + def wrap_callable(self, func: Callable) -> Callable: """ Decorate a function to start the profiler on function entry and stop it on function exit. From cf2ad1a150bd0b842924664866671c6da5e5cbf4 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 16:34:33 -0500 Subject: [PATCH 17/22] Revert ipython types --- line_profiler/ipython_extension.py | 130 +++++++++++------------------ 1 file changed, 47 insertions(+), 83 deletions(-) diff --git a/line_profiler/ipython_extension.py b/line_profiler/ipython_extension.py index 3d1e9526..4e99213f 100644 --- a/line_profiler/ipython_extension.py +++ b/line_profiler/ipython_extension.py @@ -42,59 +42,25 @@ import textwrap import time import types -from io import StringIO from contextlib import ExitStack from dataclasses import dataclass from pathlib import Path -from typing import TYPE_CHECKING, Any, Callable, ClassVar, TypeVar - +from typing import TYPE_CHECKING, Union if TYPE_CHECKING: # pragma: no cover - from typing_extensions import ParamSpec + from typing import (Callable, ParamSpec, # noqa: F401 + Any, ClassVar, TypeVar) + PS = ParamSpec('PS') PD = TypeVar('PD', bound='_PatchDict') DefNode = TypeVar('DefNode', ast.FunctionDef, ast.AsyncFunctionDef) +from io import StringIO -try: # pragma: no cover - optional dependency - import importlib - - get_ipython = importlib.import_module( - 'IPython.core.getipython').get_ipython - magic_module = importlib.import_module('IPython.core.magic') - Magics = magic_module.Magics - magics_class = magic_module.magics_class - line_magic = magic_module.line_magic - cell_magic = magic_module.cell_magic - page = importlib.import_module('IPython.core.page').page - Struct = importlib.import_module('IPython.utils.ipstruct').Struct - UsageError = importlib.import_module('IPython.core.error').UsageError -except ImportError: # pragma: no cover - IPython is optional - if TYPE_CHECKING: - raise - - def get_ipython(): - return None - - class Magics: - pass - - def magics_class(cls): - return cls - - def line_magic(func): - return func - - def cell_magic(func): - return func - - def page(*_args: object, **_kwargs: object) -> None: - return None - - class Struct(dict): - pass - - class UsageError(Exception): - pass +from IPython.core.getipython import get_ipython +from IPython.core.magic import Magics, magics_class, line_magic, cell_magic +from IPython.core.page import page +from IPython.utils.ipstruct import Struct +from IPython.core.error import UsageError from line_profiler import line_profiler, LineProfiler, LineStats from line_profiler.autoprofile.ast_tree_profiler import AstTreeProfiler @@ -139,26 +105,26 @@ class _ParseParamResult: opts: Struct arg_str: str - def __getattr__(self, attr: str) -> Any: + def __getattr__(self, attr): # type: (str) -> Any """ Defers to :py:attr:`_ParseParamResult.opts`.""" return getattr(self.opts, attr) @functools.cached_property - def dump_raw_dest(self) -> Path | None: + def dump_raw_dest(self): # type: () -> Path | None path = self.opts.D[0] if path: return Path(path) return None @functools.cached_property - def dump_text_dest(self) -> Path | None: + def dump_text_dest(self): # type: () -> Path | None path = self.opts.T[0] if path: return Path(path) return None @functools.cached_property - def output_unit(self) -> float | None: + def output_unit(self): # type: () -> float | None if self.opts.u is None: return None try: @@ -167,11 +133,11 @@ def output_unit(self) -> float | None: raise TypeError("Timer unit setting must be a float.") @functools.cached_property - def strip_zero(self) -> bool: + def strip_zero(self): # type: () -> bool return "z" in self.opts @functools.cached_property - def return_profiler(self) -> bool: + def return_profiler(self): # type: () -> bool return "r" in self.opts @@ -182,9 +148,9 @@ class _RunAndProfileResult: """ stats: LineStats parse_result: _ParseParamResult - message: str | None = None - time_elapsed: float | None = None - tempfile: str | os.PathLike[str] | None = None + message: Union[str, None] = None + time_elapsed: Union[float, None] = None + tempfile: Union[str, 'os.PathLike[str]', None] = None def __post_init__(self): if self.tempfile is not None: @@ -220,7 +186,7 @@ def show_func_wrapper( line_profiler, get_code_block=get_code_block_wrapper): return call() - def get_code_block_wrapper(filename: str, lineno: int) -> list[str]: + def get_code_block_wrapper(filename, lineno): """ Return the entire content of :py:attr:`~.tempfile`.""" with tmp.open(mode='r') as fobj: return fobj.read().splitlines(keepends=True) @@ -228,7 +194,7 @@ def get_code_block_wrapper(filename: str, lineno: int) -> list[str]: return show_func_wrapper @functools.cached_property - def output(self) -> str: + def output(self): # type: () -> str with ExitStack() as stack: cap = stack.enter_context(StringIO()) # Trap text output patch_show_func = _PatchDict.from_module( @@ -264,13 +230,14 @@ class _PatchProfilerIntoBuiltins: skip this doctest if :py:mod:`IPython` (and hence this module) can't be imported. """ - def __init__(self, prof: LineProfiler | None = None) -> None: + def __init__(self, prof=None): + # type: (LineProfiler | None) -> None if prof is None: prof = LineProfiler() self.prof = prof self._ctx = _PatchDict.from_module(builtins, profile=self.prof) - def __enter__(self) -> LineProfiler: + def __enter__(self): # type: () -> LineProfiler self._ctx.__enter__() return self.prof @@ -279,14 +246,14 @@ def __exit__(self, *a, **k): class _PatchDict: - def __init__(self, namespace: dict[str, Any], /, - **kwargs: Any) -> None: + def __init__(self, namespace, /, **kwargs): + # type: (dict[str, Any], Any) -> None self.namespace = namespace self.replacements = kwargs - self._stack: list[dict[str, Any]] = [] + self._stack = [] # type: list[dict[str, Any]] self._absent = object() - def __enter__(self: PD) -> PD: + def __enter__(self): # type: (PD) -> PD self._push() return self @@ -312,16 +279,15 @@ def _pop(self): namespace[key] = value @classmethod - def from_module(cls: type[PD], module: types.ModuleType, /, - **kwargs: Any) -> PD: + def from_module(cls, module, /, **kwargs): + # type: (type[PD], types.ModuleType, Any) -> PD return cls(vars(module), **kwargs) @magics_class class LineProfilerMagics(Magics): - def _parse_parameters( - self, parameter_s: str, getopt_spec: str, - opts_def: Struct) -> _ParseParamResult: + def _parse_parameters(self, parameter_s, getopt_spec, opts_def): + # type: (str, str, Struct) -> _ParseParamResult # FIXME: There is a chance that this handling will need to be # updated to handle single-quoted characters better (#382) parameter_s = parameter_s.replace('"', r"\"").replace("'", r"\"") @@ -332,13 +298,13 @@ def _parse_parameters( return _ParseParamResult(opts, arg_str) @staticmethod - def _run_and_profile( - prof: LineProfiler, - parse_result: _ParseParamResult, - tempfile: str | None, - method: Callable[PS, Any], - *args: PS.args, - **kwargs: PS.kwargs) -> _RunAndProfileResult: + def _run_and_profile(prof, # type: LineProfiler + parse_result, # type: _ParseParamResult + tempfile, # type: str | None + method, # type: Callable[PS, Any] + *args, # type: PS.args + **kwargs, # type: PS.kwargs + ): # type: (...) -> _RunAndProfileResult # Use the time module because it's easier than parsing the # output from `show_text()`. # `perf_counter()` is a monotonically increasing alternative to @@ -358,8 +324,8 @@ def _run_and_profile( message=message, time_elapsed=total_time, tempfile=tempfile) @classmethod - def _lprun_all_get_rewritten_profiled_code( - cls, tmpfile: str) -> types.CodeType: + def _lprun_all_get_rewritten_profiled_code(cls, tmpfile): + # type: (str) -> types.CodeType """ Transform and compile the AST of the profiled code. This is similar to :py:meth:`.LineProfiler.runctx`, """ @@ -369,16 +335,15 @@ def _lprun_all_get_rewritten_profiled_code( return compile(tree, tmpfile, "exec") @classmethod - def _lprun_get_top_level_profiled_code( - cls, tmpfile: str) -> types.CodeType: + def _lprun_get_top_level_profiled_code(cls, tmpfile): + # type: (str) -> types.CodeType """ Compile the profiled code.""" with open(tmpfile, mode='r') as fobj: return compile(fobj.read(), tmpfile, "exec") @staticmethod - def _handle_end( - prof: LineProfiler, - run_result: _RunAndProfileResult) -> LineProfiler | None: + def _handle_end(prof, run_result): + # type: (LineProfiler, _RunAndProfileResult) -> LineProfiler | None page(run_result.output) dump_file = run_result.parse_result.dump_raw_dest @@ -399,7 +364,7 @@ def _handle_end( return prof if run_result.parse_result.return_profiler else None @line_magic - def lprun(self, parameter_s: str = "") -> LineProfiler | None: + def lprun(self, parameter_s=""): """Execute a statement under the line-by-line profiler from the :py:mod:`line_profiler` module. @@ -485,8 +450,7 @@ def lprun(self, parameter_s: str = "") -> LineProfiler | None: return self._handle_end(profile, run) @cell_magic - def lprun_all(self, parameter_s: str = "", - cell: str = "") -> LineProfiler | None: + def lprun_all(self, parameter_s="", cell=""): """Execute the whole notebook cell under the line-by-line profiler from the :py:mod:`line_profiler` module. From f60a1929b62f4081f231bde321c1384b00cfc9ac Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 17:00:37 -0500 Subject: [PATCH 18/22] Final fixes for mypy and ty checks --- line_profiler/autoprofile/autoprofile.py | 9 +++++++-- line_profiler/autoprofile/eager_preimports.py | 6 ++++-- line_profiler/autoprofile/line_profiler_utils.py | 3 +-- line_profiler/autoprofile/profmod_extractor.py | 13 +++++++++---- line_profiler/autoprofile/util_static.py | 14 +++++++------- line_profiler/line_profiler_utils.py | 2 +- line_profiler/profiler_mixin.py | 12 ++++++------ line_profiler/scoping_policy.py | 2 +- pyproject.toml | 7 +++++++ 9 files changed, 43 insertions(+), 25 deletions(-) diff --git a/line_profiler/autoprofile/autoprofile.py b/line_profiler/autoprofile/autoprofile.py index 725b28e4..57b118bf 100644 --- a/line_profiler/autoprofile/autoprofile.py +++ b/line_profiler/autoprofile/autoprofile.py @@ -52,7 +52,8 @@ def main(): import sys import types from collections.abc import MutableMapping -from typing import Any, cast, Dict +from typing import Any, cast, Dict, Mapping +from typing import ContextManager from .ast_tree_profiler import AstTreeProfiler from .run_module import AstTreeModuleProfiler from .line_profiler_utils import add_imported_function_or_module @@ -106,7 +107,7 @@ class restore_dict: def __init__(self, d: MutableMapping[str, Any], target=None): self.d = d self.target = target - self.copy: dict[str, Any] | None = None + self.copy: Mapping[str, Any] | None = None def __enter__(self): assert self.copy is None @@ -119,6 +120,10 @@ def __exit__(self, *_, **__): self.d.update(self.copy) self.copy = None + Profiler: type[AstTreeModuleProfiler] | type[AstTreeProfiler] + namespace: MutableMapping[str, Any] + ctx: ContextManager + if as_module: Profiler = AstTreeModuleProfiler module_name = modpath_to_modname(script_file) diff --git a/line_profiler/autoprofile/eager_preimports.py b/line_profiler/autoprofile/eager_preimports.py index 4c527a97..d03f617b 100644 --- a/line_profiler/autoprofile/eager_preimports.py +++ b/line_profiler/autoprofile/eager_preimports.py @@ -315,7 +315,7 @@ def walk_packages_import_sys(pkg): dotted_paths |= recurse indirect_submods = set() - all_targets = {} + all_targets: dict[str, set[str | None]] = {} unknown_locs = [] split_path = functools.partial(split_dotted_path, static=static) walk = walk_packages_static if static else walk_packages_import_sys @@ -548,7 +548,9 @@ def write_eager_import_module( chunks = [] if profile_whole_module: chunks.append(f'{adder_name}({module_name})') - for target in sorted(targets): + + targets_ = sorted(t for t in targets if t is not None) + for target in sorted(targets_): path = f'{module}.{target}' chunks.append(strip(f""" try: diff --git a/line_profiler/autoprofile/line_profiler_utils.py b/line_profiler/autoprofile/line_profiler_utils.py index fff7d43e..a84811ec 100644 --- a/line_profiler/autoprofile/line_profiler_utils.py +++ b/line_profiler/autoprofile/line_profiler_utils.py @@ -45,8 +45,7 @@ def add_imported_function_or_module( Args: item (Union[Callable, Type, ModuleType]): Object to be profiled. - scoping_policy (Union[ScopingPolicy, str, ScopingPolicyDict, \ -None]): + scoping_policy (Union[ScopingPolicy, str, ScopingPolicyDict, None]): Whether (and how) to match the scope of members and decide on whether to add them: diff --git a/line_profiler/autoprofile/profmod_extractor.py b/line_profiler/autoprofile/profmod_extractor.py index 7268ea76..5665edea 100644 --- a/line_profiler/autoprofile/profmod_extractor.py +++ b/line_profiler/autoprofile/profmod_extractor.py @@ -3,7 +3,7 @@ import ast import os import sys - +from typing import List, cast, Any from .util_static import (modname_to_modpath, modpath_to_modname, package_modpaths) @@ -99,7 +99,7 @@ def _get_modnames_to_profile_from_prof_mod( if it fails, the item may point to an installed module rather than local script so we check if the item is path and whether that path exists, else skip the item. """ - modpath = modname_to_modpath(mod, sys_path=new_sys_path) + modpath = modname_to_modpath(mod, sys_path=cast(List[str | os.PathLike[Any]], new_sys_path)) if modpath is None: """if cannot convert to modpath, check if already path and if invalid""" if not os.path.exists(mod): @@ -210,7 +210,7 @@ def _find_modnames_in_tree_imports( value (str): alias (or name if no alias used) of import """ - modnames_found_in_tree = {} + modnames_found_in_tree: dict[int, str] = {} modname_added_list = [] for i, module_dict in enumerate(module_dict_list): modname = module_dict['name'] @@ -222,8 +222,13 @@ def _find_modnames_in_tree_imports( if modname not in modnames_to_profile and modname.rsplit('.', 1)[0] not in modnames_to_profile: continue name = module_dict['alias'] or modname + if not isinstance(name, str): + raise TypeError('should have gotten a str') modname_added_list.append(modname) - modnames_found_in_tree[module_dict['tree_index']] = name + tree_index = module_dict['tree_index'] + if not isinstance(tree_index, int): + raise TypeError('should have gotten an int') + modnames_found_in_tree[tree_index] = name return modnames_found_in_tree def run(self) -> dict[int, str]: diff --git a/line_profiler/autoprofile/util_static.py b/line_profiler/autoprofile/util_static.py index 555d972f..4eaf224b 100644 --- a/line_profiler/autoprofile/util_static.py +++ b/line_profiler/autoprofile/util_static.py @@ -390,8 +390,8 @@ def check_dpath(dpath): import pathlib for editable_pth in new_editable_pth_paths: - editable_pth = pathlib.Path(editable_pth) - target = editable_pth.read_text().strip().split("\n")[(-1)] + editable_pth_ = pathlib.Path(editable_pth) + target = editable_pth_.read_text().strip().split("\n")[(-1)] if (not exclude) or (normalize(target) not in real_exclude): modpath = check_dpath(target) if modpath: @@ -470,10 +470,10 @@ def modname_to_modpath( modpath = _syspath_modname_to_modpath(modname, sys_path) if modpath is None: return None - modpath = normalize_modpath(modpath, hide_init=hide_init, hide_main=hide_main) + modpath_ = normalize_modpath(modpath, hide_init=hide_init, hide_main=hide_main) if typing.TYPE_CHECKING: - modpath = typing.cast(str, modpath) - return modpath + modpath_ = typing.cast(str, modpath_) + return modpath_ def split_modpath(modpath: str | os.PathLike, check: bool = True) -> tuple[(str, str)]: @@ -644,8 +644,8 @@ def modpath_to_modname( if check and (relativeto is None): if not exists(modpath): raise ValueError("modpath={} does not exist".format(modpath)) - modpath_ = abspath(expanduser(modpath)) - modpath_ = normalize_modpath(modpath_, hide_init=hide_init, hide_main=hide_main) + modpath__ = abspath(expanduser(modpath)) + modpath_ = normalize_modpath(modpath__, hide_init=hide_init, hide_main=hide_main) if relativeto: dpath = dirname(abspath(expanduser(relativeto))) rel_modpath = relpath(modpath_, dpath) diff --git a/line_profiler/line_profiler_utils.py b/line_profiler/line_profiler_utils.py index f62b8f32..8d15f060 100644 --- a/line_profiler/line_profiler_utils.py +++ b/line_profiler/line_profiler_utils.py @@ -44,7 +44,7 @@ def __str__(self) -> str: return self.value -class StringEnum(getattr(enum, 'StrEnum', _StrEnumBase)): +class StringEnum(getattr(enum, 'StrEnum', _StrEnumBase)): # type: ignore[misc] """ Convenience wrapper around :py:class:`enum.StrEnum`. diff --git a/line_profiler/profiler_mixin.py b/line_profiler/profiler_mixin.py index 3b39eac8..1f66773d 100644 --- a/line_profiler/profiler_mixin.py +++ b/line_profiler/profiler_mixin.py @@ -6,7 +6,7 @@ from functools import cached_property, partial, partialmethod from sys import version_info from typing import (TYPE_CHECKING, Any, Callable, Mapping, Protocol, TypeVar, - cast) + cast, Sequence) from warnings import warn from ._line_profiler import label from .scoping_policy import ScopingPolicy @@ -253,7 +253,7 @@ def get_underlying_functions( def _get_underlying_functions( cls, func: object, seen: set[int] | None = None, stop_at_classes: bool = False - ) -> list[types.FunctionType | type | CythonCallable]: + ) -> Sequence[Callable]: if seen is None: seen = set() # Extract inner functions @@ -298,8 +298,8 @@ def _get_underlying_functions( def _get_underlying_functions_from_property( cls, prop: property, seen: set[int], stop_at_classes: bool - ) -> list[types.FunctionType | type | CythonCallable]: - result = [] + ) -> Sequence[Callable]: + result: list[Callable] = [] for impl in prop.fget, prop.fset, prop.fdel: if impl is not None: result.extend( @@ -310,8 +310,8 @@ def _get_underlying_functions_from_property( def _get_underlying_functions_from_type( cls, kls: type, seen: set[int], stop_at_classes: bool - ) -> list[types.FunctionType | type | CythonCallable]: - result = [] + ) -> Sequence[Callable]: + result: list[Callable] = [] get_filter = cls._class_scoping_policy.get_filter func_check = get_filter(kls, 'func') cls_check = get_filter(kls, 'class') diff --git a/line_profiler/scoping_policy.py b/line_profiler/scoping_policy.py index ede61777..b9c9a579 100644 --- a/line_profiler/scoping_policy.py +++ b/line_profiler/scoping_policy.py @@ -132,7 +132,7 @@ class MockClass: @overload def get_filter( self, namespace: type | ModuleType, - obj_type: Literal['func']) -> Callable[[FunctionType], bool]: + obj_type: Literal['func']) -> Callable[[Callable], bool]: ... @overload diff --git a/pyproject.toml b/pyproject.toml index 74ffac68..1b60089e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,3 +81,10 @@ norecursedirs = ".git ignore build __pycache__ dev _skbuild docs agentx" filterwarnings = [ "default", ] + +[[tool.ty.overrides]] +# Apply the ignore unresolved rules to these files +include = [ + "line_profiler/line_profiler_utils.py", +] +rules = { unused-type-ignore-comment = "ignore" } From 2b21d7eceac7c58d0de96d973207983c00a51f10 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 17:03:38 -0500 Subject: [PATCH 19/22] wip --- CHANGELOG.rst | 6 ++++++ line_profiler/autoprofile/profmod_extractor.py | 4 ++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5969c53a..a47b8a36 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,12 @@ Changes ======= + +5.0.1 +~~~~~ +* ENH: improved type annotations and moved them inline + + 5.0.1 ~~~~~ * FIX: Prevented duplicate or inconsistent profiler output under Python 3.14 when multiprocessing is used. diff --git a/line_profiler/autoprofile/profmod_extractor.py b/line_profiler/autoprofile/profmod_extractor.py index 5665edea..10615550 100644 --- a/line_profiler/autoprofile/profmod_extractor.py +++ b/line_profiler/autoprofile/profmod_extractor.py @@ -3,7 +3,7 @@ import ast import os import sys -from typing import List, cast, Any +from typing import List, cast, Any, Union from .util_static import (modname_to_modpath, modpath_to_modname, package_modpaths) @@ -99,7 +99,7 @@ def _get_modnames_to_profile_from_prof_mod( if it fails, the item may point to an installed module rather than local script so we check if the item is path and whether that path exists, else skip the item. """ - modpath = modname_to_modpath(mod, sys_path=cast(List[str | os.PathLike[Any]], new_sys_path)) + modpath = modname_to_modpath(mod, sys_path=cast(List[Union[str, os.PathLike[Any]]], new_sys_path)) if modpath is None: """if cannot convert to modpath, check if already path and if invalid""" if not os.path.exists(mod): From b3cb100e7d3ebbd2305583668bb76022304ffd4f Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 17:05:04 -0500 Subject: [PATCH 20/22] Add typing ignore lines to coverage --- pyproject.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 1b60089e..a63e487d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,10 @@ exclude_lines =[ "^ *pass *$", "if _debug:", "if __name__ == .__main__.:", + ".*if typing.TYPE_CHECKING:", + ".*if TYPE_CHECKING:", + "@typing.overload", + "@overload", ] omit =[ From 5a39a7099e1924cc46ade279e88e89d9309f2c80 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 17:12:01 -0500 Subject: [PATCH 21/22] fix --- line_profiler/autoprofile/profmod_extractor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/line_profiler/autoprofile/profmod_extractor.py b/line_profiler/autoprofile/profmod_extractor.py index 10615550..f19319dc 100644 --- a/line_profiler/autoprofile/profmod_extractor.py +++ b/line_profiler/autoprofile/profmod_extractor.py @@ -99,7 +99,7 @@ def _get_modnames_to_profile_from_prof_mod( if it fails, the item may point to an installed module rather than local script so we check if the item is path and whether that path exists, else skip the item. """ - modpath = modname_to_modpath(mod, sys_path=cast(List[Union[str, os.PathLike[Any]]], new_sys_path)) + modpath = modname_to_modpath(mod, sys_path=cast(List[Union[str, os.PathLike]], new_sys_path)) if modpath is None: """if cannot convert to modpath, check if already path and if invalid""" if not os.path.exists(mod): From 8e90623fb0ec03a1e8cc2c2e7262d865fd238734 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 8 Feb 2026 17:14:58 -0500 Subject: [PATCH 22/22] Fix --- line_profiler/line_profiler.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/line_profiler/line_profiler.py b/line_profiler/line_profiler.py index 2428da2f..399ebfd2 100755 --- a/line_profiler/line_profiler.py +++ b/line_profiler/line_profiler.py @@ -22,7 +22,6 @@ from os import PathLike from typing import (TYPE_CHECKING, IO, Callable, Literal, Mapping, Protocol, Sequence, TypeVar, cast, Tuple) -from functools import cached_property, partial, partialmethod try: from ._line_profiler import (LineProfiler as CLineProfiler, @@ -41,7 +40,6 @@ if TYPE_CHECKING: # pragma: no cover from typing_extensions import ParamSpec, Self - from .profiler_mixin import CLevelCallable, UnparametrizedCallableLike class _IPythonLike(Protocol): def register_magics(self, magics: type) -> None: @@ -52,7 +50,7 @@ def register_magics(self, magics: type) -> None: T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) - ColumnLiterals = Literal['line', 'hits', 'time', 'perhit', 'percent'] +ColumnLiterals = Literal['line', 'hits', 'time', 'perhit', 'percent'] # NOTE: This needs to be in sync with ../kernprof.py and __init__.py