From 89d0272a804c77f461b420d55acfea287b886d9f Mon Sep 17 00:00:00 2001 From: Mergen Nachin Date: Fri, 3 Jan 2025 14:56:50 -0500 Subject: [PATCH] Enable mypy lintrunner, Part 5 (test/*) --- .lintrunner.toml | 2 +- .mypy.ini | 10 ++++++++++ test/end2end/exported_module.py | 20 ++++++------------- test/end2end/test_end2end.py | 6 +----- test/models/export_delegated_program.py | 6 ++---- .../generate_linear_out_bundled_program.py | 4 +++- 6 files changed, 23 insertions(+), 25 deletions(-) diff --git a/.lintrunner.toml b/.lintrunner.toml index 00275ff7f9b..35117b714a6 100644 --- a/.lintrunner.toml +++ b/.lintrunner.toml @@ -302,7 +302,7 @@ include_patterns = [ 'profiler/**/*.py', 'runtime/**/*.py', 'scripts/**/*.py', - # 'test/**/*.py', + 'test/**/*.py', 'util/**/*.py', '*.py', ] diff --git a/.mypy.ini b/.mypy.ini index 922b912cb33..43d75e64de4 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -21,10 +21,14 @@ files = profiler, runtime, scripts, + test, util mypy_path = executorch +[mypy-executorch.backends.*] +follow_untyped_imports = True + [mypy-executorch.codegen.*] follow_untyped_imports = True @@ -46,6 +50,12 @@ follow_untyped_imports = True [mypy-executorch.runtime.*] follow_untyped_imports = True +[mypy-executorch.test.*] +follow_untyped_imports = True + +[mypy-functorch.*] +follow_untyped_imports = True + [mypy-requests.*] follow_untyped_imports = True diff --git a/test/end2end/exported_module.py b/test/end2end/exported_module.py index 81d7ff9f6cb..67a03b59a82 100644 --- a/test/end2end/exported_module.py +++ b/test/end2end/exported_module.py @@ -126,9 +126,7 @@ def return_wrapper(): trace_inputs_method = "get_upper_bound_inputs" get_trace_inputs = get_inputs_adapter( ( - # pyre-fixme[6]: For 1st argument expected `(...) -> Any` but got - # `Union[Module, Tensor]`. - getattr(eager_module, trace_inputs_method) + getattr(eager_module, trace_inputs_method) # type: ignore[arg-type] if hasattr(eager_module, trace_inputs_method) else eager_module.get_random_inputs ), @@ -144,18 +142,14 @@ def return_wrapper(): if hasattr(eager_module, "get_dynamic_shapes"): assert capture_config is not None assert capture_config.enable_aot is True - # pyre-fixme[29]: `Union[nn.modules.module.Module, - # torch._tensor.Tensor]` is not a function. - trace_dynamic_shapes = eager_module.get_dynamic_shapes() + trace_dynamic_shapes = eager_module.get_dynamic_shapes() # type: ignore[operator] method_name_to_dynamic_shapes = {} for method in methods: method_name_to_dynamic_shapes[method] = trace_dynamic_shapes memory_planning_pass = MemoryPlanningPass() if hasattr(eager_module, "get_memory_planning_pass"): - # pyre-fixme[29]: `Union[nn.modules.module.Module, - # torch._tensor.Tensor]` is not a function. - memory_planning_pass = eager_module.get_memory_planning_pass() + memory_planning_pass = eager_module.get_memory_planning_pass() # type: ignore[operator] class WrapperModule(nn.Module): def __init__(self, method): @@ -172,7 +166,7 @@ def __init__(self, method): assert method_name == "forward" ep = _export( eager_module, - method_input, + method_input, # type: ignore[arg-type] dynamic_shapes=( method_name_to_dynamic_shapes[method_name] if method_name_to_dynamic_shapes @@ -184,7 +178,7 @@ def __init__(self, method): else: exported_methods[method_name] = export( eager_module, - method_input, + method_input, # type: ignore[arg-type] dynamic_shapes=( method_name_to_dynamic_shapes[method_name] if method_name_to_dynamic_shapes @@ -220,9 +214,7 @@ def __init__(self, method): # Get a function that creates random inputs appropriate for testing. get_random_inputs_fn = get_inputs_adapter( - # pyre-fixme[6]: For 1st argument expected `(...) -> Any` but got - # `Union[Module, Tensor]`. - eager_module.get_random_inputs, + eager_module.get_random_inputs, # type: ignore[arg-type] # all exported methods must have the same signature so just pick the first one. methods[0], ) diff --git a/test/end2end/test_end2end.py b/test/end2end/test_end2end.py index 0f374720b41..a3bc1e64e39 100644 --- a/test/end2end/test_end2end.py +++ b/test/end2end/test_end2end.py @@ -52,9 +52,7 @@ kernel_mode = None # either aten mode or lean mode try: from executorch.extension.pybindings.portable_lib import ( - _load_bundled_program_from_buffer, _load_for_executorch_from_buffer, - _load_for_executorch_from_bundled_program, ) kernel_mode = "lean" @@ -63,10 +61,8 @@ pass try: - from executorch.extension.pybindings.aten_lib import ( - _load_bundled_program_from_buffer, + from executorch.extension.pybindings.aten_lib import ( # type: ignore[import-not-found] _load_for_executorch_from_buffer, - _load_for_executorch_from_bundled_program, ) assert kernel_mode is None diff --git a/test/models/export_delegated_program.py b/test/models/export_delegated_program.py index a85dab6753f..4f4429aca88 100644 --- a/test/models/export_delegated_program.py +++ b/test/models/export_delegated_program.py @@ -118,9 +118,7 @@ def export_module_to_program( eager_module = module_class().eval() inputs = () if hasattr(eager_module, "get_random_inputs"): - # pyre-fixme[29]: `Union[nn.modules.module.Module, torch._tensor.Tensor]` is - # not a function. - inputs = eager_module.get_random_inputs() + inputs = eager_module.get_random_inputs() # type: ignore[operator] class WrapperModule(torch.nn.Module): def __init__(self, fn): @@ -153,7 +151,7 @@ def forward(self, *args, **kwargs): ).to_executorch(config=et_config) else: edge: exir.EdgeProgramManager = to_edge(exported_program) - lowered_module = to_backend( + lowered_module = to_backend( # type: ignore[call-arg] backend_id, edge.exported_program(), compile_specs=[] ) diff --git a/test/models/generate_linear_out_bundled_program.py b/test/models/generate_linear_out_bundled_program.py index c98ea7ed683..8ab75d87fb7 100644 --- a/test/models/generate_linear_out_bundled_program.py +++ b/test/models/generate_linear_out_bundled_program.py @@ -27,7 +27,9 @@ from executorch.exir.passes import MemoryPlanningPass, ToOutVarPass from executorch.exir.print_program import pretty_print -from executorch.test.models.linear_model import LinearModel +from executorch.test.models.linear_model import ( # type: ignore[import-not-found] + LinearModel, +) from torch.export import export