diff --git a/mypy/build.py b/mypy/build.py index 355ba861385e..71575de9d877 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -194,7 +194,7 @@ def default_flush_errors( result.errors = messages return result except CompileError as e: - # CompileErrors raised from an errors object carry all of the + # CompileErrors raised from an errors object carry all the # messages that have not been reported out by error streaming. # Patch it up to contain either none or all none of the messages, # depending on whether we are flushing errors. @@ -802,11 +802,11 @@ def correct_rel_imp(imp: ImportFrom | ImportAll) -> str: res.append((pri, sub_id, imp.line)) else: all_are_submodules = False - # Add cur_id as a dependency, even if all of the + # Add cur_id as a dependency, even if all the # imports are submodules. Processing import from will try # to look through cur_id, so we should depend on it. - # As a workaround for for some bugs in cycle handling (#4498), - # if all of the imports are submodules, do the import at a lower + # As a workaround for some bugs in cycle handling (#4498), + # if all the imports are submodules, do the import at a lower # priority. pri = import_priority(imp, PRI_HIGH if not all_are_submodules else PRI_LOW) res.append((pri, cur_id, imp.line)) @@ -929,7 +929,7 @@ def write_deps_cache( ) -> None: """Write cache files for fine-grained dependencies. - Serialize fine-grained dependencies map for fine grained mode. + Serialize fine-grained dependencies map for fine-grained mode. Dependencies on some module 'm' is stored in the dependency cache file m.deps.json. This entails some spooky action at a distance: @@ -943,7 +943,7 @@ def write_deps_cache( fine-grained dependencies in a global cache file: * We take a snapshot of current sources to later check consistency between the fine-grained dependency cache and module cache metadata - * We store the mtime of all of the dependency files to verify they + * We store the mtime of all the dependency files to verify they haven't changed """ metastore = manager.metastore @@ -1111,7 +1111,7 @@ def read_deps_cache(manager: BuildManager, graph: Graph) -> dict[str, FgDepMeta] if deps_meta is None: return None meta_snapshot = deps_meta["snapshot"] - # Take a snapshot of the source hashes from all of the metas we found. + # Take a snapshot of the source hashes from all the metas we found. # (Including the ones we rejected because they were out of date.) # We use this to verify that they match up with the proto_deps. current_meta_snapshot = { diff --git a/mypyc/build.py b/mypyc/build.py index 8ddbf4d22a27..b7d3e1b25366 100644 --- a/mypyc/build.py +++ b/mypyc/build.py @@ -270,12 +270,12 @@ def build_using_shared_lib( ) -> list[Extension]: """Produce the list of extension modules when a shared library is needed. - This creates one shared library extension module that all of the - others import and then one shim extension module for each - module in the build, that simply calls an initialization function + This creates one shared library extension module that all the + others import, and one shim extension module for each + module in the build. Each shim simply calls an initialization function in the shared library. - The shared library (which lib_name is the name of) is a python + The shared library (which lib_name is the name of) is a Python extension module that exports the real initialization functions in Capsules stored in module attributes. """ @@ -511,7 +511,7 @@ def mypycify( separate: Should compiled modules be placed in separate extension modules. If False, all modules are placed in a single shared library. If True, every module is placed in its own library. - Otherwise separate should be a list of + Otherwise, separate should be a list of (file name list, optional shared library name) pairs specifying groups of files that should be placed in the same shared library (while all other modules will be placed in its own library). diff --git a/mypyc/codegen/emitmodule.py b/mypyc/codegen/emitmodule.py index 7037409ff40b..13a3727cd188 100644 --- a/mypyc/codegen/emitmodule.py +++ b/mypyc/codegen/emitmodule.py @@ -1,7 +1,7 @@ """Generate C code for a Python C extension module from Python source code.""" # FIXME: Basically nothing in this file operates on the level of a -# single module and it should be renamed. +# single module and it should be renamed. from __future__ import annotations @@ -71,7 +71,7 @@ from mypyc.transform.spill import insert_spills from mypyc.transform.uninit import insert_uninit_checks -# All of the modules being compiled are divided into "groups". A group +# All the modules being compiled are divided into "groups". A group # is a set of modules that are placed into the same shared library. # Two common configurations are that every module is placed in a group # by itself (fully separate compilation) and that every module is @@ -164,7 +164,7 @@ def report_config_data(self, ctx: ReportConfigContext) -> tuple[str | None, list if hash_digest(meta_json) != ir_data["meta_hash"]: return None - # Check that all of the source files are present and as + # Check that all the source files are present and as # expected. The main situation where this would come up is the # user deleting the build directory without deleting # .mypy_cache, which we should handle gracefully. @@ -215,8 +215,8 @@ def compile_scc_to_ir( ) -> ModuleIRs: """Compile an SCC into ModuleIRs. - Any modules that this SCC depends on must have either compiled or - loaded from a cache into mapper. + Any modules that this SCC depends on must have either been compiled, + type checked, or loaded from a cache into mapper. Arguments: scc: The list of MypyFiles to compile @@ -244,11 +244,11 @@ def compile_scc_to_ir( for module in modules.values(): for fn in module.functions: - # Insert uninit checks. + # Insert checks for uninitialized values. insert_uninit_checks(fn) # Insert exception handling. insert_exception_handling(fn) - # Insert refcount handling. + # Insert reference count handling. insert_ref_count_opcodes(fn) if fn in env_user_functions: @@ -369,7 +369,7 @@ def write_cache( cache are in sync and refer to the same version of the code. This is particularly important if mypyc crashes/errors/is stopped after mypy has written its cache but before mypyc has. - * The hashes of all of the source file outputs for the group + * The hashes of all the source file outputs for the group the module is in. This is so that the module will be recompiled if the source outputs are missing. """ @@ -429,7 +429,7 @@ def compile_modules_to_c( Each shared library module provides, for each module in its group, a PyCapsule containing an initialization function. Additionally, it provides a capsule containing an export table of - pointers to all of the group's functions and static variables. + pointers to all the group's functions and static variables. Arguments: result: The BuildResult from the mypy front-end @@ -504,7 +504,7 @@ def __init__( The code for a compilation group contains an internal and an external .h file, and then one .c if not in multi_file mode or - one .c file per module if in multi_file mode.) + one .c file per module if in multi_file mode. Arguments: modules: (name, ir) pairs for each module in the group @@ -512,8 +512,7 @@ def __init__( group_name: The name of the group (or None if this is single-module compilation) group_map: A map of modules to their group names names: The name generator for the compilation - multi_file: Whether to put each module in its own source file regardless - of group structure. + compiler_options: Mypyc specific options, including multi_file mode """ self.modules = modules self.source_paths = source_paths @@ -642,7 +641,7 @@ def generate_c_for_modules(self) -> list[tuple[str, str]]: decls = ext_declarations if declaration.is_type else declarations if not declaration.is_type: decls.emit_lines(f"extern {declaration.decl[0]}", *declaration.decl[1:]) - # If there is a definition, emit it. Otherwise repeat the declaration + # If there is a definition, emit it. Otherwise, repeat the declaration # (without an extern). if declaration.defn: emitter.emit_lines(*declaration.defn) @@ -770,13 +769,13 @@ def generate_export_table(self, decl_emitter: Emitter, code_emitter: Emitter) -> def generate_shared_lib_init(self, emitter: Emitter) -> None: """Generate the init function for a shared library. - A shared library contains all of the actual code for a + A shared library contains all the actual code for a compilation group. The init function is responsible for creating Capsules that wrap pointers to the initialization function of all the real init functions for modules in this shared library as well as - the export table containing all of the exported functions and + the export table containing all the exported functions and values from all the modules. These capsules are stored in attributes of the shared library.