Skip to content

Various minor docstring and comment updates #19519

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jul 28, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ def default_flush_errors(
result.errors = messages
return result
except CompileError as e:
# CompileErrors raised from an errors object carry all of the
# CompileErrors raised from an errors object carry all the
# messages that have not been reported out by error streaming.
# Patch it up to contain either none or all none of the messages,
# depending on whether we are flushing errors.
Expand Down Expand Up @@ -802,11 +802,11 @@ def correct_rel_imp(imp: ImportFrom | ImportAll) -> str:
res.append((pri, sub_id, imp.line))
else:
all_are_submodules = False
# Add cur_id as a dependency, even if all of the
# Add cur_id as a dependency, even if all the
# imports are submodules. Processing import from will try
# to look through cur_id, so we should depend on it.
# As a workaround for for some bugs in cycle handling (#4498),
# if all of the imports are submodules, do the import at a lower
# As a workaround for some bugs in cycle handling (#4498),
# if all the imports are submodules, do the import at a lower
# priority.
pri = import_priority(imp, PRI_HIGH if not all_are_submodules else PRI_LOW)
res.append((pri, cur_id, imp.line))
Expand Down Expand Up @@ -929,7 +929,7 @@ def write_deps_cache(
) -> None:
"""Write cache files for fine-grained dependencies.

Serialize fine-grained dependencies map for fine grained mode.
Serialize fine-grained dependencies map for fine-grained mode.

Dependencies on some module 'm' is stored in the dependency cache
file m.deps.json. This entails some spooky action at a distance:
Expand All @@ -943,7 +943,7 @@ def write_deps_cache(
fine-grained dependencies in a global cache file:
* We take a snapshot of current sources to later check consistency
between the fine-grained dependency cache and module cache metadata
* We store the mtime of all of the dependency files to verify they
* We store the mtime of all the dependency files to verify they
haven't changed
"""
metastore = manager.metastore
Expand Down Expand Up @@ -1111,7 +1111,7 @@ def read_deps_cache(manager: BuildManager, graph: Graph) -> dict[str, FgDepMeta]
if deps_meta is None:
return None
meta_snapshot = deps_meta["snapshot"]
# Take a snapshot of the source hashes from all of the metas we found.
# Take a snapshot of the source hashes from all the metas we found.
# (Including the ones we rejected because they were out of date.)
# We use this to verify that they match up with the proto_deps.
current_meta_snapshot = {
Expand Down
10 changes: 5 additions & 5 deletions mypyc/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,12 +270,12 @@ def build_using_shared_lib(
) -> list[Extension]:
"""Produce the list of extension modules when a shared library is needed.

This creates one shared library extension module that all of the
others import and then one shim extension module for each
module in the build, that simply calls an initialization function
This creates one shared library extension module that all the
others import, and one shim extension module for each
module in the build. Each shim simply calls an initialization function
in the shared library.

The shared library (which lib_name is the name of) is a python
The shared library (which lib_name is the name of) is a Python
extension module that exports the real initialization functions in
Capsules stored in module attributes.
"""
Expand Down Expand Up @@ -511,7 +511,7 @@ def mypycify(
separate: Should compiled modules be placed in separate extension modules.
If False, all modules are placed in a single shared library.
If True, every module is placed in its own library.
Otherwise separate should be a list of
Otherwise, separate should be a list of
(file name list, optional shared library name) pairs specifying
groups of files that should be placed in the same shared library
(while all other modules will be placed in its own library).
Expand Down
29 changes: 14 additions & 15 deletions mypyc/codegen/emitmodule.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Generate C code for a Python C extension module from Python source code."""

# FIXME: Basically nothing in this file operates on the level of a
# single module and it should be renamed.
# single module and it should be renamed.

from __future__ import annotations

Expand Down Expand Up @@ -71,7 +71,7 @@
from mypyc.transform.spill import insert_spills
from mypyc.transform.uninit import insert_uninit_checks

# All of the modules being compiled are divided into "groups". A group
# All the modules being compiled are divided into "groups". A group
# is a set of modules that are placed into the same shared library.
# Two common configurations are that every module is placed in a group
# by itself (fully separate compilation) and that every module is
Expand Down Expand Up @@ -164,7 +164,7 @@ def report_config_data(self, ctx: ReportConfigContext) -> tuple[str | None, list
if hash_digest(meta_json) != ir_data["meta_hash"]:
return None

# Check that all of the source files are present and as
# Check that all the source files are present and as
# expected. The main situation where this would come up is the
# user deleting the build directory without deleting
# .mypy_cache, which we should handle gracefully.
Expand Down Expand Up @@ -215,8 +215,8 @@ def compile_scc_to_ir(
) -> ModuleIRs:
"""Compile an SCC into ModuleIRs.

Any modules that this SCC depends on must have either compiled or
loaded from a cache into mapper.
Any modules that this SCC depends on must have either been compiled,
type checked, or loaded from a cache into mapper.

Arguments:
scc: The list of MypyFiles to compile
Expand Down Expand Up @@ -244,11 +244,11 @@ def compile_scc_to_ir(

for module in modules.values():
for fn in module.functions:
# Insert uninit checks.
# Insert checks for uninitialized values.
insert_uninit_checks(fn)
# Insert exception handling.
insert_exception_handling(fn)
# Insert refcount handling.
# Insert reference count handling.
insert_ref_count_opcodes(fn)

if fn in env_user_functions:
Expand Down Expand Up @@ -369,7 +369,7 @@ def write_cache(
cache are in sync and refer to the same version of the code.
This is particularly important if mypyc crashes/errors/is
stopped after mypy has written its cache but before mypyc has.
* The hashes of all of the source file outputs for the group
* The hashes of all the source file outputs for the group
the module is in. This is so that the module will be
recompiled if the source outputs are missing.
"""
Expand Down Expand Up @@ -429,7 +429,7 @@ def compile_modules_to_c(
Each shared library module provides, for each module in its group,
a PyCapsule containing an initialization function.
Additionally, it provides a capsule containing an export table of
pointers to all of the group's functions and static variables.
pointers to all the group's functions and static variables.

Arguments:
result: The BuildResult from the mypy front-end
Expand Down Expand Up @@ -504,16 +504,15 @@ def __init__(

The code for a compilation group contains an internal and an
external .h file, and then one .c if not in multi_file mode or
one .c file per module if in multi_file mode.)
one .c file per module if in multi_file mode.

Arguments:
modules: (name, ir) pairs for each module in the group
source_paths: Map from module names to source file paths
group_name: The name of the group (or None if this is single-module compilation)
group_map: A map of modules to their group names
names: The name generator for the compilation
multi_file: Whether to put each module in its own source file regardless
of group structure.
compiler_options: Mypyc specific options, including multi_file mode
"""
self.modules = modules
self.source_paths = source_paths
Expand Down Expand Up @@ -642,7 +641,7 @@ def generate_c_for_modules(self) -> list[tuple[str, str]]:
decls = ext_declarations if declaration.is_type else declarations
if not declaration.is_type:
decls.emit_lines(f"extern {declaration.decl[0]}", *declaration.decl[1:])
# If there is a definition, emit it. Otherwise repeat the declaration
# If there is a definition, emit it. Otherwise, repeat the declaration
# (without an extern).
if declaration.defn:
emitter.emit_lines(*declaration.defn)
Expand Down Expand Up @@ -770,13 +769,13 @@ def generate_export_table(self, decl_emitter: Emitter, code_emitter: Emitter) ->
def generate_shared_lib_init(self, emitter: Emitter) -> None:
"""Generate the init function for a shared library.

A shared library contains all of the actual code for a
A shared library contains all the actual code for a
compilation group.

The init function is responsible for creating Capsules that
wrap pointers to the initialization function of all the real
init functions for modules in this shared library as well as
the export table containing all of the exported functions and
the export table containing all the exported functions and
values from all the modules.

These capsules are stored in attributes of the shared library.
Expand Down