From 1d1e93edcdf617c651c3eb1d1cbadd29d99172b2 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Sun, 11 Feb 2024 17:19:53 -0600 Subject: [PATCH] Misc grammar/typo fixes (#20518) Fix various typos and grammar issues (like repeated words) found by the PyCharm grammar/spelling inspections. Most of these are in docs or code comments. A few isolated instances change some variable names, but they seem local enough that refactoring them shouldn't cause issues. I will highlight them in comments. --- build-support/bin/terraform_tool_versions.py | 2 +- .../integrating-new-tools-without-plugins.mdx | 2 +- .../debugging-and-benchmarking.mdx | 2 +- .../development/internal-architecture.mdx | 6 ++--- .../contributions/development/style-guide.mdx | 2 +- docs/docs/docker/tagging-docker-images.mdx | 4 ++-- .../getting-started/incremental-adoption.mdx | 2 +- docs/docs/go/index.mdx | 2 +- docs/docs/helm/deployments.mdx | 8 +++---- docs/docs/helm/index.mdx | 4 ++-- .../docs/introduction/how-does-pants-work.mdx | 2 +- docs/docs/java-and-scala/index.mdx | 2 +- docs/docs/python/goals/check.mdx | 2 +- docs/docs/python/goals/package.mdx | 4 ++-- docs/docs/python/goals/publish.mdx | 2 +- docs/docs/python/goals/run.mdx | 4 ++-- docs/docs/python/integrations/aws-lambda.mdx | 2 +- docs/docs/shell/run-shell-commands.mdx | 2 +- .../docs/using-pants/key-concepts/options.mdx | 4 ++-- .../plugin-upgrade-guide.mdx | 6 ++--- docs/docs/writing-plugins/overview.mdx | 2 +- .../backend/codegen/protobuf/java/rules.py | 2 +- .../codegen/thrift/apache/python/subsystem.py | 2 +- .../pants/backend/cue/goals/fix_test.py | 2 +- .../pants/backend/debian/target_types.py | 2 +- src/python/pants/backend/docker/registries.py | 2 +- .../docker/subsystems/docker_options.py | 2 +- .../subsystems/dockerfile_wrapper_script.py | 2 +- .../pants/backend/docker/target_types_test.py | 4 ++-- .../backend/docker/util_rules/dependencies.py | 6 ++--- .../go/go_sources/analyze_package/read.go | 2 +- .../pants/backend/go/util_rules/assembly.py | 2 +- .../pants/backend/go/util_rules/build_pkg.py | 2 +- .../backend/go/util_rules/coverage_profile.py | 2 +- .../backend/go/util_rules/coverage_test.py | 2 +- .../backend/go/util_rules/pkg_pattern.py | 4 ++-- .../helm/dependency_inference/deployment.py | 2 +- .../pants/backend/helm/resolve/artifacts.py | 6 +++-- .../pants/backend/helm/subsystems/helm.py | 2 +- .../helm/subsystems/k8s_parser_main.py | 2 +- .../pants/backend/helm/util_rules/renderer.py | 4 ++-- .../pants/backend/helm/util_rules/tool.py | 2 +- src/python/pants/backend/project_info/peek.py | 2 +- .../pants/backend/python/goals/export.py | 2 +- .../pants/backend/python/goals/lockfile.py | 2 +- .../pants/backend/python/goals/publish.py | 2 +- .../pants/backend/python/lint/black/rules.py | 2 +- .../python/macros/pipenv_requirements_test.py | 2 +- .../python/packaging/pyoxidizer/config.py | 2 +- .../backend/python/target_types_rules.py | 2 +- .../backend/python/typecheck/mypy/rules.py | 2 +- .../util_rules/interpreter_constraints.py | 2 +- .../python/util_rules/local_dists_pep660.py | 2 +- .../python/util_rules/package_dists.py | 8 +++---- .../python/util_rules/pex_requirements.py | 2 +- .../backend/scala/compile/scalac_plugins.py | 2 +- .../dependency_inference/scala_parser_test.py | 2 +- .../shell/util_rules/shell_command_test.py | 2 +- .../pants/backend/terraform/goals/tailor.py | 2 +- .../pants/backend/terraform/hcl2_parser.py | 2 +- src/python/pants/base/build_root.py | 6 ++--- src/python/pants/base/exception_sink.py | 4 ++-- src/python/pants/base/specs.py | 2 +- .../pants/base/specs_integration_test.py | 2 +- src/python/pants/bin/local_pants_runner.py | 2 +- src/python/pants/bin/pants_loader.py | 2 +- src/python/pants/bin/remote_pants_runner.py | 2 +- src/python/pants/bsp/protocol.py | 6 ++--- src/python/pants/bsp/spec/base.py | 2 +- src/python/pants/bsp/spec/lifecycle.py | 4 ++-- src/python/pants/conftest.py | 4 ++-- src/python/pants/core/goals/fix.py | 2 +- src/python/pants/core/goals/fix_test.py | 2 +- .../pants/core/goals/generate_lockfiles.py | 4 ++-- .../core/goals/generate_lockfiles_test.py | 2 +- src/python/pants/core/goals/lint.py | 2 +- src/python/pants/core/goals/publish.py | 4 ++-- src/python/pants/core/goals/run.py | 8 +++---- src/python/pants/core/goals/tailor.py | 4 ++-- src/python/pants/core/goals/test.py | 2 +- src/python/pants/core/target_types.py | 4 ++-- .../core/util_rules/adhoc_process_support.py | 2 +- src/python/pants/core/util_rules/archive.py | 2 +- .../pants/core/util_rules/archive_test.py | 2 +- src/python/pants/core/util_rules/asdf.py | 2 +- .../pants/core/util_rules/system_binaries.py | 6 ++--- src/python/pants/engine/console.py | 2 +- src/python/pants/engine/engine_aware.py | 6 ++--- src/python/pants/engine/environment_test.py | 2 +- src/python/pants/engine/fs.py | 2 +- src/python/pants/engine/fs_test.py | 2 +- src/python/pants/engine/goal.py | 2 +- .../engine/internals/build_files_test.py | 6 ++--- .../pants/engine/internals/scheduler.py | 4 ++-- .../pants/engine/internals/selectors.py | 10 ++++---- src/python/pants/engine/process.py | 2 +- src/python/pants/engine/rules_test.py | 4 ++-- src/python/pants/engine/target.py | 24 +++++++++---------- src/python/pants/engine/target_test.py | 4 ++-- src/python/pants/init/options_initializer.py | 2 +- .../artifact_mapper_test.py | 2 +- src/python/pants/jvm/jdk_rules_test.py | 4 ++-- src/python/pants/jvm/resources_test.py | 2 +- .../jvm/shading/rules_integration_test.py | 4 ++-- src/python/pants/notes/2.13.x.md | 2 +- src/python/pants/notes/2.14.x.md | 6 ++--- src/python/pants/notes/2.15.x.md | 2 +- src/python/pants/notes/2.16.x.md | 4 ++-- src/python/pants/notes/2.17.x.md | 2 +- src/python/pants/option/config.py | 2 +- src/python/pants/option/config_test.py | 2 +- src/python/pants/option/global_options.py | 4 ++-- src/python/pants/option/options.py | 2 +- .../pants/option/options_bootstrapper.py | 2 +- .../pants/option/options_integration_test.py | 2 +- src/python/pants/option/subsystem.py | 4 ++-- src/python/pants/pantsd/pants_daemon_core.py | 2 +- .../pants/pantsd/service/pants_service.py | 2 +- src/python/pants/util/resources.py | 2 +- src/python/pants/util/strutil.py | 2 +- src/python/pants/version.py | 2 +- .../engine/fs/store/src/immutable_inputs.rs | 8 +++---- src/rust/engine/graph/README.md | 2 +- .../engine/process_execution/src/bounded.rs | 2 +- src/rust/engine/rule_graph/README.md | 4 ++-- src/rust/engine/workunit_store/src/lib.rs | 2 +- .../pantsd/pantsd_integration_test.py | 4 ++-- 127 files changed, 200 insertions(+), 198 deletions(-) diff --git a/build-support/bin/terraform_tool_versions.py b/build-support/bin/terraform_tool_versions.py index 2774ff04e2d..426e1166c24 100644 --- a/build-support/bin/terraform_tool_versions.py +++ b/build-support/bin/terraform_tool_versions.py @@ -189,7 +189,7 @@ def fetch_platforms_for_version( ) -> Optional[List[ExternalToolVersion]]: """Fetch platform binary information for a particular Terraform version.""" logging.info( - f"processiong version {version_slug} with {len(version_links.binary_links)} binaries" + f"processing version {version_slug} with {len(version_links.binary_links)} binaries" ) if is_prerelease(version_slug): diff --git a/docs/docs/ad-hoc-tools/integrating-new-tools-without-plugins.mdx b/docs/docs/ad-hoc-tools/integrating-new-tools-without-plugins.mdx index 730a397c5dc..66abfd9026b 100644 --- a/docs/docs/ad-hoc-tools/integrating-new-tools-without-plugins.mdx +++ b/docs/docs/ad-hoc-tools/integrating-new-tools-without-plugins.mdx @@ -11,7 +11,7 @@ The `adhoc_tool` target allows you to execute "runnable" targets inside the Pant `adhoc_tool` provides you with the building blocks needed to put together a custom build process without needing to develop and maintain a plugin. The level of initial effort involved in using `adhoc_tool` is significantly lower than that of [writing a plugin](../writing-plugins/overview.mdx), so it's well-suited to consuming one-off scripts, or for rapidly prototyping a process before actually writing a plugin. The tradeoff is that there is more manual work involved in defining build processes that reflect your codebase's structure, and that the targets that define the tools you consume are less easy to reuse. -The `antlr` demo in the [`example-adhoc` respository](https://github.com/pantsbuild/example-adhoc) shows an example of running a JVM-based tool to transparently generate Python code that can be used in another language: +The `antlr` demo in the [`example-adhoc` repository](https://github.com/pantsbuild/example-adhoc) shows an example of running a JVM-based tool to transparently generate Python code that can be used in another language: ``` adhoc_tool( diff --git a/docs/docs/contributions/development/debugging-and-benchmarking.mdx b/docs/docs/contributions/development/debugging-and-benchmarking.mdx index 5d54f6de274..989fe503cbf 100644 --- a/docs/docs/contributions/development/debugging-and-benchmarking.mdx +++ b/docs/docs/contributions/development/debugging-and-benchmarking.mdx @@ -121,6 +121,6 @@ Dumping thread stacks: - Run: `gdb /path/to/python/binary PROCESS_ID` 3. Enable logging to write the thread dump to `gdb.txt`: `set logging on` 4. Dump all thread backtraces: `thread apply all bt` -5. If you use pyenv to mange your Python install, a gdb script will exist in the same directory as the Python binary. Source it into gdb: +5. If you use pyenv to manage your Python install, a gdb script will exist in the same directory as the Python binary. Source it into gdb: - `source ~/.pyenv/versions/3.8.5/bin/python3.8-gdb.py` (if using version 3.8.5) 6. Dump all Python stacks: `thread apply all py-bt` diff --git a/docs/docs/contributions/development/internal-architecture.mdx b/docs/docs/contributions/development/internal-architecture.mdx index 45c89184be5..c5012fe5df4 100644 --- a/docs/docs/contributions/development/internal-architecture.mdx +++ b/docs/docs/contributions/development/internal-architecture.mdx @@ -66,9 +66,9 @@ But both of the goals are important because together they allow for an API that There are a few constraints that decide which `Rule`s are able to provide dependencies for one another: - `param_consumption` - When a `Rule` directly uses a `Param` as a positional argument, that `Param` is removed from scope for any of that `Rule`'s dependencies. - - For example, for a `Rule` `y` with a positional argument `A` and a `Get(B, C)`: if there is a `Param` `A` in scope at `y` and it is used to satisfy the positional argument, it cannot also be used to (transitively) to satisfy the `Get(B, C)` (i.e., a hyptothetical rule that consumes both `A` and `C` would not be eligible in that position). + - For example, for a `Rule` `y` with a positional argument `A` and a `Get(B, C)`: if there is a `Param` `A` in scope at `y` and it is used to satisfy the positional argument, it cannot also be used to (transitively) to satisfy the `Get(B, C)` (i.e., a hypothetical rule that consumes both `A` and `C` would not be eligible in that position). - On the other hand, for a `Rule` `w` with `Get(B, C)` and `Get(D, E)`, if there is a `Param` `A` in scope at `w`, two dependency `Rule`s that consume `A` (transitively) _can_ be used to satisfy those `Get`s. Only consuming a `Param` as a positional argument removes it from scope. -- `provided_params` - When deciding whether one `Rule` can use another `Rule` to provide the output type of a `Get`, a constraint is applied that the candidate depedency must (transitively) consume the `Param` that is provided by the `Get`. +- `provided_params` - When deciding whether one `Rule` can use another `Rule` to provide the output type of a `Get`, a constraint is applied that the candidate dependency must (transitively) consume the `Param` that is provided by the `Get`. - For example: if a `Rule` `z` has a `Get(A, B)`, only `Rule`s that compute an `A` and (transitively) consume a `B` are eligible to be used. This also means that a `Param` `A` which is already in scope for `Rule` `z` is not eligible to be used, because it would trivially not consume `B`. ### Implementation @@ -83,7 +83,7 @@ The construction algorithm is broken up into phases: - If we were to stop `RuleGraph` construction at this phase, it would be necessary to do a form of [dynamic dispatch](https://en.wikipedia.org/wiki/Dynamic_dispatch) at runtime to decide which source of a dependency to use based on the `Param`s that were currently in scope. And the sets of `Param`s used in the memoization key for each `Rule` would still be overly large, causing excess invalidation. 3. [monomorphize](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L325-L353) - "Monomorphize" the polymorphic graph by using the out-set of available `Param`s (initialized during `initial_polymorphic`) and the in-set of consumed `Param`s (computed during `live_param_labeled`) to partition nodes (and their dependents) for each valid combination of their dependencies. Combinations of dependencies that would be invalid (see the Constraints section) are not generated, which causes some pruning of the graph to happen during this phase. - Continuing the example from above: the goal of monomorphize is to create one copy of `Rule` `x` per legal combination of its `DependencyKey`. Assuming that both of `x`'s dependencies remain legal (i.e. that all of `{A,B,C}` are still in scope in the dependents of `x`, etc), then two copies of `x` will be created: one that uses the first dependency and has an in-set of `{A,B}`, and another that uses the second dependency and has an in-set of `{B,C}`. -4. [prune_edges](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L836-L845) - Once the monomorphic graph has [converged](https://en.wikipedia.org/wiki/Data-flow_analysis#Convergence), each node in the graph will ideally have exactly one source of each `DependencyKey` (with the exception of `Query`s, which are not monomorphized). This phase validates that, and chooses the smallest input `Param` set to use for each `Query`. In cases where a node has more that one dependency per `DependencyKey`, it is because given a particular set of input `Params` there was more than one valid way to compute a dependency. This can happen either because there were too many `Param`s in scope, or because there were multiple `Rule`s with the same `Param` requirements. +4. [prune_edges](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L836-L845) - Once the monomorphic graph has [converged](https://en.wikipedia.org/wiki/Data-flow_analysis#Convergence), each node in the graph will ideally have exactly one source of each `DependencyKey` (except for `Query`s, which are not monomorphized). This phase validates that, and chooses the smallest input `Param` set to use for each `Query`. In cases where a node has more than one dependency per `DependencyKey`, it is because given a particular set of input `Params` there was more than one valid way to compute a dependency. This can happen either because there were too many `Param`s in scope, or because there were multiple `Rule`s with the same `Param` requirements. - This phase is the only phase that renders errors: all of the other phases mark nodes and edges "deleted" for particular reasons, and this phase consumes that record. A node that has been deleted indicates that that node is unsatisfiable for some reason, while an edge that has been deleted indicates that the source node was not able to consume the target node for some reason. - If a node has too many sources of a `DependencyKey`, this phase will recurse to attempt to locate the node in the `Rule` graph where the ambiguity was introduced. Likewise, if a node has no source of a `DependencyKey`, this phase will recurse on deleted nodes (which are preserved by the other phases) to attempt to locate the bottom-most `Rule` that was missing a `DependencyKey`. 5. [finalize](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L1064-L1068) - After `prune_edges` the graph is known to be valid, and this phase generates the final static `RuleGraph` for all `Rule`s reachable from `Query`s. diff --git a/docs/docs/contributions/development/style-guide.mdx b/docs/docs/contributions/development/style-guide.mdx index 86ee3c38fc3..de840d13200 100644 --- a/docs/docs/contributions/development/style-guide.mdx +++ b/docs/docs/contributions/development/style-guide.mdx @@ -97,7 +97,7 @@ class OrderedSet: ### TODOs -When creating a TODO, first [create an issue](https://github.com/pantsbuild/pants/issues/new) in GitHub. Then, link to the issue # in parantheses and add a brief description. +When creating a TODO, first [create an issue](https://github.com/pantsbuild/pants/issues/new) in GitHub. Then, link to the issue # in parentheses and add a brief description. For example: diff --git a/docs/docs/docker/tagging-docker-images.mdx b/docs/docs/docker/tagging-docker-images.mdx index 27ed23350c6..a91460e4508 100644 --- a/docs/docs/docker/tagging-docker-images.mdx +++ b/docs/docs/docker/tagging-docker-images.mdx @@ -148,7 +148,7 @@ See [String interpolation using placeholder values](./tagging-docker-images.mdx# When Docker builds images, it can tag them with a set of tags. Pants will apply the tags listed in the `image_tags` field of `docker_image`, and any additional tags if defined from the registry -configuration (see [Configuring registries](./tagging-docker-images.mdx#configuring-registries). +configuration (see [Configuring registries](./tagging-docker-images.mdx#configuring-registries)). (Note that the field is named `image_tags` and not just `tags`, because Pants has [its own tags concept](doc:reference-target#tags), which is unrelated.) @@ -307,7 +307,7 @@ See [Setting a repository name](./tagging-docker-images.mdx#setting-a-repository The calculated hash value _may_ change between stable versions of Pants for the otherwise same input sources. ::: -## Retrieving the tags of an packaged image +## Retrieving the tags of a packaged image When a docker image is packaged, metadata about the resulting image is output to a JSON file artefact. This includes the image ID, as well as the full names that the image was tagged with. This file is written in the same manner as outputs of other packageable targets and available for later steps (for example, a test with `runtime_package_dependencies` including the docker image target) or in `dist/` after `pants package`. By default, this is available at `path.to.target/target_name.docker-info.json`. diff --git a/docs/docs/getting-started/incremental-adoption.mdx b/docs/docs/getting-started/incremental-adoption.mdx index 322efd72399..c214490d7ee 100644 --- a/docs/docs/getting-started/incremental-adoption.mdx +++ b/docs/docs/getting-started/incremental-adoption.mdx @@ -11,7 +11,7 @@ How to incrementally add Pants to an existing repository. If you have an existing repository, we recommend incrementally adopting to reduce the surface area of change, which reduces risk. -Incremental adoption also allows you to immediately start benefitting from Pants, then deepen adoption at your own pace, instead of postponing benefit until you are ready to make dramatic change all at once. +Incremental adoption also allows you to immediately start benefiting from Pants, then deepen adoption at your own pace, instead of postponing benefit until you are ready to make dramatic change all at once. :::note Joining Slack We would love to help you with adopting Pants. Please reach out through [Slack](/community/getting-help). diff --git a/docs/docs/go/index.mdx b/docs/docs/go/index.mdx index fb553689086..315fd90a818 100644 --- a/docs/docs/go/index.mdx +++ b/docs/docs/go/index.mdx @@ -175,7 +175,7 @@ You can pass through arguments with `--`, e.g. `pants test pkg/deploy: -- -v -ru ### Loose files in tests (`testdata`) -To open files in your tests, use [`file` / `files` targets](../using-pants/assets-and-archives.mdx) targets and add them as `dependencies` to your `go_package`. +To open files in your tests, use [`file` / `files`](../using-pants/assets-and-archives.mdx) targets and add them as `dependencies` to your `go_package`. ```python title="pkg/runner/BUILD" go_package(dependencies=[":testdata"]) diff --git a/docs/docs/helm/deployments.mdx b/docs/docs/helm/deployments.mdx index 7776710936d..797dd8b85bf 100644 --- a/docs/docs/helm/deployments.mdx +++ b/docs/docs/helm/deployments.mdx @@ -15,13 +15,13 @@ Please share feedback for what you need to use Pants with your Helm deployments Helm's ultimate purpose is to simplify the deployment of Kubernetes resources and help in making these reproducible. However it is quite common to deploy the same software application into different kind of environments using slightly different configuration overrides. -This hinders reproducibility since operators end up having a set of configuration files and additional shell scripts that ensure that the Helm command line usued to deploy a piece of software into a given environment is always the same. +This hinders reproducibility since operators end up having a set of configuration files and additional shell scripts that ensure that the Helm command line used to deploy a piece of software into a given environment is always the same. Pants solves this problem by providing with the ability to manage the configuration files and the different parameters of a deployment as single unit such that a simple command line as `pants experimental-deploy ::` will always have the same effect on each of the deployments previously defined. ## Defining Helm deployments -Helm deployments are defined using the `helm_deployment` target which has a series of fields that can be used to guarantee the reproducibility of the given deployment. `helm_deployment` targets need to be added by hand as there is no deterministic way of instrospecting your repository to find sources that are specific to Helm: +Helm deployments are defined using the `helm_deployment` target which has a series of fields that can be used to guarantee the reproducibility of the given deployment. `helm_deployment` targets need to be added by hand as there is no deterministic way of introspecting your repository to find sources that are specific to Helm: ```python tab={"label":"src/chart/BUILD"} helm_chart() @@ -85,7 +85,7 @@ There are quite a few things to notice in the previous example: - One of those value files (`common-values.yaml`) provides with default values that are common to all deployments. - Each deployment uses an additional `xxx-override.yaml` file with values that are specific to the given deployment. -The `helm_deployment` target has many additional fields including the target kubernetes namespace, adding inline override values (similar to using helm's `--set` arg) and many others. Please run `pants help helm_deployment` to see all the posibilities. +The `helm_deployment` target has many additional fields including the target kubernetes namespace, adding inline override values (similar to using helm's `--set` arg) and many others. Please run `pants help helm_deployment` to see all the possibilities. ## Dependencies with `docker_image` targets @@ -253,7 +253,7 @@ helm_deployment( As shown above, now the `release` and `namespace` fields are calculated at deploy-time by Pants and, as in the previous example, they will be forwarded to the Helm chart accordingly. :::caution Ensuring repeatable deployments -You should always favor using static values (or value files) VS dynamic values in your deployments. Using interpolated environment variables in your deployments can render your deployments non-repetable anymore if those values can affect the behaviour of the system deployed, or what gets deployed (i.e. Docker image addresses). +You should always favor using static values (or value files) VS dynamic values in your deployments. Using interpolated environment variables in your deployments can render your deployments non-repeatable anymore if those values can affect the behaviour of the system deployed, or what gets deployed (i.e. Docker image addresses). Be careful when chossing the values that are going to be calculated dynamically. ::: diff --git a/docs/docs/helm/index.mdx b/docs/docs/helm/index.mdx index 25e92c78927..a2dc11852b9 100644 --- a/docs/docs/helm/index.mdx +++ b/docs/docs/helm/index.mdx @@ -111,7 +111,7 @@ pants package :: Built Helm chart artifact: testprojects.src.helm.example/example/example-0.2.0.tgz ``` -The final output folder can customised using the `output_path` field in the `helm_chart` target. Run `pants help helm_chart` for more information. +The final output folder can be customised using the `output_path` field in the `helm_chart` target. Run `pants help helm_chart` for more information. #### Helm chart version @@ -356,7 +356,7 @@ Use the option `pants test --no-timeouts` to temporarily disable timeouts, e.g. Pants only supports publishing Helm charts to OCI registries, a feature that was made generally available in Helm 3.8. -The publishing is done with Pants' `publish` goal but first you will need to tell Pants what are the possible destination registries where to upload your charts. +The publishing is done with Pants' `publish` goal, but first you will need to tell Pants what are the possible destination registries where to upload your charts. ### Configuring OCI registries diff --git a/docs/docs/introduction/how-does-pants-work.mdx b/docs/docs/introduction/how-does-pants-work.mdx index 0f76c8c9fb3..ed73c64f300 100644 --- a/docs/docs/introduction/how-does-pants-work.mdx +++ b/docs/docs/introduction/how-does-pants-work.mdx @@ -27,7 +27,7 @@ This means, for example, that you can run all of your linters at the same time, ### Caching -The engine caches processes precisely based on their inputs, and sandboxes execution to minimize side-effects and to make builds consistent and repeatable. +The engine caches processes precisely based on their inputs, and sandboxes execution to minimize side effects and to make builds consistent and repeatable. We run both tests, then add a syntax error to one test and rerun; the diff --git a/docs/docs/java-and-scala/index.mdx b/docs/docs/java-and-scala/index.mdx index 275adb70fad..6efa42ae40d 100644 --- a/docs/docs/java-and-scala/index.mdx +++ b/docs/docs/java-and-scala/index.mdx @@ -321,7 +321,7 @@ Pants supports loading Java and Scala projects in IntelliJ via the [BSP protocol After Setup (see below), and after IntelliJ has finished indexing your code, you should be able to: - Use goto definition and other symbol-index-using operations. -- Run test classes, which will first compile them will Pants (and render compile failures if not), and then run them in the foreground with IntelliJ's test runner. +- Run test classes, which will first compile them with Pants (and render compile failures if not), and then run them in the foreground with IntelliJ's test runner. ### Setup diff --git a/docs/docs/python/goals/check.mdx b/docs/docs/python/goals/check.mdx index 1e7ae5db8aa..e8b3de04ace 100644 --- a/docs/docs/python/goals/check.mdx +++ b/docs/docs/python/goals/check.mdx @@ -94,7 +94,7 @@ You can use [`.pyi` files](https://mypy.readthedocs.io/en/stable/stubs.html) for Pants's dependency inference knows to infer a dependency both on the implementation and the type stub. You can verify this by running `pants dependencies path/to/file.py`. -When writing stubs for third-party libraries, you may need the set up the `[source].root_patterns` option so that [source roots](../../using-pants/key-concepts/source-roots.mdx) are properly stripped. For example: +When writing stubs for third-party libraries, you may need to set up the `[source].root_patterns` option so that [source roots](../../using-pants/key-concepts/source-roots.mdx) are properly stripped. For example: ```toml tab={"label":"pants.toml"} [source] diff --git a/docs/docs/python/goals/package.mdx b/docs/docs/python/goals/package.mdx index 706f6dbce59..7b83734163f 100644 --- a/docs/docs/python/goals/package.mdx +++ b/docs/docs/python/goals/package.mdx @@ -25,8 +25,8 @@ This allows you to test your packaging pipeline by simply running `pants test :: See [test](./test.mdx) for more information. ::: -:::tip Streamling Docker builds -Check out our blog [Streamling Docker Builds](https://blog.pantsbuild.org/pants-pex-and-docker/) to read about how you can combine these `package` formats with Pants's Docker support. Also see our [Docker docs](../../docker/index.mdx) +:::tip Streamline Docker builds +Check out our blog [Streamline Docker Builds](https://blog.pantsbuild.org/pants-pex-and-docker/) to read about how you can combine these `package` formats with Pants's Docker support. Also see our [Docker docs](../../docker/index.mdx) ::: ## Creating a PEX file from a `pex_binary` target diff --git a/docs/docs/python/goals/publish.mdx b/docs/docs/python/goals/publish.mdx index 88408dda46a..c40ad76112f 100644 --- a/docs/docs/python/goals/publish.mdx +++ b/docs/docs/python/goals/publish.mdx @@ -57,7 +57,7 @@ It is better to provide the required secrets using environment variables when ru ## Environment variables -Pants will pass certain configuration [environment variables](https://twine.readthedocs.io/en/latest/#environment-variables), through to Twine. If multiple repositories are involved in a single `publish` goal, you can distinguish them by adding an undersore and the repository name (upper-cased, and with hyphens replaced with underscores) as a suffix on the environment variable names: +Pants will pass certain configuration [environment variables](https://twine.readthedocs.io/en/latest/#environment-variables), through to Twine. If multiple repositories are involved in a single `publish` goal, you can distinguish them by adding an underscore and the repository name (upper-cased, and with hyphens replaced with underscores) as a suffix on the environment variable names: - `TWINE_USERNAME` - `TWINE_USERNAME_` diff --git a/docs/docs/python/goals/run.mdx b/docs/docs/python/goals/run.mdx index 8e05dd3a98e..f1467545bba 100644 --- a/docs/docs/python/goals/run.mdx +++ b/docs/docs/python/goals/run.mdx @@ -54,9 +54,9 @@ Running a `python_source` with the `run_goal_use_sandbox` field set to `False` i ## Watching the filesystem -If the app that you are running is long lived and safe to restart (including web apps like Django and Flask or other types of servers/services), you can set `restartable=True` on your `pex_binary` target to indicate this to Pants. The `run` goal will then automatically restart the app when its input files change! +If the app that you are running is long-lived and safe to restart (including web apps like Django and Flask or other types of servers/services), you can set `restartable=True` on your `pex_binary` target to indicate this to Pants. The `run` goal will then automatically restart the app when its input files change! -On the other hand, if your app is short lived (like a script) and you'd like to re-run it when files change but never interrupt an ongoing run, consider using `pants --loop run` instead. See [Goals](../../using-pants/key-concepts/goals.mdx#running-goals) for more information on `--loop`. +On the other hand, if your app is short-lived (like a script) and you'd like to re-run it when files change but never interrupt an ongoing run, consider using `pants --loop run` instead. See [Goals](../../using-pants/key-concepts/goals.mdx#running-goals) for more information on `--loop`. ## Debugging diff --git a/docs/docs/python/integrations/aws-lambda.mdx b/docs/docs/python/integrations/aws-lambda.mdx index a9079734405..93c2e2ca87b 100644 --- a/docs/docs/python/integrations/aws-lambda.mdx +++ b/docs/docs/python/integrations/aws-lambda.mdx @@ -137,7 +137,7 @@ For example, one use of layers is splitting the deployment package for a Lambda 1. a function artifact with only the code in your repository (first-party sources) 2. a layer artifact with the third-party requirements that the function imports -This split means making a change to first-party sources only requires rebuilding and re-deploying the function artifact. Since this artifact doesn't need to include all of the third-party requirements, rebuilding is likely to much faster and the resulting package will be smaller. The layer will only need to be rebuilt and redeployed if the third-party dependencies change, like a version upgrade or an additional `import`. +This split means making a change to first-party sources only requires rebuilding and re-deploying the function artifact. Since this artifact doesn't need to include all of the third-party requirements, rebuilding is likely to be much faster and the resulting package will be smaller. The layer will only need to be rebuilt and redeployed if the third-party dependencies change, like a version upgrade or an additional `import`. ```python tab={"label":"project/BUILD"} python_sources(name="lib") diff --git a/docs/docs/shell/run-shell-commands.mdx b/docs/docs/shell/run-shell-commands.mdx index 33f28a164ad..442d9b00d78 100644 --- a/docs/docs/shell/run-shell-commands.mdx +++ b/docs/docs/shell/run-shell-commands.mdx @@ -7,7 +7,7 @@ How to execute arbitrary scripts and programs --- -The [`shell_command`](../../reference/targets/shell_command.mdx) target allows you to run any command during a Pants execution, for the purpose of modifying or creating files to be used by other targets, or its (idempotent: see below) side-effects when accessing services over the network. +The [`shell_command`](../../reference/targets/shell_command.mdx) target allows you to run any command during a Pants execution, for the purpose of modifying or creating files to be used by other targets, or its (idempotent: see below) side effects when accessing services over the network. ```python tab={"label":"BUILD"} shell_command( diff --git a/docs/docs/using-pants/key-concepts/options.mdx b/docs/docs/using-pants/key-concepts/options.mdx index 9f2c05bf843..95f2e7fd0d3 100644 --- a/docs/docs/using-pants/key-concepts/options.mdx +++ b/docs/docs/using-pants/key-concepts/options.mdx @@ -11,7 +11,7 @@ A deep dive into how options may be configured. Options are partitioned into named _scopes_. -Some systemwide options belong in the _global scope_. For example, the `--level` option, which controls the logging level, is in the global scope. +Some system-wide options belong in the _global scope_. For example, the `--level` option, which controls the logging level, is in the global scope. Other options belong to a _subsystem scope_. A _subsystem_ is simply a collection of related options, in a scope. For example, the `pytest` subsystem contains options related to [Python's test framework pytest](../../../reference/subsystems/pytest.mdx). @@ -304,7 +304,7 @@ dictopt = """{ #### Add/replace semantics - A value can be preceded by `+`, which will _update_ the value obtained from lower-precedence sources with the entries. -- Otherwise, the value _replaces_ the one obtained from lower-precendence sources. +- Otherwise, the value _replaces_ the one obtained from lower-precedence sources. For example, if the value of `--dictopt` in `scope` is set to `{'foo', 1, 'bar': 2}` in a config file, then diff --git a/docs/docs/writing-plugins/common-plugin-tasks/plugin-upgrade-guide.mdx b/docs/docs/writing-plugins/common-plugin-tasks/plugin-upgrade-guide.mdx index eb23a1f80eb..74323e404e9 100644 --- a/docs/docs/writing-plugins/common-plugin-tasks/plugin-upgrade-guide.mdx +++ b/docs/docs/writing-plugins/common-plugin-tasks/plugin-upgrade-guide.mdx @@ -32,7 +32,7 @@ async def my_rule_lazy() -> MyOutput: The lazy API is useful, for example, when you only want to `Get` that output type inside an `if` branch. -We added syntax in 2.17 to now use `Get(OutputType)`, whereas before you had to do `Get(OutputType, OutputTypeRequest)` or (as of 2.15) `Get(OutputType, {})`. So, these `OutputTypeRequest` types are now redudent and deprecated in favor of simply using `Get(OutputType)`. +We added syntax in 2.17 to now use `Get(OutputType)`, whereas before you had to do `Get(OutputType, OutputTypeRequest)` or (as of 2.15) `Get(OutputType, {})`. So, these `OutputTypeRequest` types are now redundant and deprecated in favor of simply using `Get(OutputType)`. ### `EnvironmentBehavior.UNMIGRATED` is no longer available @@ -57,7 +57,7 @@ Additionally, these types now by-default register the implementations for the ru ### `RunFieldSet` can be used to run targets in the sandbox as part of a build rule -With the new `experimental_run_in_sandbox` target type, targets that implement `RunFieldSet` can be run as a build rule for their side-effects. +With the new `experimental_run_in_sandbox` target type, targets that implement `RunFieldSet` can be run as a build rule for their side effects. Many rules that create `RunRequest`s can be used verbatim, but others may make assumptions that they will not be run hermetically. You will need set `run_in_sandbox_behavior` to one of the following values to generate a rule that allows your targets to be run in the sandbox: @@ -99,7 +99,7 @@ Instead of the `name` class attribute, `LintRequest` and `FmtTarg #### 2. Your tool subsystem should have a `skip` option. -Although not explictly not required by the engine to function correctly, `mypy` will complain if the subsystem type provided to `tool_subsystem` doesn't have a `skip: SkipOption` option registered. +Although not explicitly not required by the engine to function correctly, `mypy` will complain if the subsystem type provided to `tool_subsystem` doesn't have a `skip: SkipOption` option registered. Otherwise, you can `# type: ignore[assignment]` on your `tool_subsystem` declaration. diff --git a/docs/docs/writing-plugins/overview.mdx b/docs/docs/writing-plugins/overview.mdx index 1eea54f7e09..390e0342c53 100644 --- a/docs/docs/writing-plugins/overview.mdx +++ b/docs/docs/writing-plugins/overview.mdx @@ -87,7 +87,7 @@ Or, although less recommended, you can add them to the `plugins` option: plugins = ["ansicolors==1.18.0"] ``` -However, be careful adding third-party dependencies that perform side-effects like reading from the filesystem or making network requests, as they will not work properly with the engine's caching model. +However, be careful adding third-party dependencies that perform side effects like reading from the filesystem or making network requests, as they will not work properly with the engine's caching model. ## Enabling Plugins with `register.py` diff --git a/src/python/pants/backend/codegen/protobuf/java/rules.py b/src/python/pants/backend/codegen/protobuf/java/rules.py index 76d97fd60c0..ec8d589baa9 100644 --- a/src/python/pants/backend/codegen/protobuf/java/rules.py +++ b/src/python/pants/backend/codegen/protobuf/java/rules.py @@ -219,7 +219,7 @@ def rules(): ProtobufSourcesGeneratorTarget.register_plugin_field(PrefixedJvmJdkField), ProtobufSourceTarget.register_plugin_field(PrefixedJvmResolveField), ProtobufSourcesGeneratorTarget.register_plugin_field(PrefixedJvmResolveField), - # Bring in the Java backend (since this backend compiles Jave code) to avoid rule graph errors. + # Bring in the Java backend (since this backend compiles Java code) to avoid rule graph errors. # TODO: Figure out whether a subset of rules can be brought in to still avoid rule graph errors. *java_backend_rules(), ] diff --git a/src/python/pants/backend/codegen/thrift/apache/python/subsystem.py b/src/python/pants/backend/codegen/thrift/apache/python/subsystem.py index 92e3c296b0e..af839c25f90 100644 --- a/src/python/pants/backend/codegen/thrift/apache/python/subsystem.py +++ b/src/python/pants/backend/codegen/thrift/apache/python/subsystem.py @@ -18,7 +18,7 @@ class ThriftPythonSubsystem(Subsystem): help=softwrap( """ Code generation options specific to the Python code generator to pass to the - Apache `thift` binary via the `-gen py` argument. + Apache `thrift` binary via the `-gen py` argument. See `thrift -help` for supported values. """ ), diff --git a/src/python/pants/backend/cue/goals/fix_test.py b/src/python/pants/backend/cue/goals/fix_test.py index b4389bc9c91..b0f11a95f8d 100644 --- a/src/python/pants/backend/cue/goals/fix_test.py +++ b/src/python/pants/backend/cue/goals/fix_test.py @@ -128,7 +128,7 @@ def test_simple_cue_fmt_issue(rule_runner: RuleRunner) -> None: assert_results( rule_runner, run_cue(rule_runner, ["src/example.cue"]), - # `cue fmt` does not output anything.. so we have only the formatted files to go on. :/ + # `cue fmt` does not output anything. so we have only the formatted files to go on. :/ ExpectedResult( files=[ ( diff --git a/src/python/pants/backend/debian/target_types.py b/src/python/pants/backend/debian/target_types.py index 7169fae6403..df585936817 100644 --- a/src/python/pants/backend/debian/target_types.py +++ b/src/python/pants/backend/debian/target_types.py @@ -97,7 +97,7 @@ class DebianPackageDependencies(SpecialCasedDependencies): It will include the results in your Debian package using the same name they would normally have, but without the `--distdir` prefix (e.g. `dist/`). - You can include anything that can be uilt by `{bin_name()} package`, e.g. a `pex_binary`, + You can include anything that can be built by `{bin_name()} package`, e.g. a `pex_binary`, a `python_distribution`, or an `archive`. """ ) diff --git a/src/python/pants/backend/docker/registries.py b/src/python/pants/backend/docker/registries.py index 512594a1971..129793e6ad1 100644 --- a/src/python/pants/backend/docker/registries.py +++ b/src/python/pants/backend/docker/registries.py @@ -99,5 +99,5 @@ def get(self, *aliases_or_addresses: str) -> Iterator[DockerRegistryOptions]: elif alias_or_address == ALL_DEFAULT_REGISTRIES: yield from self.default else: - # Assume a explicit address from the BUILD file. + # Assume an explicit address from the BUILD file. yield DockerRegistryOptions(address=alias_or_address) diff --git a/src/python/pants/backend/docker/subsystems/docker_options.py b/src/python/pants/backend/docker/subsystems/docker_options.py index 9c1bafde496..2123751494c 100644 --- a/src/python/pants/backend/docker/subsystems/docker_options.py +++ b/src/python/pants/backend/docker/subsystems/docker_options.py @@ -106,7 +106,7 @@ def env_vars(self) -> tuple[str, ...]: Configure the default repository name used in the Docker image tag. The value is formatted and may reference these variables (in addition to the normal - placeheolders derived from the Dockerfile and build args etc): + placeholders derived from the Dockerfile and build args etc): {bullet_list(["name", "directory", "parent_directory", "target_repository"])} diff --git a/src/python/pants/backend/docker/subsystems/dockerfile_wrapper_script.py b/src/python/pants/backend/docker/subsystems/dockerfile_wrapper_script.py index 9b2af331043..1aa076b9d19 100644 --- a/src/python/pants/backend/docker/subsystems/dockerfile_wrapper_script.py +++ b/src/python/pants/backend/docker/subsystems/dockerfile_wrapper_script.py @@ -11,7 +11,7 @@ from typing import Iterator # -# Note: This file is used as an pex entry point in the execution sandbox. +# Note: This file is used as a pex entry point in the execution sandbox. # diff --git a/src/python/pants/backend/docker/target_types_test.py b/src/python/pants/backend/docker/target_types_test.py index d492195ae16..cffc054859f 100644 --- a/src/python/pants/backend/docker/target_types_test.py +++ b/src/python/pants/backend/docker/target_types_test.py @@ -13,8 +13,8 @@ "src, expected", [ ( - "/aboslute/path", - "/aboslute/path", + "/absolute/path", + "/absolute/path", ), ( "./relative/path", diff --git a/src/python/pants/backend/docker/util_rules/dependencies.py b/src/python/pants/backend/docker/util_rules/dependencies.py index fa95a9ca0eb..d5be3fd94e8 100644 --- a/src/python/pants/backend/docker/util_rules/dependencies.py +++ b/src/python/pants/backend/docker/util_rules/dependencies.py @@ -24,7 +24,7 @@ class DockerInferenceFieldSet(FieldSet): required_fields = (DockerImageDependenciesField,) - depenendencies: DockerImageDependenciesField + dependencies: DockerImageDependenciesField class InferDockerDependencies(InferDependenciesRequest): @@ -35,7 +35,7 @@ class InferDockerDependencies(InferDependenciesRequest): async def infer_docker_dependencies( request: InferDockerDependencies, all_packageable_targets: AllPackageableTargets ) -> InferredDependencies: - """Inspects the Dockerfile for references to known packagable targets.""" + """Inspects the Dockerfile for references to known packageable targets.""" dockerfile_info = await Get(DockerfileInfo, DockerfileInfoRequest(request.field_set.address)) targets = await Get(Targets, Addresses([request.field_set.address])) build_args = await Get(DockerBuildArgs, DockerBuildArgsRequest(targets.expect_single())) @@ -66,7 +66,7 @@ async def infer_docker_dependencies( # NB: There's no easy way of knowing the output path's default file ending as there could # be none or it could be dynamic. Instead of forcing clients to tell us, we just use all the # possible ones from the Dockerfile. In rare cases we over-infer, but it is relatively harmless. - # NB: The suffix gets an `or None` `pathlib` includes the ".", but `OutputPathField` doesnt + # NB: The suffix gets an `or None` `pathlib` includes the ".", but `OutputPathField` doesn't # expect it (if you give it "", it'll leave a trailing "."). possible_file_endings = {PurePath(path).suffix[1:] or None for path in maybe_output_paths} inferred_addresses = [] diff --git a/src/python/pants/backend/go/go_sources/analyze_package/read.go b/src/python/pants/backend/go/go_sources/analyze_package/read.go index 18e018c5e2c..070296cdbca 100644 --- a/src/python/pants/backend/go/go_sources/analyze_package/read.go +++ b/src/python/pants/backend/go/go_sources/analyze_package/read.go @@ -36,7 +36,7 @@ import ( "unicode/utf8" ) -// PANTS NOTE: These types were adaptd from https://github.com/bazelbuild/rules_go/blob/master/go/tools/builders/filter.go +// PANTS NOTE: These types were adapted from https://github.com/bazelbuild/rules_go/blob/master/go/tools/builders/filter.go type fileInfo struct { filename string diff --git a/src/python/pants/backend/go/util_rules/assembly.py b/src/python/pants/backend/go/util_rules/assembly.py index ccc0060afad..122ba926bb8 100644 --- a/src/python/pants/backend/go/util_rules/assembly.py +++ b/src/python/pants/backend/go/util_rules/assembly.py @@ -17,7 +17,7 @@ @dataclass(frozen=True) class GenerateAssemblySymabisRequest: - """Generate a `symabis` file with metadata about the assemnbly files for consumption by Go + """Generate a `symabis` file with metadata about the assembly files for consumption by Go compiler. See https://github.com/bazelbuild/rules_go/issues/1893. diff --git a/src/python/pants/backend/go/util_rules/build_pkg.py b/src/python/pants/backend/go/util_rules/build_pkg.py index c0f34277e73..ed76338088a 100644 --- a/src/python/pants/backend/go/util_rules/build_pkg.py +++ b/src/python/pants/backend/go/util_rules/build_pkg.py @@ -963,7 +963,7 @@ async def compute_compile_action_id( h = hashlib.sha256() - # All Go action IDs have the full version (as returned by `runtime.Version()` in the key. + # All Go action IDs have the full version (as returned by `runtime.Version()`) in the key. # See https://github.com/golang/go/blob/master/src/cmd/go/internal/cache/hash.go#L32-L46 h.update(goroot.full_version.encode()) diff --git a/src/python/pants/backend/go/util_rules/coverage_profile.py b/src/python/pants/backend/go/util_rules/coverage_profile.py index fa0b6820e6d..fd89ce01535 100644 --- a/src/python/pants/backend/go/util_rules/coverage_profile.py +++ b/src/python/pants/backend/go/util_rules/coverage_profile.py @@ -45,7 +45,7 @@ class GoCoverageBoundary: class GoCoverageProfile: """Parsed representation of a raw Go coverage profile for a single file. - A coverage outpyt file may report on multiple files which will be split into different instances + A coverage output file may report on multiple files which will be split into different instances of this dataclass. """ diff --git a/src/python/pants/backend/go/util_rules/coverage_test.py b/src/python/pants/backend/go/util_rules/coverage_test.py index 30faa2c1661..05bd9f3fce3 100644 --- a/src/python/pants/backend/go/util_rules/coverage_test.py +++ b/src/python/pants/backend/go/util_rules/coverage_test.py @@ -199,7 +199,7 @@ def run_test(tgt: Target) -> str: return raw_go_report - # Test that the `foo/adder` package is missing when it is **not** configured to be covered via + # Test that the `foo/adder` package is missing when it is **not** configured to be covered # via the `--go-test-coverage-include-patterns` option. tgt = rule_runner.get_target(Address("foo")) cover_report = run_test(tgt) diff --git a/src/python/pants/backend/go/util_rules/pkg_pattern.py b/src/python/pants/backend/go/util_rules/pkg_pattern.py index 2deaba5b268..980ccef5bef 100644 --- a/src/python/pants/backend/go/util_rules/pkg_pattern.py +++ b/src/python/pants/backend/go/util_rules/pkg_pattern.py @@ -47,8 +47,8 @@ def _match_pattern_internal(pattern: str, vendor_exclude: bool) -> Callable[[str # The strategy for the vendor exclusion is to change the unmatchable # vendor strings to a disallowed code point (vendorChar) and to use # "(anything but that codepoint)*" as the implementation of the ... wildcard. - # This is a bit complicated but the obvious alternative, - # namely a hand-written search like in most shell glob matchers, + # This is a bit complicated, but the obvious alternative, + # namely a handwritten search like in most shell glob matchers, # is too easy to make accidentally exponential. # Using package regexp guarantees linear-time matching. diff --git a/src/python/pants/backend/helm/dependency_inference/deployment.py b/src/python/pants/backend/helm/dependency_inference/deployment.py index 6b389ac9865..3979f52e317 100644 --- a/src/python/pants/backend/helm/dependency_inference/deployment.py +++ b/src/python/pants/backend/helm/dependency_inference/deployment.py @@ -86,7 +86,7 @@ async def analyse_deployment(request: AnalyseHelmDeploymentRequest) -> HelmDeplo if isinstance(entry, FileEntry) ) - # Build YAML index of Docker image refs for future processing during depedendecy inference or post-rendering. + # Build YAML index of Docker image refs for future processing during dependency inference or post-rendering. image_refs_index: MutableYamlIndex[str] = MutableYamlIndex() for manifest in parsed_manifests: for entry in manifest.found_image_refs: diff --git a/src/python/pants/backend/helm/resolve/artifacts.py b/src/python/pants/backend/helm/resolve/artifacts.py index 3a0a37f1fea..42110e6236a 100644 --- a/src/python/pants/backend/helm/resolve/artifacts.py +++ b/src/python/pants/backend/helm/resolve/artifacts.py @@ -148,8 +148,10 @@ def metadata(self) -> dict[str, Any] | None: @rule -def resolved_helm_artifact(artifact: HelmArtifact, subsytem: HelmSubsystem) -> ResolvedHelmArtifact: - remotes = subsytem.remotes() +def resolved_helm_artifact( + artifact: HelmArtifact, subsystem: HelmSubsystem +) -> ResolvedHelmArtifact: + remotes = subsystem.remotes() candidate_remotes = list(remotes.get(artifact.requirement.location.spec)) if candidate_remotes: diff --git a/src/python/pants/backend/helm/subsystems/helm.py b/src/python/pants/backend/helm/subsystems/helm.py index 96a48481bcd..0fec6956114 100644 --- a/src/python/pants/backend/helm/subsystems/helm.py +++ b/src/python/pants/backend/helm/subsystems/helm.py @@ -81,7 +81,7 @@ def __init__(self, args: Iterable[str], *, extra_help: str = "") -> None: A configured registry is marked as default either by setting `default = true` or with an alias of `"default"`. - Registries also participate in resolving third party Helm charts uplodaded to those registries. + Registries also participate in resolving third party Helm charts uploaded to those registries. """ ) diff --git a/src/python/pants/backend/helm/subsystems/k8s_parser_main.py b/src/python/pants/backend/helm/subsystems/k8s_parser_main.py index b6744c03a48..08d67db90a6 100644 --- a/src/python/pants/backend/helm/subsystems/k8s_parser_main.py +++ b/src/python/pants/backend/helm/subsystems/k8s_parser_main.py @@ -20,7 +20,7 @@ def main(args: list[str]): # Hikaru fails with a `RuntimeError` when it finds a K8S manifest for an # API version and kind that doesn't understand. # - # We exit the process early without giving any ouput. + # We exit the process early without giving any output. sys.exit(0) for idx, doc in enumerate(parsed_docs): diff --git a/src/python/pants/backend/helm/util_rules/renderer.py b/src/python/pants/backend/helm/util_rules/renderer.py index 6a912a92129..de6a1e8fcd2 100644 --- a/src/python/pants/backend/helm/util_rules/renderer.py +++ b/src/python/pants/backend/helm/util_rules/renderer.py @@ -97,7 +97,7 @@ class _HelmDeploymentProcessWrapper(EngineAwareParameter, EngineAwareReturnType) """Intermediate representation of a `HelmProcess` that will produce a fully rendered set of manifests from a given chart. - The encapsulated `process` will be side-effecting dependening on the `cmd` that was originally requested. + The encapsulated `process` will be side-effecting depending on the `cmd` that was originally requested. This is meant to only be used internally by this module. """ @@ -318,7 +318,7 @@ def maybe_escape_string_value(value: str) -> str: # If using a post-renderer we are only going to keep the process result cached in # memory to prevent storing in disk, either locally or remotely, secrets or other - # sensitive values that may been added in by the post-renderer. + # sensitive values that may have been added in by the post-renderer. process_cache = ( ProcessCacheScope.PER_RESTART_SUCCESSFUL if request.post_renderer diff --git a/src/python/pants/backend/helm/util_rules/tool.py b/src/python/pants/backend/helm/util_rules/tool.py index f40a1e14cc3..84a0551debe 100644 --- a/src/python/pants/backend/helm/util_rules/tool.py +++ b/src/python/pants/backend/helm/util_rules/tool.py @@ -84,7 +84,7 @@ class ExternalHelmPlugin(HelmPluginSubsystem, TemplatedExternalTool, metaclass=A """Represents the subsystem for a Helm plugin that needs to be downloaded from an external source. - For declaring an External Helm plugin, extend this class provinding a value of the + For declaring an External Helm plugin, extend this class providing a value of the `plugin_name` class attribute and implement the rest of it like you would do for any other `TemplatedExternalTool`. diff --git a/src/python/pants/backend/project_info/peek.py b/src/python/pants/backend/project_info/peek.py index 67249a7bc21..656c632746e 100644 --- a/src/python/pants/backend/project_info/peek.py +++ b/src/python/pants/backend/project_info/peek.py @@ -124,7 +124,7 @@ def render_json( class _PeekJsonEncoder(json.JSONEncoder): - """Allow us to serialize some commmonly found types in BUILD files.""" + """Allow us to serialize some commonly found types in BUILD files.""" def default(self, o): """Return a serializable object for o.""" diff --git a/src/python/pants/backend/python/goals/export.py b/src/python/pants/backend/python/goals/export.py index 5ace0a9c1f0..28daf5f916a 100644 --- a/src/python/pants/backend/python/goals/export.py +++ b/src/python/pants/backend/python/goals/export.py @@ -226,7 +226,7 @@ async def do_export( PostProcessingCommand(["rm", "-rf", tmpdir_under_digest_root]), ] - # Insert editable wheel post processing commands if needed. + # Insert editable wheel post-processing commands if needed. if req.editable_local_dists_digest is not None: # We need the snapshot to get the wheel file names which are something like: # - pkg_name-1.2.3-0.editable-py3-none-any.whl diff --git a/src/python/pants/backend/python/goals/lockfile.py b/src/python/pants/backend/python/goals/lockfile.py index d5a60142a33..db612f2a1f5 100644 --- a/src/python/pants/backend/python/goals/lockfile.py +++ b/src/python/pants/backend/python/goals/lockfile.py @@ -123,7 +123,7 @@ async def generate_lockfile( "--resolver-version", "pip-2020-resolver", # PEX files currently only run on Linux and Mac machines; so we hard code this - # limit on lock universaility to avoid issues locking due to irrelevant + # limit on lock universality to avoid issues locking due to irrelevant # Windows-only dependency issues. See this Pex issue that originated from a # Pants user issue presented in Slack: # https://github.com/pex-tool/pex/issues/1821 diff --git a/src/python/pants/backend/python/goals/publish.py b/src/python/pants/backend/python/goals/publish.py index 2a940c808d1..dce4a8c83ab 100644 --- a/src/python/pants/backend/python/goals/publish.py +++ b/src/python/pants/backend/python/goals/publish.py @@ -152,7 +152,7 @@ async def twine_upload( skip = f"(by `{request.field_set.skip_twine.alias}` on {request.field_set.address})" elif not request.field_set.repositories.value: # I'd rather have used the opt_out mechanism on the field set, but that gives no hint as to - # why the target was not applicable.. + # why the target was not applicable. skip = f"(no `{request.field_set.repositories.alias}` specified for {request.field_set.address})" if skip: diff --git a/src/python/pants/backend/python/lint/black/rules.py b/src/python/pants/backend/python/lint/black/rules.py index 1f05d0aa0d2..61cb652dbc7 100644 --- a/src/python/pants/backend/python/lint/black/rules.py +++ b/src/python/pants/backend/python/lint/black/rules.py @@ -74,7 +74,7 @@ async def partition_black( # Black requires 3.6+ but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6, and 3.7. # However, typed-ast does not understand 3.8+, so instead we must run Black with Python 3.8+ - # when relevant. We only do this if if <3.8 can't be used, as we don't want a loose requirement + # when relevant. We only do this if <3.8 can't be used, as we don't want a loose requirement # like `>=3.6` to result in requiring Python 3.8, which would error if 3.8 is not installed on # the machine. tool_interpreter_constraints = black.interpreter_constraints diff --git a/src/python/pants/backend/python/macros/pipenv_requirements_test.py b/src/python/pants/backend/python/macros/pipenv_requirements_test.py index de1065ac011..15cbf37c6c2 100644 --- a/src/python/pants/backend/python/macros/pipenv_requirements_test.py +++ b/src/python/pants/backend/python/macros/pipenv_requirements_test.py @@ -94,7 +94,7 @@ def test_pipfile_lock(rule_runner: RuleRunner) -> None: def test_pipfile_lockfile_dependency(rule_runner: RuleRunner) -> None: - """This tests that we adds a dependency on the lockfile for the resolve for each generated + """This tests that we add a dependency on the lockfile for the resolve for each generated python_requirement.""" rule_runner.set_options(["--python-enable-resolves"]) file_addr = Address("", target_name="reqs", relative_file_path="Pipfile.lock") diff --git a/src/python/pants/backend/python/packaging/pyoxidizer/config.py b/src/python/pants/backend/python/packaging/pyoxidizer/config.py index 9f2f3cee32b..e2219d0dd86 100644 --- a/src/python/pants/backend/python/packaging/pyoxidizer/config.py +++ b/src/python/pants/backend/python/packaging/pyoxidizer/config.py @@ -13,7 +13,7 @@ def make_exe(): policy = dist.make_python_packaging_policy() policy.extension_module_filter = "no-copyleft" - # Note: Adding this for pydanic and libs that have the "unable to load from memory" error + # Note: Adding this for pydantic and libs that have the "unable to load from memory" error # https://github.com/indygreg/PyOxidizer/issues/438 policy.resources_location_fallback = "filesystem-relative:lib" diff --git a/src/python/pants/backend/python/target_types_rules.py b/src/python/pants/backend/python/target_types_rules.py index a81c8f73e0e..12e3be025c3 100644 --- a/src/python/pants/backend/python/target_types_rules.py +++ b/src/python/pants/backend/python/target_types_rules.py @@ -399,7 +399,7 @@ async def resolve_python_distribution_entry_points( entry_point_str for is_target, _, _, entry_point_str in classified_entry_points if is_target ] - # Intermediate step, as Get(Targets) returns a deduplicated set.. which breaks in case of + # Intermediate step, as Get(Targets) returns a deduplicated set which breaks in case of # multiple input refs that maps to the same target. target_addresses = await Get( Addresses, diff --git a/src/python/pants/backend/python/typecheck/mypy/rules.py b/src/python/pants/backend/python/typecheck/mypy/rules.py index 753f2a6e5ce..e989476eb0f 100644 --- a/src/python/pants/backend/python/typecheck/mypy/rules.py +++ b/src/python/pants/backend/python/typecheck/mypy/rules.py @@ -98,7 +98,7 @@ async def _generate_argv( mypy_info = mypy_pex_info.find("mypy") assert mypy_info is not None if mypy_info.version > packaging.version.Version("0.700") and python_version is not None: - # Skip mtime checks because we don't propogate mtime when materialzing the sandbox, so the + # Skip mtime checks because we don't propagate mtime when materializing the sandbox, so the # mtime checks will always fail otherwise. args.append("--skip-cache-mtime-check") # See "__run_wrapper.sh" below for explanation diff --git a/src/python/pants/backend/python/util_rules/interpreter_constraints.py b/src/python/pants/backend/python/util_rules/interpreter_constraints.py index 99051fbb04e..0bde29692d5 100644 --- a/src/python/pants/backend/python/util_rules/interpreter_constraints.py +++ b/src/python/pants/backend/python/util_rules/interpreter_constraints.py @@ -184,7 +184,7 @@ def create_from_targets( NB: Because Python targets validate that they have ICs which are a subset of their dependencies, merging constraints like this is only necessary when you are _mixing_ code - which might not have any inter-dependencies, such as when you're merging un-related roots. + which might not have any interdependencies, such as when you're merging unrelated roots. """ fields = [ tgt[InterpreterConstraintsField] diff --git a/src/python/pants/backend/python/util_rules/local_dists_pep660.py b/src/python/pants/backend/python/util_rules/local_dists_pep660.py index 079787d100b..0b9cfb964a0 100644 --- a/src/python/pants/backend/python/util_rules/local_dists_pep660.py +++ b/src/python/pants/backend/python/util_rules/local_dists_pep660.py @@ -98,7 +98,7 @@ async def run_pep660_build( ) -> PEP660BuildResult: """Run our PEP 517 / PEP 660 wrapper script to generate an editable wheel. - The PEP 517 / PEP 660 wraper script is responsible for building the editable wheel. + The PEP 517 / PEP 660 wrapper script is responsible for building the editable wheel. The backend wrapper script, along with the commands that install the editable wheel, need to conform to the following specs so that Pants is a PEP 660 compliant frontend, a PEP 660 compliant backend, and that it builds a compliant wheel and install. diff --git a/src/python/pants/backend/python/util_rules/package_dists.py b/src/python/pants/backend/python/util_rules/package_dists.py index 392a485414a..800bc9301f0 100644 --- a/src/python/pants/backend/python/util_rules/package_dists.py +++ b/src/python/pants/backend/python/util_rules/package_dists.py @@ -151,7 +151,7 @@ class DependencyOwner: We need this type to prevent rule ambiguities when computing the list of targets owned by an ExportedTarget (which involves going from ExportedTarget -> dep -> owner (which is itself an - ExportedTarget) and checking if owner is the original ExportedTarget. + ExportedTarget) and checking if owner is the original ExportedTarget). """ exported_target: ExportedTarget @@ -324,7 +324,7 @@ def validate_commands(commands: tuple[str, ...]): ) ) # We don't allow publishing via setup.py, as we don't want the setup.py running rule, - # which is not a @goal_rule, to side-effect (plus, we'd need to ensure that publishing + # which is not a @goal_rule, to side effect (plus, we'd need to ensure that publishing # happens in dependency order). Note that `upload` and `register` were removed in # setuptools 42.0.0, in favor of Twine, but we still check for them in case the user modified # the default version used by our Setuptools subsystem. @@ -634,7 +634,7 @@ async def determine_finalized_setup_kwargs(request: GenerateSetupPyRequest) -> F ) # NB: We are careful to not overwrite these values, but we also don't expect them to have been - # set. The user must have have gone out of their way to use a `SetupKwargs` plugin, and to have + # set. The user must have gone out of their way to use a `SetupKwargs` plugin, and to have # specified `SetupKwargs(_allow_banned_keys=True)`. setup_kwargs.update( { @@ -974,7 +974,7 @@ def is_ownable_target(tgt: Target, union_membership: UnionMembership) -> bool: or tgt.get(SourcesField).can_generate(ResourceSourceField, union_membership) # We also check for generating sources so that dependencies on `python_sources(sources=[])` # is included. Those won't generate any `python_source` targets, but still can be - # dependended upon. + # depended upon. or tgt.has_field(PythonGeneratingSourcesBase) ) diff --git a/src/python/pants/backend/python/util_rules/pex_requirements.py b/src/python/pants/backend/python/util_rules/pex_requirements.py index f63f27f644e..2534c1095b2 100644 --- a/src/python/pants/backend/python/util_rules/pex_requirements.py +++ b/src/python/pants/backend/python/util_rules/pex_requirements.py @@ -264,7 +264,7 @@ class EntireLockfile: """A request to resolve the entire contents of a lockfile. This resolution mode is used in a few cases: - 1. for poetry or hand-written lockfiles (which do not support being natively subsetted the + 1. for poetry or handwritten lockfiles (which do not support being natively subsetted the way that a PEX lockfile can be), in order to build a repository-PEX to subset separately. 2. for tool lockfiles, which (regardless of format), need to resolve the entire lockfile content anyway. diff --git a/src/python/pants/backend/scala/compile/scalac_plugins.py b/src/python/pants/backend/scala/compile/scalac_plugins.py index 0b114354c11..2d098edaac4 100644 --- a/src/python/pants/backend/scala/compile/scalac_plugins.py +++ b/src/python/pants/backend/scala/compile/scalac_plugins.py @@ -120,7 +120,7 @@ async def _resolve_scalac_plugin_artifact( local_environment_name: ChosenLocalEnvironmentName, field_defaults: FieldDefaults, ) -> WrappedTarget: - """Helps resolving the actual artifact for a scalac plugin even in the scenario in which the + """Helps resolve the actual artifact for a scalac plugin even in the scenario in which the artifact has been declared as a scala_artifact and it has been parametrized (i.e. across multiple resolves for cross building).""" diff --git a/src/python/pants/backend/scala/dependency_inference/scala_parser_test.py b/src/python/pants/backend/scala/dependency_inference/scala_parser_test.py index b35f50bb1e3..144cb6c162d 100644 --- a/src/python/pants/backend/scala/dependency_inference/scala_parser_test.py +++ b/src/python/pants/backend/scala/dependency_inference/scala_parser_test.py @@ -687,7 +687,7 @@ def by_name(symbol: ScalaProvidedSymbol) -> str: ) -def test_type_constaint(rule_runner: RuleRunner) -> None: +def test_type_constraint(rule_runner: RuleRunner) -> None: analysis = _analyze( rule_runner, textwrap.dedent( diff --git a/src/python/pants/backend/shell/util_rules/shell_command_test.py b/src/python/pants/backend/shell/util_rules/shell_command_test.py index 2380ac8b175..87a9d5ecf73 100644 --- a/src/python/pants/backend/shell/util_rules/shell_command_test.py +++ b/src/python/pants/backend/shell/util_rules/shell_command_test.py @@ -469,7 +469,7 @@ def test_execution_dependencies(caplog, rule_runner: RuleRunner) -> None: workdir="/", ) - # Succeeds becuase `a1` and `a2` are requested directly and `output_dependencies` + # Succeeds because `a1` and `a2` are requested directly and `output_dependencies` # are made available at runtime shell_command( name="expect_success_2", diff --git a/src/python/pants/backend/terraform/goals/tailor.py b/src/python/pants/backend/terraform/goals/tailor.py index 683a1e2c7fb..b89175ed899 100644 --- a/src/python/pants/backend/terraform/goals/tailor.py +++ b/src/python/pants/backend/terraform/goals/tailor.py @@ -33,7 +33,7 @@ class PutativeTerraformTargetsRequest(PutativeTargetsRequest): @rule(level=LogLevel.DEBUG, desc="Determine candidate Terraform targets to create") -async def find_putative_terrform_module_targets( +async def find_putative_terraform_module_targets( request: PutativeTerraformTargetsRequest, terraform: TerraformTool, all_owned_sources: AllOwnedSources, diff --git a/src/python/pants/backend/terraform/hcl2_parser.py b/src/python/pants/backend/terraform/hcl2_parser.py index b4ead0a042c..88d203881de 100644 --- a/src/python/pants/backend/terraform/hcl2_parser.py +++ b/src/python/pants/backend/terraform/hcl2_parser.py @@ -6,7 +6,7 @@ from typing import Set # -# Note: This file is used as an pex entry point in the execution sandbox. +# Note: This file is used as a pex entry point in the execution sandbox. # diff --git a/src/python/pants/base/build_root.py b/src/python/pants/base/build_root.py index 3d4f7740a33..86c206a2cee 100644 --- a/src/python/pants/base/build_root.py +++ b/src/python/pants/base/build_root.py @@ -12,9 +12,9 @@ class BuildRoot(metaclass=SingletonMetaclass): """Represents the global workspace build root. - By default a Pants workspace is defined by a root directory where one of multiple sentinel files - reside, such as `pants.toml` or `BUILD_ROOT`. This path can also be manipulated through this - interface for re-location of the build root in tests. + By default, a Pants workspace is defined by a root directory where one of multiple sentinel + files resides, such as `pants.toml` or `BUILD_ROOT`. This path can also be manipulated through + this interface for re-location of the build root in tests. """ sentinel_files = ["pants.toml", "pants", "BUILDROOT", "BUILD_ROOT"] diff --git a/src/python/pants/base/exception_sink.py b/src/python/pants/base/exception_sink.py index e2a8861d4aa..665cdc4f475 100644 --- a/src/python/pants/base/exception_sink.py +++ b/src/python/pants/base/exception_sink.py @@ -30,7 +30,7 @@ class SignalHandler: pantsd is enabled. The default behavior is to exit "gracefully" by leaving a detailed log of which signal was received, then exiting with failure. - Note that the terminal will convert a ctrl-c from the user into a SIGINT. + Note that the terminal converts a ctrl-c from the user into a SIGINT. """ @property @@ -323,7 +323,7 @@ def ignoring_sigint(cls) -> Iterator[None]: We currently only use this to implement disabling catching SIGINT while an InteractiveProcess is running (where we want that process to catch it), and only when pantsd - is not enabled (if pantsd is enabled, the client will actually catch SIGINT and forward it + is not enabled. If pantsd is enabled, the client will actually catch SIGINT and forward it to the server, so we don't want the server process to ignore it. """ diff --git a/src/python/pants/base/specs.py b/src/python/pants/base/specs.py index db9450f2b69..e6bd7c5b74f 100644 --- a/src/python/pants/base/specs.py +++ b/src/python/pants/base/specs.py @@ -487,7 +487,7 @@ class Specs: The `ignores` will filter out all relevant `includes`. If your rule does not need to consider includes vs. ignores, e.g. to find all targets in a - directory, you can directly use `RawSpecs`. + directory, you can directly use `RawSpecs`. """ includes: RawSpecs diff --git a/src/python/pants/base/specs_integration_test.py b/src/python/pants/base/specs_integration_test.py index 2b208b0e84a..961061caca8 100644 --- a/src/python/pants/base/specs_integration_test.py +++ b/src/python/pants/base/specs_integration_test.py @@ -123,7 +123,7 @@ def test_sibling_addresses() -> None: @skip_unless_python39_present def test_descendent_addresses() -> None: - """Semantics are the same as sibling addreses, only recursive.""" + """Semantics are the same as sibling addresses, only recursive.""" with setup_tmpdir(SOURCES) as tmpdir: assert run(["list", f"{tmpdir}/py::"]).stdout.splitlines() == [ f"{tmpdir}/py:bin", diff --git a/src/python/pants/bin/local_pants_runner.py b/src/python/pants/bin/local_pants_runner.py index e8a8d901eb8..c7e5550d464 100644 --- a/src/python/pants/bin/local_pants_runner.py +++ b/src/python/pants/bin/local_pants_runner.py @@ -44,7 +44,7 @@ class LocalPantsRunner: """Handles a single pants invocation running in the process-local context. LocalPantsRunner is used both for single runs of Pants without `pantsd` (where a Scheduler is - created at the beginning of the run and destroyed at the end, and also for runs of Pants in + created at the beginning of the run and destroyed at the end), and also for runs of Pants in `pantsd` (where a Scheduler is borrowed from `pantsd` creation time, and left running at the end). """ diff --git a/src/python/pants/bin/pants_loader.py b/src/python/pants/bin/pants_loader.py index 25834e1ebe2..ebec2289225 100644 --- a/src/python/pants/bin/pants_loader.py +++ b/src/python/pants/bin/pants_loader.py @@ -22,7 +22,7 @@ class PantsLoader: """Initial entrypoint for pants. - Executes a pants_runner by default, or executs a pantsd-specific entrypoint. + Executes a pants_runner by default, or executes a pantsd-specific entrypoint. """ @staticmethod diff --git a/src/python/pants/bin/remote_pants_runner.py b/src/python/pants/bin/remote_pants_runner.py index 69ae17d6d87..4c9ba41d227 100644 --- a/src/python/pants/bin/remote_pants_runner.py +++ b/src/python/pants/bin/remote_pants_runner.py @@ -193,7 +193,7 @@ def strict_utf8(s: str) -> str: logger.warning(f"Pantsd was unresponsive on port {port}, retrying.") time.sleep(1) - # One possible cause of the daemon being non-responsive during an attempt might be if a + # One possible cause of the daemon being non-responsive during an attempt might be if # another lifecycle operation is happening concurrently (incl teardown). To account for # this, we won't begin attempting restarts until at least 1 attempt has passed. if attempt > 1: diff --git a/src/python/pants/bsp/protocol.py b/src/python/pants/bsp/protocol.py index 0c6464fba01..a7b1a1f9627 100644 --- a/src/python/pants/bsp/protocol.py +++ b/src/python/pants/bsp/protocol.py @@ -66,7 +66,7 @@ def _make_error_future(exc: Exception) -> Future: class BSPConnection: _INITIALIZE_METHOD_NAME = "build/initialize" _SHUTDOWN_METHOD_NAME = "build/shutdown" - _EXIT_NOTIFCATION_NAME = "build/exit" + _EXIT_NOTIFICATION_NAME = "build/exit" def __init__( self, @@ -103,7 +103,7 @@ def _send_outbound_message(self, msg): _logger.info(f"_send_outbound_message: msg={msg}") self._outbound.write(msg) - # TODO: Figure out how to run this on the `Endpoint`'s thread pool by returing a callable. For now, we + # TODO: Figure out how to run this on the `Endpoint`'s thread pool by returning a callable. For now, we # need to return errors as futures given that `Endpoint` only handles exceptions returned that way versus using a try ... except block. def _handle_inbound_message(self, *, method_name: str, params: Any): # If the connection is not yet initialized and this is not the initialization request, BSP requires @@ -129,7 +129,7 @@ def _handle_inbound_message(self, *, method_name: str, params: Any): # Return no-op success for the `build/shutdown` method. This doesn't actually cause the server to # exit. That will occur once the client sends the `build/exit` notification. return None - elif method_name == self._EXIT_NOTIFCATION_NAME: + elif method_name == self._EXIT_NOTIFICATION_NAME: # The `build/exit` notification directs the BSP server to immediately exit. # The read-dispatch loop will exit once it notices that the inbound handle is closed. So close the # inbound handle (and outbound handle for completeness) and then return to the dispatch loop diff --git a/src/python/pants/bsp/spec/base.py b/src/python/pants/bsp/spec/base.py index c78a955f08e..9ffc0b70727 100644 --- a/src/python/pants/bsp/spec/base.py +++ b/src/python/pants/bsp/spec/base.py @@ -82,7 +82,7 @@ class BuildTarget: # The target’s unique identifier id: BuildTargetIdentifier - # A human readable name for this target. + # A human-readable name for this target. # May be presented in the user interface. # Should be unique if possible. # The id.uri is used if None. diff --git a/src/python/pants/bsp/spec/lifecycle.py b/src/python/pants/bsp/spec/lifecycle.py index 52ed6e1beea..bc26e120dd7 100644 --- a/src/python/pants/bsp/spec/lifecycle.py +++ b/src/python/pants/bsp/spec/lifecycle.py @@ -12,7 +12,7 @@ @dataclass(frozen=True) class BuildClientCapabilities: # The languages that this client supports. - # The ID strings for each language is defined in the LSP. + # The ID strings for each language are defined in the LSP. # The server must never respond with build targets for other # languages than those that appear in this list. language_ids: tuple[str, ...] @@ -149,7 +149,7 @@ class BuildServerCapabilities: # via method buildTarget/dependencySources dependency_sources_provider: bool | None - # The server cam provide a list of dependency modules (libraries with meta information) + # The server can provide a list of dependency modules (libraries with meta information) # via method buildTarget/dependencyModules dependency_modules_provider: bool | None diff --git a/src/python/pants/conftest.py b/src/python/pants/conftest.py index 4068093b75a..e2b13035f33 100644 --- a/src/python/pants/conftest.py +++ b/src/python/pants/conftest.py @@ -10,7 +10,7 @@ def dedicated_target_fields(): """Ensures we follow our convention of dedicated source and dependencies field per-target. - This help ensure that plugin authors can do dependency inference on _specific_ field types, and + This helps ensure that plugin authors can do dependency inference on _specific_ field types, and not have to filter targets using generic field types. Note that this can't help us if a target type should use an _even more specialized_ dependencies @@ -63,7 +63,7 @@ def new__init__(self, *args, **kwargs): assert expected == actual else: for attrname in self.__slots__: - # Only way to validate it was initialized is to trigger the descriptor + # The only way to validate it was initialized is to trigger the descriptor. getattr(self, attrname) setattr(dataclass_cls, "__init__", new__init__) diff --git a/src/python/pants/core/goals/fix.py b/src/python/pants/core/goals/fix.py index 740b2f56934..292d922ea08 100644 --- a/src/python/pants/core/goals/fix.py +++ b/src/python/pants/core/goals/fix.py @@ -106,7 +106,7 @@ def message(self) -> str | None: snapshot_diff.changed_files, snapshot_diff.their_unique_files, # added files snapshot_diff.our_unique_files, # removed files - # NB: there is no rename detection, so a renames will list + # NB: there is no rename detection, so a rename will list # both the old filename (removed) and the new filename (added). ) ) diff --git a/src/python/pants/core/goals/fix_test.py b/src/python/pants/core/goals/fix_test.py index 5e521a2db20..7f7cba3b333 100644 --- a/src/python/pants/core/goals/fix_test.py +++ b/src/python/pants/core/goals/fix_test.py @@ -396,7 +396,7 @@ def test_fixers_first() -> None: stderr = run_fix(rule_runner, target_specs=["::"]) - # NB Since both rules have the same body, if the fixer runs first, it'll make changes. Then the + # NB: Since both rules have the same body, if the fixer runs first, it'll make changes. Then the # formatter will have nothing to change. assert stderr == dedent( """\ diff --git a/src/python/pants/core/goals/generate_lockfiles.py b/src/python/pants/core/goals/generate_lockfiles.py index a7cd490e74b..2c0ca5a90ad 100644 --- a/src/python/pants/core/goals/generate_lockfiles.py +++ b/src/python/pants/core/goals/generate_lockfiles.py @@ -131,7 +131,7 @@ class RequestedUserResolveNames(Collection[str]): class PackageVersion(Protocol): - """Protocol for backend specific implementations, to support language ecosystem specific version + """Protocol for backend specific implementations, to support language-ecosystem-specific version formats and sort rules. May support the `int` properties `major`, `minor` and `micro` to color diff based on semantic @@ -546,7 +546,7 @@ async def generate_lockfiles_goal( ) # Execute the actual lockfile generation in each request's environment. - # Currently, since resolves specify a single filename for output, we pick a resonable + # Currently, since resolves specify a single filename for output, we pick a reasonable # environment to execute the request in. Currently we warn if multiple environments are # specified. all_requests: Iterator[GenerateLockfile] = itertools.chain( diff --git a/src/python/pants/core/goals/generate_lockfiles_test.py b/src/python/pants/core/goals/generate_lockfiles_test.py index 0cb67cd8198..a80c31a8250 100644 --- a/src/python/pants/core/goals/generate_lockfiles_test.py +++ b/src/python/pants/core/goals/generate_lockfiles_test.py @@ -116,7 +116,7 @@ class AmbiguousTool(GenerateToolLockfileSentinel): set(), ) == ([Lang1Requested(["ambiguous"])], []) - # Error if same resolve name used for multiple user lockfiles. + # Error if the same resolve name is used for multiple user lockfiles. with pytest.raises(AmbiguousResolveNamesError): determine_resolves_to_generate( [ diff --git a/src/python/pants/core/goals/lint.py b/src/python/pants/core/goals/lint.py index 9545d41c558..9173d6e2d00 100644 --- a/src/python/pants/core/goals/lint.py +++ b/src/python/pants/core/goals/lint.py @@ -324,7 +324,7 @@ async def _get_partitions_by_request_type( file_partitioners: Iterable[type[_FilePartitioner]], subsystem: _MultiToolGoalSubsystem, specs: Specs, - # NB: Because the rule parser code will collect `Get`s from caller's scope, these allows the + # NB: Because the rule parser code will collect `Get`s from caller's scope, these allow the # caller to customize the specific `Get`. make_targets_partition_request_get: Callable[[_TargetPartitioner], Get[Partitions]], make_files_partition_request_get: Callable[[_FilePartitioner], Get[Partitions]], diff --git a/src/python/pants/core/goals/publish.py b/src/python/pants/core/goals/publish.py index d6692de9f5f..8bb009d913a 100644 --- a/src/python/pants/core/goals/publish.py +++ b/src/python/pants/core/goals/publish.py @@ -6,7 +6,7 @@ publish the artifacts. The publish protocol consists of defining two union members and one rule, returning the processes to -run. See the doc for the corresponding classses in this module for details on the classes to define. +run. See the doc for the corresponding classes in this module for details on the classes to define. Example rule: @@ -124,7 +124,7 @@ class PublishPackages: The `names` should list all artifacts being published by the `process` command. The `process` may be `None`, indicating that it will not be published. This will be logged as - `skipped`. If the process returns a non zero exit code, it will be logged as `failed`. + `skipped`. If the process returns a non-zero exit code, it will be logged as `failed`. The `description` may be a reason explaining why the publish was skipped, or identifying which repository the artifacts are published to. diff --git a/src/python/pants/core/goals/run.py b/src/python/pants/core/goals/run.py index 20940091c37..7e5a4f64c6a 100644 --- a/src/python/pants/core/goals/run.py +++ b/src/python/pants/core/goals/run.py @@ -44,8 +44,8 @@ class RunInSandboxBehavior(Enum): - """Defines the behavhior of rules that act on a `RunFieldSet` subclass with regards to use in - the sandbox. + """Defines the behavior of rules that act on a `RunFieldSet` subclass with regards to use in the + sandbox. This is used to automatically generate rules used to fulfill `experimental_run_in_sandbox` targets. @@ -53,7 +53,7 @@ class RunInSandboxBehavior(Enum): The behaviors are as follows: * `RUN_REQUEST_HERMETIC`: Use the existing `RunRequest`-generating rule, and enable cacheing. - Use this if you are confident the behaviour of the rule relies only on state that is + Use this if you are confident the behavior of the rule relies only on state that is captured by pants (e.g. binary paths are found using `EnvironmentVarsRequest`), and that the rule only refers to files in the sandbox. * `RUN_REQUEST_NOT_HERMETIC`: Use the existing `RunRequest`-generating rule, and do not @@ -231,7 +231,7 @@ async def run( run_subsystem: RunSubsystem, debug_adapter: DebugAdapterSubsystem, global_options: GlobalOptions, - workspace: Workspace, # Needed to enable sideeffecting. + workspace: Workspace, # Needed to enable side-effecting. complete_env: CompleteEnvironmentVars, ) -> Run: field_set, target = await _find_what_to_run("the `run` goal") diff --git a/src/python/pants/core/goals/tailor.py b/src/python/pants/core/goals/tailor.py index 45fc2fb4105..16da16fb1cb 100644 --- a/src/python/pants/core/goals/tailor.py +++ b/src/python/pants/core/goals/tailor.py @@ -106,7 +106,7 @@ class PutativeTarget: name: str type_alias: str - # The sources that triggered creating of this putative target. + # The sources that triggered creating this putative target. # The putative target will own these sources, but may also glob over other sources. # If the putative target does not have a `sources` field, then this value must be the # empty tuple. @@ -208,7 +208,7 @@ def address(self) -> Address: def realias(self, new_alias: str | None) -> PutativeTarget: """A copy of this object with the alias replaced to the given alias. - Returns this object if the alias is None or is identical to this objects existing alias. + Returns this object if the alias is None or is identical to this object's existing alias. """ return ( self diff --git a/src/python/pants/core/goals/test.py b/src/python/pants/core/goals/test.py index d5091fe85fc..3904e1af02c 100644 --- a/src/python/pants/core/goals/test.py +++ b/src/python/pants/core/goals/test.py @@ -83,7 +83,7 @@ class TestResult(EngineAwareReturnType): addresses: tuple[Address, ...] output_setting: ShowOutput # A None result_metadata indicates a backend that performs its own test discovery/selection - # and either discovered no tests, or encounted an error, such as a compilation error, in + # and either discovered no tests, or encountered an error, such as a compilation error, in # the attempt. result_metadata: ProcessResultMetadata | None # TODO: Merge elapsed MS of all subproceses partition_description: str | None = None diff --git a/src/python/pants/core/target_types.py b/src/python/pants/core/target_types.py index ebb557635c1..c12f404bb2f 100644 --- a/src/python/pants/core/target_types.py +++ b/src/python/pants/core/target_types.py @@ -94,7 +94,7 @@ def compute_value( # type: ignore[override] address: Address, ) -> Optional[Union[str, per_platform[str]]]: if isinstance(raw_value, per_platform): - # NOTE: Ensure the values are homogenous + # NOTE: Ensure the values are homogeneous raw_value.check_types(str) return raw_value @@ -111,7 +111,7 @@ async def my_rule(..., platform: Platform) -> ...: ... ``` - NOTE: Support for this object should be heavily weighed, as it would be innaproriate to use in + NOTE: Support for this object should be heavily weighed, as it would be inappropriate to use in certain contexts (such as the `source` field in a `foo_source` target, where the intent is to support differing source files based on platform. The result would be that dependency inference (and therefore the dependencies field) wouldn't be knowable on the host, which is not something diff --git a/src/python/pants/core/util_rules/adhoc_process_support.py b/src/python/pants/core/util_rules/adhoc_process_support.py index 58d2e2f0a67..1df0d2d2b0f 100644 --- a/src/python/pants/core/util_rules/adhoc_process_support.py +++ b/src/python/pants/core/util_rules/adhoc_process_support.py @@ -169,7 +169,7 @@ async def merge_extra_sandbox_contents(request: MergeExtraSandboxContents) -> Ex @rule -async def add_extra_contents_to_prcess(request: AddExtraSandboxContentsToProcess) -> Process: +async def add_extra_contents_to_process(request: AddExtraSandboxContentsToProcess) -> Process: proc = request.process extras = request.contents new_digest = await Get( diff --git a/src/python/pants/core/util_rules/archive.py b/src/python/pants/core/util_rules/archive.py index 3485583a3bf..781d004a3f9 100644 --- a/src/python/pants/core/util_rules/archive.py +++ b/src/python/pants/core/util_rules/archive.py @@ -86,7 +86,7 @@ async def create_archive( # `tar` expects to find a couple binaries like `gzip` and `xz` by looking on the PATH. env = {"PATH": os.pathsep.join(system_binaries_environment.system_binary_paths)} - # `tar` requires that the output filename's parent directory exists,so if the caller + # `tar` requires that the output filename's parent directory exists, so if the caller # wants the output in a directory we explicitly create it here. # We have to guard this path as the Rust code will crash if we give it empty paths. output_dir = os.path.dirname(request.output_filename) diff --git a/src/python/pants/core/util_rules/archive_test.py b/src/python/pants/core/util_rules/archive_test.py index b3e0f7799f8..990c93ba360 100644 --- a/src/python/pants/core/util_rules/archive_test.py +++ b/src/python/pants/core/util_rules/archive_test.py @@ -176,7 +176,7 @@ def test_create_tar_archive_in_root_dir(rule_runner: RuleRunner, format: Archive The specific requirements of creating a tar led to a situation where the CreateArchive code assumed the output file had a directory component, attempting to create a directory called "" if this assumption didn't hold. In 2.14 creating the "" directory became an error, which meant - CreateArchive broke. This guards against that break reoccuring. + CreateArchive broke. This guards against that break reoccurring. Issue: https://github.com/pantsbuild/pants/issues/17545 """ diff --git a/src/python/pants/core/util_rules/asdf.py b/src/python/pants/core/util_rules/asdf.py index d18678cf293..22ba0b7a113 100644 --- a/src/python/pants/core/util_rules/asdf.py +++ b/src/python/pants/core/util_rules/asdf.py @@ -233,7 +233,7 @@ async def _resolve_asdf_tool_paths( return tuple(asdf_paths) -# TODO: This rule is marked uncacheable because it directly accsses the filesystem to examine ASDF configuration. +# TODO: This rule is marked uncacheable because it directly accesses the filesystem to examine ASDF configuration. # See https://github.com/pantsbuild/pants/issues/10842 for potential future support for capturing from absolute # paths that could allow this rule to be cached. @_uncacheable_rule diff --git a/src/python/pants/core/util_rules/system_binaries.py b/src/python/pants/core/util_rules/system_binaries.py index 770ae1ab87c..71f8ef4b990 100644 --- a/src/python/pants/core/util_rules/system_binaries.py +++ b/src/python/pants/core/util_rules/system_binaries.py @@ -250,7 +250,7 @@ def for_paths( @dataclass(frozen=True) class BinaryShims: - """The shims created for a BinaryShimsRequest is placed in `bin_directory` of the `digest`. + """The shims created for a BinaryShimsRequest are placed in `bin_directory` of the `digest`. The purpose of these shims is so that a Process may be executed with `immutable_input_digests` provided to the `Process`, and `path_component` included in its `PATH` environment variable. @@ -637,7 +637,7 @@ async def find_tar( async def find_cat(system_binaries: SystemBinariesSubsystem.EnvironmentAware) -> CatBinary: request = BinaryPathRequest(binary_name="cat", search_path=system_binaries.system_binary_paths) paths = await Get(BinaryPaths, BinaryPathRequest, request) - first_path = paths.first_path_or_raise(request, rationale="outputing content from files") + first_path = paths.first_path_or_raise(request, rationale="outputting content from files") return CatBinary(first_path.path, first_path.fingerprint) @@ -731,7 +731,7 @@ async def find_readlink( binary_name="readlink", search_path=system_binaries.system_binary_paths ) paths = await Get(BinaryPaths, BinaryPathRequest, request) - first_path = paths.first_path_or_raise(request, rationale="defererence symlinks") + first_path = paths.first_path_or_raise(request, rationale="dereference symlinks") return ReadlinkBinary(first_path.path, first_path.fingerprint) diff --git a/src/python/pants/engine/console.py b/src/python/pants/engine/console.py index 411b7b269aa..8f65cee31f0 100644 --- a/src/python/pants/engine/console.py +++ b/src/python/pants/engine/console.py @@ -14,7 +14,7 @@ class Console(SideEffecting): """Class responsible for writing text to the console while Pants is running. - A SchedulerSession should always be set in production usage, in order to track side-effects, and + A SchedulerSession should always be set in production usage, in order to track side effects, and tear down any running UI before stdio is rendered. """ diff --git a/src/python/pants/engine/engine_aware.py b/src/python/pants/engine/engine_aware.py index 3291c098f09..6f063fecefe 100644 --- a/src/python/pants/engine/engine_aware.py +++ b/src/python/pants/engine/engine_aware.py @@ -67,7 +67,7 @@ def cacheable(self) -> bool: """Allows a return type to be conditionally marked uncacheable. An uncacheable value is recomputed in each Session: this can be useful if the level or - message should be rendered as sideeffects in each Session. + message should be rendered as side effects in each Session. """ return True @@ -93,9 +93,9 @@ class SideEffecting(ABC): """Marks a class as providing side-effecting APIs, which are handled specially in @rules. Implementers of SideEffecting classes should ensure that `def side_effected` is called before - the class causes side-effects. + the class causes side effects. - Note that logging is _not_ considered to be a side-effect, but other types of output to stdio + Note that logging is _not_ considered to be a side effect, but other types of output to stdio are. """ diff --git a/src/python/pants/engine/environment_test.py b/src/python/pants/engine/environment_test.py index ab1d7e2a8d3..8ffd3e67769 100644 --- a/src/python/pants/engine/environment_test.py +++ b/src/python/pants/engine/environment_test.py @@ -11,7 +11,7 @@ @pytest.mark.parametrize( "input_strs, expected", [ - # Test explicit variable and variable read from Pants' enivronment. + # Test explicit variable and variable read from Pants' environment. (["A=unrelated", "B"], {"A": "unrelated", "B": "b"}), # Test multi-word string. (["A=unrelated", "C=multi word"], {"A": "unrelated", "C": "multi word"}), diff --git a/src/python/pants/engine/fs.py b/src/python/pants/engine/fs.py index d0bbba20087..95fcaa4ebc0 100644 --- a/src/python/pants/engine/fs.py +++ b/src/python/pants/engine/fs.py @@ -300,7 +300,7 @@ def write_digest( You should not use this in a `for` loop due to slow performance. Instead, call `await Get(Digest, MergeDigests)` beforehand. - As an advanced usecase, if the digest is known to be written to a temporary or idempotent + As an advanced use-case, if the digest is known to be written to a temporary or idempotent location, side_effecting=False may be passed to avoid tracking this write as a side effect. """ if side_effecting: diff --git a/src/python/pants/engine/fs_test.py b/src/python/pants/engine/fs_test.py index b4d5efa667c..801ab8c1500 100644 --- a/src/python/pants/engine/fs_test.py +++ b/src/python/pants/engine/fs_test.py @@ -1316,7 +1316,7 @@ def read_file() -> str: def test_invalidated_after_parent_deletion(rule_runner: RuleRunner) -> None: - """Test that FileContent is invalidated after deleting parent directory.""" + """Test that FileContent is invalidated after deleting the parent directory.""" setup_fs_test_tar(rule_runner) def read_file() -> Optional[str]: diff --git a/src/python/pants/engine/goal.py b/src/python/pants/engine/goal.py index a8bd026ff48..64285b90a22 100644 --- a/src/python/pants/engine/goal.py +++ b/src/python/pants/engine/goal.py @@ -79,7 +79,7 @@ class List(Goal): ``` Since `@goal_rules` always run in order to produce side effects (generally: console output), - they are not cacheable, and the `Goal` product of a `@goal_rule` contains only a exit_code + they are not cacheable, and the `Goal` product of a `@goal_rule` contains only an exit_code value to indicate whether the rule exited cleanly. """ diff --git a/src/python/pants/engine/internals/build_files_test.py b/src/python/pants/engine/internals/build_files_test.py index 1ad6de651f6..ba39938c68c 100644 --- a/src/python/pants/engine/internals/build_files_test.py +++ b/src/python/pants/engine/internals/build_files_test.py @@ -672,9 +672,9 @@ def assert_bfa_resolved(address: Address) -> None: def test_build_files_share_globals() -> None: """Test that a macro in a prelude can reference another macro in another prelude. - At some point a change was made to separate the globals/locals dict (uninentional) which has the - unintended side-effect of having the `__globals__` of a macro not contain references to every - other symbol in every other prelude. + At some point a change was made to separate the globals/locals dict (unintentional) which has + the unintended side effect of having the `__globals__` of a macro not contain references to + every other symbol in every other prelude. """ symbols = run_rule_with_mocks( diff --git a/src/python/pants/engine/internals/scheduler.py b/src/python/pants/engine/internals/scheduler.py index 8f3145ee8c3..d531ea91689 100644 --- a/src/python/pants/engine/internals/scheduler.py +++ b/src/python/pants/engine/internals/scheduler.py @@ -210,7 +210,7 @@ def __init__( lease_time_millis=LOCAL_STORE_LEASE_TIME_SECS * 1000, shard_count=local_store_options.shard_count, ) - exec_stategy_opts = PyExecutionStrategyOptions( + exec_strategy_opts = PyExecutionStrategyOptions( local_cache=execution_options.local_cache, remote_cache_read=execution_options.remote_cache_read, remote_cache_write=execution_options.remote_cache_write, @@ -236,7 +236,7 @@ def __init__( watch_filesystem, remoting_options, py_local_store_options, - exec_stategy_opts, + exec_strategy_opts, ca_certs_path, ) diff --git a/src/python/pants/engine/internals/selectors.py b/src/python/pants/engine/internals/selectors.py index 12104d83c94..dc406df65e4 100644 --- a/src/python/pants/engine/internals/selectors.py +++ b/src/python/pants/engine/internals/selectors.py @@ -128,7 +128,7 @@ def __await__( class Effect(Generic[_Output], Awaitable[_Output]): """Asynchronous generator API for types which are SideEffecting. - Unlike `Get`s, `Effect`s can cause side-effects (writing files to the workspace, publishing + Unlike `Get`s, `Effect`s can cause side effects (writing files to the workspace, publishing things, printing to the console), and so they may only be used in `@goal_rule`s. See Get for more information on supported syntaxes. @@ -550,7 +550,7 @@ def render_arg(arg: Any) -> str | None: return repr(arg) return repr(arg) - likely_args_exlicitly_passed = tuple( + likely_args_explicitly_passed = tuple( reversed( [ render_arg(arg) @@ -558,12 +558,12 @@ def render_arg(arg: Any) -> str | None: ] ) ) - if any(arg is None for arg in likely_args_exlicitly_passed): + if any(arg is None for arg in likely_args_explicitly_passed): raise ValueError( softwrap( f""" Unexpected MultiGet None arguments: {', '.join( - map(str, likely_args_exlicitly_passed) + map(str, likely_args_explicitly_passed) )} When constructing a MultiGet from individual Gets, all leading arguments must be @@ -575,7 +575,7 @@ def render_arg(arg: Any) -> str | None: raise TypeError( softwrap( f""" - Unexpected MultiGet argument types: {', '.join(map(str, likely_args_exlicitly_passed))} + Unexpected MultiGet argument types: {', '.join(map(str, likely_args_explicitly_passed))} A MultiGet can be constructed in two ways: 1. MultiGet(Iterable[Get[T]]) -> Tuple[T] diff --git a/src/python/pants/engine/process.py b/src/python/pants/engine/process.py index 1fab9b033b2..882843a8825 100644 --- a/src/python/pants/engine/process.py +++ b/src/python/pants/engine/process.py @@ -230,7 +230,7 @@ def platform(self) -> Platform: def source(self, current_run_id: RunId) -> Source: """Given the current run_id, return the calculated "source" of the ProcessResult. - If a ProcessResult is consumed in any run_id other than the one it was created in, the its + If a ProcessResult is consumed in any run_id other than the one it was created in, the source implicitly becomes memoization, since the result was re-used in a new run without being recreated. """ diff --git a/src/python/pants/engine/rules_test.py b/src/python/pants/engine/rules_test.py index bfb3edf9d2c..f6230bbfc7c 100644 --- a/src/python/pants/engine/rules_test.py +++ b/src/python/pants/engine/rules_test.py @@ -473,8 +473,8 @@ def a_from_b(b: B) -> A: @pytest.mark.skip(reason="TODO(#10649): figure out if this tests is still relevant.") @pytest.mark.no_error_if_skipped def test_not_fulfillable_duplicated_dependency(self) -> None: - # If a rule depends on another rule+subject in two ways, and one of them is unfulfillable - # Only the unfulfillable one should be in the errors. + # If a rule depends on another rule+subject in two ways, and one of them is unfulfillable, + # only the unfulfillable one should be in the errors. @rule def a_from_c(c: C) -> A: diff --git a/src/python/pants/engine/target.py b/src/python/pants/engine/target.py index c4de1b598f8..02cd173b438 100644 --- a/src/python/pants/engine/target.py +++ b/src/python/pants/engine/target.py @@ -198,7 +198,7 @@ class FieldDefaults: TODO: This is to work around the fact that Field value defaulting cannot have arbitrary subsystem requirements, and so e.g. `JvmResolveField` and `PythonResolveField` have methods - which compute the true value of the field given a subsytem argument. Consumers need to + which compute the true value of the field given a subsystem argument. Consumers need to be type aware, and `@rules` cannot have dynamic requirements. Additionally, `__defaults__` should mean that computed default Field values should become @@ -515,7 +515,7 @@ def get(self, field: Type[_F], *, default_raw_value: Optional[Any] = None) -> _F grab the `Field`'s inner value, e.g. `tgt.get(Compatibility).value`. (For async fields like `SourcesField`, you may need to hydrate the value.). - This works with subclasses of `Field`s. For example, if you subclass `Tags` + This works with subclasses of `Field`. For example, if you subclass `Tags` to define a custom subclass `CustomTags`, both `tgt.get(Tags)` and `tgt.get(CustomTags)` will return the same `CustomTags` instance. @@ -550,7 +550,7 @@ def _has_fields( def has_field(self, field: Type[Field]) -> bool: """Check that this target has registered the requested field. - This works with subclasses of `Field`s. For example, if you subclass `Tags` to define a + This works with subclasses of `Field`. For example, if you subclass `Tags` to define a custom subclass `CustomTags`, both `tgt.has_field(Tags)` and `python_tgt.has_field(CustomTags)` will return True. """ @@ -560,7 +560,7 @@ def has_field(self, field: Type[Field]) -> bool: def has_fields(self, fields: Iterable[Type[Field]]) -> bool: """Check that this target has registered all of the requested fields. - This works with subclasses of `Field`s. For example, if you subclass `Tags` to define a + This works with subclasses of `Field`. For example, if you subclass `Tags` to define a custom subclass `CustomTags`, both `tgt.has_fields([Tags])` and `python_tgt.has_fields([CustomTags])` will return True. """ @@ -693,8 +693,8 @@ def expect_single(self) -> Target: # FilteredTargets`. That is necessary so that project-introspection goals like `list` which don't # use `FilteredTargets` still have filtering applied. class FilteredTargets(Collection[Target]): - """A heterogenous collection of Target instances that have been filtered with the global options - `--tag` and `--exclude-target-regexp`. + """A heterogeneous collection of Target instances that have been filtered with the global + options `--tag` and `--exclude-target-regexp`. Outside of the extra filtering, this type is identical to `Targets`, including its handling of target generators. @@ -788,7 +788,7 @@ def __call__( class CoarsenedTarget(EngineAwareParameter): def __init__(self, members: Iterable[Target], dependencies: Iterable[CoarsenedTarget]) -> None: - """A set of Targets which cyclicly reach one another, and are thus indivisible. + """A set of Targets which cyclically reach one another, and are thus indivisible. Instances of this class form a structure-shared DAG, and so a hashcode is pre-computed for the recursive portion. @@ -946,7 +946,7 @@ class TransitiveTargetsRequest: """A request to get the transitive dependencies of the input roots. Resolve the transitive targets with `await Get(TransitiveTargets, - TransitiveTargetsRequest([addr1, addr2])`. + TransitiveTargetsRequest([addr1, addr2]))`. """ roots: Tuple[Address, ...] @@ -1369,7 +1369,7 @@ class FieldSet(EngineAwareParameter, metaclass=ABCMeta): Subclasses must set `@dataclass(frozen=True)` for their declared fields to be recognized. - You can optionally set implement the classmethod `opt_out` so that targets have a + You can optionally implement the classmethod `opt_out` so that targets have a mechanism to not match with the FieldSet even if they have the `required_fields` registered. For example: @@ -1385,7 +1385,7 @@ class FortranTestFieldSet(FieldSet): def opt_out(cls, tgt: Target) -> bool: return tgt.get(MaybeSkipFortranTestsField).value - This field set may then created from a `Target` through the `is_applicable()` and `create()` + This field set may then be created from a `Target` through the `is_applicable()` and `create()` class methods: field_sets = [ @@ -2021,7 +2021,7 @@ class SourcesField(AsyncFieldMixin, Field): - `default_glob_match_error_behavior` -- Advanced option, should very rarely be used. Override glob match error behavior when using the default value. If setting this to `GlobMatchErrorBehavior.ignore`, make sure you have other validation in place in case the - default glob doesn't match any files if required, to alert the user appropriately. + default glob doesn't match any files, if required, to alert the user appropriately. """ expected_file_extensions: ClassVar[tuple[str, ...] | None] = None @@ -2785,7 +2785,7 @@ class DependenciesRuleApplicationRequest: @dataclass(frozen=True) class DependenciesRuleApplication: - """Maps all dependencies to their respective dependency rule application of a origin target + """Maps all dependencies to their respective dependency rule application of an origin target address. The `applications` will be empty and the `address` `None` if there is no dependency rule diff --git a/src/python/pants/engine/target_test.py b/src/python/pants/engine/target_test.py index d89e6273537..c2334c95147 100644 --- a/src/python/pants/engine/target_test.py +++ b/src/python/pants/engine/target_test.py @@ -326,7 +326,7 @@ def test_override_preexisting_field_via_new_target() -> None: # that still works where the original target was expected. # # However, this means that we must ensure `Target.get()` and `Target.has_fields()` will work - # with subclasses of the original `Field`s. + # with subclasses of the original `Field`. class CustomFortranExtensions(FortranExtensions): banned_extensions = ("FortranBannedExt",) @@ -526,7 +526,7 @@ def assert_closure(cts: Sequence[CoarsenedTarget], expected: Sequence[Target]) - def test_generated_targets_address_validation() -> None: - """Ensure that all addresses are well formed.""" + """Ensure that all addresses are well-formed.""" class MockTarget(Target): alias = "tgt" diff --git a/src/python/pants/init/options_initializer.py b/src/python/pants/init/options_initializer.py index f0a0ae3d69c..8faf7df7c63 100644 --- a/src/python/pants/init/options_initializer.py +++ b/src/python/pants/init/options_initializer.py @@ -43,7 +43,7 @@ def _initialize_build_configuration( """Initialize a BuildConfiguration for the given OptionsBootstrapper. NB: This method: - 1. has the side-effect of (idempotently) adding PYTHONPATH entries for this process + 1. has the side effect of (idempotently) adding PYTHONPATH entries for this process 2. is expensive to call, because it might resolve plugins from the network """ diff --git a/src/python/pants/jvm/dependency_inference/artifact_mapper_test.py b/src/python/pants/jvm/dependency_inference/artifact_mapper_test.py index 41c93901503..ba59560b7f1 100644 --- a/src/python/pants/jvm/dependency_inference/artifact_mapper_test.py +++ b/src/python/pants/jvm/dependency_inference/artifact_mapper_test.py @@ -149,7 +149,7 @@ def traverse(*children) -> FrozenTrieNode: node = new_node return node - # Provided by `JVM_ARTFACT_MAPPINGS.` + # Provided by `JVM_ARTIFACT_MAPPINGS.` assert set(traverse("org", "junit").addresses[DEFAULT_SYMBOL_NAMESPACE]) == { Address("", target_name="junit_junit"), } diff --git a/src/python/pants/jvm/jdk_rules_test.py b/src/python/pants/jvm/jdk_rules_test.py index cf95bb2f873..ea16b33545e 100644 --- a/src/python/pants/jvm/jdk_rules_test.py +++ b/src/python/pants/jvm/jdk_rules_test.py @@ -130,13 +130,13 @@ def test_parse_java_version() -> None: @maybe_skip_jdk_test -def test_inclue_default_heap_size_in_jvm_options(rule_runner: RuleRunner) -> None: +def test_include_default_heap_size_in_jvm_options(rule_runner: RuleRunner) -> None: proc = javac_version_proc(rule_runner) assert "-Xmx512m" in proc.argv @maybe_skip_jdk_test -def test_inclue_child_mem_constraint_in_jvm_options(rule_runner: RuleRunner) -> None: +def test_include_child_mem_constraint_in_jvm_options(rule_runner: RuleRunner) -> None: rule_runner.set_options( ["--process-per-child-memory-usage=1GiB"], env_inherit=PYTHON_BOOTSTRAP_ENV, diff --git a/src/python/pants/jvm/resources_test.py b/src/python/pants/jvm/resources_test.py index cf6750da295..3a10cd7187e 100644 --- a/src/python/pants/jvm/resources_test.py +++ b/src/python/pants/jvm/resources_test.py @@ -107,7 +107,7 @@ def test_resources(rule_runner: RuleRunner) -> None: @maybe_skip_jdk_test -def test_resources_jar_is_determinstic(rule_runner: RuleRunner) -> None: +def test_resources_jar_is_deterministic(rule_runner: RuleRunner) -> None: rule_runner.write_files( { "BUILD": "resources(name='root', sources=['**/*.txt'])", diff --git a/src/python/pants/jvm/shading/rules_integration_test.py b/src/python/pants/jvm/shading/rules_integration_test.py index 45902a8d81a..30fed7efb4d 100644 --- a/src/python/pants/jvm/shading/rules_integration_test.py +++ b/src/python/pants/jvm/shading/rules_integration_test.py @@ -87,7 +87,7 @@ def jarjar_lockfile( def _resolve_jar(rule_runner: RuleRunner, coord: Coordinate) -> ClasspathEntry: - jvm_arifact_field_sets = [ + jvm_artifact_field_sets = [ JvmArtifactFieldSet.create(tgt) for tgt in rule_runner.request(AllTargets, []) if JvmArtifactFieldSet.is_applicable(tgt) @@ -98,7 +98,7 @@ def _resolve_jar(rule_runner: RuleRunner, coord: Coordinate) -> ClasspathEntry: CoarsenedTargetsRequest( [ fs.address - for fs in jvm_arifact_field_sets + for fs in jvm_artifact_field_sets if fs.artifact.value == coord.artifact and fs.group.value == coord.group and fs.version.value == coord.version diff --git a/src/python/pants/notes/2.13.x.md b/src/python/pants/notes/2.13.x.md index 3b5403966cf..265e043a4d8 100644 --- a/src/python/pants/notes/2.13.x.md +++ b/src/python/pants/notes/2.13.x.md @@ -274,7 +274,7 @@ See our [blog post](https://blog.pantsbuild.org/introducing-pants-2-13/) for a s ### User API Changes -* Deprecate not setting `tailor_pex_binary_targets` explictly (Cherry-pick of #15962) ([#16023](https://github.com/pantsbuild/pants/pull/16023)) +* Deprecate not setting `tailor_pex_binary_targets` explicitly (Cherry-pick of #15962) ([#16023](https://github.com/pantsbuild/pants/pull/16023)) * Upgrade default iPython to 7.34, which drops Python 3.6 (Cherry-pick of #15934) ([#15938](https://github.com/pantsbuild/pants/pull/15938)) diff --git a/src/python/pants/notes/2.14.x.md b/src/python/pants/notes/2.14.x.md index 7673b6b66df..f17acd1e97a 100644 --- a/src/python/pants/notes/2.14.x.md +++ b/src/python/pants/notes/2.14.x.md @@ -212,7 +212,7 @@ The first stable release of the `2.14.x` series, with no changes since the previ ### New Features -* Add suport for string interpolation in Helm deployments (Cherry-pick of #16611) ([#16723](https://github.com/pantsbuild/pants/pull/16723)) +* Add support for string interpolation in Helm deployments (Cherry-pick of #16611) ([#16723](https://github.com/pantsbuild/pants/pull/16723)) * Support Docker custom tags via plugin hook (Cherry-pick of #16662) ([#16719](https://github.com/pantsbuild/pants/pull/16719)) @@ -310,7 +310,7 @@ The first stable release of the `2.14.x` series, with no changes since the previ ### Documentation -* More succint phrasing and better formatting for cli help. ([#16549](https://github.com/pantsbuild/pants/pull/16549)) +* More succinct phrasing and better formatting for cli help. ([#16549](https://github.com/pantsbuild/pants/pull/16549)) * Document `[python].resolves_to_constraints_file` and `[python].resolves_to_no_binary` ([#16548](https://github.com/pantsbuild/pants/pull/16548)) @@ -592,7 +592,7 @@ There was no 2.14.0.dev4 because of technical issues. * Default `python` `tailor_pex_binary_targets` to `False`. ([#16035](https://github.com/pantsbuild/pants/pull/16035)) -* Deprecate not setting `tailor_pex_binary_targets` explictly ([#15962](https://github.com/pantsbuild/pants/pull/15962)) +* Deprecate not setting `tailor_pex_binary_targets` explicitly ([#15962](https://github.com/pantsbuild/pants/pull/15962)) * `tailor` and `update-build-files` require specifying CLI arguments ([#15941](https://github.com/pantsbuild/pants/pull/15941)) diff --git a/src/python/pants/notes/2.15.x.md b/src/python/pants/notes/2.15.x.md index 3a442e461f8..344929e3c9d 100644 --- a/src/python/pants/notes/2.15.x.md +++ b/src/python/pants/notes/2.15.x.md @@ -254,7 +254,7 @@ The first stable release of the series, with no changes since the previous `rc`! * Add the `environment` field to most Python binary target types (Cherry-pick of #18144) ([#18150](https://github.com/pantsbuild/pants/pull/18150)) -* Run `pyupgrade` until convergance (Cherry-pick of #18128) ([#18130](https://github.com/pantsbuild/pants/pull/18130)) +* Run `pyupgrade` until convergence (Cherry-pick of #18128) ([#18130](https://github.com/pantsbuild/pants/pull/18130)) * Make mypy cache per-repo (Cherry-pick of #18043) ([#18061](https://github.com/pantsbuild/pants/pull/18061)) diff --git a/src/python/pants/notes/2.16.x.md b/src/python/pants/notes/2.16.x.md index 85f13115469..82c490a5b86 100644 --- a/src/python/pants/notes/2.16.x.md +++ b/src/python/pants/notes/2.16.x.md @@ -649,7 +649,7 @@ The first stable release of the series, with no changes since the previous `rc`! * go: fix backend to work when run in non-local environments ([#18140](https://github.com/pantsbuild/pants/pull/18140)) -* Run `pyupgrade` until convergance ([#18128](https://github.com/pantsbuild/pants/pull/18128)) +* Run `pyupgrade` until convergence ([#18128](https://github.com/pantsbuild/pants/pull/18128)) ### Documentation @@ -877,7 +877,7 @@ The first stable release of the series, with no changes since the previous `rc`! ### New Features -* Allow targets that implement `RunFieldSet` to be executed in the sandbox for side-effects ([#17716](https://github.com/pantsbuild/pants/pull/17716)) +* Allow targets that implement `RunFieldSet` to be executed in the sandbox for side effects ([#17716](https://github.com/pantsbuild/pants/pull/17716)) * `python_requirements` target generator can parse PEP 621 pyproject.toml files ([#16932](https://github.com/pantsbuild/pants/pull/16932)) diff --git a/src/python/pants/notes/2.17.x.md b/src/python/pants/notes/2.17.x.md index 74642cda4d0..0355b56ba93 100644 --- a/src/python/pants/notes/2.17.x.md +++ b/src/python/pants/notes/2.17.x.md @@ -354,7 +354,7 @@ NOTE: `2.17.0a0` was not released to PyPI due to infrastructure issues. ### New Features -* javascript: Implement dependency inference opt-out and improve resillience ([#18931](https://github.com/pantsbuild/pants/pull/18931)) +* javascript: Implement dependency inference opt-out and improve resilience ([#18931](https://github.com/pantsbuild/pants/pull/18931)) * javascript: Add support for the pnpm package manager ([#18864](https://github.com/pantsbuild/pants/pull/18864)) diff --git a/src/python/pants/option/config.py b/src/python/pants/option/config.py index 180a2ca911a..a7b2d54da47 100644 --- a/src/python/pants/option/config.py +++ b/src/python/pants/option/config.py @@ -242,7 +242,7 @@ def stringify(raw_val: _TomlValue, prefix: str = "") -> str: # Handle dict options, along with the special `my_dict_option.add`, `my_list_option.add` and # `my_list_option.remove` syntax. We only treat `add` and `remove` as the special syntax - # if the values have the approprate type, to reduce the risk of incorrectly special casing. + # if the values have the appropriate type, to reduce the risk of incorrectly special casing. has_add_dict = isinstance(option_value.get("add"), dict) has_add_list = isinstance(option_value.get("add"), list) has_remove_list = isinstance(option_value.get("remove"), list) diff --git a/src/python/pants/option/config_test.py b/src/python/pants/option/config_test.py index 0fe88d5c457..a6348896f28 100644 --- a/src/python/pants/option/config_test.py +++ b/src/python/pants/option/config_test.py @@ -34,7 +34,7 @@ class ConfigFile: that.''' [a] - # TODO: once TOML releases its new version with support for heterogenous lists, we should be + # TODO: once TOML releases its new version with support for heterogeneous lists, we should be # able to rewrite this to `[1, 2, 3, "%(answer)s"`. See # https://github.com/toml-lang/toml/issues/665. list = ["1", "2", "3", "%(answer)s"] diff --git a/src/python/pants/option/global_options.py b/src/python/pants/option/global_options.py index 907319b16b3..954ee7baedf 100644 --- a/src/python/pants/option/global_options.py +++ b/src/python/pants/option/global_options.py @@ -69,7 +69,7 @@ class DynamicUIRenderer(Enum): - """Which renderer to use for dyanmic UI.""" + """Which renderer to use for dynamic UI.""" indicatif_spinner = "indicatif-spinner" experimental_prodash = "experimental-prodash" @@ -1458,7 +1458,7 @@ class BootstrapOptions: """ The cache namespace for process execution. Change this value to invalidate every artifact's execution, or to prevent - process cache entries from being (re)used for different usecases or users. + process cache entries from being (re)used for different use-cases or users. """ ), ) diff --git a/src/python/pants/option/options.py b/src/python/pants/option/options.py index db0af826f6d..54e8ba28ce5 100644 --- a/src/python/pants/option/options.py +++ b/src/python/pants/option/options.py @@ -287,7 +287,7 @@ def _check_and_apply_deprecations(self, scope, values): 2) The entire ScopeInfo is deprecated (as in the case of deprecated SubsystemDependencies), meaning that the options live in one location. - In the first case, this method has the sideeffect of merging options values from deprecated + In the first case, this method has the side effect of merging options values from deprecated scopes into the given values. """ si = self.known_scope_to_info[scope] diff --git a/src/python/pants/option/options_bootstrapper.py b/src/python/pants/option/options_bootstrapper.py index 7c1fb66db6f..d94b6db59f8 100644 --- a/src/python/pants/option/options_bootstrapper.py +++ b/src/python/pants/option/options_bootstrapper.py @@ -117,7 +117,7 @@ def create( :param args: An args array. :param allow_pantsrc: True to allow pantsrc files to be used. Unless tests are expecting to consume pantsrc files, they should pass False in order to avoid reading files from - absolute paths. Production usecases should pass True to allow options values to make the + absolute paths. Production use-cases should pass True to allow options values to make the decision of whether to respect pantsrc files. """ with warnings.catch_warnings(record=True): diff --git a/src/python/pants/option/options_integration_test.py b/src/python/pants/option/options_integration_test.py index 7a95f16456f..ff259a6c871 100644 --- a/src/python/pants/option/options_integration_test.py +++ b/src/python/pants/option/options_integration_test.py @@ -54,7 +54,7 @@ class Options(Subsystem): deprecated = StrOption( default=None, - help="doens't matter", + help="doesn't matter", removal_version="999.99.9.dev0", removal_hint="blah", ) diff --git a/src/python/pants/option/subsystem.py b/src/python/pants/option/subsystem.py index 8ee2db591e5..786202493e1 100644 --- a/src/python/pants/option/subsystem.py +++ b/src/python/pants/option/subsystem.py @@ -184,7 +184,7 @@ def _construct_subsystem_rule(cls) -> Rule: # Global-level imports are conditional, we need to re-import here for runtime use from pants.engine.rules import TaskRule - partial_construct_subsystem: Any = functools.partial(_construct_subsytem, cls) + partial_construct_subsystem: Any = functools.partial(_construct_subsystem, cls) # NB: We must populate several dunder methods on the partial function because partial # functions do not have these defined by default and the engine uses these values to @@ -314,7 +314,7 @@ def __eq__(self, other: Any) -> bool: return bool(self.options == other.options) -async def _construct_subsytem(subsystem_typ: type[_SubsystemT]) -> _SubsystemT: +async def _construct_subsystem(subsystem_typ: type[_SubsystemT]) -> _SubsystemT: scoped_options = await Get(ScopedOptions, Scope(str(subsystem_typ.options_scope))) return subsystem_typ(scoped_options.options) diff --git a/src/python/pants/pantsd/pants_daemon_core.py b/src/python/pants/pantsd/pants_daemon_core.py index 158be42d4c0..1afa7d434d2 100644 --- a/src/python/pants/pantsd/pants_daemon_core.py +++ b/src/python/pants/pantsd/pants_daemon_core.py @@ -133,7 +133,7 @@ def prepare( scheduler_restart_explanation: str | None = None - # Because these options are computed dynamically via side-effects like reading from a file, + # Because these options are computed dynamically via side effects like reading from a file, # they need to be re-evaluated every run. We only reinitialize the scheduler if changes # were made, though. dynamic_remote_options, auth_plugin_result = DynamicRemoteOptions.from_options( diff --git a/src/python/pants/pantsd/service/pants_service.py b/src/python/pants/pantsd/service/pants_service.py index a895f6851a6..eab505ee7c7 100644 --- a/src/python/pants/pantsd/service/pants_service.py +++ b/src/python/pants/pantsd/service/pants_service.py @@ -234,7 +234,7 @@ def services(self) -> KeysView[PantsService]: def are_all_alive(self) -> bool: """Return true if all services threads are still alive, and false if any have died. - This method does not have sideeffects: if one service thread has died, the rest should be + This method does not have side effects: if one service thread has died, the rest should be killed and joined via `self.shutdown()`. """ for service, service_thread in self._service_threads.items(): diff --git a/src/python/pants/util/resources.py b/src/python/pants/util/resources.py index 40eeca7d60e..75a62328ee3 100644 --- a/src/python/pants/util/resources.py +++ b/src/python/pants/util/resources.py @@ -10,7 +10,7 @@ def read_resource(package_or_module: str, resource: str) -> bytes: """Reads a resource file from within the Pants package itself. This helper function is designed for compatibility with `pkgutil.get_data()` wherever possible, - but also allows compability with PEP302 pluggable importers such as included with PyOxidizer. + but also allows compatibility with PEP302 pluggable importers such as included with PyOxidizer. """ a = importlib.import_module(package_or_module) diff --git a/src/python/pants/util/strutil.py b/src/python/pants/util/strutil.py index 6d19d5c0ba0..0c1eab3b716 100644 --- a/src/python/pants/util/strutil.py +++ b/src/python/pants/util/strutil.py @@ -264,7 +264,7 @@ def softwrap(text: str) -> str: """ if not text: return text - # If callers didn't use a leading "\" thats OK. + # If callers didn't use a leading "\" that's OK. if text[0] == "\n": text = text[1:] diff --git a/src/python/pants/version.py b/src/python/pants/version.py index 4e781d279a0..1b6fbaf282f 100644 --- a/src/python/pants/version.py +++ b/src/python/pants/version.py @@ -8,7 +8,7 @@ import pants._version -# Generate a inferrable dependency on the `pants._version` package and its associated resources. +# Generate an inferrable dependency on the `pants._version` package and its associated resources. from pants.util.resources import read_resource diff --git a/src/rust/engine/fs/store/src/immutable_inputs.rs b/src/rust/engine/fs/store/src/immutable_inputs.rs index d17a73be008..1b7740f0040 100644 --- a/src/rust/engine/fs/store/src/immutable_inputs.rs +++ b/src/rust/engine/fs/store/src/immutable_inputs.rs @@ -66,14 +66,14 @@ impl ImmutableInputs { // We (might) need to initialize the value. // - // Because this code executes a side-effect which could be observed elsewhere within this + // Because this code executes a side effect which could be observed elsewhere within this // process (other threads can observe the contents of the temporary directory), we need to // ensure that if this method is cancelled (via async Drop), whether the cell has been - // initialized or not stays in sync with whether the side-effect is visible. + // initialized or not stays in sync with whether the side effect is visible. // // Making the initialization "cancellation safe", involves either: // - // 1. Adding a Drop guard to "undo" the side-effect if we're dropped before we fully + // 1. Adding a Drop guard to "undo" the side effect if we're dropped before we fully // initialize the cell. // * This is challenging to do correctly in this case, because the `Drop` guard cannot // be created until after initialization begins, but cannot be cleared until after the @@ -89,7 +89,7 @@ impl ImmutableInputs { // requested that Digest. // 3. Using anonymous destination paths, such that multiple attempts to initialize cannot // collide. - // * This means that although the side-effect is visible, it can never collide. + // * This means that although the side effect is visible, it can never collide. // // We take the final approach here currently (for simplicity's sake), but the advanced variant // of approach 2 might eventually be worthwhile. diff --git a/src/rust/engine/graph/README.md b/src/rust/engine/graph/README.md index f090e4e436c..de01b5b42b8 100644 --- a/src/rust/engine/graph/README.md +++ b/src/rust/engine/graph/README.md @@ -18,7 +18,7 @@ It's important to differentiate the "identity" of a node from its current value, ### Execution -When a external caller requests the output for a `Node` (via [get](https://github.com/pantsbuild/pants/blob/01372719f7e6f2a3aa0b9f3ce6909991388071ca/src/rust/engine/graph/src/lib.rs#L689-L704)), the `Graph` does a `HashMap` lookup for the `Node` to locate an `Entry` object for that node. The `Entry` object stores the current state of the node, including whether it is [`NotStarted`, `Running`, or `Completed`](https://github.com/pantsbuild/pants/blob/01372719f7e6f2a3aa0b9f3ce6909991388071ca/src/rust/engine/graph/src/entry.rs#L153-L191). To get the value of a `Node`, `Graph::get` calls [Entry::get_node_result](https://github.com/pantsbuild/pants/blob/01372719f7e6f2a3aa0b9f3ce6909991388071ca/src/rust/engine/graph/src/entry.rs#L361-L374). +When an external caller requests the output for a `Node` (via [get](https://github.com/pantsbuild/pants/blob/01372719f7e6f2a3aa0b9f3ce6909991388071ca/src/rust/engine/graph/src/lib.rs#L689-L704)), the `Graph` does a `HashMap` lookup for the `Node` to locate an `Entry` object for that node. The `Entry` object stores the current state of the node, including whether it is [`NotStarted`, `Running`, or `Completed`](https://github.com/pantsbuild/pants/blob/01372719f7e6f2a3aa0b9f3ce6909991388071ca/src/rust/engine/graph/src/entry.rs#L153-L191). To get the value of a `Node`, `Graph::get` calls [Entry::get_node_result](https://github.com/pantsbuild/pants/blob/01372719f7e6f2a3aa0b9f3ce6909991388071ca/src/rust/engine/graph/src/entry.rs#L361-L374). In the simplest case, if the `Node` is already `Completed`, the `Graph` immediately returns the `Node`'s result value from the `Entry`. If the `Node` is currently `Running`, the caller will be added as a waiter on the `Entry`, and pushed a value when it is ready. Finally, if the `Node` is `NotStarted`, its `run` method is launched (concurrently on a `tokio` task) and the `Entry` moves to the `Running` state. diff --git a/src/rust/engine/process_execution/src/bounded.rs b/src/rust/engine/process_execution/src/bounded.rs index e13f606082d..57aac67c0fb 100644 --- a/src/rust/engine/process_execution/src/bounded.rs +++ b/src/rust/engine/process_execution/src/bounded.rs @@ -373,7 +373,7 @@ pub(crate) fn balance(now: Instant, state: &mut State) -> usize { state.total_concurrency as isize - concurrency_used as isize; let mut prempted = 0; - // To reduce the number of tasks that we preempty, we preempt them in order by the amount of + // To reduce the number of tasks that we preempt, we preempt them in order by the amount of // concurrency that they desire or can relinquish. match desired_change_in_commitment.cmp(&0) { Ordering::Equal => { diff --git a/src/rust/engine/rule_graph/README.md b/src/rust/engine/rule_graph/README.md index 973afe11084..cf02239d474 100644 --- a/src/rust/engine/rule_graph/README.md +++ b/src/rust/engine/rule_graph/README.md @@ -55,7 +55,7 @@ But both of the goals are important because together they allow for an API that There are a few constraints that decide which `Rule`s are able to provide dependencies for one another: * `param_consumption` - When a `Rule` directly uses a `Param` as a positional argument, that `Param` is removed from scope for any of that `Rule`'s dependencies. - * For example, for a `Rule` `y` with a positional argument `A` and a `Get(B, C)`: if there is a `Param` `A` in scope at `y` and it is used to satisfy the positional argument, it cannot also be used to (transitively) to satisfy the `Get(B, C)` (i.e., a hyptothetical rule that consumes both `A` and `C` would not be eligible in that position). + * For example, for a `Rule` `y` with a positional argument `A` and a `Get(B, C)`: if there is a `Param` `A` in scope at `y` and it is used to satisfy the positional argument, it cannot also be used to (transitively) to satisfy the `Get(B, C)` (i.e., a hypothetical rule that consumes both `A` and `C` would not be eligible in that position). * On the other hand, for a `Rule` `w` with `Get(B, C)` and `Get(D, E)`, if there is a `Param` `A` in scope at `w`, two dependency `Rule`s that consume `A` (transitively) _can_ be used to satisfy those `Get`s. Only consuming a `Param` as a positional argument removes it from scope. * `provided_params` - When deciding whether one `Rule` can use another `Rule` to provide the output type of a `Get`, a constraint is applied that the candidate dependency must (transitively) consume the `Param` that is provided by the `Get`. * For example: if a `Rule` `z` has a `Get(A, B)`, only `Rule`s that compute an `A` and (transitively) consume a `B` are eligible to be used. This also means that a `Param` `A` which is already in scope for `Rule` `z` is not eligible to be used, because it would trivially not consume `B`. @@ -72,7 +72,7 @@ The construction algorithm is broken up into phases: * If we were to stop `RuleGraph` construction at this phase, it would be necessary to do a form of [dynamic dispatch](https://en.wikipedia.org/wiki/Dynamic_dispatch) at runtime to decide which source of a dependency to use based on the `Param`s that were currently in scope. And the sets of `Param`s used in the memoization key for each `Rule` would still be overly large, causing excess invalidation. 3. [monomorphize](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L325-L353) - "Monomorphize" the polymorphic graph by using the out-set of available `Param`s (initialized during `initial_polymorphic`) and the in-set of consumed `Param`s (computed during `live_param_labeled`) to partition nodes (and their dependents) for each valid combination of their dependencies. Combinations of dependencies that would be invalid (see the Constraints section) are not generated, which causes some pruning of the graph to happen during this phase. * Continuing the example from above: the goal of monomorphize is to create one copy of `Rule` `x` per legal combination of its `DependencyKey`. Assuming that both of `x`'s dependencies remain legal (i.e. that all of `{A,B,C}` are still in scope in the dependents of `x`, etc), then two copies of `x` will be created: one that uses the first dependency and has an in-set of `{A,B}`, and another that uses the second dependency and has an in-set of `{B,C}`. -4. [prune_edges](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L836-L845) - Once the monomorphic graph has [converged](https://en.wikipedia.org/wiki/Data-flow_analysis#Convergence), each node in the graph will ideally have exactly one source of each `DependencyKey` (with the exception of `Query`s, which are not monomorphized). This phase validates that, and chooses the smallest input `Param` set to use for each `Query`. In cases where a node has more that one dependency per `DependencyKey`, it is because given a particular set of input `Params` there was more than one valid way to compute a dependency. This can happen either because there were too many `Param`s in scope, or because there were multiple `Rule`s with the same `Param` requirements. +4. [prune_edges](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L836-L845) - Once the monomorphic graph has [converged](https://en.wikipedia.org/wiki/Data-flow_analysis#Convergence), each node in the graph will ideally have exactly one source of each `DependencyKey` (except for `Query`s, which are not monomorphized). This phase validates that, and chooses the smallest input `Param` set to use for each `Query`. In cases where a node has more than one dependency per `DependencyKey`, it is because given a particular set of input `Params` there was more than one valid way to compute a dependency. This can happen either because there were too many `Param`s in scope, or because there were multiple `Rule`s with the same `Param` requirements. * This phase is the only phase that renders errors: all of the other phases mark nodes and edges "deleted" for particular reasons, and this phase consumes that record. A node that has been deleted indicates that that node is unsatisfiable for some reason, while an edge that has been deleted indicates that the source node was not able to consume the target node for some reason. * If a node has too many sources of a `DependencyKey`, this phase will recurse to attempt to locate the node in the `Rule` graph where the ambiguity was introduced. Likewise, if a node has no source of a `DependencyKey`, this phase will recurse on deleted nodes (which are preserved by the other phases) to attempt to locate the bottom-most `Rule` that was missing a `DependencyKey`. 5. [finalize](https://github.com/pantsbuild/pants/blob/3a188a1e06d8c27ff86d8c311ff1b2bdea0d39ff/src/rust/engine/rule_graph/src/builder.rs#L1064-L1068) - After `prune_edges` the graph is known to be valid, and this phase generates the final static `RuleGraph` for all `Rule`s reachable from `Query`s. diff --git a/src/rust/engine/workunit_store/src/lib.rs b/src/rust/engine/workunit_store/src/lib.rs index 78d67562fd1..7ba14127605 100644 --- a/src/rust/engine/workunit_store/src/lib.rs +++ b/src/rust/engine/workunit_store/src/lib.rs @@ -541,7 +541,7 @@ impl HeavyHittersData { impl WorkunitStore { pub fn new(log_starting_workunits: bool, max_level: Level) -> WorkunitStore { - // NB: Although it would be nice not to have seperate allocations per consumer, it is + // NB: Although it would be nice not to have separate allocations per consumer, it is // difficult to use a channel like `tokio::sync::broadcast` due to that channel being bounded. // Subscribers receive messages at very different rates, and adjusting the workunit level // affects the total number of messages that might be queued at any given time. diff --git a/tests/python/pants_test/pantsd/pantsd_integration_test.py b/tests/python/pants_test/pantsd/pantsd_integration_test.py index ba69ad555f1..810249c6422 100644 --- a/tests/python/pants_test/pantsd/pantsd_integration_test.py +++ b/tests/python/pants_test/pantsd/pantsd_integration_test.py @@ -524,7 +524,7 @@ def _assert_pantsd_keyboardinterrupt_signal( :param signum: The signal to send. :param regexps: Assert that all of these regexps match somewhere in stderr. :param not_regexps: Assert that all of these regexps do not match somewhere in stderr. - :param cleanup_wait_time: passed throught to waiter, dictated how long simulated cleanup will take + :param cleanup_wait_time: passed through to waiter, dictated how long simulated cleanup will take """ with self.pantsd_test_context() as (workdir, config, checker): client_handle, waiter_pid, child_pid, _ = launch_waiter( @@ -561,7 +561,7 @@ def _assert_pantsd_keyboardinterrupt_signal( checker.assert_running() def test_pantsd_graceful_shutdown(self): - """Test that SIGINT is propgated to child processes and they are given time to shutdown.""" + """Test that SIGINT is propagated to child processes and they are given time to shutdown.""" self._assert_pantsd_keyboardinterrupt_signal( signal.SIGINT, regexps=[