This is an automated email from the ASF dual-hosted git repository.

ephraimanierobi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 1571f80  Add pre-commit hook for common misspelling check in files 
(#18964)
1571f80 is described below

commit 1571f80546853688778c2a3ec5194e5c8be0edbd
Author: Ephraim Anierobi <[email protected]>
AuthorDate: Thu Oct 14 19:37:08 2021 +0100

    Add pre-commit hook for common misspelling check in files (#18964)
    
    This PR adds codespell to the pre-commit hooks. This will specifically help
    us a bit in resolving sphinx errors.
    
    From the project page:
    It does not check for word membership in a complete dictionary, but instead 
looks for a set of common misspellings.
    Therefore it should catch errors like "adn", but it will not catch 
"adnasdfasdf".
    This also means it shouldn't generate false-positives when you use a niche 
term it doesn't know about.
    
    This means the sphinx errors are not solved completely.
---
 .github/workflows/build-images.yml                           |  4 ++--
 .pre-commit-config.yaml                                      | 12 ++++++++++++
 BREEZE.rst                                                   |  2 +-
 STATIC_CODE_CHECKS.rst                                       |  2 ++
 UPDATING.md                                                  |  2 +-
 airflow/_vendor/connexion/__init__.py                        |  2 +-
 airflow/example_dags/plugins/workday.py                      |  2 +-
 airflow/providers/cncf/kubernetes/utils/pod_launcher.py      |  2 +-
 airflow/providers/google/cloud/hooks/bigquery.py             |  2 +-
 .../providers/microsoft/azure/operators/wasb_delete_blob.py  |  2 +-
 airflow/providers/microsoft/azure/sensors/wasb.py            |  2 +-
 airflow/providers/ssh/hooks/ssh.py                           |  8 ++++----
 airflow/settings.py                                          |  2 +-
 airflow/templates.py                                         |  2 +-
 airflow/utils/db.py                                          |  2 +-
 airflow/www/fab_security/manager.py                          |  6 +++---
 airflow/www/static/js/graph.js                               |  2 +-
 airflow/www/static/js/tree.js                                |  2 +-
 breeze                                                       |  2 +-
 breeze-complete                                              |  1 +
 chart/README.md                                              |  2 +-
 chart/tests/helm_template_generator.py                       |  2 +-
 dev/README_RELEASE_PROVIDER_PACKAGES.md                      |  2 +-
 dev/import_all_classes.py                                    |  2 +-
 .../secrets-backends/aws-secrets-manager.rst                 |  2 +-
 docs/apache-airflow-providers-ssh/connections/ssh.rst        |  2 +-
 docs/apache-airflow/howto/set-up-database.rst                |  4 ++--
 docs/apache-airflow/modules_management.rst                   |  2 +-
 docs/apache-airflow/pipeline_example.csv                     |  4 ++--
 docs/apache-airflow/security/kerberos.rst                    |  2 +-
 docs/apache-airflow/security/webserver.rst                   |  2 +-
 docs/helm-chart/index.rst                                    |  2 +-
 docs/helm-chart/production-guide.rst                         |  4 ++--
 docs/spelling_wordlist.txt                                   |  7 +++++++
 scripts/ci/libraries/_initialization.sh                      |  2 +-
 scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py              |  2 +-
 scripts/in_container/prod/entrypoint_prod.sh                 |  2 +-
 tests/dag_processing/test_manager.py                         |  2 +-
 tests/models/test_taskinstance.py                            |  2 +-
 tests/providers/ssh/hooks/test_ssh.py                        |  2 +-
 tests/sensors/test_external_task_sensor.py                   |  2 +-
 tests/task/task_runner/test_base_task_runner.py              |  2 +-
 42 files changed, 69 insertions(+), 47 deletions(-)

diff --git a/.github/workflows/build-images.yml 
b/.github/workflows/build-images.yml
index 612ffb5..3f7c51a 100644
--- a/.github/workflows/build-images.yml
+++ b/.github/workflows/build-images.yml
@@ -307,12 +307,12 @@ jobs:
       - name: "Build PROD images ${{ matrix.python-version }}:${{ 
env.GITHUB_REGISTRY_PUSH_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh
         env:
-          # GITHUB_REGISTRY_PULL_IMAGE_TAG is overriden to latest in order to 
build PROD image using "latest"
+          # GITHUB_REGISTRY_PULL_IMAGE_TAG is overridden to latest in order to 
build PROD image using "latest"
           GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest"
       - name: "Push PROD images ${{ matrix.python-version }}:${{ 
env.GITHUB_REGISTRY_PUSH_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_push_production_images.sh
         env:
-          # GITHUB_REGISTRY_PULL_IMAGE_TAG is overriden to latest in order to 
build PROD image using "latest"
+          # GITHUB_REGISTRY_PULL_IMAGE_TAG is overridden to latest in order to 
build PROD image using "latest"
           GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest"
 
   cancel-on-ci-build:
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 0003071..896f9d3 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -258,6 +258,18 @@ repos:
         exclude: |
           (?x)
           ^airflow/_vendor/
+  - repo: https://github.com/codespell-project/codespell
+    rev: v2.1.0
+    hooks:
+      - id: codespell
+        name: Run codespell to check for common misspellings in files
+        entry: codespell
+        language: python
+        types: [text]
+        exclude: 
^airflow/_vendor/|^CHANGELOG.txt|^airflow/www/static/css/material-icons.css
+        args:
+          - --ignore-words=docs/spelling_wordlist.txt
+          - 
--skip=docs/*/commits.rst,airflow/providers/*/*.rst,*.lock,INTHEWILD.md,*.min.js
   - repo: local
     hooks:
       - id: lint-openapi
diff --git a/BREEZE.rst b/BREEZE.rst
index 711da47..e4a1ca0 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -2191,7 +2191,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
                  check-executables-have-shebangs check-extras-order 
check-hooks-apply
                  check-integrations check-merge-conflict check-xml 
daysago-import-check
                  debug-statements detect-private-key doctoc 
dont-use-safe-filter end-of-file-fixer
-                 fix-encoding-pragma flake8 flynt forbid-tabs helm-lint 
identity
+                 fix-encoding-pragma flake8 flynt codespell forbid-tabs 
helm-lint identity
                  incorrect-use-of-LoggingMixin insert-license isort 
json-schema language-matters
                  lint-dockerfile lint-openapi markdownlint mermaid 
mixed-line-ending mypy mypy-helm
                  no-providers-in-core-examples no-relative-imports 
pre-commit-descriptions
diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst
index 81bc6de..51391a5 100644
--- a/STATIC_CODE_CHECKS.rst
+++ b/STATIC_CODE_CHECKS.rst
@@ -188,6 +188,8 @@ require Breeze Docker images to be installed locally.
 ------------------------------------ 
---------------------------------------------------------------- ------------
 ``flynt``                              Runs flynt
 ------------------------------------ 
---------------------------------------------------------------- ------------
+``codespell``                          Checks for common misspellings in files.
+------------------------------------ 
---------------------------------------------------------------- ------------
 ``forbid-tabs``                        Fails if tabs are used in the project
 ------------------------------------ 
---------------------------------------------------------------- ------------
 ``helm-lint``                          Verifies if helm lint passes for the 
chart
diff --git a/UPDATING.md b/UPDATING.md
index 8ffd87d..cef97e1 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -283,7 +283,7 @@ No breaking changes.
 
 ### `activate_dag_runs` argument of the function `clear_task_instances` is 
replaced with `dag_run_state`
 
-To achieve the previous default behaviour of `clear_task_instances` with 
`activate_dag_runs=True`, no change is needed. To achieve the previous 
behaviour of `activate_dag_runs=False`, pass `dag_run_state=False` instead. 
(The previous paramater is still accepted, but is deprecated)
+To achieve the previous default behaviour of `clear_task_instances` with 
`activate_dag_runs=True`, no change is needed. To achieve the previous 
behaviour of `activate_dag_runs=False`, pass `dag_run_state=False` instead. 
(The previous parameter is still accepted, but is deprecated)
 
 ### `dag.set_dag_runs_state` is deprecated
 
diff --git a/airflow/_vendor/connexion/__init__.py 
b/airflow/_vendor/connexion/__init__.py
index 8286d6c..7852d49 100755
--- a/airflow/_vendor/connexion/__init__.py
+++ b/airflow/_vendor/connexion/__init__.py
@@ -6,7 +6,7 @@ from .apis import AbstractAPI  # NOQA
 from .apps import AbstractApp  # NOQA
 from .decorators.produces import NoContent  # NOQA
 from .exceptions import ProblemException  # NOQA
-# add operation for backwards compatability
+# add operation for backwards compatibility
 from .operations import compat
 from .problem import problem  # NOQA
 from .resolver import Resolution, Resolver, RestyResolver  # NOQA
diff --git a/airflow/example_dags/plugins/workday.py 
b/airflow/example_dags/plugins/workday.py
index 84a5ff8c..f856739 100644
--- a/airflow/example_dags/plugins/workday.py
+++ b/airflow/example_dags/plugins/workday.py
@@ -16,7 +16,7 @@
 # specific language governing permissions and limitations
 # under the License.
 
-"""Plugin to demostrate timetable registration and accomdate example DAGs."""
+"""Plugin to demonstrate timetable registration and accommodate example 
DAGs."""
 
 # [START howto_timetable]
 from datetime import timedelta
diff --git a/airflow/providers/cncf/kubernetes/utils/pod_launcher.py 
b/airflow/providers/cncf/kubernetes/utils/pod_launcher.py
index 671d260..1eda041 100644
--- a/airflow/providers/cncf/kubernetes/utils/pod_launcher.py
+++ b/airflow/providers/cncf/kubernetes/utils/pod_launcher.py
@@ -263,7 +263,7 @@ class PodLauncher(LoggingMixin):
             )
         except BaseHTTPError:
             self.log.exception('There was an error reading the kubernetes 
API.')
-            # Reraise to be catched by self.monitor_pod.
+            # Reraise to be caught by self.monitor_pod.
             raise
 
     @tenacity.retry(stop=tenacity.stop_after_attempt(3), 
wait=tenacity.wait_exponential(), reraise=True)
diff --git a/airflow/providers/google/cloud/hooks/bigquery.py 
b/airflow/providers/google/cloud/hooks/bigquery.py
index ba2ae50..1aa95ed 100644
--- a/airflow/providers/google/cloud/hooks/bigquery.py
+++ b/airflow/providers/google/cloud/hooks/bigquery.py
@@ -1422,7 +1422,7 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
             # Turn schema_field_updates into a dict keyed on field names
             schema_fields_updates = {field["name"]: field for field in 
deepcopy(schema_fields_updates)}
 
-            # Create a new dict for storing the new schema, initated based on 
the current_schema
+            # Create a new dict for storing the new schema, initiated based on 
the current_schema
             # as of Python 3.6, dicts retain order.
             new_schema = {field["name"]: field for field in 
deepcopy(current_schema)}
 
diff --git a/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py 
b/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py
index 3e8c1c5..8cc0849 100644
--- a/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py
+++ b/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py
@@ -65,7 +65,7 @@ class WasbDeleteBlobOperator(BaseOperator):
         self.ignore_if_missing = ignore_if_missing
 
     def execute(self, context: dict) -> None:
-        self.log.info('Deleting blob: %s\nin wasb://%s', self.blob_name, 
self.container_name)
+        self.log.info('Deleting blob: %s\n in wasb://%s', self.blob_name, 
self.container_name)
         hook = WasbHook(wasb_conn_id=self.wasb_conn_id)
 
         hook.delete_file(
diff --git a/airflow/providers/microsoft/azure/sensors/wasb.py 
b/airflow/providers/microsoft/azure/sensors/wasb.py
index 8a345bf..e78ec1d 100644
--- a/airflow/providers/microsoft/azure/sensors/wasb.py
+++ b/airflow/providers/microsoft/azure/sensors/wasb.py
@@ -57,7 +57,7 @@ class WasbBlobSensor(BaseSensorOperator):
         self.check_options = check_options
 
     def poke(self, context: dict):
-        self.log.info('Poking for blob: %s\nin wasb://%s', self.blob_name, 
self.container_name)
+        self.log.info('Poking for blob: %s\n in wasb://%s', self.blob_name, 
self.container_name)
         hook = WasbHook(wasb_conn_id=self.wasb_conn_id)
         return hook.check_for_blob(self.container_name, self.blob_name, 
**self.check_options)
 
diff --git a/airflow/providers/ssh/hooks/ssh.py 
b/airflow/providers/ssh/hooks/ssh.py
index e0c5c96..cc4ef1c 100644
--- a/airflow/providers/ssh/hooks/ssh.py
+++ b/airflow/providers/ssh/hooks/ssh.py
@@ -261,13 +261,13 @@ class SSHHook(BaseHook):
 
         if not self.allow_host_key_change:
             self.log.warning(
-                'Remote Identification Change is not verified. '
-                'This wont protect against Man-In-The-Middle attacks'
+                "Remote Identification Change is not verified. "
+                "This won't protect against Man-In-The-Middle attacks"
             )
             client.load_system_host_keys()
 
         if self.no_host_key_check:
-            self.log.warning('No Host Key Verification. This wont protect 
against Man-In-The-Middle attacks')
+            self.log.warning("No Host Key Verification. This won't protect 
against Man-In-The-Middle attacks")
             # Default is RejectPolicy
             client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
         else:
@@ -400,7 +400,7 @@ class SSHHook(BaseHook):
         for pkey_class in self._pkey_loaders:
             try:
                 key = pkey_class.from_private_key(StringIO(private_key), 
password=passphrase)
-                # Test it acutally works. If Paramiko loads an openssh 
generated key, sometimes it will
+                # Test it actually works. If Paramiko loads an openssh 
generated key, sometimes it will
                 # happily load it as the wrong type, only to fail when 
actually used.
                 key.sign_ssh_data(b'')
                 return key
diff --git a/airflow/settings.py b/airflow/settings.py
index 9a456ba..3df8248 100644
--- a/airflow/settings.py
+++ b/airflow/settings.py
@@ -550,7 +550,7 @@ MASK_SECRETS_IN_LOGS = False
 
 # Display alerts on the dashboard
 # Useful for warning about setup issues or announcing changes to end users
-# List of UIAlerts, which allows for specifiying the message, category, and 
roles the
+# List of UIAlerts, which allows for specifying the message, category, and 
roles the
 # message should be shown to. For example:
 #   from airflow.www.utils import UIAlert
 #
diff --git a/airflow/templates.py b/airflow/templates.py
index 7cbd556..690951e 100644
--- a/airflow/templates.py
+++ b/airflow/templates.py
@@ -32,7 +32,7 @@ class 
SandboxedEnvironment(jinja2.sandbox.SandboxedEnvironment):
         Allow access to ``_`` prefix vars (but not ``__``).
 
         Unlike the stock SandboxedEnvironment, we allow access to "private" 
attributes (ones starting with
-        ``_``) whilst still blocking internal or truely private attributes 
(``__`` prefixed ones).
+        ``_``) whilst still blocking internal or truly private attributes 
(``__`` prefixed ones).
         """
         return not jinja2.sandbox.is_internal_attribute(obj, attr)
 
diff --git a/airflow/utils/db.py b/airflow/utils/db.py
index 17522d3..fc0dc44 100644
--- a/airflow/utils/db.py
+++ b/airflow/utils/db.py
@@ -771,7 +771,7 @@ def check_task_tables_without_matching_dagruns(session) -> 
Iterable[str]:
         try:
             metadata.reflect(only=[model.__tablename__])
         except exc.InvalidRequestError:
-            # Table doesn't exist, but try the other ones incase the user is 
upgrading from an _old_ DB
+            # Table doesn't exist, but try the other ones in case the user is 
upgrading from an _old_ DB
             # version
             pass
 
diff --git a/airflow/www/fab_security/manager.py 
b/airflow/www/fab_security/manager.py
index 85341e9..997d70b 100644
--- a/airflow/www/fab_security/manager.py
+++ b/airflow/www/fab_security/manager.py
@@ -857,7 +857,7 @@ class BaseSecurityManager:
         if len(self.auth_roles_mapping) > 0:
             request_fields.append(self.auth_ldap_group_field)
 
-        # preform the LDAP search
+        # perform the LDAP search
         log.debug(
             "LDAP search for '%s' with fields %s in scope '%s'"
             % (filter_str, request_fields, self.auth_ldap_search)
@@ -1017,7 +1017,7 @@ class BaseSecurityManager:
             user_attributes = {}
 
             # Flow 1 - (Indirect Search Bind):
-            #  - in this flow, special bind credentials are used to preform the
+            #  - in this flow, special bind credentials are used to perform the
             #    LDAP search
             #  - in this flow, AUTH_LDAP_SEARCH must be set
             if self.auth_ldap_bind_user:
@@ -1051,7 +1051,7 @@ class BaseSecurityManager:
 
             # Flow 2 - (Direct Search Bind):
             #  - in this flow, the credentials provided by the end-user are 
used
-            #    to preform the LDAP search
+            #    to perform the LDAP search
             #  - in this flow, we only search LDAP if AUTH_LDAP_SEARCH is set
             #     - features like AUTH_USER_REGISTRATION & 
AUTH_ROLES_SYNC_AT_LOGIN
             #       will only work if AUTH_LDAP_SEARCH is set
diff --git a/airflow/www/static/js/graph.js b/airflow/www/static/js/graph.js
index 097c861..6671eae 100644
--- a/airflow/www/static/js/graph.js
+++ b/airflow/www/static/js/graph.js
@@ -400,7 +400,7 @@ function startOrStopRefresh() {
 
 $('#auto_refresh').change(() => {
   if ($('#auto_refresh').is(':checked')) {
-    // Run an initial refesh before starting interval if manually turned on
+    // Run an initial refresh before starting interval if manually turned on
     handleRefresh();
     localStorage.removeItem('disableAutoRefresh');
   } else {
diff --git a/airflow/www/static/js/tree.js b/airflow/www/static/js/tree.js
index 741224d..de79cb3 100644
--- a/airflow/www/static/js/tree.js
+++ b/airflow/www/static/js/tree.js
@@ -473,7 +473,7 @@ document.addEventListener('DOMContentLoaded', () => {
 
   $('#auto_refresh').change(() => {
     if ($('#auto_refresh').is(':checked')) {
-      // Run an initial refesh before starting interval if manually turned on
+      // Run an initial refresh before starting interval if manually turned on
 
       handleRefresh();
       localStorage.removeItem('disableAutoRefresh');
diff --git a/breeze b/breeze
index 4a2c80d..2dd7fab 100755
--- a/breeze
+++ b/breeze
@@ -149,7 +149,7 @@ function breeze::setup_default_breeze_constants() {
     AIRFLOW_SOURCES_TO=${AIRFLOW_SOURCES_TO:="/opt/airflow"}
     export AIRFLOW_SOURCES_TO
 
-    # Unlike in CI scripts, in breeze by default production image ist 
installed from sources
+    # Unlike in CI scripts, in breeze by default production image is installed 
from sources
     export AIRFLOW_INSTALLATION_METHOD="."
 
     # If it set is set to specified version, then the source version of Airflow
diff --git a/breeze-complete b/breeze-complete
index 0b7e8ab..1e1eb99 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -102,6 +102,7 @@ end-of-file-fixer
 fix-encoding-pragma
 flake8
 flynt
+codespell
 forbid-tabs
 helm-lint
 identity
diff --git a/chart/README.md b/chart/README.md
index db2ffc5..b4e38b4 100644
--- a/chart/README.md
+++ b/chart/README.md
@@ -40,7 +40,7 @@ cluster using the [Helm](https://helm.sh) package manager.
 * Supported Airflow version: ``1.10+``, ``2.0+``
 * Supported database backend: ``PostgresSQL``, ``MySQL``
 * Autoscaling for ``CeleryExecutor`` provided by KEDA
-* PostgresSQL and PgBouncer with a battle-tested configuration
+* PostgreSQL and PgBouncer with a battle-tested configuration
 * Monitoring:
    * StatsD/Prometheus metrics for Airflow
    * Prometheus metrics for PgBouncer
diff --git a/chart/tests/helm_template_generator.py 
b/chart/tests/helm_template_generator.py
index 8c9aff5..af4d1ba 100644
--- a/chart/tests/helm_template_generator.py
+++ b/chart/tests/helm_template_generator.py
@@ -80,7 +80,7 @@ def create_validator(api_version, kind, kubernetes_version):
 
 
 def validate_k8s_object(instance, kubernetes_version):
-    # Skip PostgresSQL chart
+    # Skip PostgreSQL chart
     labels = jmespath.search("metadata.labels", instance)
     if "helm.sh/chart" in labels:
         chart = labels["helm.sh/chart"]
diff --git a/dev/README_RELEASE_PROVIDER_PACKAGES.md 
b/dev/README_RELEASE_PROVIDER_PACKAGES.md
index 3edb68e..f5df6f8 100644
--- a/dev/README_RELEASE_PROVIDER_PACKAGES.md
+++ b/dev/README_RELEASE_PROVIDER_PACKAGES.md
@@ -212,7 +212,7 @@ rm -rf ${AIRFLOW_REPO_ROOT}/dist/*
 ./breeze prepare-provider-packages --version-suffix-for-pypi rc1 
--package-format both
 ```
 
-if you ony build few packages, run:
+if you only build few packages, run:
 
 ```shell script
 ./breeze prepare-provider-packages --version-suffix-for-pypi rc1 
--package-format both \
diff --git a/dev/import_all_classes.py b/dev/import_all_classes.py
index 67a76c8..54cd747 100755
--- a/dev/import_all_classes.py
+++ b/dev/import_all_classes.py
@@ -45,7 +45,7 @@ def import_all_classes(
     :param provider_ids - provider ids that should be loaded.
     :param print_imports - if imported class should also be printed in output
     :param print_skips - if skipped classes should also be printed in output
-    :return: tupple of list of all imported classes and all warnings generated
+    :return: tuple of list of all imported classes and all warnings generated
     """
     imported_classes = []
     tracebacks = []
diff --git 
a/docs/apache-airflow-providers-amazon/secrets-backends/aws-secrets-manager.rst 
b/docs/apache-airflow-providers-amazon/secrets-backends/aws-secrets-manager.rst
index 692b77c..5ceaa39 100644
--- 
a/docs/apache-airflow-providers-amazon/secrets-backends/aws-secrets-manager.rst
+++ 
b/docs/apache-airflow-providers-amazon/secrets-backends/aws-secrets-manager.rst
@@ -104,7 +104,7 @@ For example, if you want to set parameter 
``connections_prefix`` to ``"airflow/c
     backend = 
airflow.providers.amazon.aws.secrets.secrets_manager.SecretsManagerBackend
     backend_kwargs = {"connections_prefix": "airflow/connections", 
"variables_prefix": null, "profile_name": "default"}
 
-Example of storing Google Secrets in AWS Secrets Manger
+Example of storing Google Secrets in AWS Secrets Manager
 """"""""""""""""""""""""""""""""""""""""""""""""""""""""
 For connecting to a google cloud conn, all the fields must be in the extra 
field, and their names follow the pattern
 ``extra_google_cloud_platform__value``. For example:
diff --git a/docs/apache-airflow-providers-ssh/connections/ssh.rst 
b/docs/apache-airflow-providers-ssh/connections/ssh.rst
index c9c109a..3cb0e0d 100644
--- a/docs/apache-airflow-providers-ssh/connections/ssh.rst
+++ b/docs/apache-airflow-providers-ssh/connections/ssh.rst
@@ -51,7 +51,7 @@ Extra (optional)
     * ``timeout`` - Deprecated - use conn_timeout instead.
     * ``compress`` - ``true`` to ask the remote client/server to compress 
traffic; ``false`` to refuse compression. Default is ``true``.
     * ``no_host_key_check`` - Set to ``false`` to restrict connecting to hosts 
with no entries in ``~/.ssh/known_hosts`` (Hosts file). This provides maximum 
protection against trojan horse attacks, but can be troublesome when the 
``/etc/ssh/ssh_known_hosts`` file is poorly maintained or connections to new 
hosts are frequently made. This option forces the user to manually add all new 
hosts. Default is ``true``, ssh will automatically add new host keys to the 
user known hosts files.
-    * ``allow_host_key_change`` - Set to ``true`` if you want to allow 
connecting to hosts that has host key changed or when you get 'REMOTE HOST 
IDENTIFICATION HAS CHANGED' error.  This wont protect against Man-In-The-Middle 
attacks. Other possible solution is to remove the host entry from 
``~/.ssh/known_hosts`` file. Default is ``false``.
+    * ``allow_host_key_change`` - Set to ``true`` if you want to allow 
connecting to hosts that has host key changed or when you get 'REMOTE HOST 
IDENTIFICATION HAS CHANGED' error.  This won't protect against 
Man-In-The-Middle attacks. Other possible solution is to remove the host entry 
from ``~/.ssh/known_hosts`` file. Default is ``false``.
     * ``look_for_keys`` - Set to ``false`` if you want to disable searching 
for discoverable private key files in ``~/.ssh/``
     * ``host_key`` - The base64 encoded ssh-rsa public key of the host or 
"ssh-<key type> <key data>" (as you would find in the ``known_hosts`` file). 
Specifying this allows making the connection if and only if the public key of 
the endpoint matches this value.
 
diff --git a/docs/apache-airflow/howto/set-up-database.rst 
b/docs/apache-airflow/howto/set-up-database.rst
index 932f15a..0c2a61e 100644
--- a/docs/apache-airflow/howto/set-up-database.rst
+++ b/docs/apache-airflow/howto/set-up-database.rst
@@ -27,7 +27,7 @@ The document below describes the database engine 
configurations, the necessary c
 Choosing database backend
 -------------------------
 
-If you want to take a real test drive of Airflow, you should consider setting 
up a database backend to **MySQL**, **PostgresSQL** , **MsSQL**.
+If you want to take a real test drive of Airflow, you should consider setting 
up a database backend to **MySQL**, **PostgreSQL** , **MsSQL**.
 By default, Airflow uses **SQLite**, which is intended for development 
purposes only.
 
 Airflow supports the following database engine versions, so make sure which 
version you have. Old versions may not support all SQL statements.
@@ -230,7 +230,7 @@ If you use a current Postgres user with custom search_path, 
search_path can be c
 
    ALTER USER airflow_user SET search_path = public;
 
-For more information regarding setup of the PostgresSQL connection, see 
`PostgreSQL dialect 
<https://docs.sqlalchemy.org/en/13/dialects/postgresql.html>`__ in SQLAlchemy 
documentation.
+For more information regarding setup of the PostgreSQL connection, see 
`PostgreSQL dialect 
<https://docs.sqlalchemy.org/en/13/dialects/postgresql.html>`__ in SQLAlchemy 
documentation.
 
 .. note::
 
diff --git a/docs/apache-airflow/modules_management.rst 
b/docs/apache-airflow/modules_management.rst
index 8488a58..4e61d27 100644
--- a/docs/apache-airflow/modules_management.rst
+++ b/docs/apache-airflow/modules_management.rst
@@ -112,7 +112,7 @@ In the case above, there are the ways you could import the 
python files:
 .. code-block:: python
 
    from my_company.common_package.common_module import SomeClass
-   from my_company.common_package.subpackge.subpackaged_util_module import 
AnotherClass
+   from my_company.common_package.subpackage.subpackaged_util_module import 
AnotherClass
    from my_company.my_custom_dags.base_dag import BaseDag
 
 You can see the ``.airflowignore`` file at the root of your folder. This is a 
file that you can put in your
diff --git a/docs/apache-airflow/pipeline_example.csv 
b/docs/apache-airflow/pipeline_example.csv
index a1b9f2d..16df952 100644
--- a/docs/apache-airflow/pipeline_example.csv
+++ b/docs/apache-airflow/pipeline_example.csv
@@ -68,7 +68,7 @@ Serial Number,Company Name,Employee Markme,Description,Leave
 9.78174E+12,COLLABORATION IN LEARNING,MAL LEE AND LORRAE WARD,ACER PRESS,0
 9.78086E+12,RE-IMAGINING EDUCATIMarkL LEADERSHIP,BRIAN J.CALDWELL,ACER PRESS,0
 9.78086E+12,TOWARDS A MOVING SCHOOL,FLEMING & KLEINHENZ,ACER PRESS,0
-9.78086E+12,DESINGNING A THINKING A CURRICULAM,SUSAN WILKS,ACER PRESS,0
+9.78086E+12,DESIGNING A THINKING A CURRICULAM,SUSAN WILKS,ACER PRESS,0
 9.78086E+12,LEADING A DIGITAL SCHOOL,MAL LEE AND MICHEAL GAFFNEY,ACER PRESS,0
 9.78086E+12,NUMERACY,WESTWOOD,ACER PRESS,0
 9.78086E+12,TEACHING ORAL LANGUAGE,JOHN MUNRO,ACER PRESS,0
@@ -87,7 +87,7 @@ Serial Number,Company Name,Employee Markme,Description,Leave
 9.78818E+12,TULSIDAS ' RAMAYAMark,Mark,ACK,0
 9.78818E+12,TALES OF HANUMAN,-,ACK,0
 9.78818E+12,VALMIKI'S RAMAYAMark,A C K,ACK,1
-9.78818E+12,THE BEST OF INIDAN WIT AND WISDOM,Mark,ACK,0
+9.78818E+12,THE BEST OF INIDAN WITH AND WISDOM,Mark,ACK,0
 9.78818E+12,MORE TALES FROM THE PANCHTANTRA,AMarkNT PAL,ACK,0
 9.78818E+12,THE GREAT MUGHALS {5-IN-1},AMarkNT.,ACK,0
 9.78818E+12,FAMOUS SCIENTISTS,Mark,ACK,0
diff --git a/docs/apache-airflow/security/kerberos.rst 
b/docs/apache-airflow/security/kerberos.rst
index 190c2f1..1832645 100644
--- a/docs/apache-airflow/security/kerberos.rst
+++ b/docs/apache-airflow/security/kerberos.rst
@@ -78,7 +78,7 @@ If you need more granular options for your kerberos ticket 
the following options
     forwardable = True
 
     # Allow to include or remove local IP from kerberos token.
-    # This is particulary useful if you use Airflow inside a VM NATted behind 
host system IP.
+    # This is particularly useful if you use Airflow inside a VM NATted behind 
host system IP.
     include_ip = True
 
 Keep in mind that Kerberos ticket are generated via ``kinit`` and will your 
use your local ``krb5.conf`` by default.
diff --git a/docs/apache-airflow/security/webserver.rst 
b/docs/apache-airflow/security/webserver.rst
index 283d5aa..f168faa 100644
--- a/docs/apache-airflow/security/webserver.rst
+++ b/docs/apache-airflow/security/webserver.rst
@@ -220,7 +220,7 @@ webserver_config.py itself if you wish.
         ) -> Dict[str, Union[str, List[str]]]:
 
             # Creates the user info payload from Github.
-            # The user previously allowed your app to act on thier behalf,
+            # The user previously allowed your app to act on their behalf,
             #   so now we can query the user and teams endpoints for their 
data.
             # Username and team membership are added to the payload and 
returned to FAB.
 
diff --git a/docs/helm-chart/index.rst b/docs/helm-chart/index.rst
index 6910a73..ad4dd88 100644
--- a/docs/helm-chart/index.rst
+++ b/docs/helm-chart/index.rst
@@ -67,7 +67,7 @@ Features
 * Supported Airflow version: ``1.10+``, ``2.0+``
 * Supported database backend: ``PostgresSQL``, ``MySQL``
 * Autoscaling for ``CeleryExecutor`` provided by KEDA
-* PostgresSQL and PgBouncer with a battle-tested configuration
+* PostgreSQL and PgBouncer with a battle-tested configuration
 * Monitoring:
 
    * StatsD/Prometheus metrics for Airflow
diff --git a/docs/helm-chart/production-guide.rst 
b/docs/helm-chart/production-guide.rst
index 4d3c3a3..a1250f2 100644
--- a/docs/helm-chart/production-guide.rst
+++ b/docs/helm-chart/production-guide.rst
@@ -24,7 +24,7 @@ Database
 --------
 
 You will want to use an external database instead of the one deployed with the 
chart by default.
-Both **PostgresSQL** and **MySQL** are supported. Supported versions can be
+Both **PostgreSQL** and **MySQL** are supported. Supported versions can be
 found on the :doc:`Set up a Database Backend 
<apache-airflow:howto/set-up-database>` page.
 
 .. code-block:: yaml
@@ -48,7 +48,7 @@ found on the :doc:`Set up a Database Backend 
<apache-airflow:howto/set-up-databa
 PgBouncer
 ---------
 
-If you are using PostgresSQL as your database, you will likely want to enable 
`PgBouncer <https://www.pgbouncer.org/>`_ as well.
+If you are using PostgreSQL as your database, you will likely want to enable 
`PgBouncer <https://www.pgbouncer.org/>`_ as well.
 Airflow can open a lot of database connections due to its distributed nature 
and using a connection pooler can significantly
 reduce the number of open connections on the database.
 
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index 39b88c7..998d203 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -400,6 +400,7 @@ Zsh
 Zymergen
 abc
 accessor
+aci
 ack
 ackIds
 acknowledgement
@@ -407,6 +408,7 @@ actionCard
 acyclic
 adhoc
 adls
+afterall
 airbnb
 airbyte
 airflowignore
@@ -527,6 +529,7 @@ changelog
 charset
 checklicence
 checksums
+childs
 chmod
 chown
 ci
@@ -570,6 +573,7 @@ configs
 conftest
 conn
 connectTimeoutMS
+connexion
 containerConfiguration
 contentUrl
 contextmgr
@@ -873,6 +877,7 @@ ish
 isn
 iterable
 iteratively
+iterm
 itertools
 izip
 javascript
@@ -1109,6 +1114,7 @@ psrp
 psycopg
 pty
 pubsub
+pullrequest
 py
 pyMongo
 pyarrow
@@ -1214,6 +1220,7 @@ sharded
 shellcheck
 shellcmd
 shm
+sie
 sigv
 skipable
 sku
diff --git a/scripts/ci/libraries/_initialization.sh 
b/scripts/ci/libraries/_initialization.sh
index 65c8f3b..cad80ff 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -938,7 +938,7 @@ function initialization::check_docker_version() {
     docker_version=$(docker version --format '{{.Client.Version}}' | sed 
's/\+.*$//' || true)
     if [ "${docker_version}" == "" ]; then
         echo
-        echo "${COLOR_YELLOW}Your version of docker is unknown. If the scripts 
faill, please make sure to install docker at least: ${min_docker_version} 
version.${COLOR_RESET}"
+        echo "${COLOR_YELLOW}Your version of docker is unknown. If the scripts 
fail, please make sure to install docker at least: ${min_docker_version} 
version.${COLOR_RESET}"
         echo
         return
     fi
diff --git a/scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py 
b/scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py
index 600c5de..5a66d6c 100755
--- a/scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py
+++ b/scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py
@@ -17,7 +17,7 @@
 # specific language governing permissions and limitations
 # under the License.
 """
-Module to covert Airflow configs in config.yml to default_airflow.cfg file
+Module to convert Airflow configs in config.yml to default_airflow.cfg file
 """
 
 import os
diff --git a/scripts/in_container/prod/entrypoint_prod.sh 
b/scripts/in_container/prod/entrypoint_prod.sh
index 2e57773..a7623b6 100755
--- a/scripts/in_container/prod/entrypoint_prod.sh
+++ b/scripts/in_container/prod/entrypoint_prod.sh
@@ -281,7 +281,7 @@ if [[ -n "${_PIP_ADDITIONAL_REQUIREMENTS=}" ]] ; then
     >&2 echo
     >&2 echo "!!!!!  Installing additional requirements: 
'${_PIP_ADDITIONAL_REQUIREMENTS}' !!!!!!!!!!!!"
     >&2 echo
-    >&2 echo "WARNING: This is a developpment/test feature only. NEVER use it 
in production!"
+    >&2 echo "WARNING: This is a development/test feature only. NEVER use it 
in production!"
     >&2 echo "         Instead, build a custom image as described in"
     >&2 echo
     >&2 echo "         https://airflow.apache.org/docs/docker-stack/build.html";
diff --git a/tests/dag_processing/test_manager.py 
b/tests/dag_processing/test_manager.py
index 2db282e..1dbd1c9 100644
--- a/tests/dag_processing/test_manager.py
+++ b/tests/dag_processing/test_manager.py
@@ -774,7 +774,7 @@ class TestDagFileProcessorManager:
         manager._refresh_dag_dir()
         # Assert dag not deleted in SDM
         assert SerializedDagModel.has_dag('test_zip_dag')
-        # assert code not delted
+        # assert code not deleted
         assert DagCode.has_dag(dag.fileloc)
 
 
diff --git a/tests/models/test_taskinstance.py 
b/tests/models/test_taskinstance.py
index 4f7d3f9..e705207 100644
--- a/tests/models/test_taskinstance.py
+++ b/tests/models/test_taskinstance.py
@@ -1916,7 +1916,7 @@ class TestTaskInstance:
             rtif_get_k8s_pod_yaml.assert_called_once_with(ti, session=session)
             render_k8s_pod_yaml.assert_not_called()
 
-            # Now test that when we _dont_ find it in the DB, it calles 
render_k8s_pod_yaml
+            # Now test that when we _dont_ find it in the DB, it calls 
render_k8s_pod_yaml
             rtif_get_k8s_pod_yaml.return_value = None
             render_k8s_pod_yaml.return_value = fake_spec
 
diff --git a/tests/providers/ssh/hooks/test_ssh.py 
b/tests/providers/ssh/hooks/test_ssh.py
index 89068fd..2351c4e 100644
--- a/tests/providers/ssh/hooks/test_ssh.py
+++ b/tests/providers/ssh/hooks/test_ssh.py
@@ -691,7 +691,7 @@ class TestSSHHook(unittest.TestCase):
     def test_openssh_private_key(self):
         # Paramiko behaves differently with OpenSSH generated keys to paramiko
         # generated keys, so we need a test one.
-        # This has been gernerated specifically to put here, it is not 
otherwise in use
+        # This has been generated specifically to put here, it is not 
otherwise in use
         TEST_OPENSSH_PRIVATE_KEY = "-----BEGIN OPENSSH " + textwrap.dedent(
             """\
         PRIVATE KEY-----
diff --git a/tests/sensors/test_external_task_sensor.py 
b/tests/sensors/test_external_task_sensor.py
index e507c7c..9580dc3 100644
--- a/tests/sensors/test_external_task_sensor.py
+++ b/tests/sensors/test_external_task_sensor.py
@@ -750,7 +750,7 @@ def dag_bag_cyclic():
                 )
                 task_a >> task_b
 
-        # Create the last dag wich loops back
+        # Create the last dag which loops back
         with DAG(f"dag_{depth}", start_date=DEFAULT_DATE, 
schedule_interval=None) as dag:
             dags.append(dag)
             task_a = ExternalTaskSensor(
diff --git a/tests/task/task_runner/test_base_task_runner.py 
b/tests/task/task_runner/test_base_task_runner.py
index 499bba7..96fd9f5 100644
--- a/tests/task/task_runner/test_base_task_runner.py
+++ b/tests/task/task_runner/test_base_task_runner.py
@@ -37,7 +37,7 @@ def test_config_copy_mode(tmp_configuration_copy, chown, 
subprocess_call, dag_ma
     ti = dr.task_instances[0]
     job = LocalTaskJob(ti)
     runner = BaseTaskRunner(job)
-    # So we don't try to delete it -- cos the file wont exist
+    # So we don't try to delete it -- cos the file won't exist
     del runner._cfg_path
 
     includes = bool(impersonation)

Reply via email to