diff --git a/airflow/providers/amazon/aws/example_dags/example_datasync_1.py b/airflow/providers/amazon/aws/example_dags/example_datasync_1.py index 02c36fb3a496..9f2b9f5255ee 100644 --- a/airflow/providers/amazon/aws/example_dags/example_datasync_1.py +++ b/airflow/providers/amazon/aws/example_dags/example_datasync_1.py @@ -22,7 +22,7 @@ This DAG relies on the following environment variables: -* SOURCE_LOCATION_URI - Source location URI, usually on premisis SMB or NFS +* SOURCE_LOCATION_URI - Source location URI, usually on premises SMB or NFS * DESTINATION_LOCATION_URI - Destination location URI, usually S3 """ diff --git a/airflow/providers/amazon/aws/example_dags/example_datasync_2.py b/airflow/providers/amazon/aws/example_dags/example_datasync_2.py index 5db7ee33cbd4..6fb9f7352a00 100644 --- a/airflow/providers/amazon/aws/example_dags/example_datasync_2.py +++ b/airflow/providers/amazon/aws/example_dags/example_datasync_2.py @@ -25,7 +25,7 @@ This DAG relies on the following environment variables: -* SOURCE_LOCATION_URI - Source location URI, usually on premisis SMB or NFS +* SOURCE_LOCATION_URI - Source location URI, usually on premises SMB or NFS * DESTINATION_LOCATION_URI - Destination location URI, usually S3 * CREATE_TASK_KWARGS - Passed to boto3.create_task(**kwargs) * CREATE_SOURCE_LOCATION_KWARGS - Passed to boto3.create_location(**kwargs) diff --git a/airflow/providers/apache/hive/sensors/named_hive_partition.py b/airflow/providers/apache/hive/sensors/named_hive_partition.py index b8cb600cf9cb..1d5417ad6a8f 100644 --- a/airflow/providers/apache/hive/sensors/named_hive_partition.py +++ b/airflow/providers/apache/hive/sensors/named_hive_partition.py @@ -63,7 +63,7 @@ def __init__( self.hook = hook if self.hook and metastore_conn_id != 'metastore_default': self.log.warning( - 'A hook was passed but a non defaul metastore_conn_id=%s was used', metastore_conn_id + 'A hook was passed but a non default metastore_conn_id=%s was used', metastore_conn_id ) @staticmethod diff --git a/airflow/providers/google/cloud/example_dags/example_stackdriver.py b/airflow/providers/google/cloud/example_dags/example_stackdriver.py index 9becdfb552be..68ac978e3da3 100644 --- a/airflow/providers/google/cloud/example_dags/example_stackdriver.py +++ b/airflow/providers/google/cloud/example_dags/example_stackdriver.py @@ -156,7 +156,7 @@ # [START howto_operator_gcp_stackdriver_delete_alert_policy] delete_alert_policy = StackdriverDeleteAlertOperator( - task_id='delete-alert-polciy', + task_id='delete-alert-policy', name='test-alert', ) # [END howto_operator_gcp_stackdriver_delete_alert_policy] diff --git a/airflow/providers/google/cloud/hooks/kms.py b/airflow/providers/google/cloud/hooks/kms.py index fe75e6dd84db..00f7216b99e0 100644 --- a/airflow/providers/google/cloud/hooks/kms.py +++ b/airflow/providers/google/cloud/hooks/kms.py @@ -141,7 +141,7 @@ def decrypt( """ Decrypts a ciphertext message using Google Cloud KMS. - :param key_name: The Resource Name for the key to be used for decyption. + :param key_name: The Resource Name for the key to be used for decryption. Of the form ``projects/*/locations/*/keyRings/*/cryptoKeys/**`` :type key_name: str :param ciphertext: The message to be decrypted. diff --git a/airflow/providers/google/cloud/hooks/mlengine.py b/airflow/providers/google/cloud/hooks/mlengine.py index a3e9d65fa908..c1c42351d97d 100644 --- a/airflow/providers/google/cloud/hooks/mlengine.py +++ b/airflow/providers/google/cloud/hooks/mlengine.py @@ -420,7 +420,7 @@ def create_model( self._append_label(model) try: request = hook.projects().models().create(parent=project, body=model) # pylint: disable=no-member - respone = request.execute(num_retries=self.num_retries) + response = request.execute(num_retries=self.num_retries) except HttpError as e: if e.resp.status != 409: raise e @@ -441,9 +441,9 @@ def create_model( or field_violation["description"] != "A model with the same name already exists." ): raise e - respone = self.get_model(model_name=model['name'], project_id=project_id) + response = self.get_model(model_name=model['name'], project_id=project_id) - return respone + return response @GoogleBaseHook.fallback_to_default_project_id def get_model( diff --git a/airflow/providers/google/cloud/operators/speech_to_text.py b/airflow/providers/google/cloud/operators/speech_to_text.py index 0bed07581f8b..3b5434ff114d 100644 --- a/airflow/providers/google/cloud/operators/speech_to_text.py +++ b/airflow/providers/google/cloud/operators/speech_to_text.py @@ -111,7 +111,7 @@ def execute(self, context): gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain, ) - respones = hook.recognize_speech( + response = hook.recognize_speech( config=self.config, audio=self.audio, retry=self.retry, timeout=self.timeout ) - return MessageToDict(respones) + return MessageToDict(response) diff --git a/airflow/providers/google/cloud/transfers/local_to_gcs.py b/airflow/providers/google/cloud/transfers/local_to_gcs.py index 63f20cd5655e..f8ace44656cd 100644 --- a/airflow/providers/google/cloud/transfers/local_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/local_to_gcs.py @@ -120,7 +120,7 @@ def execute(self, context): if os.path.basename(self.dst): # path to a file if len(filepaths) > 1: # multiple file upload raise ValueError( - "'dst' parameter references filepath. Please specifiy " + "'dst' parameter references filepath. Please specify " "directory (with trailing backslash) to upload multiple " "files. e.g. /path/to/directory/" ) diff --git a/airflow/providers/google/cloud/transfers/postgres_to_gcs.py b/airflow/providers/google/cloud/transfers/postgres_to_gcs.py index 62af2c9ec7b9..68405049b70d 100644 --- a/airflow/providers/google/cloud/transfers/postgres_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/postgres_to_gcs.py @@ -141,10 +141,10 @@ def convert_type(self, value, schema_type): if isinstance(value, (datetime.datetime, datetime.date)): return pendulum.parse(value.isoformat()).float_timestamp if isinstance(value, datetime.time): - formated_time = time.strptime(str(value), "%H:%M:%S") + formatted_time = time.strptime(str(value), "%H:%M:%S") return int( datetime.timedelta( - hours=formated_time.tm_hour, minutes=formated_time.tm_min, seconds=formated_time.tm_sec + hours=formatted_time.tm_hour, minutes=formatted_time.tm_min, seconds=formatted_time.tm_sec ).total_seconds() ) if isinstance(value, dict): diff --git a/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py b/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py index 5ee837d4851d..4e197b0de82c 100644 --- a/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py +++ b/airflow/providers/google/cloud/utils/mlengine_prediction_summary.py @@ -127,7 +127,7 @@ def decode(x): @beam.ptransform_fn def MakeSummary(pcoll, metric_fn, metric_keys): # pylint: disable=invalid-name - """Summary PTransofrm used in Dataflow.""" + """Summary PTransform used in Dataflow.""" return ( pcoll | "ApplyMetricFnPerInstance" >> beam.Map(metric_fn) diff --git a/airflow/providers/redis/operators/redis_publish.py b/airflow/providers/redis/operators/redis_publish.py index be8fc5058b2f..97f2c364e82a 100644 --- a/airflow/providers/redis/operators/redis_publish.py +++ b/airflow/providers/redis/operators/redis_publish.py @@ -54,7 +54,7 @@ def execute(self, context: Dict) -> None: """ redis_hook = RedisHook(redis_conn_id=self.redis_conn_id) - self.log.info('Sending messsage %s to Redis on channel %s', self.message, self.channel) + self.log.info('Sending message %s to Redis on channel %s', self.message, self.channel) result = redis_hook.get_conn().publish(channel=self.channel, message=self.message) diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 2ee883c305ed..7e3d66e61d3c 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -623,7 +623,6 @@ de decomissioning decrypt decrypted -decyption deepcopy deidentify del diff --git a/tests/providers/amazon/aws/sensors/test_sqs.py b/tests/providers/amazon/aws/sensors/test_sqs.py index ee9b544f47fb..cbd6639da3c5 100644 --- a/tests/providers/amazon/aws/sensors/test_sqs.py +++ b/tests/providers/amazon/aws/sensors/test_sqs.py @@ -57,7 +57,7 @@ def test_poke_success(self): ) @mock_sqs - def test_poke_no_messsage_failed(self): + def test_poke_no_message_failed(self): self.sqs_hook.create_queue('test') result = self.sensor.poke(self.mock_context)