Content-Length: 888503 | pFad | https://github.com/apache/airflow/commit/527b948856584320f74d385f58477af79506834d

7F [misc] Replace XOR `^` conditions by `exactly_one` helper in provider… · apache/airflow@527b948 · GitHub
Skip to content

Commit 527b948

Browse files
authored
[misc] Replace XOR ^ conditions by exactly_one helper in providers (#27858)
1 parent 51c70a5 commit 527b948

File tree

8 files changed

+73
-45
lines changed

8 files changed

+73
-45
lines changed

airflow/providers/amazon/aws/hooks/base_aws.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@
4747
from airflow.exceptions import AirflowException, AirflowNotFoundException
4848
from airflow.hooks.base import BaseHook
4949
from airflow.providers.amazon.aws.utils.connection_wrapper import AwsConnectionWrapper
50+
from airflow.utils.helpers import exactly_one
5051
from airflow.utils.log.logging_mixin import LoggingMixin
5152
from airflow.utils.log.secrets_masker import mask_secret
5253

@@ -493,7 +494,7 @@ def conn(self) -> BaseAwsConnection:
493494
494495
:return: boto3.client or boto3.resource
495496
"""
496-
if not ((not self.client_type) ^ (not self.resource_type)):
497+
if not exactly_one(self.client_type, self.resource_type):
497498
raise ValueError(
498499
f"Either client_type={self.client_type!r} or "
499500
f"resource_type={self.resource_type!r} must be provided, not both."

airflow/providers/amazon/aws/operators/emr.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
from airflow.models import BaseOperator
2727
from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook, EmrHook, EmrServerlessHook
2828
from airflow.providers.amazon.aws.links.emr import EmrClusterLink
29+
from airflow.utils.helpers import exactly_one
2930

3031
if TYPE_CHECKING:
3132
from airflow.utils.context import Context
@@ -71,7 +72,7 @@ def __init__(
7172
wait_for_completion: bool = False,
7273
**kwargs,
7374
):
74-
if not (job_flow_id is None) ^ (job_flow_name is None):
75+
if not exactly_one(job_flow_id is None, job_flow_name is None):
7576
raise AirflowException("Exactly one of job_flow_id or job_flow_name must be specified.")
7677
super().__init__(**kwargs)
7778
cluster_states = cluster_states or []

airflow/providers/amazon/aws/operators/s3.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
from airflow.exceptions import AirflowException
2727
from airflow.models import BaseOperator
2828
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
29+
from airflow.utils.helpers import exactly_one
2930

3031
if TYPE_CHECKING:
3132
from airflow.utils.context import Context
@@ -463,11 +464,11 @@ def __init__(
463464
self.aws_conn_id = aws_conn_id
464465
self.verify = verify
465466

466-
if not bool(keys is None) ^ bool(prefix is None):
467+
if not exactly_one(prefix is None, keys is None):
467468
raise AirflowException("Either keys or prefix should be set.")
468469

469470
def execute(self, context: Context):
470-
if not bool(self.keys is None) ^ bool(self.prefix is None):
471+
if not exactly_one(self.keys is None, self.prefix is None):
471472
raise AirflowException("Either keys or prefix should be set.")
472473

473474
if isinstance(self.keys, (list, str)) and not bool(self.keys):

airflow/providers/google/cloud/operators/cloud_build.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@
4040
from airflow.providers.google.cloud.triggers.cloud_build import CloudBuildCreateBuildTrigger
4141
from airflow.providers.google.common.consts import GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME
4242
from airflow.utils import yaml
43+
from airflow.utils.helpers import exactly_one
4344

4445
if TYPE_CHECKING:
4546
from airflow.utils.context import Context
@@ -971,7 +972,7 @@ def __init__(self, build: dict | Build) -> None:
971972
self.build = deepcopy(build)
972973

973974
def _verify_source(self) -> None:
974-
if not (("storage_source" in self.build["source"]) ^ ("repo_source" in self.build["source"])):
975+
if not exactly_one("storage_source" in self.build["source"], "repo_source" in self.build["source"]):
975976
raise AirflowException(
976977
"The source could not be determined. Please choose one data source from: "
977978
"storage_source and repo_source."

airflow/providers/slack/hooks/slack.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
from airflow.exceptions import AirflowException, AirflowNotFoundException
3131
from airflow.hooks.base import BaseHook
3232
from airflow.providers.slack.utils import ConnectionExtraConfig
33+
from airflow.utils.helpers import exactly_one
3334
from airflow.utils.log.secrets_masker import mask_secret
3435

3536
if TYPE_CHECKING:
@@ -268,7 +269,7 @@ def send_file(
268269
- `Slack API files.upload method <https://api.slack.com/methods/files.upload>`_
269270
- `File types <https://api.slack.com/types/file#file_types>`_
270271
"""
271-
if not ((not file) ^ (not content)):
272+
if not exactly_one(file, content):
272273
raise ValueError("Either `file` or `content` must be provided, not both.")
273274
elif file:
274275
file = Path(file)

tests/providers/amazon/aws/operators/test_emr_add_steps.py

Lines changed: 18 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919

2020
import json
2121
import os
22-
import unittest
2322
from datetime import timedelta
2423
from unittest.mock import MagicMock, call, patch
2524

@@ -41,7 +40,7 @@
4140
)
4241

4342

44-
class TestEmrAddStepsOperator(unittest.TestCase):
43+
class TestEmrAddStepsOperator:
4544
# When
4645
_config = [
4746
{
@@ -54,7 +53,7 @@ class TestEmrAddStepsOperator(unittest.TestCase):
5453
}
5554
]
5655

57-
def setUp(self):
56+
def setup_method(self):
5857
self.args = {"owner": "airflow", "start_date": DEFAULT_DATE}
5958

6059
# Mock out the emr_client (moto has incorrect response)
@@ -79,6 +78,22 @@ def test_init(self):
7978
assert self.operator.job_flow_id == "j-8989898989"
8079
assert self.operator.aws_conn_id == "aws_default"
8180

81+
@pytest.mark.parametrize(
82+
"job_flow_id, job_flow_name",
83+
[
84+
pytest.param("j-8989898989", "test_cluster", id="both-specified"),
85+
pytest.param(None, None, id="both-none"),
86+
],
87+
)
88+
def test_validate_mutually_exclusive_args(self, job_flow_id, job_flow_name):
89+
error_message = r"Exactly one of job_flow_id or job_flow_name must be specified\."
90+
with pytest.raises(AirflowException, match=error_message):
91+
EmrAddStepsOperator(
92+
task_id="test_validate_mutually_exclusive_args",
93+
job_flow_id=job_flow_id,
94+
job_flow_name=job_flow_name,
95+
)
96+
8297
def test_render_template(self):
8398
dag_run = DagRun(dag_id=self.operator.dag.dag_id, execution_date=DEFAULT_DATE, run_id="test")
8499
ti = TaskInstance(task=self.operator)

tests/providers/amazon/aws/operators/test_s3_object.py

Lines changed: 39 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
from unittest import mock
2323

2424
import boto3
25+
import pytest
2526
from moto import mock_s3
2627

2728
from airflow import AirflowException
@@ -95,8 +96,8 @@ def test_s3_copy_object_arg_combination_2(self):
9596
assert objects_in_dest_bucket["Contents"][0]["Key"] == self.dest_key
9697

9798

98-
class TestS3DeleteObjectsOperator(unittest.TestCase):
99-
@mock_s3
99+
@mock_s3
100+
class TestS3DeleteObjectsOperator:
100101
def test_s3_delete_single_object(self):
101102
bucket = "testbucket"
102103
key = "path/data.txt"
@@ -116,7 +117,6 @@ def test_s3_delete_single_object(self):
116117
# There should be no object found in the bucket created earlier
117118
assert "Contents" not in conn.list_objects(Bucket=bucket, Prefix=key)
118119

119-
@mock_s3
120120
def test_s3_delete_multiple_objects(self):
121121
bucket = "testbucket"
122122
key_pattern = "path/data"
@@ -139,7 +139,6 @@ def test_s3_delete_multiple_objects(self):
139139
# There should be no object found in the bucket created earlier
140140
assert "Contents" not in conn.list_objects(Bucket=bucket, Prefix=key_pattern)
141141

142-
@mock_s3
143142
def test_s3_delete_prefix(self):
144143
bucket = "testbucket"
145144
key_pattern = "path/data"
@@ -162,7 +161,6 @@ def test_s3_delete_prefix(self):
162161
# There should be no object found in the bucket created earlier
163162
assert "Contents" not in conn.list_objects(Bucket=bucket, Prefix=key_pattern)
164163

165-
@mock_s3
166164
def test_s3_delete_empty_list(self):
167165
bucket = "testbucket"
168166
key_of_test = "path/data.txt"
@@ -185,7 +183,6 @@ def test_s3_delete_empty_list(self):
185183
# the object found should be consistent with dest_key specified earlier
186184
assert objects_in_dest_bucket["Contents"][0]["Key"] == key_of_test
187185

188-
@mock_s3
189186
def test_s3_delete_empty_string(self):
190187
bucket = "testbucket"
191188
key_of_test = "path/data.txt"
@@ -208,50 +205,57 @@ def test_s3_delete_empty_string(self):
208205
# the object found should be consistent with dest_key specified earlier
209206
assert objects_in_dest_bucket["Contents"][0]["Key"] == key_of_test
210207

211-
@mock_s3
212-
def test_assert_s3_both_keys_and_prifix_given(self):
213-
bucket = "testbucket"
214-
keys = "path/data.txt"
215-
key_pattern = "path/data"
216-
217-
conn = boto3.client("s3")
218-
conn.create_bucket(Bucket=bucket)
219-
conn.upload_fileobj(Bucket=bucket, Key=keys, Fileobj=io.BytesIO(b"input"))
220-
221-
# The object should be detected before the DELETE action is tested
222-
objects_in_dest_bucket = conn.list_objects(Bucket=bucket, Prefix=keys)
223-
assert len(objects_in_dest_bucket["Contents"]) == 1
224-
assert objects_in_dest_bucket["Contents"][0]["Key"] == keys
225-
with self.assertRaises(AirflowException):
226-
op = S3DeleteObjectsOperator(
227-
task_id="test_assert_s3_both_keys_and_prifix_given",
228-
bucket=bucket,
208+
@pytest.mark.parametrize(
209+
"keys, prefix",
210+
[
211+
pytest.param("path/data.txt", "path/data", id="single-key-and-prefix"),
212+
pytest.param(["path/data.txt"], "path/data", id="multiple-keys-and-prefix"),
213+
pytest.param(None, None, id="both-none"),
214+
],
215+
)
216+
def test_validate_keys_and_prefix_in_constructor(self, keys, prefix):
217+
with pytest.raises(AirflowException, match=r"Either keys or prefix should be set\."):
218+
S3DeleteObjectsOperator(
219+
task_id="test_validate_keys_and_prefix_in_constructor",
220+
bucket="foo-bar-bucket",
229221
keys=keys,
230-
prefix=key_pattern,
222+
prefix=prefix,
231223
)
232-
op.execute(None)
233-
234-
# The object found in the bucket created earlier should still be there
235-
assert len(objects_in_dest_bucket["Contents"]) == 1
236-
# the object found should be consistent with dest_key specified earlier
237-
assert objects_in_dest_bucket["Contents"][0]["Key"] == keys
238224

239-
@mock_s3
240-
def test_assert_s3_no_keys_or_prifix_given(self):
225+
@pytest.mark.parametrize(
226+
"keys, prefix",
227+
[
228+
pytest.param("path/data.txt", "path/data", id="single-key-and-prefix"),
229+
pytest.param(["path/data.txt"], "path/data", id="multiple-keys-and-prefix"),
230+
pytest.param(None, None, id="both-none"),
231+
],
232+
)
233+
def test_validate_keys_and_prefix_in_execute(self, keys, prefix):
241234
bucket = "testbucket"
242235
key_of_test = "path/data.txt"
243236

244237
conn = boto3.client("s3")
245238
conn.create_bucket(Bucket=bucket)
246239
conn.upload_fileobj(Bucket=bucket, Key=key_of_test, Fileobj=io.BytesIO(b"input"))
247240

241+
# Set valid values for constructor, and change them later for emulate rendering template
242+
op = S3DeleteObjectsOperator(
243+
task_id="test_validate_keys_and_prefix_in_execute",
244+
bucket=bucket,
245+
keys="keys-exists",
246+
prefix=None,
247+
)
248+
op.keys = keys
249+
op.prefix = prefix
250+
248251
# The object should be detected before the DELETE action is tested
249252
objects_in_dest_bucket = conn.list_objects(Bucket=bucket, Prefix=key_of_test)
250253
assert len(objects_in_dest_bucket["Contents"]) == 1
251254
assert objects_in_dest_bucket["Contents"][0]["Key"] == key_of_test
252-
with self.assertRaises(AirflowException):
253-
op = S3DeleteObjectsOperator(task_id="test_assert_s3_no_keys_or_prifix_given", bucket=bucket)
255+
256+
with pytest.raises(AirflowException, match=r"Either keys or prefix should be set\."):
254257
op.execute(None)
258+
255259
# The object found in the bucket created earlier should still be there
256260
assert len(objects_in_dest_bucket["Contents"]) == 1
257261
# the object found should be consistent with dest_key specified earlier

tests/providers/google/cloud/operators/test_cloud_build.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -304,9 +304,13 @@ def test_update_build_trigger(self, mock_hook):
304304

305305
class TestBuildProcessor(TestCase):
306306
def test_verify_source(self):
307-
with pytest.raises(AirflowException, match="The source could not be determined."):
307+
error_message = r"The source could not be determined."
308+
with pytest.raises(AirflowException, match=error_message):
308309
BuildProcessor(build={"source": {"storage_source": {}, "repo_source": {}}}).process_body()
309310

311+
with pytest.raises(AirflowException, match=error_message):
312+
BuildProcessor(build={"source": {}}).process_body()
313+
310314
@parameterized.expand(
311315
[
312316
(

0 commit comments

Comments
 (0)








ApplySandwichStrip

pFad - (p)hone/(F)rame/(a)nonymizer/(d)eclutterfier!      Saves Data!


--- a PPN by Garber Painting Akron. With Image Size Reduction included!

Fetched URL: https://github.com/apache/airflow/commit/527b948856584320f74d385f58477af79506834d

Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy