Skip to content

Commit 94d0790

Browse files
authored
Refactor: Simplify comparisons (#34181)
1 parent 36eba18 commit 94d0790

File tree

9 files changed

+13
-17
lines changed

9 files changed

+13
-17
lines changed

airflow/dag_processing/manager.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1081,7 +1081,7 @@ def start_new_processes(self):
10811081
# needs to be done before this process is forked to create the DAG parsing processes.
10821082
SecretCache.init()
10831083

1084-
while self._parallelism - len(self._processors) > 0 and self._file_path_queue:
1084+
while self._parallelism > len(self._processors) and self._file_path_queue:
10851085
file_path = self._file_path_queue.popleft()
10861086
# Stop creating duplicate processor i.e. processor with the same filepath
10871087
if file_path in self._processors:

airflow/plugins_manager.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -340,9 +340,8 @@ def ensure_plugins_loaded():
340340
for plugin in plugins:
341341
registered_hooks.extend(plugin.hooks)
342342

343-
num_loaded = len(plugins)
344-
if num_loaded > 0:
345-
log.debug("Loading %d plugin(s) took %.2f seconds", num_loaded, timer.duration)
343+
if plugins:
344+
log.debug("Loading %d plugin(s) took %.2f seconds", len(plugins), timer.duration)
346345

347346

348347
def initialize_web_ui_plugins():

airflow/providers/amazon/aws/hooks/datasync.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,9 +57,10 @@ def __init__(self, wait_interval_seconds: int = 30, *args, **kwargs) -> None:
5757
self.locations: list = []
5858
self.tasks: list = []
5959
# wait_interval_seconds = 0 is used during unit tests
60-
if wait_interval_seconds < 0 or wait_interval_seconds > 15 * 60:
60+
if 0 <= wait_interval_seconds <= 15 * 60:
61+
self.wait_interval_seconds = wait_interval_seconds
62+
else:
6163
raise ValueError(f"Invalid wait_interval_seconds {wait_interval_seconds}")
62-
self.wait_interval_seconds = wait_interval_seconds
6364

6465
def create_location(self, location_uri: str, **create_location_kwargs) -> str:
6566
"""

airflow/providers/apache/hive/hooks/hive.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,7 @@ def _validate_beeline_parameters(self, conn):
172172
)
173173
try:
174174
int_port = int(conn.port)
175-
if int_port <= 0 or int_port > 65535:
175+
if not 0 < int_port <= 65535:
176176
raise Exception(f"The port used in beeline command ({conn.port}) should be in range 0-65535)")
177177
except (ValueError, TypeError) as e:
178178
raise Exception(f"The port used in beeline command ({conn.port}) should be a valid integer: {e})")

airflow/providers/google/cloud/operators/bigquery_dts.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -356,7 +356,7 @@ def execute(self, context: Context):
356356
)
357357

358358
def _wait_for_transfer_to_be_done(self, run_id: str, transfer_config_id: str, interval: int = 10):
359-
if interval < 0:
359+
if interval <= 0:
360360
raise ValueError("Interval must be > 0")
361361

362362
while True:

airflow/providers/google/cloud/sensors/dataproc_metastore.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,4 +113,4 @@ def poke(self, context: Context) -> bool:
113113

114114
# Return True if we got all requested partitions.
115115
# If no partitions were given in the request, then we expect to find at least one.
116-
return found_partitions > 0 and found_partitions >= len(set(self.partitions))
116+
return found_partitions >= max(1, len(set(self.partitions)))

airflow/providers/google/cloud/transfers/gcs_to_gcs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,7 @@ def _ignore_existing_files(self, hook, prefix, **kwargs):
307307
]
308308

309309
objects = set(objects) - set(existing_objects)
310-
if len(objects) > 0:
310+
if objects:
311311
self.log.info("%s files are going to be synced: %s.", len(objects), objects)
312312
else:
313313
self.log.info("There are no new files to sync. Have a nice day!")

airflow/providers/snowflake/utils/sql_api_generate_jwt.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -102,14 +102,10 @@ def prepare_account_name_for_jwt(self, raw_account: str) -> str:
102102
account = raw_account
103103
if ".global" not in account:
104104
# Handle the general case.
105-
idx = account.find(".")
106-
if idx > 0:
107-
account = account[0:idx]
105+
account = account.partition(".")[0]
108106
else:
109107
# Handle the replication case.
110-
idx = account.find("-")
111-
if idx > 0:
112-
account = account[0:idx] # pragma: no cover
108+
account = account.partition("-")[0]
113109
# Use uppercase for the account identifier.
114110
return account.upper()
115111

tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ def _cases(self):
9292
@staticmethod
9393
def _is_valid_pod_id(name):
9494
regex = r"^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
95-
return len(name) <= 253 and all(ch.lower() == ch for ch in name) and re.match(regex, name)
95+
return len(name) <= 253 and name.islower() and re.match(regex, name)
9696

9797
@staticmethod
9898
def _is_safe_label_value(value):

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy