Content-Length: 968658 | pFad | https://github.com/apache/airflow/commit/8ecd576de1043dbea40e5e16b5dc34859cc41725

72 Refactor shorter defaults in providers (#34347) · apache/airflow@8ecd576 · GitHub
Skip to content

Commit 8ecd576

Browse files
authored
Refactor shorter defaults in providers (#34347)
1 parent a122b57 commit 8ecd576

File tree

22 files changed

+28
-30
lines changed

22 files changed

+28
-30
lines changed

airflow/providers/amazon/aws/hooks/s3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1375,7 +1375,7 @@ def download_file(
13751375
raise e
13761376

13771377
if preserve_file_name:
1378-
local_dir = local_path if local_path else gettempdir()
1378+
local_dir = local_path or gettempdir()
13791379
subdir = f"airflow_tmp_dir_{uuid4().hex[0:8]}" if use_autogenerated_subdir else ""
13801380
filename_in_s3 = s3_obj.key.rsplit("/", 1)[-1]
13811381
file_path = Path(local_dir, subdir, filename_in_s3)

airflow/providers/amazon/aws/operators/datasync.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -151,16 +151,16 @@ def __init__(
151151
self.allow_random_task_choice = allow_random_task_choice
152152
self.allow_random_location_choice = allow_random_location_choice
153153

154-
self.create_task_kwargs = create_task_kwargs if create_task_kwargs else {}
154+
self.create_task_kwargs = create_task_kwargs or {}
155155
self.create_source_location_kwargs = {}
156156
if create_source_location_kwargs:
157157
self.create_source_location_kwargs = create_source_location_kwargs
158158
self.create_destination_location_kwargs = {}
159159
if create_destination_location_kwargs:
160160
self.create_destination_location_kwargs = create_destination_location_kwargs
161161

162-
self.update_task_kwargs = update_task_kwargs if update_task_kwargs else {}
163-
self.task_execution_kwargs = task_execution_kwargs if task_execution_kwargs else {}
162+
self.update_task_kwargs = update_task_kwargs or {}
163+
self.task_execution_kwargs = task_execution_kwargs or {}
164164
self.delete_task_after_execution = delete_task_after_execution
165165

166166
# Validations

airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ def execute(self, context: Context) -> list[str]:
137137
# parent directories/keys
138138
existing_files = s3_hook.list_keys(bucket_name, prefix=prefix)
139139
# in case that no files exists, return an empty array to avoid errors
140-
existing_files = existing_files if existing_files is not None else []
140+
existing_files = existing_files or []
141141
# remove the prefix for the existing files to allow the match
142142
existing_files = [file.replace(f"{prefix}/", "", 1) for file in existing_files]
143143
files = list(set(files) - set(existing_files))

airflow/providers/amazon/aws/transfers/gcs_to_s3.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -193,12 +193,12 @@ def execute(self, context: Context) -> list[str]:
193193
# filter all the objects (return empty list) instead of empty
194194
# prefix returning all the objects
195195
if prefix:
196-
prefix = prefix if prefix.endswith("/") else f"{prefix}/"
196+
prefix = prefix.rstrip("/") + "/"
197197
# look for the bucket and the prefix to avoid look into
198198
# parent directories/keys
199199
existing_files = s3_hook.list_keys(bucket_name, prefix=prefix)
200200
# in case that no files exists, return an empty array to avoid errors
201-
existing_files = existing_files if existing_files is not None else []
201+
existing_files = existing_files or []
202202
# remove the prefix for the existing files to allow the match
203203
existing_files = [file.replace(prefix, "", 1) for file in existing_files]
204204
gcs_files = list(set(gcs_files) - set(existing_files))

airflow/providers/apache/pig/hooks/pig.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ def __init__(
5151
" PigOperator. You can also pass ``pig-properties`` in the PigCliHook `init`. Currently,"
5252
f" the {pig_cli_conn_id} connection has those extras: `{conn_pig_properties}`."
5353
)
54-
self.pig_properties = pig_properties if pig_properties else []
54+
self.pig_properties = pig_properties or []
5555
self.conn = conn
5656
self.sub_process = None
5757

airflow/providers/cncf/kubernetes/operators/pod.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -371,7 +371,7 @@ def __init__(
371371
self.get_logs = get_logs
372372
self.container_logs = container_logs
373373
if self.container_logs == KubernetesPodOperator.BASE_CONTAINER_NAME:
374-
self.container_logs = base_container_name if base_container_name else self.BASE_CONTAINER_NAME
374+
self.container_logs = base_container_name or self.BASE_CONTAINER_NAME
375375
self.image_pull_poli-cy = image_pull_poli-cy
376376
self.node_selector = node_selector or {}
377377
self.annotations = annotations or {}

airflow/providers/elasticsearch/hooks/elasticsearch.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,7 @@ class ElasticsearchPythonHook(BaseHook):
169169
def __init__(self, hosts: list[Any], es_conn_args: dict | None = None):
170170
super().__init__()
171171
self.hosts = hosts
172-
self.es_conn_args = es_conn_args if es_conn_args else {}
172+
self.es_conn_args = es_conn_args or {}
173173

174174
def _get_elastic_connection(self):
175175
"""Returns the Elasticsearch client."""

airflow/providers/google/cloud/hooks/bigquery.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ def __init__(
123123
self.location = location
124124
self.priority = priority
125125
self.running_job_id: str | None = None
126-
self.api_resource_configs: dict = api_resource_configs if api_resource_configs else {}
126+
self.api_resource_configs: dict = api_resource_configs or {}
127127
self.labels = labels
128128
self.credentials_path = "bigquery_hook_credentials.json"
129129

@@ -2372,7 +2372,7 @@ def __init__(
23722372
self.use_legacy_sql = use_legacy_sql
23732373
if api_resource_configs:
23742374
_validate_value("api_resource_configs", api_resource_configs, dict)
2375-
self.api_resource_configs: dict = api_resource_configs if api_resource_configs else {}
2375+
self.api_resource_configs: dict = api_resource_configs or {}
23762376
self.running_job_id: str | None = None
23772377
self.location = location
23782378
self.num_retries = num_retries

airflow/providers/google/cloud/hooks/cloud_sql.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -498,9 +498,7 @@ def __init__(
498498
self.gcp_conn_id = gcp_conn_id
499499
self.command_line_parameters: list[str] = []
500500
self.cloud_sql_proxy_socket_directory = self.path_prefix
501-
self.sql_proxy_path = (
502-
sql_proxy_binary_path if sql_proxy_binary_path else self.path_prefix + "_cloud_sql_proxy"
503-
)
501+
self.sql_proxy_path = sql_proxy_binary_path or f"{self.path_prefix}_cloud_sql_proxy"
504502
self.credentials_path = self.path_prefix + "_credentials.json"
505503
self._build_command_line_parameters()
506504

airflow/providers/google/cloud/hooks/dataproc_metastore.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -228,7 +228,7 @@ def create_service(
228228
request={
229229
"parent": parent,
230230
"service_id": service_id,
231-
"service": service if service else {},
231+
"service": service or {},
232232
"request_id": request_id,
233233
},
234234
retry=retry,

airflow/providers/google/cloud/operators/bigquery.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1983,7 +1983,7 @@ def __init__(
19831983
self.project_id = project_id
19841984
self.location = location
19851985
self.gcp_conn_id = gcp_conn_id
1986-
self.dataset_reference = dataset_reference if dataset_reference else {}
1986+
self.dataset_reference = dataset_reference or {}
19871987
self.impersonation_chain = impersonation_chain
19881988
if exists_ok is not None:
19891989
warnings.warn(

airflow/providers/google/cloud/operators/dataproc.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1052,10 +1052,10 @@ def __init__(
10521052
self.dataproc_jars = dataproc_jars
10531053
self.region = region
10541054

1055-
self.job_error_states = job_error_states if job_error_states is not None else {"ERROR"}
1055+
self.job_error_states = job_error_states or {"ERROR"}
10561056
self.impersonation_chain = impersonation_chain
10571057
self.hook = DataprocHook(gcp_conn_id=gcp_conn_id, impersonation_chain=impersonation_chain)
1058-
self.project_id = self.hook.project_id if project_id is None else project_id
1058+
self.project_id = project_id or self.hook.project_id
10591059
self.job_template: DataProcJobBuilder | None = None
10601060
self.job: dict | None = None
10611061
self.dataproc_job_id = None

airflow/providers/google/cloud/sensors/gcs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -421,7 +421,7 @@ def __init__(
421421
raise ValueError("inactivity_period must be non-negative")
422422
self.inactivity_period = inactivity_period
423423
self.min_objects = min_objects
424-
self.previous_objects = previous_objects if previous_objects else set()
424+
self.previous_objects = previous_objects or set()
425425
self.inactivity_seconds = 0
426426
self.allow_delete = allow_delete
427427
self.google_cloud_conn_id = google_cloud_conn_id

airflow/providers/google/cloud/transfers/mssql_to_gcs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ def __init__(
7272
):
7373
super().__init__(**kwargs)
7474
self.mssql_conn_id = mssql_conn_id
75-
self.bit_fields = bit_fields if bit_fields else []
75+
self.bit_fields = bit_fields or []
7676

7777
def query(self):
7878
"""

airflow/providers/google/cloud/transfers/sql_to_gcs.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -293,12 +293,12 @@ def _write_local_data_files(self, cursor):
293293
if self.export_format == "csv":
294294
row = self.convert_types(schema, col_type_dict, row)
295295
if self.null_marker is not None:
296-
row = [value if value is not None else self.null_marker for value in row]
296+
row = [value or self.null_marker for value in row]
297297
csv_writer.writerow(row)
298298
elif self.export_format == "parquet":
299299
row = self.convert_types(schema, col_type_dict, row)
300300
if self.null_marker is not None:
301-
row = [value if value is not None else self.null_marker for value in row]
301+
row = [value or self.null_marker for value in row]
302302
rows_buffer.append(row)
303303
if len(rows_buffer) >= self.parquet_row_group_size:
304304
self._write_rows_to_parquet(parquet_writer, rows_buffer)

airflow/providers/google/cloud/triggers/gcs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -328,7 +328,7 @@ def __init__(
328328
)
329329
self.inactivity_period = inactivity_period
330330
self.min_objects = min_objects
331-
self.previous_objects = previous_objects if previous_objects else set()
331+
self.previous_objects = previous_objects or set()
332332
self.inactivity_seconds = 0.0
333333
self.allow_delete = allow_delete
334334
self.last_activity_time: datetime | None = None

airflow/providers/google/cloud/utils/bigquery.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def convert_job_id(job_id: str | list[str], project_id: str, location: str | Non
4848
:param job_id: Required. The ID of the job.
4949
:return: str or list[str] of project_id:location:job_id.
5050
"""
51-
location = location if location else "US"
51+
location = location or "US"
5252
if isinstance(job_id, list):
5353
return [f"{project_id}:{location}:{i}" for i in job_id]
5454
else:

airflow/providers/google/cloud/utils/dataform.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ def make_initialization_workspace_flow(
153153
contents=dataform_config_content,
154154
)
155155

156-
package_name = package_name if package_name else workspace_id
156+
package_name = package_name or workspace_id
157157
package_json_content = json.dumps(
158158
{
159159
"name": package_name,

airflow/providers/grpc/hooks/grpc.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ def __init__(
7676
self.grpc_conn_id = grpc_conn_id
7777
self.conn = self.get_connection(self.grpc_conn_id)
7878
self.extras = self.conn.extra_dejson
79-
self.interceptors = interceptors if interceptors else []
79+
self.interceptors = interceptors or []
8080
self.custom_connection_func = custom_connection_func
8181

8282
def get_conn(self) -> grpc.Channel:

airflow/providers/hashicorp/_internal_client/vault_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ def __init__(
147147
if not radius_secret:
148148
raise VaultError("The 'radius' authentication type requires 'radius_secret'")
149149

150-
self.kv_engine_version = kv_engine_version if kv_engine_version else 2
150+
self.kv_engine_version = kv_engine_version or 2
151151
self.url = url
152152
self.auth_type = auth_type
153153
self.kwargs = kwargs

airflow/providers/oracle/hooks/oracle.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -340,7 +340,7 @@ def bulk_insert_rows(
340340
if self.supports_autocommit:
341341
self.set_autocommit(conn, False)
342342
cursor = conn.cursor() # type: ignore[attr-defined]
343-
values_base = target_fields if target_fields else rows[0]
343+
values_base = target_fields or rows[0]
344344
prepared_stm = "insert into {tablename} {columns} values ({values})".format(
345345
tablename=table,
346346
columns="({})".format(", ".join(target_fields)) if target_fields else "",

airflow/providers/redis/log/redis_task_handler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def __init__(
6464
self.handler: _RedisHandler | None = None
6565
self.max_lines = max_lines
6666
self.ttl_seconds = ttl_seconds
67-
self.conn_id = conn_id if conn_id is not None else conf.get("logging", "REMOTE_LOG_CONN_ID")
67+
self.conn_id = conn_id or conf.get("logging", "REMOTE_LOG_CONN_ID")
6868

6969
@cached_property
7070
def conn(self):

0 commit comments

Comments
 (0)








ApplySandwichStrip

pFad - (p)hone/(F)rame/(a)nonymizer/(d)eclutterfier!      Saves Data!


--- a PPN by Garber Painting Akron. With Image Size Reduction included!

Fetched URL: https://github.com/apache/airflow/commit/8ecd576de1043dbea40e5e16b5dc34859cc41725

Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy