Content-Length: 995993 | pFad | https://github.com/apache/airflow/commit/e25eea052fd54c94b490a377de05c6bae4c24dbb

08 Inclusive Language (#18349) · apache/airflow@e25eea0 · GitHub
Skip to content

Commit e25eea0

Browse files
Inclusive Language (#18349)
1 parent e81f14b commit e25eea0

File tree

23 files changed

+34
-34
lines changed

23 files changed

+34
-34
lines changed

COMMITTERS.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ can become the Mentor and guide the proposed candidates on how they can become a
158158

159159
If the committee does not have enough information, requires more time, or requires more evidence of
160160
candidate's eligibility, a mentor, who is not the proposer, is selected to help mentor the candidate
161-
The mentor should try to remain impartial -- his/her goal is to provide the missing evidence and to
161+
The mentor should try to remain impartial -- their goal is to provide the missing evidence and to
162162
try to coach/mentor the candidate to success.
163163

164164
In order to re-raise a candidate vote, both Proposer and Mentor must be in favor. Again,

airflow/models/serialized_dag.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ def read_all_dags(cls, session: Session = None) -> Dict[str, 'SerializedDAG']:
160160
log.debug("Deserializing DAG: %s", row.dag_id)
161161
dag = row.dag
162162

163-
# Sanity check.
163+
# Coherence check
164164
if dag.dag_id == row.dag_id:
165165
dags[row.dag_id] = dag
166166
else:

airflow/providers/amazon/aws/example_dags/example_emr_job_flow_automatic_steps.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@
4444
'Instances': {
4545
'InstanceGroups': [
4646
{
47-
'Name': 'Master node',
47+
'Name': 'Primary node',
4848
'Market': 'SPOT',
4949
'InstanceRole': 'MASTER',
5050
'InstanceType': 'm1.medium',

airflow/providers/amazon/aws/example_dags/example_emr_job_flow_manual_steps.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@
4848
'Instances': {
4949
'InstanceGroups': [
5050
{
51-
'Name': 'Master node',
51+
'Name': 'Primary node',
5252
'Market': 'SPOT',
5353
'InstanceRole': 'MASTER',
5454
'InstanceType': 'm1.medium',

airflow/providers/google/cloud/hooks/cloud_memorystore.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -291,7 +291,7 @@ def failover_instance(
291291
metadata: Optional[Sequence[Tuple[str, str]]] = None,
292292
):
293293
"""
294-
Initiates a failover of the master node to current replica node for a specific STANDARD tier Cloud
294+
Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud
295295
Memorystore for Redis instance.
296296
297297
:param location: The location of the Cloud Memorystore instance (for example europe-west1)

airflow/providers/google/cloud/operators/cloud_memorystore.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -315,7 +315,7 @@ def execute(self, context: dict) -> None:
315315

316316
class CloudMemorystoreFailoverInstanceOperator(BaseOperator):
317317
"""
318-
Initiates a failover of the master node to current replica node for a specific STANDARD tier Cloud
318+
Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud
319319
Memorystore for Redis instance.
320320
321321
.. seealso::

airflow/providers/google/cloud/operators/dataproc.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -135,14 +135,14 @@ class ClusterGenerator:
135135
:type optional_components: list[str]
136136
:param num_masters: The # of master nodes to spin up
137137
:type num_masters: int
138-
:param master_machine_type: Compute engine machine type to use for the master node
138+
:param master_machine_type: Compute engine machine type to use for the primary node
139139
:type master_machine_type: str
140-
:param master_disk_type: Type of the boot disk for the master node
140+
:param master_disk_type: Type of the boot disk for the primary node
141141
(default is ``pd-standard``).
142142
Valid values: ``pd-ssd`` (Persistent Disk Solid State Drive) or
143143
``pd-standard`` (Persistent Disk Hard Disk Drive).
144144
:type master_disk_type: str
145-
:param master_disk_size: Disk size for the master node
145+
:param master_disk_size: Disk size for the primary node
146146
:type master_disk_size: int
147147
:param worker_machine_type: Compute engine machine type to use for the worker nodes
148148
:type worker_machine_type: str

airflow/providers/yandex/operators/yandexcloud_dataproc.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ class DataprocCreateClusterOperator(BaseOperator):
4949
Service account can be created inside the folder.
5050
:type service_account_id: Optional[str]
5151
:param masternode_resource_preset: Resources preset (CPU+RAM configuration)
52-
for the master node of the cluster.
52+
for the primary node of the cluster.
5353
:type masternode_resource_preset: str
5454
:param masternode_disk_size: Masternode storage size in GiB.
5555
:type masternode_disk_size: int

airflow/providers_manager.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ def _create_customized_form_field_behaviours_schema_validator():
107107

108108
def _sanity_check(provider_package: str, class_name: str) -> bool:
109109
"""
110-
Performs sanity check on provider classes.
110+
Performs coherence check on provider classes.
111111
For apache-airflow providers - it checks if it starts with appropriate package. For all providers
112112
it tries to import the provider - checking that there are no exceptions during importing.
113113
It logs appropriate warning in case it detects any problems.
@@ -121,7 +121,7 @@ def _sanity_check(provider_package: str, class_name: str) -> bool:
121121
provider_path = provider_package[len("apache-") :].replace("-", ".")
122122
if not class_name.startswith(provider_path):
123123
log.warning(
124-
"Sanity check failed when importing '%s' from '%s' package. It should start with '%s'",
124+
"Coherence check failed when importing '%s' from '%s' package. It should start with '%s'",
125125
class_name,
126126
provider_package,
127127
provider_path,

airflow/utils/db.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -231,7 +231,7 @@ def create_default_connections(session=None):
231231
"InstanceCount": 1
232232
},
233233
{
234-
"Name": "Slave nodes",
234+
"Name": "Core nodes",
235235
"Market": "ON_DEMAND",
236236
"InstanceRole": "CORE",
237237
"InstanceType": "r3.2xlarge",

airflow/utils/process_utils.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -159,19 +159,19 @@ def execute_interactive(cmd: List[str], **kwargs):
159159
tty.setraw(sys.stdin.fileno())
160160

161161
# open pseudo-terminal to interact with subprocess
162-
master_fd, slave_fd = pty.openpty()
162+
primary_fd, secondary_fd = pty.openpty()
163163
try:
164164
# use os.setsid() make it run in a new process group, or bash job control will not be enabled
165165
with subprocess.Popen(
166-
cmd, stdin=slave_fd, stdout=slave_fd, stderr=slave_fd, universal_newlines=True, **kwargs
166+
cmd, stdin=secondary_fd, stdout=secondary_fd, stderr=secondary_fd, universal_newlines=True, **kwargs
167167
) as proc:
168168
while proc.poll() is None:
169-
readable_fbs, _, _ = select.select([sys.stdin, master_fd], [], [])
169+
readable_fbs, _, _ = select.select([sys.stdin, primary_fd], [], [])
170170
if sys.stdin in readable_fbs:
171171
input_data = os.read(sys.stdin.fileno(), 10240)
172-
os.write(master_fd, input_data)
173-
if master_fd in readable_fbs:
174-
output_data = os.read(master_fd, 10240)
172+
os.write(primary_fd, input_data)
173+
if primary_fd in readable_fbs:
174+
output_data = os.read(primary_fd, 10240)
175175
if output_data:
176176
os.write(sys.stdout.fileno(), output_data)
177177
finally:

docs/apache-airflow-providers-google/operators/cloud/dataproc.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ The list currently includes Spark, Hadoop, Pig and Hive.
105105
For more information on versions and images take a look at `Cloud Dataproc Image version list <https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions>`__
106106

107107
To submit a job to the cluster you need a provide a job source file. The job source file can be on GCS, the cluster or on your local
108-
file system. You can specify a file://github.com/ path to refer to a local file on a cluster's master node.
108+
file system. You can specify a file://github.com/ path to refer to a local file on a cluster's primary node.
109109

110110
The job configuration can be submitted by using:
111111
:class:`~airflow.providers.google.cloud.operators.dataproc.DataprocSubmitJobOperator`.

scripts/docker/install_airflow.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
3131

3232
function install_airflow() {
33-
# Sanity check for editable installation mode.
33+
# Coherence check for editable installation mode.
3434
if [[ ${AIRFLOW_INSTALLATION_METHOD} != "." && \
3535
${AIRFLOW_INSTALL_EDITABLE_FLAG} == "--editable" ]]; then
3636
echo

scripts/in_container/bin/install_aws.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ pushd "${TMP_DIR}" && unzip "${TMP_DIR}/awscliv2.zip" && cd aws && \
5858
--bin-dir "/files/bin/" && \
5959
popd
6060

61-
# Sanity check
61+
# Coherence check
6262
if ! command -v aws > /dev/null; then
6363
echo 'Installation failed. The command "aws" was not found.'
6464
exit 1

scripts/in_container/bin/install_az.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ chmod a+x /files/opt/az/az
6161

6262
ln -s /files/opt/az/az "${BIN_PATH}"
6363

64-
# Sanity check
64+
# Coherence check
6565
if ! command -v az > /dev/null; then
6666
echo 'Installation failed. The command "az" was not found.'
6767
exit 1

scripts/in_container/bin/install_gcloud.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ while IPS='' read -r line; do
6767
ln -sf "${line}" "/files/bin/${BIN_NAME}"
6868
done < <(find "${INSTALL_DIR}/bin/" -type f)
6969

70-
# Sanity check
70+
# Coherence check
7171
if ! command -v gcloud > /dev/null; then
7272
echo 'Installation failed. The command "gcloud" was not found.'
7373
exit 1

scripts/in_container/bin/install_imgcat.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ echo "Downloading from ${DOWNLOAD_URL}"
3838
curl -# --fail "${DOWNLOAD_URL}" --output "${BIN_PATH}"
3939
chmod +x "${BIN_PATH}"
4040

41-
# Sanity check
41+
# Coherence check
4242
if ! command -v imgcat > /dev/null; then
4343
echo 'Installation failed. The command "imgcat" was not found.'
4444
exit 1

scripts/in_container/bin/install_java.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ while IPS='' read -r line; do
6161
ln -s "${line}" "/files/bin/${BIN_NAME}"
6262
done < <(find "${INSTALL_DIR}/bin/" -type f)
6363

64-
# Sanity check
64+
# Coherence check
6565
if ! command -v java > /dev/null; then
6666
echo 'Installation failed. The command "java" was not found.'
6767
exit 1

scripts/in_container/bin/install_kubectl.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ echo "Downloading from ${DOWNLOAD_URL}"
4646
curl -# --fail "${DOWNLOAD_URL}" --output "${BIN_PATH}"
4747
chmod +x "${BIN_PATH}"
4848

49-
# Sanity check
49+
# Coherence check
5050
if ! command -v kubectl > /dev/null; then
5151
echo 'Installation failed. The command "kubectl" was not found.'
5252
exit 1

scripts/in_container/bin/install_terraform.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ curl -# --fail "${DOWNLOAD_URL}" --output "${TMP_DIR}/terraform.zip"
4747
echo "Extracting archive"
4848
unzip "${TMP_DIR}/terraform.zip" -d /files/bin
4949

50-
# Sanity check
50+
# Coherence check
5151
if ! command -v terraform > /dev/null; then
5252
echo 'Installation failed. The command "terraform" was not found.'
5353
exit 1

tests/core/test_providers_manager.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def test_providers_are_loaded(self):
4242
version = provider_manager.providers[provider][0]
4343
assert re.search(r'[0-9]*\.[0-9]*\.[0-9]*.*', version)
4444
assert package_name == provider
45-
# just a sanity check - no exact number as otherwise we would have to update
45+
# just a coherence check - no exact number as otherwise we would have to update
4646
# several tests if we add new connections/provider which is not ideal
4747
assert len(provider_list) > 65
4848
assert [] == self._caplog.records

tests/models/test_dagbag.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -419,7 +419,7 @@ def subdag_1():
419419
return dag
420420

421421
test_dag = standard_subdag()
422-
# sanity check to make sure DAG.subdag is still functioning properly
422+
# coherence check to make sure DAG.subdag is still functioning properly
423423
assert len(test_dag.subdags) == 2
424424

425425
# Perform processing dag
@@ -503,7 +503,7 @@ def subdag_1():
503503
return dag
504504

505505
test_dag = nested_subdags()
506-
# sanity check to make sure DAG.subdag is still functioning properly
506+
# coherence check to make sure DAG.subdag is still functioning properly
507507
assert len(test_dag.subdags) == 6
508508

509509
# Perform processing dag
@@ -541,7 +541,7 @@ def basic_cycle():
541541
return dag
542542

543543
test_dag = basic_cycle()
544-
# sanity check to make sure DAG.subdag is still functioning properly
544+
# coherence check to make sure DAG.subdag is still functioning properly
545545
assert len(test_dag.subdags) == 0
546546

547547
# Perform processing dag
@@ -628,7 +628,7 @@ def subdag_1():
628628
return dag
629629

630630
test_dag = nested_subdag_cycle()
631-
# sanity check to make sure DAG.subdag is still functioning properly
631+
# coherence check to make sure DAG.subdag is still functioning properly
632632
assert len(test_dag.subdags) == 6
633633

634634
# Perform processing dag

tests/models/test_dagrun.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -737,7 +737,7 @@ def test_next_dagruns_to_examine_only_unpaused(self, state):
737737
def test_no_scheduling_delay_for_nonscheduled_runs(self, stats_mock):
738738
"""
739739
Tests that dag scheduling delay stat is not called if the dagrun is not a scheduled run.
740-
This case is manual run. Simple test for sanity check.
740+
This case is manual run. Simple test for coherence check.
741741
"""
742742
dag = DAG(dag_id='test_dagrun_stats', start_date=days_ago(1))
743743
dag_task = DummyOperator(task_id='dummy', dag=dag)

0 commit comments

Comments
 (0)








ApplySandwichStrip

pFad - (p)hone/(F)rame/(a)nonymizer/(d)eclutterfier!      Saves Data!


--- a PPN by Garber Painting Akron. With Image Size Reduction included!

Fetched URL: https://github.com/apache/airflow/commit/e25eea052fd54c94b490a377de05c6bae4c24dbb

Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy