Content-Length: 906886 | pFad | https://github.com/apache/airflow/commit/73305c7bd57f14444804c13b8b290f479832d3db

83 [AIRFLOW-7081] Remove env variables from GCP guide (#7755) · apache/airflow@73305c7 · GitHub
Skip to content

Commit 73305c7

Browse files
authored
[AIRFLOW-7081] Remove env variables from GCP guide (#7755)
1 parent 4e626be commit 73305c7

25 files changed

+29
-660
lines changed

airflow/providers/google/cloud/example_dags/example_bigtable.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,6 @@
5656
from airflow.providers.google.cloud.sensors.bigtable import BigtableTableReplicationCompletedSensor
5757
from airflow.utils.dates import days_ago
5858

59-
# [START howto_operator_gcp_bigtable_args]
6059
GCP_PROJECT_ID = getenv('GCP_PROJECT_ID', 'example-project')
6160
CBT_INSTANCE_ID = getenv('CBT_INSTANCE_ID', 'some-instance-id')
6261
CBT_INSTANCE_DISPLAY_NAME = getenv('CBT_INSTANCE_DISPLAY_NAME', 'Human-readable name')
@@ -69,7 +68,6 @@
6968
CBT_CLUSTER_STORAGE_TYPE = getenv('CBT_CLUSTER_STORAGE_TYPE', '2')
7069
CBT_TABLE_ID = getenv('CBT_TABLE_ID', 'some-table-id')
7170
CBT_POKE_INTERVAL = getenv('CBT_POKE_INTERVAL', '60')
72-
# [END howto_operator_gcp_bigtable_args]
7371

7472
default_args = {
7573
'start_date': days_ago(1)

airflow/providers/google/cloud/example_dags/example_cloud_build.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -37,14 +37,10 @@
3737
from airflow.providers.google.cloud.operators.cloud_build import CloudBuildCreateOperator
3838
from airflow.utils import dates
3939

40-
# [START howto_operator_gcp_common_variables]
4140
GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project")
42-
# [END howto_operator_gcp_common_variables]
4341

44-
# [START howto_operator_gcp_create_build_variables]
4542
GCP_SOURCE_ARCHIVE_URL = os.environ.get("GCP_CLOUD_BUILD_ARCHIVE_URL", "gs://example-bucket/file")
4643
GCP_SOURCE_REPOSITORY_NAME = os.environ.get("GCP_CLOUD_BUILD_REPOSITORY_NAME", "")
47-
# [END howto_operator_gcp_create_build_variables]
4844

4945
GCP_SOURCE_ARCHIVE_URL_PARTS = urlparse(GCP_SOURCE_ARCHIVE_URL)
5046
GCP_SOURCE_BUCKET_NAME = GCP_SOURCE_ARCHIVE_URL_PARTS.netloc

airflow/providers/google/cloud/example_dags/example_cloud_sql.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -41,25 +41,19 @@
4141
)
4242
from airflow.utils.dates import days_ago
4343

44-
# [START howto_operator_cloudsql_arguments]
4544
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
4645
INSTANCE_NAME = os.environ.get('GCSQL_MYSQL_INSTANCE_NAME', 'test-mysql')
4746
INSTANCE_NAME2 = os.environ.get('GCSQL_MYSQL_INSTANCE_NAME2', 'test-mysql2')
4847
DB_NAME = os.environ.get('GCSQL_MYSQL_DATABASE_NAME', 'testdb')
49-
# [END howto_operator_cloudsql_arguments]
5048

51-
# [START howto_operator_cloudsql_export_import_arguments]
5249
EXPORT_URI = os.environ.get('GCSQL_MYSQL_EXPORT_URI', 'gs://bucketName/fileName')
5350
IMPORT_URI = os.environ.get('GCSQL_MYSQL_IMPORT_URI', 'gs://bucketName/fileName')
54-
# [END howto_operator_cloudsql_export_import_arguments]
5551

5652
# Bodies below represent Cloud SQL instance resources:
5753
# https://cloud.google.com/sql/docs/mysql/admin-api/v1beta4/instances
5854

59-
# [START howto_operator_cloudsql_create_arguments]
6055
FAILOVER_REPLICA_NAME = INSTANCE_NAME + "-failover-replica"
6156
READ_REPLICA_NAME = INSTANCE_NAME + "-read-replica"
62-
# [END howto_operator_cloudsql_create_arguments]
6357

6458
# [START howto_operator_cloudsql_create_body]
6559
body = {

airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,6 @@
4545
from airflow.providers.google.cloud.operators.cloud_sql import CloudSQLExecuteQueryOperator
4646
from airflow.utils.dates import days_ago
4747

48-
# [START howto_operator_cloudsql_query_arguments]
49-
5048
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
5149
GCP_REGION = os.environ.get('GCP_REGION', 'europe-west-1b')
5250

@@ -89,7 +87,6 @@
8987
'DROP TABLE TABLE_TEST2',
9088
]
9189

92-
# [END howto_operator_cloudsql_query_arguments]
9390
default_args = {
9491
'start_date': days_ago(1)
9592
}

airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,6 @@
6060
)
6161
from airflow.utils.dates import days_ago
6262

63-
# [START howto_operator_gcp_transfer_common_variables]
6463
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
6564
GCP_DESCRIPTION = os.environ.get('GCP_DESCRIPTION', 'description')
6665
GCP_TRANSFER_TARGET_BUCKET = os.environ.get('GCP_TRANSFER_TARGET_BUCKET')
@@ -73,7 +72,6 @@
7372
GCP_TRANSFER_SECOND_TARGET_BUCKET = os.environ.get(
7473
'GCP_TRANSFER_SECOND_TARGET_BUCKET', 'gcp-transfer-second-target'
7574
)
76-
# [END howto_operator_gcp_transfer_common_variables]
7775

7876
# [START howto_operator_gcp_transfer_create_job_body_aws]
7977
aws_to_gcs_transfer_body = {

airflow/providers/google/cloud/example_dags/example_compute.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -48,12 +48,7 @@
4848
'start_date': days_ago(1),
4949
}
5050

51-
# [START howto_operator_gce_args_set_machine_type]
5251
GCE_SHORT_MACHINE_TYPE_NAME = os.environ.get('GCE_SHORT_MACHINE_TYPE_NAME', 'n1-standard-1')
53-
SET_MACHINE_TYPE_BODY = {
54-
'machineType': 'zones/{}/machineTypes/{}'.format(GCE_ZONE, GCE_SHORT_MACHINE_TYPE_NAME)
55-
}
56-
# [END howto_operator_gce_args_set_machine_type]
5752

5853

5954
with models.DAG(
@@ -99,7 +94,9 @@
9994
project_id=GCP_PROJECT_ID,
10095
zone=GCE_ZONE,
10196
resource_id=GCE_INSTANCE,
102-
body=SET_MACHINE_TYPE_BODY,
97+
body={
98+
'machineType': 'zones/{}/machineTypes/{}'.format(GCE_ZONE, GCE_SHORT_MACHINE_TYPE_NAME)
99+
},
103100
task_id='gcp_compute_set_machine_type'
104101
)
105102
# [END howto_operator_gce_set_machine_type]
@@ -108,7 +105,9 @@
108105
gce_set_machine_type2 = ComputeEngineSetMachineTypeOperator(
109106
zone=GCE_ZONE,
110107
resource_id=GCE_INSTANCE,
111-
body=SET_MACHINE_TYPE_BODY,
108+
body={
109+
'machineType': 'zones/{}/machineTypes/{}'.format(GCE_ZONE, GCE_SHORT_MACHINE_TYPE_NAME)
110+
},
112111
task_id='gcp_compute_set_machine_type2'
113112
)
114113
# [END howto_operator_gce_set_machine_type_no_project_id]

airflow/providers/google/cloud/example_dags/example_compute_igm.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,10 +46,8 @@
4646
)
4747
from airflow.utils.dates import days_ago
4848

49-
# [START howto_operator_compute_igm_common_args]
5049
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
5150
GCE_ZONE = os.environ.get('GCE_ZONE', 'europe-west1-b')
52-
# [END howto_operator_compute_igm_common_args]
5351

5452
default_args = {
5553
'start_date': days_ago(1)

airflow/providers/google/cloud/example_dags/example_datastore.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -48,11 +48,11 @@
4848
overwrite_existing=True,
4949
)
5050

51-
bucket = "{{ task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[2] }}"
52-
file = "{{ '/'.join(task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[3:]) }}"
53-
5451
import_task = CloudDatastoreImportEntitiesOperator(
55-
task_id="import_task", bucket=bucket, file=file, project_id=GCP_PROJECT_ID
52+
task_id="import_task",
53+
bucket="{{ task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[2] }}",
54+
file="{{ '/'.join(task_instance.xcom_pull('export_task')['response']['outputUrl'].split('/')[3:]) }}",
55+
project_id=GCP_PROJECT_ID
5656
)
5757

5858
export_task >> import_task

airflow/providers/google/cloud/example_dags/example_functions.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,16 +49,13 @@
4949
)
5050
from airflow.utils import dates
5151

52-
# [START howto_operator_gcf_common_variables]
5352
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
5453
GCP_LOCATION = os.environ.get('GCP_LOCATION', 'europe-west1')
5554
GCF_SHORT_FUNCTION_NAME = os.environ.get('GCF_SHORT_FUNCTION_NAME', 'hello').\
5655
replace("-", "_") # make sure there are no dashes in function name (!)
5756
FUNCTION_NAME = 'projects/{}/locations/{}/functions/{}'.format(GCP_PROJECT_ID,
5857
GCP_LOCATION,
5958
GCF_SHORT_FUNCTION_NAME)
60-
# [END howto_operator_gcf_common_variables]
61-
# [START howto_operator_gcf_deploy_variables]
6259
GCF_SOURCE_ARCHIVE_URL = os.environ.get('GCF_SOURCE_ARCHIVE_URL', '')
6360
GCF_SOURCE_UPLOAD_URL = os.environ.get('GCF_SOURCE_UPLOAD_URL', '')
6461
GCF_SOURCE_REPOSITORY = os.environ.get(
@@ -69,7 +66,6 @@
6966
GCF_ENTRYPOINT = os.environ.get('GCF_ENTRYPOINT', 'helloWorld')
7067
GCF_RUNTIME = 'nodejs6'
7168
GCP_VALIDATE_BODY = os.environ.get('GCP_VALIDATE_BODY', True)
72-
# [END howto_operator_gcf_deploy_variables]
7369

7470
# [START howto_operator_gcf_deploy_body]
7571
body = {

airflow/providers/google/cloud/example_dags/example_gcs.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,13 +34,11 @@
3434

3535
default_args = {"start_date": days_ago(1)}
3636

37-
# [START howto_operator_gcs_acl_args_common]
3837
PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-id")
3938
BUCKET_1 = os.environ.get("GCP_GCS_BUCKET_1", "test-gcs-example-bucket")
4039
GCS_ACL_ENTITY = os.environ.get("GCS_ACL_ENTITY", "allUsers")
4140
GCS_ACL_BUCKET_ROLE = "OWNER"
4241
GCS_ACL_OBJECT_ROLE = "OWNER"
43-
# [END howto_operator_gcs_acl_args_common]
4442

4543
BUCKET_2 = os.environ.get("GCP_GCS_BUCKET_1", "test-gcs-example-bucket-2")
4644

airflow/providers/google/cloud/example_dags/example_spanner.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,6 @@
4242
)
4343
from airflow.utils.dates import days_ago
4444

45-
# [START howto_operator_spanner_arguments]
4645
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
4746
GCP_SPANNER_INSTANCE_ID = os.environ.get('GCP_SPANNER_INSTANCE_ID', 'testinstance')
4847
GCP_SPANNER_DATABASE_ID = os.environ.get('GCP_SPANNER_DATABASE_ID', 'testdatabase')
@@ -52,7 +51,6 @@
5251
GCP_SPANNER_DISPLAY_NAME = os.environ.get('GCP_SPANNER_DISPLAY_NAME', 'Test Instance')
5352
# OPERATION_ID should be unique per operation
5453
OPERATION_ID = 'unique_operation_id'
55-
# [END howto_operator_spanner_arguments]
5654

5755
default_args = {
5856
'start_date': days_ago(1)

airflow/providers/google/cloud/example_dags/example_speech.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,8 @@
3232
from airflow.providers.google.cloud.operators.translate_speech import GcpTranslateSpeechOperator
3333
from airflow.utils import dates
3434

35-
# [START howto_operator_text_to_speech_env_variables]
3635
GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project")
3736
BUCKET_NAME = os.environ.get("GCP_SPEECH_TEST_BUCKET", "gcp-speech-test-bucket")
38-
# [END howto_operator_text_to_speech_env_variables]
3937

4038
# [START howto_operator_text_to_speech_gcp_filename]
4139
FILENAME = "gcp-speech-test-file"

airflow/providers/google/cloud/example_dags/example_video_intelligence.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
"""
2727
import os
2828

29-
# [START howto_operator_vision_retry_import]
3029
from google.api_core.retry import Retry
3130

3231
from airflow import models
@@ -37,9 +36,6 @@
3736
)
3837
from airflow.utils.dates import days_ago
3938

40-
# [END howto_operator_vision_retry_import]
41-
42-
4339
default_args = {"start_date": days_ago(1)}
4440

4541
# [START howto_operator_video_intelligence_os_args]

airflow/providers/google/cloud/example_dags/example_vision.py

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -65,26 +65,13 @@
6565

6666
default_args = {'start_date': days_ago(1)}
6767

68-
# [START howto_operator_vision_args_common]
6968
GCP_VISION_LOCATION = os.environ.get('GCP_VISION_LOCATION', 'europe-west1')
70-
# [END howto_operator_vision_args_common]
7169

72-
# [START howto_operator_vision_product_set_explicit_id]
7370
GCP_VISION_PRODUCT_SET_ID = os.environ.get('GCP_VISION_PRODUCT_SET_ID', 'product_set_explicit_id')
74-
# [END howto_operator_vision_product_set_explicit_id]
75-
76-
# [START howto_operator_vision_product_explicit_id]
7771
GCP_VISION_PRODUCT_ID = os.environ.get('GCP_VISION_PRODUCT_ID', 'product_explicit_id')
78-
# [END howto_operator_vision_product_explicit_id]
79-
80-
# [START howto_operator_vision_reference_image_args]
8172
GCP_VISION_REFERENCE_IMAGE_ID = os.environ.get('GCP_VISION_REFERENCE_IMAGE_ID', 'reference_image_explicit_id')
8273
GCP_VISION_REFERENCE_IMAGE_URL = os.environ.get('GCP_VISION_REFERENCE_IMAGE_URL', 'gs://bucket/image1.jpg')
83-
# [END howto_operator_vision_reference_image_args]
84-
85-
# [START howto_operator_vision_annotate_image_url]
8674
GCP_VISION_ANNOTATE_IMAGE_URL = os.environ.get('GCP_VISION_ANNOTATE_IMAGE_URL', 'gs://bucket/image2.jpg')
87-
# [END howto_operator_vision_annotate_image_url]
8875

8976
# [START howto_operator_vision_product_set]
9077
product_set = ProductSet(display_name='My Product Set')

docs/howto/operator/gcp/bigtable.rst

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -30,16 +30,6 @@ Prerequisite Tasks
3030
.. include:: _partials/prerequisite_tasks.rst
3131

3232

33-
Environment variables
34-
---------------------
35-
36-
All examples below rely on the following variables, which can be passed via environment variables.
37-
38-
.. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_bigtable.py
39-
:language: python
40-
:start-after: [START howto_operator_gcp_bigtable_args]
41-
:end-before: [END howto_operator_gcp_bigtable_args]
42-
4333
.. _howto/operator:BigtableCreateInstanceOperator:
4434

4535
BigtableCreateInstanceOperator

0 commit comments

Comments
 (0)








ApplySandwichStrip

pFad - (p)hone/(F)rame/(a)nonymizer/(d)eclutterfier!      Saves Data!


--- a PPN by Garber Painting Akron. With Image Size Reduction included!

Fetched URL: https://github.com/apache/airflow/commit/73305c7bd57f14444804c13b8b290f479832d3db

Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy