Skip to content

Commit 9e283c4

Browse files
authored
samples: tests refactored due to overwhelmed resources (#79)
* samples: tests refactored due to overwhelmed resources * fixed the lint issues * lint
1 parent bb5effc commit 9e283c4

9 files changed

+66
-116
lines changed
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-cloud-automl==1.0.1
1+
google-cloud-automl==2.0.0

packages/google-cloud-automl/samples/beta/video_classification_create_model_test.py

Lines changed: 9 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -13,36 +13,20 @@
1313
# limitations under the License.
1414

1515
import os
16-
import uuid
17-
18-
from google.cloud import automl_v1beta1 as automl
19-
import pytest
2016

2117
import video_classification_create_model
2218

2319
PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"]
24-
DATASET_ID = "VCN510437278078730240"
20+
DATASET_ID = "VCN00000000000000000"
2521
OPERATION_ID = None
2622

2723

28-
@pytest.fixture(scope="function", autouse=True)
29-
def teardown():
30-
yield
31-
32-
# Cancel the training operation
33-
client = automl.AutoMlClient()
34-
client._transport.operations_client.cancel_operation(OPERATION_ID)
35-
36-
3724
def test_video_classification_create_model(capsys):
38-
model_name = "test_{}".format(uuid.uuid4()).replace("-", "")[:32]
39-
video_classification_create_model.create_model(
40-
PROJECT_ID, DATASET_ID, model_name
41-
)
42-
43-
out, _ = capsys.readouterr()
44-
assert "Training started" in out
45-
46-
# Cancel the operation
47-
global OPERATION_ID
48-
OPERATION_ID = out.split("Training operation name: ")[1].split("\n")[0]
25+
try:
26+
video_classification_create_model.create_model(
27+
PROJECT_ID, DATASET_ID, "video_class_test_create_model"
28+
)
29+
out, _ = capsys.readouterr()
30+
assert "Dataset does not exist." in out
31+
except Exception as e:
32+
assert "Dataset does not exist." in e.message

packages/google-cloud-automl/samples/beta/video_object_tracking_create_model_test.py

Lines changed: 9 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -13,35 +13,20 @@
1313
# limitations under the License.
1414

1515
import os
16-
import uuid
17-
18-
from google.cloud import automl_v1beta1 as automl
19-
import pytest
2016

2117
import video_object_tracking_create_model
2218

2319
PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"]
24-
DATASET_ID = "VOT2823376535338090496"
20+
DATASET_ID = "VOT00000000000000000000"
2521
OPERATION_ID = None
2622

2723

28-
@pytest.fixture(scope="function", autouse=True)
29-
def teardown():
30-
yield
31-
32-
# Cancel the training operation
33-
client = automl.AutoMlClient()
34-
client._transport.operations_client.cancel_operation(OPERATION_ID)
35-
36-
3724
def test_video_classification_create_model(capsys):
38-
model_name = "test_{}".format(uuid.uuid4()).replace("-", "")[:32]
39-
video_object_tracking_create_model.create_model(
40-
PROJECT_ID, DATASET_ID, model_name
41-
)
42-
out, _ = capsys.readouterr()
43-
assert "Training started" in out
44-
45-
# Cancel the operation
46-
global OPERATION_ID
47-
OPERATION_ID = out.split("Training operation name: ")[1].split("\n")[0]
25+
try:
26+
video_object_tracking_create_model.create_model(
27+
PROJECT_ID, DATASET_ID, "video_object_test_create_model"
28+
)
29+
out, _ = capsys.readouterr()
30+
assert "Dataset does not exist." in out
31+
except Exception as e:
32+
assert "Dataset does not exist." in e.message

packages/google-cloud-automl/samples/snippets/language_sentiment_analysis_create_model_test.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -14,21 +14,18 @@
1414

1515
import os
1616

17-
import pytest
18-
1917
import language_sentiment_analysis_create_model
2018

2119
PROJECT_ID = os.environ["AUTOML_PROJECT_ID"]
22-
DATASET_ID = os.environ["SENTIMENT_ANALYSIS_DATASET_ID"]
20+
DATASET_ID = "TST00000000000000000"
2321

2422

25-
@pytest.mark.slow
2623
def test_sentiment_analysis_create_model(capsys):
27-
operation = language_sentiment_analysis_create_model.create_model(
28-
PROJECT_ID, DATASET_ID, "sentiment_test_create_model"
29-
)
30-
out, _ = capsys.readouterr()
31-
assert "Training started" in out
32-
33-
# Cancel the operation
34-
operation.cancel()
24+
try:
25+
language_sentiment_analysis_create_model.create_model(
26+
PROJECT_ID, DATASET_ID, "lang_sent_test_create_model"
27+
)
28+
out, _ = capsys.readouterr()
29+
assert "Dataset does not exist." in out
30+
except Exception as e:
31+
assert "Dataset does not exist." in e.message

packages/google-cloud-automl/samples/snippets/language_text_classification_create_model_test.py

Lines changed: 9 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -14,24 +14,18 @@
1414

1515
import os
1616

17-
from google.cloud import automl
18-
import pytest
19-
2017
import language_text_classification_create_model
2118

2219
PROJECT_ID = os.environ["AUTOML_PROJECT_ID"]
23-
DATASET_ID = os.environ["TEXT_CLASSIFICATION_DATASET_ID"]
20+
DATASET_ID = "TCN00000000000000000000"
2421

2522

26-
@pytest.mark.slow
2723
def test_text_classification_create_model(capsys):
28-
language_text_classification_create_model.create_model(
29-
PROJECT_ID, DATASET_ID, "classification_test_create_model"
30-
)
31-
out, _ = capsys.readouterr()
32-
assert "Training started" in out
33-
34-
# Cancel the operation
35-
operation_id = out.split("Training operation name: ")[1].split("\n")[0]
36-
client = automl.AutoMlClient()
37-
client._transport.operations_client.cancel_operation(operation_id)
24+
try:
25+
language_text_classification_create_model.create_model(
26+
PROJECT_ID, DATASET_ID, "lang_text_test_create_model"
27+
)
28+
out, _ = capsys.readouterr()
29+
assert "Dataset does not exist." in out
30+
except Exception as e:
31+
assert "Dataset does not exist." in e.message
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
google-cloud-translate==2.0.2
1+
google-cloud-translate==3.0.1
22
google-cloud-storage==1.31.2
3-
google-cloud-automl==1.0.1
3+
google-cloud-automl==2.0.0

packages/google-cloud-automl/samples/snippets/translate_create_model_test.py

Lines changed: 9 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -14,22 +14,18 @@
1414

1515
import os
1616

17-
from google.cloud import automl
18-
1917
import translate_create_model
2018

2119
PROJECT_ID = os.environ["AUTOML_PROJECT_ID"]
22-
DATASET_ID = os.environ["TRANSLATION_DATASET_ID"]
20+
DATASET_ID = "TRL00000000000000000"
2321

2422

2523
def test_translate_create_model(capsys):
26-
translate_create_model.create_model(
27-
PROJECT_ID, DATASET_ID, "translate_test_create_model"
28-
)
29-
out, _ = capsys.readouterr()
30-
assert "Training started" in out
31-
32-
# Cancel the operation
33-
operation_id = out.split("Training operation name: ")[1].split("\n")[0]
34-
client = automl.AutoMlClient()
35-
client._transport.operations_client.cancel_operation(operation_id)
24+
try:
25+
translate_create_model.create_model(
26+
PROJECT_ID, DATASET_ID, "translate_test_create_model"
27+
)
28+
out, _ = capsys.readouterr()
29+
assert "Dataset does not exist." in out
30+
except Exception as e:
31+
assert "Dataset does not exist." in e.message

packages/google-cloud-automl/samples/snippets/vision_classification_create_model_test.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -14,21 +14,18 @@
1414

1515
import os
1616

17-
import pytest
18-
1917
import vision_classification_create_model
2018

2119
PROJECT_ID = os.environ["AUTOML_PROJECT_ID"]
22-
DATASET_ID = os.environ["VISION_CLASSIFICATION_DATASET_ID"]
20+
DATASET_ID = "ICN000000000000000000"
2321

2422

25-
@pytest.mark.slow
2623
def test_vision_classification_create_model(capsys):
27-
operation = vision_classification_create_model.create_model(
28-
PROJECT_ID, DATASET_ID, "classification_test_create_model"
29-
)
30-
out, _ = capsys.readouterr()
31-
assert "Training started" in out
32-
33-
# Cancel the operation
34-
operation.cancel()
24+
try:
25+
vision_classification_create_model.create_model(
26+
PROJECT_ID, DATASET_ID, "classification_test_create_model"
27+
)
28+
out, _ = capsys.readouterr()
29+
assert "Dataset does not exist." in out
30+
except Exception as e:
31+
assert "Dataset does not exist." in e.message

packages/google-cloud-automl/samples/snippets/vision_object_detection_create_model_test.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -14,21 +14,18 @@
1414

1515
import os
1616

17-
import pytest
18-
1917
import vision_object_detection_create_model
2018

2119
PROJECT_ID = os.environ["AUTOML_PROJECT_ID"]
22-
DATASET_ID = os.environ["OBJECT_DETECTION_DATASET_ID"]
20+
DATASET_ID = "IOD0000000000000000"
2321

2422

25-
@pytest.mark.slow
2623
def test_vision_object_detection_create_model(capsys):
27-
operation = vision_object_detection_create_model.create_model(
28-
PROJECT_ID, DATASET_ID, "object_test_create_model"
29-
)
30-
out, _ = capsys.readouterr()
31-
assert "Training started" in out
32-
33-
# Cancel the operation
34-
operation.cancel()
24+
try:
25+
vision_object_detection_create_model.create_model(
26+
PROJECT_ID, DATASET_ID, "object_test_create_model"
27+
)
28+
out, _ = capsys.readouterr()
29+
assert "Dataset does not exist." in out
30+
except Exception as e:
31+
assert "Dataset does not exist." in e.message

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy