Skip to content

Commit 089ee19

Browse files
authored
ppai/land-cover: simplify tests and pin python version (#8501)
* rebase * remove constraints file * pin python version * add copyright * add python version
1 parent c879b3a commit 089ee19

File tree

12 files changed

+90
-102
lines changed

12 files changed

+90
-102
lines changed

people-and-planet-ai/conftest.py

Lines changed: 17 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ def unique_name(test_name: str, unique_id: str) -> str:
6565

6666

6767
@pytest.fixture(scope="session")
68-
def bucket_name(test_name: str, location: str, unique_id: str) -> str:
68+
def bucket_name(test_name: str, location: str, unique_id: str) -> Iterable[str]:
6969
# Override for local testing.
7070
if "GOOGLE_CLOUD_BUCKET" in os.environ:
7171
bucket_name = os.environ["GOOGLE_CLOUD_BUCKET"]
@@ -192,7 +192,6 @@ def aiplatform_cleanup(model_name: str, location: str, versions: list[str]) -> N
192192
)
193193

194194

195-
196195
def run_notebook(
197196
ipynb_file: str,
198197
prelude: str = "",
@@ -203,7 +202,8 @@ def run_notebook(
203202
skip_shell_commands: bool = False,
204203
until_end: bool = False,
205204
) -> None:
206-
import nbclient
205+
from nbclient.client import NotebookClient
206+
from nbclient.exceptions import CellExecutionError
207207
import nbformat
208208

209209
def notebook_filter_section(
@@ -283,10 +283,10 @@ def notebook_filter_section(
283283

284284
# Run the notebook.
285285
error = ""
286-
client = nbclient.NotebookClient(nb)
286+
client = NotebookClient(nb)
287287
try:
288288
client.execute()
289-
except nbclient.exceptions.CellExecutionError as e:
289+
except CellExecutionError as e:
290290
# Remove colors and other escape characters to make it easier to read in the logs.
291291
# https://stackoverflow.com/a/33925425
292292
error = re.sub(r"(\x9B|\x1B\[)[0-?]*[ -\/]*[@-~]", "", str(e))
@@ -299,22 +299,24 @@ def notebook_filter_section(
299299

300300
def run_notebook_parallel(
301301
ipynb_file: str,
302-
prelude: str,
303-
sections: list[str],
304-
variables: dict[str, dict] = {},
305-
replace: dict[str, dict[str, str]] = {},
306-
skip_shell_commands: list[str] = [],
302+
sections: dict[str, dict],
303+
prelude: str = "",
304+
variables: dict = {},
305+
replace: dict[str, str] = {},
306+
skip_shell_commands: bool = False,
307307
) -> None:
308308
args = [
309309
{
310310
"ipynb_file": ipynb_file,
311311
"section": section,
312-
"prelude": prelude,
313-
"variables": variables.get(section, {}),
314-
"replace": replace.get(section, {}),
315-
"skip_shell_commands": section in skip_shell_commands,
312+
"prelude": params.get("prelude", prelude),
313+
"variables": {**variables, **params.get("variables", {})},
314+
"replace": {**replace, **params.get("replace", {})},
315+
"skip_shell_commands": params.get(
316+
"skip_shell_commands", skip_shell_commands
317+
),
316318
}
317-
for section in sections
319+
for section, params in sections.items()
318320
]
319321
with multiprocessing.Pool(len(args)) as pool:
320322
pool.map(_run_notebook_section, args)

people-and-planet-ai/land-cover-classification/README.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
# 🌍 Land cover classification -- _image segmentation_
22

3-
[![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/GoogleCloudPlatform/python-docs-samples/blob/main/people-and-planet-ai/land-cover-classification/README.ipynb)
3+
## [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/GoogleCloudPlatform/python-docs-samples/blob/main/people-and-planet-ai/land-cover-classification/README.ipynb) 🌍 TensorFlow with Earth Engine introduction
4+
5+
## [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/GoogleCloudPlatform/python-docs-samples/blob/main/people-and-planet-ai/land-cover-classification/cloud-tensorflow.ipynb) ☁️ Scaling TensorFlow with Cloud
6+
7+
## [![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/GoogleCloudPlatform/python-docs-samples/blob/main/people-and-planet-ai/land-cover-classification/land-cover-change.ipynb) 🗺️ Visualizing land cover change
48

59
> [Watch the video in YouTube<br> ![thumbnail](http://img.youtube.com/vi/zImQf91ffFo/0.jpg)](https://youtu.be/zImQf91ffFo)
610

people-and-planet-ai/land-cover-classification/cloud-tensorflow.ipynb

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -162,8 +162,7 @@
162162
"source": [
163163
"repo_url = \"https://raw.githubusercontent.com/GoogleCloudPlatform/python-docs-samples/main/people-and-planet-ai/land-cover-classification\"\n",
164164
"\n",
165-
"!wget --quiet -O requirements.txt {repo_url}/requirements.txt\n",
166-
"!wget --quiet -O constraints.txt {repo_url}/constraints.txt"
165+
"!wget --quiet {repo_url}/requirements.txt"
167166
]
168167
},
169168
{
@@ -172,7 +171,7 @@
172171
"id": "rPWUYDKCwibV"
173172
},
174173
"source": [
175-
"> 💡 For more information about the `requirements.txt` and `constraints.txt` files, see the [`pip` user guide](https://pip.pypa.io/en/stable/user_guide/)."
174+
"> 💡 For more information about the `requirements.txt` files, see the [`pip` user guide](https://pip.pypa.io/en/stable/user_guide/)."
176175
]
177176
},
178177
{
@@ -195,7 +194,7 @@
195194
"outputs": [],
196195
"source": [
197196
"# Install the dependencies.\n",
198-
"!pip install --quiet -r requirements.txt -c constraints.txt\n",
197+
"!pip install --quiet -r requirements.txt\n",
199198
"\n",
200199
"# Restart the runtime by ending the process.\n",
201200
"exit()"

people-and-planet-ai/land-cover-classification/constraints.txt

Lines changed: 0 additions & 20 deletions
This file was deleted.

people-and-planet-ai/land-cover-classification/e2e_test.py

Lines changed: 48 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
from __future__ import annotations
1616

17+
from collections.abc import Iterable
1718
import tempfile
1819
import textwrap
1920

@@ -61,15 +62,15 @@ def model_path(bucket_name: str) -> str:
6162

6263

6364
@pytest.fixture(scope="session")
64-
def cloud_run_service_name(unique_name: str, location: str) -> str:
65+
def cloud_run_service_name(unique_name: str, location: str) -> Iterable[str]:
6566
# The notebook itself creates the service.
6667
service_name = unique_name
6768
yield service_name
6869
conftest.cloud_run_cleanup(service_name, location)
6970

7071

7172
@pytest.fixture(scope="session")
72-
def aiplatform_model_name(unique_name: str, location: str) -> str:
73+
def aiplatform_model_name(unique_name: str, location: str) -> Iterable[str]:
7374
# The notebook itself creates the service.
7475
model_name = unique_name.replace("-", "_")
7576
yield model_name
@@ -164,60 +165,62 @@ def test_land_cover_tensorflow(
164165
ee_init()
165166
"""
166167
),
167-
sections=[
168-
"# 📚 Understand the data",
169-
"# 🗄 Create the dataset",
170-
"# 🧠 Train the model",
171-
"## 💻 Local predictions",
172-
"## ☁️ Cloud Run predictions",
173-
"## 🧺 Dataflow batch prediction",
174-
"## 🌍 Earth Engine with AI Platform",
175-
],
176-
variables={
168+
sections={
169+
"# 📚 Understand the data": {},
177170
"# 🗄 Create the dataset": {
178-
"points_per_class": 1,
179-
"--data-path": f"gs://{bucket_name}/dataflow-data",
171+
"variables": {
172+
"points_per_class": 1,
173+
"--data-path": f"gs://{bucket_name}/dataflow-data",
174+
},
175+
"replace": {
176+
'--runner="DataflowRunner"': " ".join(
177+
[
178+
'--runner="DataflowRunner"',
179+
f"--job_name={unique_name}-dataset",
180+
"--max-requests=1",
181+
]
182+
)
183+
},
180184
},
181185
"# 🧠 Train the model": {
182-
"display_name": unique_name,
183-
"data_path": data_path,
184-
"epochs": 1,
186+
"variables": {
187+
"display_name": unique_name,
188+
"data_path": data_path,
189+
"epochs": 1,
190+
},
185191
},
186192
"## 💻 Local predictions": {
187-
"model_path": model_path,
193+
"variables": {
194+
"model_path": model_path,
195+
},
188196
},
189197
"## ☁️ Cloud Run predictions": {
190-
"service_name": cloud_run_service_name,
191-
"model_path": model_path,
192-
"identity_token": identity_token,
198+
"variables": {
199+
"service_name": cloud_run_service_name,
200+
"model_path": model_path,
201+
"identity_token": identity_token,
202+
},
193203
},
194204
"## 🧺 Dataflow batch prediction": {
195-
"model_path": model_path,
205+
"variables": {
206+
"model_path": model_path,
207+
},
208+
"replace": {
209+
'--runner="DataflowRunner"': " ".join(
210+
[
211+
'--runner="DataflowRunner"',
212+
f"--job_name={unique_name}-predict",
213+
"--max-requests=1",
214+
f"--locations-file={locations_file.name}",
215+
]
216+
)
217+
},
196218
},
197219
"## 🌍 Earth Engine with AI Platform": {
198-
"model_name": aiplatform_model_name,
199-
"model_path": model_path,
200-
},
201-
},
202-
replace={
203-
"# 🗄 Create the dataset": {
204-
'--runner="DataflowRunner"': " ".join(
205-
[
206-
'--runner="DataflowRunner"',
207-
f"--job_name={unique_name}-dataset",
208-
"--max-requests=1",
209-
]
210-
)
211-
},
212-
"## 🧺 Dataflow batch prediction": {
213-
'--runner="DataflowRunner"': " ".join(
214-
[
215-
'--runner="DataflowRunner"',
216-
f"--job_name={unique_name}-predict",
217-
"--max-requests=1",
218-
f"--locations-file={locations_file.name}",
219-
]
220-
)
220+
"variables": {
221+
"model_name": aiplatform_model_name,
222+
"model_path": model_path,
223+
},
221224
},
222225
},
223226
)

people-and-planet-ai/land-cover-classification/noxfile_config.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,8 @@
2222

2323
TEST_CONFIG_OVERRIDE = {
2424
# You can opt out from the test for specific Python versions.
25-
# ℹ️ Testing only in Python 3.9 since it's the latest supported version for Dataflow.
25+
# ℹ️ Test only in Python 3.9 since that's what Dataflow currently supports:
26+
# https://cloud.google.com/dataflow/docs/support/beam-runtime-support
2627
"ignored_versions": ["2.7", "3.6", "3.7", "3.8", "3.10"],
2728
# Old samples are opted out of enforcing Python type hints
2829
# All new samples should feature them
@@ -39,7 +40,5 @@
3940
"pip_version_override": None,
4041
# A dictionary you want to inject into your test. Don't put any
4142
# secrets here. These values will override predefined values.
42-
"envs": {
43-
"PYTEST_ADDOPTS": "-n=16", # not CPU-bound, it's bound by Cloud requests.
44-
},
43+
"envs": {},
4544
}

people-and-planet-ai/land-cover-classification/predict_batch.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -107,11 +107,11 @@ def run_tensorflow(
107107
import tensorflow as tf
108108

109109
class LandCoverModel(ModelHandler[np.ndarray, np.ndarray, tf.keras.Model]):
110-
def load_model(self) -> tf.keras.Model: # noqa: ANN101
110+
def load_model(self) -> tf.keras.Model:
111111
return tf.keras.models.load_model(model_path)
112112

113113
def run_inference(
114-
self, # noqa: ANN101
114+
self,
115115
batch: Sequence[np.ndarray],
116116
model: tf.keras.Model,
117117
inference_args: Optional[dict] = None,
Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Requirements to run tests.
2-
apache-beam[interactive]==2.41.0
2+
apache-beam[interactive]==2.42.0
33
importnb==2022.10.24
4-
ipykernel==6.16.0
4+
ipykernel==6.17.1
55
nbclient==0.7.0
6-
pytest-xdist==2.5.0
7-
pytest==7.1.2
6+
pytest-xdist==3.0.2
7+
pytest==7.2.0
Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
# Requirements to run the notebooks.
2-
apache-beam[gcp]==2.41.0
3-
earthengine-api==0.1.325
2+
apache-beam[gcp]==2.42.0
3+
earthengine-api==0.1.331
44
folium==0.13.0
5-
google-cloud-aiplatform==1.17.1
6-
imageio==2.22.0
5+
google-cloud-aiplatform==1.18.3
6+
imageio==2.22.4
77
plotly==5.11.0
88
tensorflow==2.10.0
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
3.10.x
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# Requirements for the prediction web service.
22
Flask==2.2.2
3-
earthengine-api==0.1.325
3+
earthengine-api==0.1.331
44
gunicorn==20.1.0
55
tensorflow==2.10.0

people-and-planet-ai/land-cover-classification/setup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020
url="https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/people-and-planet-ai/land-cover-classification",
2121
packages=["serving"],
2222
install_requires=[
23-
"apache-beam[gcp]==2.41.0",
24-
"earthengine-api==0.1.325",
23+
"apache-beam[gcp]==2.42.0",
24+
"earthengine-api==0.1.331",
2525
"tensorflow==2.10.0",
2626
],
2727
)

0 commit comments

Comments
 (0)