Skip to content

Commit 19a4997

Browse files
committed
Added quickstart test
1 parent 194ba5b commit 19a4997

File tree

1 file changed

+70
-0
lines changed

1 file changed

+70
-0
lines changed
Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
# Copyright 2019 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
import os
16+
import uuid
17+
import pytest
18+
19+
from google.cloud import dataproc_v1 as dataproc
20+
from google.cloud import storage
21+
22+
import quickstart
23+
24+
25+
PROJECT_ID = os.environ['GCLOUD_PROJECT']
26+
REGION = 'us-central1'
27+
CLUSTER_NAME = 'test-cluster-{}'.format(str(uuid.uuid4()))
28+
STAGING_BUCKET = 'test-bucket-{}'.format(str(uuid.uuid4()))
29+
JOB_FILE_NAME = 'sum.py'
30+
JOB_FILE_PATH = 'gs://{}/{}'.format(STAGING_BUCKET, JOB_FILE_NAME)
31+
SORT_CODE = (
32+
"import pyspark\n"
33+
"sc = pyspark.SparkContext()\n"
34+
"rdd = sc.parallelize((1,2,3,4,5))\n"
35+
"sum = rdd.reduce(lambda x, y: x + y)\n"
36+
)
37+
38+
39+
@pytest.fixture(autouse=True)
40+
def setup_teardown():
41+
storage_client = storage.Client()
42+
bucket = storage_client.create_bucket(STAGING_BUCKET)
43+
blob = bucket.blob(JOB_FILE_NAME)
44+
blob.upload_from_string(SORT_CODE)
45+
46+
yield
47+
48+
cluster_client = dataproc.ClusterControllerClient(client_options={
49+
'api_endpoint': '{}-dataproc.googleapis.com:443'.format(REGION)
50+
})
51+
52+
# The quickstart sample deletes the cluster, but in the event that the
53+
# test fails before cluster deletion occurs, it can be manually deleted here.
54+
clusters = cluster_client.list_clusters(PROJECT_ID, REGION)
55+
56+
for cluster in clusters:
57+
if cluster.cluster_name == CLUSTER_NAME:
58+
cluster_client.delete_cluster(PROJECT_ID, REGION, CLUSTER_NAME)
59+
60+
blob.delete()
61+
62+
63+
def test_quickstart(capsys):
64+
quickstart.quickstart(PROJECT_ID, REGION, CLUSTER_NAME, JOB_FILE_PATH)
65+
66+
out, _ = capsys.readouterr()
67+
assert 'Cluster created successfully' in out
68+
assert 'Submitted job' in out
69+
assert 'finished with state DONE:' in out
70+
assert 'successfully deleted' in out

0 commit comments

Comments
 (0)