1
1
#!/usr/bin/env python
2
2
3
- # Copyright 2018 Google Inc. All Rights Reserved.
3
+ # Copyright 2018 Google LLC
4
4
#
5
5
# Licensed under the Apache License, Version 2.0 (the "License");
6
6
# you may not use this file except in compliance with the License.
21
21
https://cloud.google.com/natural-language/automl/docs/
22
22
"""
23
23
24
- # [START automl_natural_language_import]
25
24
import argparse
26
25
import os
27
26
28
- from google .cloud import automl_v1beta1 as automl
29
27
30
- # [END automl_natural_language_import]
28
+ def create_dataset (project_id , compute_region , dataset_name , multilabel = False ):
29
+ """Create a dataset."""
30
+ # [START automl_natural_language_create_dataset]
31
+ # TODO(developer): Uncomment and set the following variables
32
+ # project_id = 'PROJECT_ID_HERE'
33
+ # compute_region = 'COMPUTE_REGION_HERE'
34
+ # dataset_name = 'DATASET_NAME_HERE'
35
+ # multilabel = True for multilabel or False for multiclass
31
36
37
+ from google .cloud import automl_v1beta1 as automl
32
38
33
- # [START automl_natural_language_create_dataset]
34
- def create_dataset (project_id , compute_region , dataset_name , multilabel = False ):
35
- """Create a dataset.
36
- Args:
37
- project_id: Id of the project.
38
- compute_region: Region name.
39
- dataset_name: Name of the dataset.
40
- multilabel: Type of the classification problem.
41
- False - MULTICLASS, True - MULTILABEL.
42
- Default is False.
43
- """
44
39
client = automl .AutoMlClient ()
45
40
46
41
# A resource that represents Google Cloud Platform location.
@@ -74,18 +69,19 @@ def create_dataset(project_id, compute_region, dataset_name, multilabel=False):
74
69
print ("\t seconds: {}" .format (dataset .create_time .seconds ))
75
70
print ("\t nanos: {}" .format (dataset .create_time .nanos ))
76
71
77
-
78
- # [END automl_natural_language_create_dataset]
72
+ # [END automl_natural_language_create_dataset]
79
73
80
74
81
- # [START automl_natural_language_list_datasets]
82
75
def list_datasets (project_id , compute_region , filter_ ):
83
- """List all datasets.
84
- Args:
85
- project_id: Id of the project.
86
- compute_region: Region name.
87
- filter_: Filter expression.
88
- """
76
+ """List all datasets."""
77
+ # [START automl_natural_language_list_datasets]
78
+ # TODO(developer): Uncomment and set the following variables
79
+ # project_id = 'PROJECT_ID_HERE'
80
+ # compute_region = 'COMPUTE_REGION_HERE'
81
+ # filter_ = 'filter expression here'
82
+
83
+ from google .cloud import automl_v1beta1 as automl
84
+
89
85
client = automl .AutoMlClient ()
90
86
91
87
# A resource that represents Google Cloud Platform location.
@@ -107,18 +103,19 @@ def list_datasets(project_id, compute_region, filter_):
107
103
print ("\t seconds: {}" .format (dataset .create_time .seconds ))
108
104
print ("\t nanos: {}" .format (dataset .create_time .nanos ))
109
105
106
+ # [END automl_natural_language_list_datasets]
110
107
111
- # [END automl_natural_language_list_datasets]
112
108
113
-
114
- # [START automl_natural_language_get_dataset]
115
109
def get_dataset (project_id , compute_region , dataset_id ):
116
- """Get the dataset.
117
- Args:
118
- project_id: Id of the project.
119
- compute_region: Region name.
120
- dataset_id: Id of the dataset.
121
- """
110
+ """Get the dataset."""
111
+ # [START automl_natural_language_get_dataset]
112
+ # TODO(developer): Uncomment and set the following variables
113
+ # project_id = 'PROJECT_ID_HERE'
114
+ # compute_region = 'COMPUTE_REGION_HERE'
115
+ # dataset_id = 'DATASET_ID_HERE'
116
+
117
+ from google .cloud import automl_v1beta1 as automl
118
+
122
119
client = automl .AutoMlClient ()
123
120
124
121
# Get the full path of the dataset
@@ -140,21 +137,20 @@ def get_dataset(project_id, compute_region, dataset_id):
140
137
print ("\t seconds: {}" .format (dataset .create_time .seconds ))
141
138
print ("\t nanos: {}" .format (dataset .create_time .nanos ))
142
139
143
-
144
- # [END automl_natural_language_get_dataset]
140
+ # [END automl_natural_language_get_dataset]
145
141
146
142
147
- # [START automl_natural_language_import_data]
148
143
def import_data (project_id , compute_region , dataset_id , path ):
149
- """Import labeled items.
150
- Args:
151
- project_id: Id of the project.
152
- compute_region: Region name.
153
- dataset_id: ID of the dataset into which the training content are to
154
- be imported.
155
- path: Google Cloud Storage URIs.
156
- Target files must be in AutoML Natural Language CSV format.
157
- """
144
+ """Import labelled items."""
145
+ # [START automl_natural_language_import_data]
146
+ # TODO(developer): Uncomment and set the following variables
147
+ # project_id = 'PROJECT_ID_HERE'
148
+ # compute_region = 'COMPUTE_REGION_HERE'
149
+ # dataset_id = 'DATASET_ID_HERE'
150
+ # path = 'gs://path/to/file.csv'
151
+
152
+ from google .cloud import automl_v1beta1 as automl
153
+
158
154
client = automl .AutoMlClient ()
159
155
160
156
# Get the full path of the dataset.
@@ -173,19 +169,20 @@ def import_data(project_id, compute_region, dataset_id, path):
173
169
# synchronous check of operation status.
174
170
print ("Data imported. {}" .format (response .result ()))
175
171
176
-
177
- # [END automl_natural_language_import_data]
172
+ # [END automl_natural_language_import_data]
178
173
179
174
180
- # [START automl_natural_language_export_data]
181
175
def export_data (project_id , compute_region , dataset_id , output_uri ):
182
- """Export a dataset to a Google Cloud Storage bucket.
183
- Args:
184
- project_id: Id of the project.
185
- compute_region: Region name.
186
- dataset_id: Id of the dataset to which will be exported.
187
- output_uri: Google Cloud Storage URI for the export directory.
188
- """
176
+ """Export a dataset to a Google Cloud Storage bucket."""
177
+ # [START automl_natural_language_export_data]
178
+ # TODO(developer): Uncomment and set the following variables
179
+ # project_id = 'PROJECT_ID_HERE'
180
+ # compute_region = 'COMPUTE_REGION_HERE'
181
+ # dataset_id = 'DATASET_ID_HERE'
182
+ # output_uri: 'gs://location/to/export/data'
183
+
184
+ from google .cloud import automl_v1beta1 as automl
185
+
189
186
client = automl .AutoMlClient ()
190
187
191
188
# Get the full path of the dataset.
@@ -203,18 +200,19 @@ def export_data(project_id, compute_region, dataset_id, output_uri):
203
200
# synchronous check of operation status.
204
201
print ("Data exported. {}" .format (response .result ()))
205
202
203
+ # [END automl_natural_language_export_data]
206
204
207
- # [END automl_natural_language_export_data]
208
205
209
-
210
- # [START automl_natural_language_delete_dataset]
211
206
def delete_dataset (project_id , compute_region , dataset_id ):
212
- """Delete a dataset.
213
- Args:
214
- project_id: Id of the project.
215
- compute_region: Region name.
216
- dataset_id: Id of the dataset.
217
- """
207
+ """Delete a dataset."""
208
+ # [START automl_natural_language_delete_dataset]
209
+ # TODO(developer): Uncomment and set the following variables
210
+ # project_id = 'PROJECT_ID_HERE'
211
+ # compute_region = 'COMPUTE_REGION_HERE'
212
+ # dataset_id = 'DATASET_ID_HERE'
213
+
214
+ from google .cloud import automl_v1beta1 as automl
215
+
218
216
client = automl .AutoMlClient ()
219
217
220
218
# Get the full path of the dataset.
@@ -228,8 +226,7 @@ def delete_dataset(project_id, compute_region, dataset_id):
228
226
# synchronous check of operation status.
229
227
print ("Dataset deleted. {}" .format (response .result ()))
230
228
231
-
232
- # [END automl_natural_language_delete_dataset]
229
+ # [END automl_natural_language_delete_dataset]
233
230
234
231
235
232
if __name__ == "__main__" :
0 commit comments