33
33
34
34
dataset_id = "test_dataset_{}" .format (uuid .uuid4 ())
35
35
fhir_store_id = "test_fhir_store-{}" .format (uuid .uuid4 ())
36
+ test_fhir_store_id = "test_fhir_store-{}" .format (uuid .uuid4 ())
36
37
37
38
gcs_uri = os .environ ["CLOUD_STORAGE_BUCKET" ]
38
39
RESOURCES = os .path .join (os .path .dirname (__file__ ), "resources" )
@@ -53,6 +54,39 @@ def test_dataset():
53
54
datasets .delete_dataset (service_account_json , project_id , cloud_region , dataset_id )
54
55
55
56
57
+ @pytest .fixture (scope = "module" )
58
+ def test_fhir_store ():
59
+ resp = fhir_stores .create_fhir_store (
60
+ service_account_json , project_id , cloud_region , dataset_id ,
61
+ test_fhir_store_id
62
+ )
63
+
64
+ yield resp
65
+
66
+ fhir_stores .delete_fhir_store (
67
+ service_account_json , project_id , cloud_region , dataset_id ,
68
+ test_fhir_store_id
69
+ )
70
+
71
+
72
+ @pytest .fixture (scope = "module" )
73
+ def test_blob ():
74
+ storage_client = storage .Client ()
75
+ bucket = storage_client .get_bucket (gcs_uri )
76
+ blob = bucket .blob (source_file_name )
77
+
78
+ blob .upload_from_filename (resource_file )
79
+
80
+ yield blob
81
+
82
+ # Clean up
83
+ try :
84
+ blob .delete ()
85
+ # If blob not found, then it's already been deleted, so no need to clean up.
86
+ except exceptions .NotFound as e :
87
+ print (f'Ignoring 404: { str (e )} ' )
88
+
89
+
56
90
def test_CRUD_fhir_store (test_dataset , capsys ):
57
91
fhir_stores .create_fhir_store (
58
92
service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
@@ -79,110 +113,67 @@ def test_CRUD_fhir_store(test_dataset, capsys):
79
113
assert "Deleted FHIR store" in out
80
114
81
115
82
- def test_patch_fhir_store (test_dataset , capsys ):
83
- fhir_stores .create_fhir_store (
84
- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
85
- )
86
-
116
+ def test_patch_fhir_store (test_dataset , test_fhir_store , capsys ):
87
117
fhir_stores .patch_fhir_store (
88
- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
89
- )
90
-
91
- # Clean up
92
- fhir_stores .delete_fhir_store (
93
- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
118
+ service_account_json , project_id , cloud_region , dataset_id ,
119
+ test_fhir_store_id
94
120
)
95
121
96
122
out , _ = capsys .readouterr ()
97
123
98
124
assert "Patched FHIR store" in out
99
125
100
126
101
- def test_import_fhir_store_gcs (test_dataset , capsys ):
102
- fhir_stores .create_fhir_store (
103
- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
104
- )
105
-
106
- storage_client = storage .Client ()
107
- bucket = storage_client .get_bucket (gcs_uri )
108
- blob = bucket .blob (source_file_name )
109
-
110
- blob .upload_from_filename (resource_file )
127
+ def test_import_fhir_store_gcs (
128
+ test_dataset , test_fhir_store , test_blob , capsys ):
111
129
112
130
time .sleep (10 ) # Give new blob time to propagate
131
+
113
132
fhir_stores .import_fhir_resources (
114
133
service_account_json ,
115
134
project_id ,
116
135
cloud_region ,
117
136
dataset_id ,
118
- fhir_store_id ,
137
+ test_fhir_store_id ,
119
138
import_object ,
120
139
)
121
140
122
- # Clean up
123
- try :
124
- blob .delete ()
125
- # If blob not found, then it's already been deleted, so no need to clean up.
126
- except exceptions .NotFound :
127
- pass
128
-
129
- fhir_stores .delete_fhir_store (
130
- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
131
- )
132
-
133
141
out , _ = capsys .readouterr ()
134
142
135
143
assert "Imported FHIR resources" in out
136
144
137
145
138
- def test_export_fhir_store_gcs (test_dataset , capsys ):
139
- fhir_stores .create_fhir_store (
140
- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
141
- )
142
-
146
+ def test_export_fhir_store_gcs (test_dataset , test_fhir_store , capsys ):
143
147
fhir_stores .export_fhir_store_gcs (
144
148
service_account_json ,
145
149
project_id ,
146
150
cloud_region ,
147
151
dataset_id ,
148
- fhir_store_id ,
152
+ test_fhir_store_id ,
149
153
gcs_uri ,
150
154
)
151
155
152
- # Clean up
153
- fhir_stores .delete_fhir_store (
154
- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
155
- )
156
-
157
156
out , _ = capsys .readouterr ()
158
157
159
158
assert "Exported FHIR resources to bucket" in out
160
159
161
160
162
- def test_get_set_fhir_store_iam_policy (test_dataset , capsys ):
163
- fhir_stores .create_fhir_store (
164
- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
165
- )
166
-
161
+ def test_get_set_fhir_store_iam_policy (test_dataset , test_fhir_store , capsys ):
167
162
get_response = fhir_stores .get_fhir_store_iam_policy (
168
- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
163
+ service_account_json , project_id , cloud_region , dataset_id ,
164
+ test_fhir_store_id
169
165
)
170
166
171
167
set_response = fhir_stores .set_fhir_store_iam_policy (
172
168
service_account_json ,
173
169
project_id ,
174
170
cloud_region ,
175
171
dataset_id ,
176
- fhir_store_id ,
172
+ test_fhir_store_id ,
177
173
"serviceAccount:[email protected] " ,
178
174
"roles/viewer" ,
179
175
)
180
176
181
- # Clean up
182
- fhir_stores .delete_fhir_store (
183
- service_account_json , project_id , cloud_region , dataset_id , fhir_store_id
184
- )
185
-
186
177
out , _ = capsys .readouterr ()
187
178
188
179
assert "etag" in get_response
0 commit comments