17
17
import sys
18
18
import time
19
19
20
+ from google .cloud import storage
21
+
20
22
# Add datasets for bootstrapping datasets for testing
21
23
sys .path .append (os .path .join (os .path .dirname (__file__ ), '..' , 'datasets' )) # noqa
22
24
import datasets
30
32
dataset_id = 'test_dataset_{}' .format (int (time .time ()))
31
33
fhir_store_id = 'test_fhir_store-{}' .format (int (time .time ()))
32
34
pubsub_topic = 'test_pubsub_topic-{}' .format (int (time .time ()))
33
- resource_type = 'Patient'
35
+
36
+ gcs_uri = os .environ ['CLOUD_STORAGE_BUCKET' ]
37
+ RESOURCES = os .path .join (os .path .dirname (__file__ ), 'resources' )
38
+ source_file_name = 'Patient.json'
39
+ resource_file = os .path .join (RESOURCES , source_file_name )
40
+ import_object = gcs_uri + '/' + source_file_name
34
41
35
42
36
43
@pytest .fixture (scope = 'module' )
@@ -126,3 +133,75 @@ def test_patch_fhir_store(test_dataset, capsys):
126
133
out , _ = capsys .readouterr ()
127
134
128
135
assert 'Patched FHIR store' in out
136
+
137
+
138
+ def test_import_fhir_store_gcs (test_dataset , capsys ):
139
+ fhir_stores .create_fhir_store (
140
+ service_account_json ,
141
+ api_key ,
142
+ project_id ,
143
+ cloud_region ,
144
+ dataset_id ,
145
+ fhir_store_id )
146
+
147
+ storage_client = storage .Client ()
148
+ bucket = storage_client .get_bucket (gcs_uri )
149
+ blob = bucket .blob (source_file_name )
150
+
151
+ blob .upload_from_filename (resource_file )
152
+
153
+ fhir_stores .import_fhir_store (
154
+ service_account_json ,
155
+ api_key ,
156
+ project_id ,
157
+ cloud_region ,
158
+ dataset_id ,
159
+ fhir_store_id ,
160
+ import_object )
161
+
162
+ # Clean up
163
+ blob .delete ()
164
+
165
+ fhir_stores .delete_fhir_store (
166
+ service_account_json ,
167
+ api_key ,
168
+ project_id ,
169
+ cloud_region ,
170
+ dataset_id ,
171
+ fhir_store_id )
172
+
173
+ out , _ = capsys .readouterr ()
174
+
175
+ assert 'Imported FHIR resources' in out
176
+
177
+
178
+ def test_export_fhir_store_gcs (test_dataset , capsys ):
179
+ fhir_stores .create_fhir_store (
180
+ service_account_json ,
181
+ api_key ,
182
+ project_id ,
183
+ cloud_region ,
184
+ dataset_id ,
185
+ fhir_store_id )
186
+
187
+ fhir_stores .export_fhir_store_gcs (
188
+ service_account_json ,
189
+ api_key ,
190
+ project_id ,
191
+ cloud_region ,
192
+ dataset_id ,
193
+ fhir_store_id ,
194
+ gcs_uri )
195
+
196
+ # Clean up
197
+ fhir_stores .delete_fhir_store (
198
+ service_account_json ,
199
+ api_key ,
200
+ project_id ,
201
+ cloud_region ,
202
+ dataset_id ,
203
+ fhir_store_id )
204
+
205
+ out , _ = capsys .readouterr ()
206
+
207
+ assert 'Exported FHIR resources to bucket' in out
0 commit comments