Skip to content

Commit 89430b1

Browse files
mikegoughnikmolnar
authored andcommitted
Added support for Layer Package (lpk) imports (#1)
* Added support for Layer Package (lpk) imports * Code cleanup. Change "tmp_job" references to "job". * Add test function for LPK imports
1 parent fa7817a commit 89430b1

File tree

2 files changed

+65
-0
lines changed

2 files changed

+65
-0
lines changed

databasin/client.py

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,51 @@ def get_temporary_file(self, uuid):
149149
except HTTPException as e:
150150
raise_for_authorization(e.response, self.username is not None)
151151
raise
152+
153+
def import_lpk(self, lpk_file):
154+
if lpk_file.endswith('.lpk'):
155+
f = open(lpk_file, 'rb')
156+
else:
157+
raise ValueError('File must be an ArcGIS Layer Package with a .lpk extension')
158+
159+
filename = os.path.basename(lpk_file)
160+
161+
tmp_file = self.upload_temporary_file(f, filename=filename)
162+
163+
f.close()
164+
165+
job_args = {
166+
'file': tmp_file.uuid,
167+
'url': None,
168+
'dataset_type': 'ArcGIS_Native'
169+
}
170+
171+
job = self.create_job('create_import_job', job_args=job_args, block=True)
172+
uri = job.message.split("/")[-2]
173+
174+
final_job_args = {
175+
'import_id': uri
176+
}
177+
178+
final_job = self.create_job('finalize_import_job', job_args=final_job_args, block=True)
152179

180+
if final_job.status != 'succeeded':
181+
raise DatasetImportError('Import failed: {0}'.format(final_job.message))
182+
183+
data = json.loads(final_job.message)
184+
next_uri = data['next_uri']
185+
if '/import/' in next_uri:
186+
dataset_import_id = DATASET_IMPORT_ID_RE.search(next_uri).group(1)
187+
dataset_import = self.get_import(dataset_import_id)
188+
dataset_import.cancel()
189+
190+
raise DatasetImportError(
191+
'Layer Package imports must have all necessary metadata information necessary for one-step import.'
192+
)
193+
194+
dataset_id = next_uri.strip('/').split('/')[-1]
195+
return self.get_dataset(dataset_id)
196+
153197
def import_netcdf_dataset(self, nc_or_zip_file, style=None):
154198
if nc_or_zip_file.endswith('.zip'):
155199
f = open(nc_or_zip_file, 'a+b')

tests/test_client.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,27 @@ def test_login_no_redirect():
121121
assert m.call_count == 2
122122
assert not any(r.url for r in m.request_history if r.url == 'https://databasin.org/redirect/')
123123

124+
def test_import_lpk(import_job_data, dataset_data, tmp_file_data):
125+
with requests_mock.mock() as m:
126+
m.post('https://databasin.org/uploads/upload-temporary-file/', text=json.dumps({'uuid': 'abcd'}))
127+
m.get('https://databasin.org/api/v1/uploads/temporary-files/abcd/', text=json.dumps(tmp_file_data))
128+
m.post('https://databasin.org/api/v1/jobs/', headers={'Location': 'https://databasin.org/api/v1/jobs/1234/'})
129+
m.get('https://databasin.org/api/v1/jobs/1234/', text=json.dumps(import_job_data))
130+
m.get('https://databasin.org/api/v1/datasets/a1b2c3/', text=json.dumps(dataset_data))
131+
132+
f = six.BytesIO()
133+
with mock.patch.object(builtins, 'open', mock.Mock(return_value=f)) as open_mock:
134+
c = Client()
135+
c._session.cookies['csrftoken'] = 'abcd'
136+
dataset = c.import_lpk('test.lpk')
137+
138+
open_mock.assert_called_once_with('test.lpk', 'rb')
139+
assert m.call_count == 7
140+
assert dataset.id == 'a1b2c3'
141+
request_data = json.loads(m.request_history[2].text)
142+
assert request_data['job_name'] == 'create_import_job'
143+
assert request_data['job_args']['file'] == 'abcd'
144+
assert request_data['job_args']['dataset_type'] == 'ArcGIS_Native'
124145

125146
def test_import_netcdf_dataset_with_zip(import_job_data, dataset_data, tmp_file_data):
126147
with requests_mock.mock() as m:

0 commit comments

Comments
 (0)