Skip to main content

Complete Workflow Example

The script below strings together token generation, catalog discovery, coverage queries, and tile downloads. Use it as a reference implementation or adapt pieces into your own services.

import requests
from typing import Dict, List

BASE_TILE_URL = "https://api.nearspacelabs.net/tile"


def get_auth_token(nsl_id: str, nsl_secret: str) -> str:
"""
Request an authorization token from the NSL oauth server.
"""
auth_headers = {'content-type': 'application/json'}
post_body = {
'client_id': nsl_id,
'client_secret': nsl_secret,
'audience': 'https://api.nearspacelabs.com',
'grant_type': 'client_credentials'
}
req = requests.post(
'https://api.nearspacelabs.net/oauth/token',
json=post_body,
headers=auth_headers
)
req.raise_for_status()
return req.json()['access_token']


def list_surveys_page(page: int, auth_token: str) -> Dict:
"""
Returns a paginated list of surveys (without footprints).
"""
url = f"{BASE_TILE_URL}/v2/surveys"
headers = {'Authorization': f'Bearer {auth_token}'}
params = {'page': page}
with requests.get(url, headers=headers, params=params) as req:
req.raise_for_status()
return req.json()


def get_survey_footprint(survey_id: str, auth_token: str) -> Dict:
"""
Retrieves the footprint payload for a single survey.
"""
url = f"{BASE_TILE_URL}/v2/{survey_id}/footprint"
headers = {'Authorization': f'Bearer {auth_token}'}
with requests.get(url, headers=headers) as req:
req.raise_for_status()
return req.json()


def get_tile_metadata_by_polygon(
survey_id: str,
polygon_wkt: str,
zoom_level: int,
auth_token: str
) -> List[Dict]:
"""
Returns metadata for all tiles that fall within the given polygon
(specified in WKT format).
"""
url = f"{BASE_TILE_URL}/v2/{survey_id}/coverage"
headers = {'Authorization': f'Bearer {auth_token}'}
params = {'wkt': polygon_wkt, 'zoom': zoom_level}
with requests.get(url, headers=headers, params=params) as req:
req.raise_for_status()
return req.json()


def download_tile(tile_url: str, auth_token: str) -> str:
"""
Downloads the specified tile, using the x, y, and z values
from the tile_url for the output file name.
"""
_, z, x, y = tile_url.rsplit('/', 3)
local_file = f'{z}_{x}_{y}.png'
headers = {'Authorization': f'Bearer {auth_token}'}
with requests.get(tile_url, headers=headers, stream=True) as req:
req.raise_for_status()
with open(local_file, 'wb') as f:
for chunk in req.iter_content(chunk_size=8192):
f.write(chunk)
return local_file


def main():
nsl_id = '< your NSL ID >'
nsl_secret = '< your NSL SECRET >'

# Request an authorization token
auth_token = get_auth_token(nsl_id, nsl_secret)

# Page through the survey catalog and request footprints separately
surveys_page = list_surveys_page(page=1, auth_token=auth_token)
if not surveys_page['results']:
raise RuntimeError('No surveys were returned')
survey_id = surveys_page['results'][0]['id']
footprint_payload = get_survey_footprint(survey_id, auth_token)
print(f"Using survey {survey_id} (has_next_page={surveys_page['has_next']})")
print(f"Footprint available: {bool(footprint_payload.get('footprint'))}")

# Define an area of interest around Port Charlotte, FL
aoi_polygon = (
'POLYGON((-82.146 27.018, -82.036 27.018, '
'-82.036 26.946, -82.146 26.946, -82.146 27.018))'
)

# Request metadata for all tiles within the region of interest
zoom_level = 14
tiles = get_tile_metadata_by_polygon(
'2024Q4-FL-PTCH', aoi_polygon, zoom_level, auth_token
)
if not tiles:
print('No tiles were found that intersect the given area')
return

# Each metadata object contains a full URL to the tile image
local_file = download_tile(tiles[0]['static_url'], auth_token)
print(f'Tile downloaded to {local_file}')


if __name__ == '__main__':
main()

What This Script Does

  1. Authenticates using client credentials to get an OAuth token
  2. Lists surveys from the paginated catalog and picks the first one
  3. Fetches the footprint for that survey (GeoJSON geometry)
  4. Queries coverage for a specific polygon (Port Charlotte, FL) at zoom 14
  5. Downloads the first matching tile to a local PNG file

You can adapt any of these functions individually into your own codebase.