How to upload predictions on geospatial assets in a model run

Open In Colab

Open a Colab to go through the exercise of importing geospatial predictions.

Supported predictions

To upload predictions in Labelbox, you need to create the predictions payload. In this section, we provide this payload for every prediction type.

Labelbox supports two formats for the predictions payload:

  • Python annotation types (recommended)
  • NDJSON

Both are described below.

📘

Uploading confidence scores is optional

If you do not specify a confidence score, the prediction will be treated as if it had a confidence score of 1.

Point

point_prediction = lb_types.ObjectAnnotation(
  name = "point_geo",
  confidence = 0.5,
  value = lb_types.Point(x=-122.31741025134123, y=37.87355669249922),
)
point_prediction_ndjson = {
    "name": "point_geo",
    "confidence": 0.5,
    "point": {
         "x": -122.31741025134123,
         "y": 37.87355669249922
     }
}

Polyline

coords = [ 
            [
              -122.31757789012927,
              37.87396317833991
            ],
            [
              -122.31639782443663,
              37.87396741226917
            ],
            [
              -122.31638977853417,
              37.87277872707839
            ]
        ]

line_points = []


for sub in coords: 
  line_points.append(lb_types.Point(x=sub[0], y=sub[1]))


polyline_prediction = lb_types.ObjectAnnotation(
  name = "polyline_geo",
  confidence = 0.5,
  value = lb_types.Line(points=line_points),
)

coords = [ 
            [
              -122.31757789012927,
              37.87396317833991
            ],
            [
              -122.31639782443663,
              37.87396741226917
            ],
            [
              -122.31638977853417,
              37.87277872707839
            ]
        ]

line_points_ndjson = []

for sub in coords: 
  line_points_ndjson.append({"x":sub[0], "y":sub[1]})

polyline_prediction_ndjson = {
    "name": "polyline_geo",
    "confidence": 0.5,
    "line": line_points_ndjson
}

Polygon

coords_polygon = [
    [
        -122.31691812612837,
        37.873289980495024
    ],
    [
        -122.31710184090099,
        37.87304335144298
    ],
    [
        -122.31680146054286,
        37.87303594197371
    ],
    [
        -122.31691812612837,
        37.873289980495024
    ]
]

polygon_points = []


for sub in coords_polygon: 
  polygon_points.append(lb_types.Point(x=sub[0], y=sub[1]))
 

polygon_prediction = lb_types.ObjectAnnotation(
  name = "polygon_geo",
  confidence = 0.5,
  value = lb_types.Polygon(points=polygon_points),
)
coords_polygon = [
    [
        -122.31691812612837,
        37.873289980495024
    ],
    [
        -122.31710184090099,
        37.87304335144298
    ],
    [
        -122.31680146054286,
        37.87303594197371
    ],
    [
        -122.31691812612837,
        37.873289980495024
    ]
]

polygon_points_ndjson = []

for sub in coords_polygon: 
  polygon_points_ndjson.append({"x":sub[0], "y":sub[1]})

polygon_prediction_ndjson = {
    "name": "polygon_geo",
    "confidence": 0.5,
    "polygon": polygon_points_ndjson
}

Bounding box

bbox_top_left = lb_types.Point(x=-122.31734455895823, y=37.873713376083884)
bbox_bottom_right = lb_types.Point(x=-122.31673038840458, y=37.87385944699745)

bbox_prediction = lb_types.ObjectAnnotation(
  name = "bbox_geo",
  confidence = 0.5,
  value = lb_types.Rectangle(start=bbox_top_left, end=bbox_bottom_right)
)
coord_object = {
      "coordinates": [
        [
          [
              -122.31734455895823,
              37.873713376083884
          ],
          [
              -122.31734455895823,
              37.87385944699745
          ],
          [
              -122.31673038840458,
              37.87385944699745
          ],
          [
              -122.31673038840458,
              37.873713376083884
          ],
          [
              -122.31734455895823,
              37.873713376083884
          ]
        ]
      ]  
    }

bbox_prediction_ndjson = {
    "name" : "bbox_geo",
    "confidence": 0.5,
    "bbox" : {
        'top': coord_object["coordinates"][0][1][1],
        'left': coord_object["coordinates"][0][1][0],
        'height': coord_object["coordinates"][0][3][1] - coord_object["coordinates"][0][1][1],        
        'width': coord_object["coordinates"][0][3][0] - coord_object["coordinates"][0][1][0]
    }
}

Classification: Radio (single choice)

radio_prediction = lb_types.ClassificationAnnotation(
    name="radio_question_geo", 
    confidence = 0.5,
    value=lb_types.Radio(answer=lb_types.ClassificationAnswer(name="first_radio_answer"))
)
radio_prediction_ndjson = {
    "name": "radio_question_geo",
    "confidence": 0.5,
    "answer": { "name": "first_radio_answer"}
}

Bounding box with nested checklist classification

bbox_with_checklist_subclass = lb_types.ObjectAnnotation(
    name="bbox_checklist_geo",
    confidence = 0.5,
    value=lb_types.Rectangle(
        start=lb_types.Point(x=-122.31711256877092, y=37.87340218056304), # Top left
        end=lb_types.Point(x=-122.31665529331502, y=37.87360752741479), # Bottom right
    ),
    classifications=[
        lb_types.ClassificationAnnotation(
            name="checklist_class_name",
            value=lb_types.Checklist(
                answer=[lb_types.ClassificationAnswer(name="first_checklist_answer", confidence = 0.5)]
            )
        )
    ]
)
coord_object_checklist = {
    "coordinates": [
       [
          [
              -122.31711256877092,
              37.87340218056304
          ],
          [
              -122.31711256877092,
              37.87360752741479
          ],
          [
              -122.31665529331502,
              37.87360752741479
          ],
          [
              -122.31665529331502,
              37.87340218056304
          ],
          [
              -122.31711256877092,
              37.87340218056304
          ]
      ]
    ]          
}

bbox_with_checklist_subclass_ndjson = {
    "name": "bbox_checklist_geo", 
    "confidence": 0.5,
    "classifications": [{
        "name": "checklist_class_name",
        "answer": [
            { "name":"first_checklist_answer", "confidence": 0.5 }
        ]   
    }],
    "bbox": {
        'top': coord_object_checklist["coordinates"][0][1][1],
        'left': coord_object_checklist["coordinates"][0][1][0],
        'height': coord_object_checklist["coordinates"][0][3][1] - coord_object_checklist["coordinates"][0][1][1],        
        'width': coord_object_checklist["coordinates"][0][3][0] - coord_object_checklist["coordinates"][0][1][0]
    }
}

Bounding box with nested free-text classification

bbox_with_free_text_subclass = lb_types.ObjectAnnotation(
    name="bbox_text_geo",
    value=lb_types.Rectangle(
        start=lb_types.Point(x=-122.31750814315438, y=37.87318201423049), # Top left
        end=lb_types.Point(x=-122.31710049991725, y=37.87337992476082), # Bottom right
    ),
    classifications=[
        lb_types.ClassificationAnnotation(
            name="free_text_geo",
            value=lb_types.Text(answer="sample text")
        )
    ]
)

coord_object_text ={
    "coordinates": [
      [
          [
              -122.31750814315438,
              37.87318201423049
          ],
          [
              -122.31750814315438,
              37.87337992476082
          ],
          [
              -122.31710049991725,
              37.87337992476082
          ],
          [
              -122.31710049991725,
              37.87318201423049
          ],
          [
              -122.31750814315438,
              37.87318201423049
          ]
      ]
    ]
}

bbox_with_free_text_subclass_ndjson = {
    "name":"bbox_text_geo",
    "classifications": [{
        "name": "free_text_geo",
        "answer": "sample text"
    }],
    "bbox": {
        'top': coord_object_text["coordinates"][0][1][1],
        'left': coord_object_text["coordinates"][0][1][0],
        'height': coord_object_text["coordinates"][0][3][1] - coord_object_text["coordinates"][0][1][1],        
        'width': coord_object_text["coordinates"][0][3][0] - coord_object_text["coordinates"][0][1][0]
    }
}

Classification: Checklist (multi-choice)

checklist_prediction = lb_types.ClassificationAnnotation(
    name="checklist_question_geo",
    confidence = 0.5,
    value=lb_types.Checklist(answer = [
        lb_types.ClassificationAnswer(name = "first_checklist_answer", confidence = 0.5),
        lb_types.ClassificationAnswer(name = "second_checklist_answer", confidence = 0.5),
        lb_types.ClassificationAnswer(name = "third_checklist_answer", confidence = 0.5)
    ])
  )
checklist_prediction_ndjson = {
  'name': 'checklist_question_geo',
  "confidence": 0.5,
  'answer': [
    {'name': 'first_checklist_answer', "confidence": 0.5},
    {'name': 'second_checklist_answer', "confidence": 0.5},
    {'name': 'third_checklist_answer', "confidence": 0.5},
  ]
}

End-to-end example: Upload predictions to a model run

Follow the steps below to upload predictions to a model run.

Before you start

You will need to import these libraries to use the code examples in this section.

import os

import uuid
import numpy as np
from PIL import Image
import cv2
import ndjson

import labelbox as lb
import labelbox.types as lb_types

Replace with your API key

To learn how to create an API key, please follow the instructions on this page.

API_KEY = ""
client = lb.Client(API_KEY)

Step 1: Import data rows into Catalog

top_left_bound = lb_types.Point(x=-122.31764674186705, y=37.87276155898985)
bottom_right_bound = lb_types.Point(x=-122.31635199317932, y=37.87398109727749)

epsg = lb_types.EPSG.EPSG4326
bounds = lb_types.TiledBounds(epsg=epsg, bounds=[top_left_bound, bottom_right_bound])

tile_layer = lb_types.TileLayer(
    url="https://api.mapbox.com/styles/v1/mapbox/satellite-streets-v11/tiles/{z}/{x}/{y}?access_token=pk.eyJ1IjoibWFwYm94IiwiYSI6ImNpejY4NXVycTA2emYycXBndHRqcmZ3N3gifQ.rJcFIG214AriISLbB6B5aw"
)

tiled_image_data = lb_types.TiledImageData(tile_layer=tile_layer,
                                  tile_bounds=bounds,
                                  zoom_levels=[17, 23])

asset = {
    "row_data": tiled_image_data.asdict(),
    "global_key": str(uuid.uuid4()),
    "media_type": "TMS_GEO"
}

dataset = client.create_dataset(name="geo_demo_dataset")
data_row = dataset.create_data_row(asset)
print(data_row)

Step 2: Create/select an ontology for your model predictions

Your model run should have the correct ontology set up with all the tools and classifications supported for your predictions.

Here is an example of creating an ontology programmatically for all the example predictions above:

ontology_builder = lb.OntologyBuilder(
    tools=[
        lb.Tool(tool=lb.Tool.Type.POINT, name="point_geo"),
        lb.Tool(tool=lb.Tool.Type.LINE, name="polyline_geo"),
        lb.Tool(tool=lb.Tool.Type.POLYGON, name="polygon_geo"),
        lb.Tool(tool=lb.Tool.Type.POLYGON, name="polygon_geo_2"),
        lb.Tool(tool=lb.Tool.Type.BBOX, name="bbox_geo"), 
        lb.Tool( 
          tool=lb.Tool.Type.BBOX, 
          name="bbox_checklist_geo",
          classifications=[
                lb.Classification(
                    class_type=lb.Classification.Type.CHECKLIST,
                    name="checklist_class_name",
                    options=[
                      lb.Option(value="first_checklist_answer")
                    ]
                ),
            ]
          ),
        lb.Tool( 
          tool=lb.Tool.Type.BBOX, 
          name="bbox_text_geo",
          classifications=[
                lb.Classification(
                    class_type=lb.Classification.Type.TEXT,
                    name="free_text_geo"
                ),
            ]
          )    
      ],
      classifications = [
          lb.Classification(
              class_type=lb.Classification.Type.CHECKLIST, 
              name="checklist_question_geo",
              options=[
                  lb.Option(value="first_checklist_answer"),
                  lb.Option(value="second_checklist_answer"), 
                  lb.Option(value="third_checklist_answer")
              ]
          ), 
          lb.Classification(
              class_type=lb.Classification.Type.RADIO, 
              name="radio_question_geo",
              options=[
                  lb.Option(value="first_radio_answer")
              ]
          )
          
      ]
)

ontology = client.create_ontology("Ontology Geospatial Annotations",
                                  ontology_builder.asdict(), 
                                  media_type=lb.MediaType.Geospatial_Tile)

Step 3: Create a Model and model run

# create Model
model = client.create_model(name="geospatial_model_run_" + str(uuid.uuid4()), 
                            ontology_id=ontology.uid)
# create model run
model_run = model.create_model_run("iteration 1")

Step 4: Send data rows to the model run

model_run.upsert_data_rows([data_row.uid])

Step 5: Create the predictions payload

Create the annotations payload using the snippets in the Supported Predictions section.

The resulting label_ndjson should have exactly the same content for both NDJson and Annotation types (with the exception of the uuid strings that are generated)

Prediction example using cv2 and PIL libraries and NDJSON version of the same annotation (optional)

polygon_prediction_two_ndjson = {
    "name": "polygon_geo_2",
    "confidence": 0.5,
    "polygon": [
        {'x': -122.31703039689702, 'y': 37.87397804081582},
        {'x': -122.31702351036107, 'y': 37.87393525033866},
        {'x': -122.31698907768116, 'y': 37.87389857276706},
        {'x': -122.3169787478772, 'y': 37.87385883871054},
        {'x': -122.31695808826926, 'y': 37.87385578224377},
        {'x': -122.31695464500127, 'y': 37.873816048164166},
        {'x': -122.31692021232138, 'y': 37.873779370533214},
        {'x': -122.31690988251741, 'y': 37.87373352346883},
        {'x': -122.3168857796415, 'y': 37.873696845796786},
        {'x': -122.3168547902296, 'y': 37.873684619902065},
        {'x': -122.31682035754969, 'y': 37.873611264491025},
        {'x': -122.31676526526188, 'y': 37.87355013492598},
        {'x': -122.3167583787259, 'y': 37.87351651364362},
        {'x': -122.31671017297403, 'y': 37.87348900531027},
        {'x': -122.31671017297403, 'y': 37.873452327516496},
        {'x': -122.31667918356217, 'y': 37.87344010158117},
        {'x': -122.31663442107829, 'y': 37.87335451997715},
        {'x': -122.31660343166638, 'y': 37.87334840700161},
        {'x': -122.31659998839841, 'y': 37.873320898605485},
        {'x': -122.31654489611057, 'y': 37.87329033370888},
        {'x': -122.31652767977064, 'y': 37.87319863894286},
        {'x': -122.31648980382273, 'y': 37.8731833564708},
        {'x': -122.31648980382273, 'y': 37.873161961004534},
        {'x': -122.31641749519497, 'y': 37.87309166157168},
        {'x': -122.316410608659, 'y': 37.873054983580076},
        {'x': -122.31639683558704, 'y': 37.873039701078184},
        {'x': -122.31635551637117, 'y': 37.873039701078184},
        {'x': -122.31635551637117, 'y': 37.87398109727749},
        {'x': -122.31703039689702, 'y': 37.87397804081582}
    ]
}
# Let's create another polygon annotation with python annotation tools that draws the image using cv2 and PIL Python libraries

hsv = cv2.cvtColor(tiled_image_data.value, cv2.COLOR_RGB2HSV)
mask = cv2.inRange(hsv, (10, 25, 25), (100, 150, 255))
kernel = np.ones((15, 20), np.uint8)
mask = cv2.erode(mask, kernel)
mask = cv2.dilate(mask, kernel)
mask_annotation = lb_types.MaskData.from_2D_arr(mask)
mask_data = lb_types.Mask(mask=mask_annotation, color=[255, 255, 255])
h, w, _ = tiled_image_data.value.shape
pixel_bounds = lb_types.TiledBounds(epsg=lb_types.EPSG.SIMPLEPIXEL,
                          bounds=[lb_types.Point(x=0, y=0),
                                  lb_types.Point(x=w, y=h)])
transformer = lb_types.EPSGTransformer.create_pixel_to_geo_transformer(
    src_epsg=pixel_bounds.epsg,
    pixel_bounds=pixel_bounds,
    geo_bounds=tiled_image_data.tile_bounds,
    zoom=23)
pixel_polygons = mask_data.shapely.simplify(3)
list_of_polygons = [transformer(lb_types.Polygon.from_shapely(p)) for p in pixel_polygons.geoms]
polygon_prediction_two = lb_types.ObjectAnnotation(value=list_of_polygons[0], name="polygon_geo_2", confidence=0.5)

Prediction payload generation

label_ndjson_method_2 = []

for prediction in [
    radio_prediction_ndjson,
    checklist_prediction_ndjson,
    bbox_with_free_text_subclass_ndjson, 
    bbox_with_checklist_subclass_ndjson,
    bbox_prediction_ndjson,
    point_prediction_ndjson,
    polyline_prediction_ndjson, 
    polygon_prediction_ndjson,
    polygon_prediction_two_ndjson
]:
  prediction.update({
      'dataRow': {'id': data_row.uid},
  })
  label_ndjson_method_2.append(prediction)
tiled_image_data_row_id = next(dataset.export_data_rows()).uid

label = lb_types.Label(
    data=lb_types.TiledImageData(
        uid=tiled_image_data_row_id ,
        tile_layer=tile_layer,
        tile_bounds=bounds,
        zoom_levels=[17, 23]
    ),
    annotations = [
        point_prediction,
        polyline_prediction,
        polygon_prediction,
        bbox_prediction,
        radio_prediction,
        bbox_with_checklist_subclass,  
        bbox_with_free_text_subclass,
        checklist_prediction,
        polygon_prediction_two
    ]
)
label_list = [label]

Step 6: Upload the predictions payload to the Model Run

# Upload the prediction label to the Model Run
upload_job_prediction = model_run.add_predictions(
    name="prediction_upload_job"+str(uuid.uuid4()),
    predictions=label_ndjson_method_2)

# Errors will appear for annotation uploads that failed.
print("Errors:", upload_job_prediction.errors)

Step 7: Send annotations to the Model Run (Optional)

# 7.1. Create a labelbox project

# Create a Labelbox project
project = client.create_project(name="geospatial_prediction_demo",                                    
                                    queue_mode=lb.QueueMode.Batch,
                                    # Quality Settings setup 
                                    auto_audit_percentage=1,
                                    auto_audit_number_of_labels=1,
                                    media_type=lb.MediaType.Geospatial_Tile)
project.setup_editor(ontology)

# 7.2. Create a batch to send to the project

project.create_batch(
  "batch_geospatial_prediction_demo", # Each batch in a project must have a unique name
  dataset.export_data_rows(), # A list of data rows or data row ids
  5 # priority between 1(Highest) - 5(lowest)
)


# 7.3 Create the annotations payload as explained in:
# https://docs.labelbox.com/reference/import-geospatial-annotations#supported-annotations
point_annotation_ndjson...
polyline_annotation_ndjson...
polygon_annotation_ndjson...
polygon_annotation_two_ndjson...
bbox_annotation_ndjson...
radio_annotation_ndjson...
bbox_with_checklist_subclass_annotation_ndjson...
bbox_with_free_text_subclass_annotation_ndjson...
checklist_annotation_ndjson...

# 7.4 Create the label object 
ndjson_annotation = []
for annot in [
    radio_annotation_ndjson,
    checklist_annotation_ndjson,
    bbox_with_free_text_subclass_annotation_ndjson, 
    bbox_with_checklist_subclass_annotation_ndjson,
    bbox_annotation_ndjson,
    point_annotation_ndjson,
    polyline_annotation_ndjson, 
    polygon_annotation_ndjson,
    polygon_annotation_two_ndjson
]:
  annot.update({
      'dataRow': {'id': data_row.uid},
  })
  ndjson_annotation.append(annot) 

# 7.5 Upload annotations to the project using Label Import
upload_job_annotation = lb.LabelImport.create_from_objects(
    client = client,
    project_id = project.uid,
    name="geospatial_annotations_import_" + str(uuid.uuid4()),
    labels=ndjson_annotation)

upload_job_annotation.wait_until_done()
# Errors will appear for annotation uploads that failed.
print("Errors:", upload_job_annotation.errors)

# 7.6. Send the annotations to the Model Run

# get the labels id from the project
label_ids = [x['ID'] for x in project.export_labels(download=True)]
model_run.upsert_labels(label_ids)

End-to-end python tutorial

Open In Colab

Open this Colab to go through the exercise of uploading geospatial predictions.