How to upload predictions on images in a model run

Open In Colab

Open this Colab for an interactive tutorial about uploading predictions on images in a model run.

Supported predictions

To upload predictions in Labelbox, you need to create a predictions payload. In this section, we provide this payload for every supported prediction type.

Labelbox supports two formats for the predictions payload:

  • Python Annotation types (recommended)
  • NDJSON

Both are described below.

📘

Uploading confidence scores is optional

If you do not specify a confidence score, the prediction will be treated as if it had a confidence score of 1.

Classification: Radio (single-choice)

radio_prediction = lb_types.ClassificationAnnotation(
    name="radio_question", 
    value=lb_types.Radio(answer = lb_types.ClassificationAnswer(name = "second_radio_answer", confidence=0.5))
)
radio_prediction_ndjson = {
  'name': 'radio_question',
  'answer': {'name': 'second_radio_answer', 'confidence': 0.5}
}

Nested classifications: Radio and checklist

nested_radio_prediction_ndjson = {
  "name": "nested_radio_question",
  "confidence": 0.5 ,
  "answer": { "name": "first_radio_answer", "confidence": 0.5 },
      "classifications" : [
      {
        "name": "sub_radio_question", 
        "answer": {"name": "first_sub_radio_answer", "confidence": 0.5 }
      }
    ]
}


nested_checklist_prediction_ndjson = {
  "name": "nested_checklist_question",
  "confidence": 0.5 ,
  "answer": [{
      "name": "first_checklist_answer", 
      "confidence": 0.5,
      "classifications" : [
        {
          "name": "sub_checklist_question", 
          "answer": {"name": "first_sub_checklist_answer", "confidence": 0.5 }
        }          
      ]         
  }]
}

Classification: Checklist (multi-choice)


checklist_prediction = lb_types.ClassificationAnnotation(
  name="checklist_question", # must match your ontology feature's name
  value=lb_types.Checklist(
      answer = [
        lb_types.ClassificationAnswer(
            name = "first_checklist_answer", 
            confidence=0.5
        ), 
        lb_types.ClassificationAnswer(
            name = "second_checklist_answer", 
            confidence=0.5
        )
      ]
    )
 )
# NDJSON
checklist_prediction_ndjson = {
  'name': 'checklist_question',
  'answer': [
    {'name': 'first_checklist_answer' , 'confidence': 0.5},
    {'name': 'second_checklist_answer', 'confidence': 0.5}
  ]
}

Classification: Free-form text

# no support for confidence score for Classification with free-form text. Coming soon
# Python annotation
text_annotation = lb_types.ClassificationAnnotation(
  name="free_text",  # must match your ontology feature's name
  value=lb_types.Text(answer="sample text")
)

# no support for confidence score for Classification with free-form text. Coming soon
text_prediction_ndjson = {
  'name': 'free_text',
  'answer': 'sample text',
}

Bounding box

 
bbox_prediction = lb_types.ObjectAnnotation(
  name = "bounding_box",  # must match your ontology feature's name
  confidence=0.5, 
  value=lb_types.Rectangle(
        start=lb_types.Point(x=977, y=1690), # Top left
        end=lb_types.Point(x=330, y=225), # Bottom right
    ),
  
)

####### Bounding box with nested classification #######
bbox_with_radio_subclass_prediction = lb_types.ObjectAnnotation(
    name="bbox_with_radio_subclass",
    confidence=0.5, # must match your ontology feature's name
    value=lb_types.Rectangle(
        start=lb_types.Point(x=933, y=541), # Top left
        end=lb_types.Point(x=191, y=330), # Bottom right
    ),
    classifications=[
        lb_types.ClassificationAnnotation(
            name="sub_radio_question",
            value=lb_types.Radio(answer=lb_types.ClassificationAnswer(name="first_sub_radio_answer", confidence=0.5))
    )
  ]
)
bbox_prediction_ndjson = {
  'name': 'bounding_box',
  'classifications': [],
  'bbox': {'top': 0.0, 'left': 0.0, 'height': 10.0, 'width': 10.0},
  'confidence': 0.5
}

bbox_with_radio_subclass_prediction_ndjson = {
  'name': 'bounding_box_with_radio_subclass',
  'classifications': [
    {
      'name': 'sub_radio_question',
      'answer': {'name': 'first_sub_radio_answer', 'confidence': 0.5}
    }
  ],
  'bbox': {'top': 0.0, 'left': 0.0, 'height': 10.0, 'width': 10.0},
  'confidence': 0.5
}

Polygon

# Python Anotation 
polygon_prediction = lb_types.ObjectAnnotation(
  name = "polygon",  # must match your ontology feature's name 
  confidence = 0.5, 
  value=lb_types.Polygon( # Coordinates for the verticies of your polygon
        points=[lb_types.Point(x=1489.581,y=183.934), lb_types.Point(x=2278.306,y=256.885), lb_types.Point(x=2428.197,y=200.437), lb_types.Point(x=2560.0,y=335.419),
                lb_types.Point(x=2557.386,y=503.165), lb_types.Point(x=2320.596,y=503.103), lb_types.Point(x=2156.083, y=628.943), lb_types.Point(x=2161.111,y=785.519),
                lb_types.Point(x=2002.115, y=894.647), lb_types.Point(x=1838.456,y=877.874), lb_types.Point(x=1436.53,y=874.636), lb_types.Point(x=1411.403,y=758.579),
                lb_types.Point(x=1353.853,y=751.74), lb_types.Point(x=1345.264, y=453.461), lb_types.Point(x=1426.011,y=421.129)]
    ),
)
polygon_prediction_ndjson = {
  'name': 'polygon',
  'classifications': [],
  'polygon': [
    {'x': 1489.581, 'y': 183.934},
    {'x': 2278.306, 'y': 256.885},
    {'x': 2428.197, 'y': 200.437},
    {'x': 2560.0, 'y': 335.419},
    {'x': 2557.386, 'y': 503.165},
    {'x': 2320.596, 'y': 503.103},
    {'x': 2156.083, 'y': 628.943},
    {'x': 2161.111, 'y': 785.519},
    {'x': 2002.115, 'y': 894.647},
    {'x': 1838.456, 'y': 877.874},
    {'x': 1436.53, 'y': 874.636},
    {'x': 1411.403, 'y': 758.579},
    {'x': 1353.853, 'y': 751.74},
    {'x': 1345.264, 'y': 453.461},
    {'x': 1426.011, 'y': 421.129},
    {'x': 1489.581, 'y': 183.934}
  ],
  'confidence': 0.5
}

Segmentation mask

MaskData is mask data in a uint8 array of [H, W, 3]. You can also convert a polygon annotation or a 2D array to MaskData. You can also specify a URL to a cloud-hosted mask hosted on any cloud provider.


# Identifying what values in the numpy array correspond to the mask annotation
color = (0, 0, 0)

# convert a polygon to mask
im_height, im_width = 100,100 #need to provide the height and width of image.
mask_data = lb_types.MaskData(arr=
                     polygon_prediction.value.draw(height=im_height,width=im_width,color=color))

# convert a 2D array to 3D array
arr_2d = np.zeros((100,100), dtype='uint8')
mask_data = lb_types.MaskData.from_2D_arr(arr_2d)

# a 3D array where 3rd axis is RGB values.
mask_data = lb_types.MaskData(arr= np.zeros([400,450,3],dtype='uint8'))

mask_prediction = lb_types.ObjectAnnotation(
  name = "mask", # must match your ontology feature's name
  confidence=0.5,
  value=lb_types.Mask(mask=mask_data, color=color),
)

mask_prediction_ndjson = {
  'name': 'mask',
  'classifications': [],
  'mask': {
    'instanceURI': 'https://storage.labelbox.com/cjhfn5y6s0pk507024nz1ocys%2F1d60856c-59b7-3060-2754-83f7e93e0d01-1?Expires=1666901963361&KeyName=labelbox-assets-key-3&Signature=t-2s2DB4YjFuWEFak0wxYqfBfZA',
    'colorRGB': (0, 0, 0)
  },
  'confidence': 0.5
}

Point

# Python Annotation
point_prediction = lb_types.ObjectAnnotation(
  name = "point",  # must match your ontology feature's name
  confidence=0.5,
  value = lb_types.Point(x=1166.606, y=1441.768),
)
point_prediction_ndjson = {
  'name': 'point',
  'classifications': [],
  'point': {'x': 10.0, 'y': 10.0},
  'confidence': 0.5
}

Polyline

# no support for confidence score for Polylines. Coming soon
polyline_prediction = lb_types.ObjectAnnotation(
  name = "polyline", # must match your ontology feature's name
  confidence=0.5, ## Not supported for python annotation tools
  value=lb_types.Line( # Coordinates for the keypoints in your polyline
        points=[lb_types.Point(x=2534.353, y=249.471), lb_types.Point(x=2429.492, y=182.092), lb_types.Point(x=2294.322, y=221.962), lb_types.Point(x=2224.491, y=180.463), lb_types.Point(x=2136.123, y=204.716),
                lb_types.Point(x=1712.247, y=173.949), lb_types.Point(x=1703.838, y=84.438), lb_types.Point(x=1579.772, y=82.61), lb_types.Point(x=1583.442, y=167.552),
                lb_types.Point(x=1478.869, y=164.903), lb_types.Point(x=1418.941, y=318.149), lb_types.Point(x=1243.128, y=400.815), lb_types.Point(x=1022.067, y=319.007),
                lb_types.Point(x=892.367, y=379.216), lb_types.Point(x=670.273, y=364.408), lb_types.Point(x=613.114, y=288.16), lb_types.Point(x=377.559, y=238.251),
                lb_types.Point(x=368.087, y=185.064), lb_types.Point(x=246.557, y=167.286), lb_types.Point(x=236.648, y=285.61), lb_types.Point(x=90.929, y=326.412)]
    ),
)
# no support for confidence score for Polylines. Coming soon
polyline_prediction_ndjson = {
  'name': 'polyline',
  'classifications': [],
  'line': [
    {'x': 2534.353, 'y': 249.471},
    {'x': 2429.492, 'y': 182.092},
    {'x': 2294.322, 'y': 221.962},
    {'x': 2224.491, 'y': 180.463},
    {'x': 2136.123, 'y': 204.716},
    {'x': 1712.247, 'y': 173.949},
    {'x': 1703.838, 'y': 84.438},
    {'x': 1579.772, 'y': 82.61},
    {'x': 1583.442, 'y': 167.552},
    {'x': 1478.869, 'y': 164.903},
    {'x': 1418.941, 'y': 318.149},
    {'x': 1243.128, 'y': 400.815},
    {'x': 1022.067, 'y': 319.007},
    {'x': 892.367, 'y': 379.216},
    {'x': 670.273, 'y': 364.408},
    {'x': 613.114, 'y': 288.16},
    {'x': 377.559, 'y': 238.251},
    {'x': 368.087, 'y': 185.064},
    {'x': 246.557, 'y': 167.286},
    {'x': 236.648, 'y': 285.61},
    {'x': 90.929, 'y': 326.412}
  ]
}

End-to-end example: Upload predictions to a model run

Here are the steps to upload predictions to a model run.

Before you start

You will need to import these libraries to use the code examples in this section:

import labelbox as lb
import labelbox.data.annotation_types as lb_types
import labelbox.data.serialization as lb_serializers
import uuid
import numpy as np

Replace with your API key

To learn how to create an API key, please follow the instructions on this page.

API_KEY = ""
client = lb.Client(API_KEY)

Step 1: Import data rows into Catalog

test_img_url = {
    "row_data": "https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg",
    "global_key": str(uuid.uuid4())
}
dataset = client.create_dataset(name="image_prediction_demo")
data_row = dataset.create_data_row(test_img_url)
print(data_row)

Step 2: Create an ontology for your model predictions

Your model run should have the correct ontology set up with all the tools and classifications supported for your predictions.

Here is an example of creating an ontology programmatically for all the example predictions above:

ontology_builder = lb.OntologyBuilder(
  classifications=[ # List of Classification objects
    lb.Classification( # Radio classification given the name "text" with two options: "first_radio_answer" and "second_radio_answer"
      class_type=lb.Classification.Type.RADIO,
      name="radio_question", 
      options=[
        lb.Option(value="first_radio_answer"),
        lb.Option(value="second_radio_answer")
      ]
    ),
    lb.Classification( 
      class_type=lb.Classification.Type.CHECKLIST,
      name="checklist_question", 
      options=[
        lb.Option(value="first_checklist_answer"),
        lb.Option(value="second_checklist_answer")
      ]
    ), 
    lb.Classification( 
      class_type=lb.Classification.Type.TEXT,
      name="free_text"
    ),
    lb.Classification(
        class_type=lb.Classification.Type.RADIO,
        name="nested_radio_question",
        options=[
            lb.Option("first_radio_answer",
                options=[
                    lb.Classification(
                        class_type=lb.Classification.Type.RADIO,
                        name="sub_radio_question",
                        options=[lb.Option("first_sub_radio_answer")]
                    )
                ]
            )
          ] 
        ),
    lb.Classification(
      class_type=lb.Classification.Type.CHECKLIST,
      name="nested_checklist_question",
      options=[
          lb.Option("first_checklist_answer",
            options=[
              lb.Classification(
                  class_type=lb.Classification.Type.CHECKLIST,
                  name="sub_checklist_question", 
                  options=[lb.Option("first_sub_checklist_answer")]
              )
          ]
        )
      ]
    ),      
  ],
  tools=[ # List of Tool objects
    lb.Tool(
      tool=lb.Tool.Type.BBOX,
      name="bounding_box"), 
    lb.Tool( 
      tool=lb.Tool.Type.BBOX,
      name="bbox_with_radio_subclass",
      classifications=[
            lb.Classification(
                class_type=lb.Classification.Type.RADIO,
                name="sub_radio_question",
                options=[
                  lb.Option(value="first_sub_radio_answer")
                ]
              ),
        ]
      ), 
    lb.Tool( # Polygon tool given the name "polygon"
      tool=lb.Tool.Type.POLYGON,
      name="polygon"),
    lb.Tool( # Segmentation mask tool given the name "mask"
      tool=lb.Tool.Type.SEGMENTATION,
      name="mask"),
      lb.Tool( # Point tool given the name "point"
      tool=lb.Tool.Type.POINT,
      name="point"), 
    lb.Tool( # Polyline tool given the name "line"
      tool=lb.Tool.Type.LINE,
      name="polyline")]
)

ontology = client.create_ontology("Image Prediction Import Demo",
                                  ontology_builder.asdict(),
                                  media_type=lb.MediaType.Image)

Step 3: Create a Model and a model run

Create a Model using the ontology and a model run.

# create Model
model = client.create_model(name="image_model_run_" + str(uuid.uuid4()),
                            ontology_id=ontology.uid)
# create Model Run
model_run = model.create_model_run("iteration 1")

Step 4: Send a set of data rows to the model run

model_run.upsert_data_rows([data_row.uid])

Step 5: Create the predictions payload

Create the predictions payload using the snippets of code shown above.

Labelbox supports two formats for the annotations payload: NDJSON and Python annotation types. Both approaches are described below with instructions to compose annotations into Labels attached to the data rows.

The resulting ndjson_prediction_method and label_list_prediction payloads should have exactly the same prediction content (with the exception of the uuid strings that are generated).

ndjson_prediction_method = []

for annot in [
    radio_prediction_ndjson,
    checklist_prediction_ndjson, 
    bbox_prediction_ndjson, 
    bbox_with_radio_subclass_prediction_ndjson, 
    polygon_prediction_ndjson, 
    mask_prediction_ndjson, 
    point_prediction_ndjson,
    polyline_prediction_ndjson,
    text_annotation_ndjson, 
    nested_radio_prediction_ndjson,
    nested_checklist_prediction_ndjson
]:
  annot.update({
      'dataRow': {'id': data_row.uid},
  })
  ndjson_prediction_method.append(annot)
# Create a Label for predictions
label_prediction = lb_types.Label(
    data=lb_types.ImageData(uid=data_row.uid),
    annotations = [
      radio_prediction,
      checklist_prediction, 
      bbox_prediction, 
      bbox_with_radio_subclass_prediction, 
      polyline_prediction,
      polygon_prediction, 
      mask_prediction, 
      point_prediction,
      text_annotation
      ]
)

# Create a label list 
label_list_prediction = [label_prediction]

Step 6: Upload the predictions payload to the model run

# Upload the prediction label to the Model Run
upload_job_prediction = model_run.add_predictions(
    name="prediction_upload_job"+str(uuid.uuid4()),
    predictions=ndjson_prediction_method)

# Errors will appear for prediction uploads that failed.
print("Errors:",  upload_job_prediction.errors)

Step 7: Send annotations to a model run (optional)

We will create a project with ground truth annotations to visualize both annotations and predictions in the model run.

To send annotations to a model run, we must first import them into a project, create a label payload, and then send them to the model run.

# 7.1 Create a labelbox project 
project = client.create_project(name="image_prediction_demo",
                                    # Quality Settings setup 
                                    auto_audit_percentage=1,
                                    auto_audit_number_of_labels=1,
                                    media_type=lb.MediaType.Image)
project.setup_editor(ontology)


# 7.2 Create a batch 
project.create_batch(
  "batch_predictions_demo", # Each batch in a project must have a unique name
  dataset.export_data_rows(), # A list of data rows or data row ids
  5 # priority between 1(Highest) - 5(lowest)
)

# 7.3 Create your annotation payload as explained in:
# https://docs.labelbox.com/reference/import-image-annotations#supported-annotations
radio_annotation_ndjson ...
checklist_annotation_ndjson ... 
bbox_annotation_ndjson ...
bbox_with_radio_subclass_ndjson ... 
polygon_annotation_ndjson ...
mask_annotation_ndjson ...
point_annotation_ndjson ... 
polyline_annotation_ndjson ...
nested_radio_annotation_ndjson ...
nested_checklist_annotation_ndjson ...
text_annotation_ndjson ...

# 7.4  Create a Label object by identifying the applicable data row in Labelbox and providing a list of annotations
ndjson_annotation = []
for annot in [
    radio_annotation_ndjson, 
    checklist_annotation_ndjson, 
    bbox_annotation_ndjson, 
    bbox_with_radio_subclass_ndjson, 
    polygon_annotation_ndjson, 
    mask_annotation_ndjson, 
    point_annotation_ndjson, 
    polyline_annotation_ndjson,
    nested_radio_annotation_ndjson,
    nested_checklist_annotation_ndjson,
    text_annotation_ndjson
]:
  annot.update({
      'dataRow': {'id': data_row.uid},
  })
  ndjson_annotation.append(annot) 

# 7.5 Upload annotations to the project using Label import 
upload_job_annotation = lb.LabelImport.create_from_objects(
    client = client,
    project_id = project.uid,
    name="annotation_import_" + str(uuid.uuid4()),
    labels=ndjson_annotation)

upload_job_annotation.wait_until_done()
# Errors will appear for annotation uploads that failed.
print("Errors:", upload_job_annotation.errors)

# 7.6 Send the annotations to the model run 

# get the labels id from the project
label_ids = [x['ID'] for x in project.export_labels(download=True)]
model_run.upsert_labels(label_ids)


End-to-end Python tutorial

Open this Colab for an end-to-end tutorial on importing predictions on images (Steps 1-7).

Open In Colab