- 17 Jul 2025
- Print
- DarkLight
- PDF
Video Annotation JSON
- Updated On 17 Jul 2025
- Print
- DarkLight
- PDF
Overview
This page outlines the Video JSON format, which is used to represent video annotation data in JavaScript Object Notation (JSON). The JSON structure encapsulates both the annotation entities generated within the Dataloop platform and their associated metadata.
The Video Annotation JSON file includes:
KeyFrames: Frames where an annotation has been explicitly created or modified.
Automated Frames: Interpolated frames based on changes between KeyFrames.
Read more about different types of frames at KeyFrames and Advanced Tracking Tools.
To download the JSON file of an item’s annotations, see Export selected/queried items.
Important
Dataloop recommends you read frame through the SDK, as it allows you to read annotations at any frame/point in time, even in reflected frames (which are not recorded in the JSON).
The JSON does not include “reflected” or unchanged frames. If an annotation remains identical to the one in the previous frame (no change in location, label, attributes, etc.), that frame is excluded from the JSON to reduce redundancy.
Video Annotation JSON Format
{
"id": "64ed888abcdc9f8e1dde8bca", // Item Id (unique identifier for the video file)
"datasetId": "64a671e0d854daf23d6ed2b9", // Dataset Id (the dataset this item belongs to)
"url": "https://rc-gate.dataloop.ai/api/v1/items/64ed888abcdc9f8e1dde8bca", // API URL of the item
"dataset": "https://rc-gate.dataloop.ai/api/v1/datasets/64a671e0d854daf23d6ed2b9", // API URL of the dataset
"createdAt": "2023-08-29T05:56:26.677Z", // Date and time when the item was created
"dir": "/", // Directory path of the item within the dataset
"filename": "/sampleVideo.webm", // Name of the file
"type": "file", // Type of the item (file, folder, etc.)
"hidden": false, // Whether the item is hidden in the UI
"metadata": { // Information about the item
"fps": 25, // Frames per second
"startTime": 0, // Start time of the annotation (seconds)
"system": { // System-generated metadata
"duration": 13.64, // Duration of the video in seconds
"encoding": "7bit", // Encoding type
"ffmpeg": { // ffmpeg-specific metadata
"avg_frame_rate": "25/1", // Average frame rate
"chroma_location": "left", // Chroma location
"closed_captions": 0, // Closed captions present (0 = no)
"codec_long_name": "Google VP9", // Codec long name
"codec_name": "vp9", // Codec short name
"codec_tag": "0x0000", // Codec tag (hex)
"codec_tag_string": "[0][0][0][0]", // Codec tag as string
"codec_type": "video", // Type of codec
"coded_height": 720, // Encoded height in pixels
"coded_width": 1280, // Encoded width in pixels
"color_primaries": "bt709", // Color primaries
"color_range": "tv", // Color range
"color_space": "bt709", // Color space
"color_transfer": "bt709", // Color transfer characteristic
"display_aspect_ratio": "16:9", // Display aspect ratio
"disposition": { // Disposition flags (various video properties)
"attached_pic": 0,
"clean_effects": 0,
"comment": 0,
"default": 1,
"dub": 0,
"forced": 0,
"hearing_impaired": 0,
"karaoke": 0,
"lyrics": 0,
"original": 0,
"timed_thumbnails": 0,
"visual_impaired": 0
},
"field_order": "progressive", // Field order (progressive/interlaced)
"has_b_frames": 0, // Number of B-frames
"height": 720, // Height in pixels
"index": 0, // Stream index
"level": -99, // Codec level
"nb_read_frames": "341", // Number of frames read
"nb_read_packets": "341", // Number of packets read
"pix_fmt": "yuv420p", // Pixel format
"profile": "Profile 0", // Codec profile
"r_frame_rate": "25/1", // Real frame rate
"refs": 1, // Number of reference frames
"sample_aspect_ratio": "1:1", // Sample aspect ratio
"start_pts": 0, // Start presentation timestamp
"start_time": "0.000000", // Start time of the video in seconds
"tags": { // Additional tags
"DURATION": "00:00:13.640000000", // Duration as string
"ENCODER": "Lavc60.3.100 libvpx-vp9", // Encoder used
"HANDLER_NAME": "ISO Media file produced by Google Inc. Created on: 06/14/2019.", // Handler name
"VENDOR_ID": "[0][0][0][0]" // Vendor ID
},
"time_base": "1/1000", // Time base for timestamps
"width": 1280 // Width in pixels
},
"fps": 25, // Frames per second (repeated)
"height": 720, // Height in pixels
"isBinary": true, // Whether the file is binary
"mimetype": "video/webm", // The mime type of the file
"nb_streams": 2, // Number of streams (e.g., video + audio)
"originalname": "sampleVideo.webm", // Original file name
"refs": [], // The references of the task
"size": 2101456, // File size in bytes
"startTime": 0, // Start time of the annotation (seconds) (repeated)
"taskStatusLog": [ // Logs from the task
{
"action": "created", // Action performed
"status": { // Status details
"assignmentId": "65091cc8d93bfcb8594f187a", // Reference task ID
"creator": "user@dataloop.ai", // User who created the status
"status": "completed", // Annotation status (for example, null/issue/review)
"taskId": "65091cc7d93bfc635d4f1878", // Task ID
"timestamp": "2024-01-04T20:32:39.731Z" // Timestamp of the status update
}
}
],
"thumbnailId": "64ed8b298573791669938b32", // Thumbnail ID for the video
"width": 1280 // Width in pixels (repeated)
}
},
"name": "sampleVideo.webm", // Name of the file
"creator": "user@dataloop.ai", // User who created the item
"updatedAt": "2024-01-04T20:32:39.758Z", // Date and time when item was last updated
"updatedBy": "user@dataloop.ai", // User who updated the item
"stream": "https://rc-gate.dataloop.ai/api/v1/items/64ed888abcdc9f8e1dde8bca/stream", // API URL to stream the video
"thumbnail": "https://rc-gate.dataloop.ai/api/v1/items/64ed888abcdc9f8e1dde8bca/thumbnail", // API URL to get the thumbnail image
"annotations": [ // Array of annotation objects for this video
{
"id": "66eeb3388d03082895553608", // Annotation Id
"datasetId": "64a671e0d854daf23d6ed2b9", // Dataset Id
"itemId": "64ed888abcdc9f8e1dde8bca", // Id of the item
"url": "https://rc-gate.dataloop.ai/api/v1/annotations/66eeb3388d03082895553608", // API URL of the annotation
"item": "https://rc-gate.dataloop.ai/api/v1/items/64ed888abcdc9f8e1dde8bca", // API URL of the item
"dataset": "https://rc-gate.dataloop.ai/api/v1/datasets/64a671e0d854daf23d6ed2b9", // API URL of the dataset
"type": "box", // Annotation type (e.g., box, polygon, etc.)
"label": "standing", // Annotation label
"attributes": [], // Annotation attributes
"coordinates": [ // Coordinates (for 2D box or 3D cube)
{ "x": 925.28, "y": 34.53, "z": 0 },
{ "x": 949.03, "y": 125.68, "z": 0 }
// For 3D cube: may include position, rotation, scale
],
"metadata": { // Information about the annotation
"system": { // Annotation system information
"attributes": {}, // Information about the annotation attributes
"automated": false, // True = Annotation created by automation process
"clientId": "cfbfe7e3-b4b1-4d4c-b936-e5c0cc56586c", // Annotation created in UI (clientId when not saved)
"coordinateVersion": "v2", // Version of coordinate system
"endFrame": 340, // Last frame the annotation appears in the video
"endTime": 13.63, // End time of the annotation in the video
"frame": 38, // Frame when the annotation appears in the video
"isOnlyLocal": false, // Whether annotation is only local (not uploaded)
"isOpen": false, // Whether the annotation is open (being edited)
"itemLinks": [], // ItemLinks are the connection between ‘snapshot’ annotation to the newly created image item
"openAnnotationVersion": "1.84.1-rc.25", // Platform version
"recipeId": "64a671e03432e001f5fb592f", // Id of the recipe
"snapshots_": [ // Snapshot information relevant to video annotation
{
"attributes": [], // Snapshots attributes
"data": [ // Coordination in each snapshot
{ "x": 225.628, "y": 316.292, "z": 0 },
{ "x": 341.638, "y": 441.692, "z": 0 }
],
"fixed": false, // Shows if the frame is a fixed frame
"frame": 39, // Snapshot frame number
"label": "standing", // Snapshot label
"namedAttributes": {}, // The attributes that appear on all annotations
"objectVisible": true, // Status of annotation (true = visible, false = hidden)
"type": "LINEAR" // Type of snapshot (e.g., LINEAR, ATTRIBUTE_CHANGE)
}
// ... more snapshots ...
],
"startTime": 1.52, // Start time of the annotation in the video
"system": false // Shows if annotation is a system annotation
},
"user": {} // Metadata added by user via SDK
},
"creator": "user@dataloop.ai", // User who created the annotation
"createdAt": "2024-09-21T11:51:20.266Z", // Date and time when annotation was created
"updatedBy": "user@dataloop.ai", // User who updated the annotation
"updatedAt": "2024-11-20T12:13:59.504Z", // Date and time when annotation was last updated
"hash": "96ff6d8fb841b353849fde8235e68699da3c19a0", // Used to map annotation’s data to a string
"source": "ui" // Where the annotation was created: UI/SDK
// Additional fields (if present):
// "interpolation": true, // Indicates an interpolation was done
// "ref": "taskId", // Reference task ID
// "mode": "overwrite", // Whether it is an overwrite mode or not
// "refType": "id", // The type of the reference is an ID
// "_id": "itemId", // The item ID
// "filename": "sampleVideo.webm", // Name of the file
// "mimetype": "video/webm", // The mime type of the file
// "refs": [], // The references of the task
// "shebang dltype": "lidar", // Allows UI to present the annotation as LiDAR instead of a JSON type
// "size": 2101456 // File size
}
// ... more annotation objects ...
],
"annotationsCount": 4, // Total number of annotations on this item
"annotated": true // Whether the item has been annotated (true/false)
}