From 4ccd8e533d7f6e120a6ccd1aed6bf6bb4ec1e499 Mon Sep 17 00:00:00 2001 From: GitHub Action Date: Fri, 22 Mar 2024 21:42:09 +0000 Subject: [PATCH] v1.5.6 --- README.md | 18 ++- RELEASE.md | 5 + VERSION | 2 +- configurations/camera.conf | 2 +- configurations/countSensors.conf | 2 +- configurations/fakeRTSP-nomedia.conf | 2 +- configurations/fakeRTSP.conf | 2 +- configurations/selectFileRTSP.conf | 2 +- .../SIOOnDemandAnalytics/docker-compose.yml | 8 +- .../docker-compose.yml | 8 +- .../docker-compose-rtsp.yml | 6 +- .../docker-compose.yml | 6 +- .../VideoStreamsConsumer/docker-compose.yml | 6 +- .../VideoStreamsRecorder/docker-compose.yml | 12 +- docs/schemas/anypipe/anypipe.html | 2 +- examples/MCPEvents/MCPEvents.py | 23 +++- examples/lib/MCP.py | 32 +++++- scripts/sh-services | 107 ++++++++++++++---- services/sio/conf/default.env | 2 +- 19 files changed, 183 insertions(+), 64 deletions(-) diff --git a/README.md b/README.md index 5b1891d..c5ce69b 100644 --- a/README.md +++ b/README.md @@ -250,18 +250,30 @@ For more advanced options visit [VehicleAnalytics Documentation](https://dev.sig ## Changing Docker env variables -If you need to modify the `.env` file of a service, you can either `./scripts/sh-services edit all` or create a new `.env file` like this +The `.env` [file](https://docs.docker.com/compose/environment-variables/set-environment-variables/#substitute-with-an-env-file) is generated by the sh-services script at runtime. To modify the environment via CLI, you can either run `./scripts/sh-services edit `, or add a user-specific file with .env extension in sio/conf/, for example `/conf/user.env` or `/conf/0009-debug.env` for example: ```bash -echo "SIO_DOCKER_TAG=r221202" > sio/conf/0009-debug.env +echo "MY_VARIABLE=24" > sio/conf/user.env +echo "SIO_DOCKER_TAG=r240318" > sio/conf/0009-debug.env ``` -and then update the services: +And then update the services (create the .env file for docker-compose) by running: ```bash ./scripts/sh-services merge all ``` +### Modyfing SIO release version + + +In the instance you need to change the release version of SIO. +Execute `./scripts/sh-services edit sio`, then select `Edit service (.env)`, find the variable `SIO_DOCKER_TAG` and finally set it to whatever value you need and then save the file. + +That would create a `sio/conf/0001-edit.env` file containing your edits while keeping the `sio/conf/default.env` intact. +The result would be stored in `sio/.env` file with the merged contents of `default.env` and `0001-edit.env`. +Being `0001-edit.env` of higher ranking than the default file. The order is defined by UNIX, being the character `0` of `0001-edit.env` first than the `d` of `default`. + + ## Deployment ```bash diff --git a/RELEASE.md b/RELEASE.md index 95a65f5..378860c 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,5 +1,10 @@ # Release Notes +## v1.5.6 +- Update SIO to r240318 +- Update .env editing for better understanding (add banners and more) +- Remove SIO images when disk is full + ## v1.5.5 - Initial version of on-demand analytics sample - Sighthound REST API Gateway - Docker Compose Updates diff --git a/VERSION b/VERSION index b671d40..54a3d96 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v1.5.5 +v1.5.6 diff --git a/configurations/camera.conf b/configurations/camera.conf index 12e2c64..035350b 100644 --- a/configurations/camera.conf +++ b/configurations/camera.conf @@ -7,5 +7,5 @@ select_example sio camera up live555 up rabbitmq up mcp -test_rtsp_stream rtsp://sh-camera-rtsp:8555/live 5 +test_rtsp_stream rtsp://localhost:8555/live 5 restart sio \ No newline at end of file diff --git a/configurations/countSensors.conf b/configurations/countSensors.conf index 0aebd43..dcd2bdc 100644 --- a/configurations/countSensors.conf +++ b/configurations/countSensors.conf @@ -8,5 +8,5 @@ select_example sio count-sensor-nomedia up live555 up rabbitmq up mcp -test_rtsp_stream rtsp://live555/StreetVideo1.mkv 5 +test_rtsp_stream rtsp://localhost/StreetVideo1.mkv 5 restart sio \ No newline at end of file diff --git a/configurations/fakeRTSP-nomedia.conf b/configurations/fakeRTSP-nomedia.conf index e3f4adb..4f05147 100644 --- a/configurations/fakeRTSP-nomedia.conf +++ b/configurations/fakeRTSP-nomedia.conf @@ -8,5 +8,5 @@ select_example sio live555-nomedia up live555 up rabbitmq up mcp -test_rtsp_stream rtsp://live555/StreetVideo1.mkv 5 +test_rtsp_stream rtsp://localhost/StreetVideo1.mkv 5 restart sio \ No newline at end of file diff --git a/configurations/fakeRTSP.conf b/configurations/fakeRTSP.conf index defa87b..197412f 100644 --- a/configurations/fakeRTSP.conf +++ b/configurations/fakeRTSP.conf @@ -7,5 +7,5 @@ select_example sio live555 up live555 up rabbitmq up mcp -test_rtsp_stream rtsp://live555/StreetVideo1.mkv 5 +test_rtsp_stream rtsp://localhost/StreetVideo1.mkv 5 restart sio \ No newline at end of file diff --git a/configurations/selectFileRTSP.conf b/configurations/selectFileRTSP.conf index 43abd29..868c278 100644 --- a/configurations/selectFileRTSP.conf +++ b/configurations/selectFileRTSP.conf @@ -8,5 +8,5 @@ select_example sio file-rtsp up live555 up rabbitmq up mcp -test_rtsp_stream rtsp://live555/data/my-video.mkv 5 +test_rtsp_stream rtsp://localhost/data/my-video.mkv 5 restart sio \ No newline at end of file diff --git a/deployment-examples/SIOOnDemandAnalytics/docker-compose.yml b/deployment-examples/SIOOnDemandAnalytics/docker-compose.yml index 165dcd6..600fe51 100644 --- a/deployment-examples/SIOOnDemandAnalytics/docker-compose.yml +++ b/deployment-examples/SIOOnDemandAnalytics/docker-compose.yml @@ -1,4 +1,4 @@ -version: "3" +version: "2.3" services: # By default pipelines.json will point to streams served by this container. @@ -13,11 +13,11 @@ services: # The actual analytics container analytics: - image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r240117}${SIO_DOCKER_TAG_VARIANT-} + image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r240318}${SIO_DOCKER_TAG_VARIANT-} restart: unless-stopped environment: # Location where SIO will place generated model engine files - - SIO_DATA_DIR=/data/sio-cache + - SIO_DATA_DIR=/data/.sio - PYTHONUNBUFFERED=1 # Container runtime defaults to `runc` if SIO_DOCKER_RUNTIME not set. Use `nvidia` if GPU is installed. runtime: ${SIO_DOCKER_RUNTIME-runc} @@ -27,7 +27,7 @@ services: - ./config:/config:ro # Writable shared folder for data exchange with host # We'll use it for storing the generated model files, data exchange folder, etc. - - ./data:/data + - ${HOME-./data}:/data # Shared memory-backed folder for data exchange with other containers - runvol:/tmp/runvol entrypoint: diff --git a/deployment-examples/SighthoundRestApiGateway/docker-compose.yml b/deployment-examples/SighthoundRestApiGateway/docker-compose.yml index 062a95c..cc6a34a 100644 --- a/deployment-examples/SighthoundRestApiGateway/docker-compose.yml +++ b/deployment-examples/SighthoundRestApiGateway/docker-compose.yml @@ -2,11 +2,11 @@ version: "3" services: analytics: - image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r240117}${SIO_DOCKER_TAG_VARIANT-} + image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r240318}${SIO_DOCKER_TAG_VARIANT-} restart: unless-stopped environment: # Location where SIO will place generated model engine files - - SIO_DATA_DIR=/data/sio-cache + - SIO_DATA_DIR=/data/.sio # Container runtime defaults to `runc` if SIO_DOCKER_RUNTIME not set. Use `nvidia` if GPU is installed. runtime: ${SIO_DOCKER_RUNTIME-runc} volumes: @@ -15,7 +15,7 @@ services: - ./config:/config:ro # Writable shared folder for data exchange with host # We'll use it for storing the generated model files, data exchange folder, etc. - - ./data:/data + - ${HOME-./data}:/data # Shared memory-backed folder for data exchange with other containers - run_vol:/tmp/inputFiles entrypoint: @@ -40,7 +40,7 @@ services: # Overrides default config - ./config/gateway/service.json:/cloudvx/config/local.json:ro # Writable shared folder for data exchange with host - - ./data:/data + - ${HOME-./data}:/data # Shared memory-backed folder for data exchange with other containers - run_vol:/tmp/inputFiles depends_on: diff --git a/deployment-examples/StandaloneSIOWithExtension/docker-compose-rtsp.yml b/deployment-examples/StandaloneSIOWithExtension/docker-compose-rtsp.yml index e649abe..8275234 100644 --- a/deployment-examples/StandaloneSIOWithExtension/docker-compose-rtsp.yml +++ b/deployment-examples/StandaloneSIOWithExtension/docker-compose-rtsp.yml @@ -13,11 +13,11 @@ services: analytics: - image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231204}${SIO_DOCKER_TAG_VARIANT} + image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r240318}${SIO_DOCKER_TAG_VARIANT} restart: unless-stopped environment: # Location where SIO will place generated model engine files - - SIO_DATA_DIR=/data/sio-cache + - SIO_DATA_DIR=/data/.sio # We need this to see output from Python extension module - PYTHONUNBUFFERED=1 # Container runtime defaults to `runc` if SIO_DOCKER_RUNTIME not set. Use `nvidia` if GPU is installed. @@ -28,7 +28,7 @@ services: - ./config:/config:ro # Writable shared folder for data exchange with host # We'll use it for storing the generated model files, data exchange folder, etc. - - ./data:/data + - ${HOME-./data}:/data entrypoint: - /sighthound/sio/bin/runPipelineSet # Pipeline configuration file diff --git a/deployment-examples/StandaloneSIOWithExtension/docker-compose.yml b/deployment-examples/StandaloneSIOWithExtension/docker-compose.yml index 34d75cb..4312df8 100644 --- a/deployment-examples/StandaloneSIOWithExtension/docker-compose.yml +++ b/deployment-examples/StandaloneSIOWithExtension/docker-compose.yml @@ -2,11 +2,11 @@ version: "2.3" services: analytics: - image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231204}${SIO_DOCKER_TAG_VARIANT} + image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r240318}${SIO_DOCKER_TAG_VARIANT} restart: unless-stopped environment: # Location where SIO will place generated model engine files - - SIO_DATA_DIR=/data/sio-cache + - SIO_DATA_DIR=/data/.sio # We need this to see output from Python extension module - PYTHONUNBUFFERED=1 # Container runtime defaults to `runc` if SIO_DOCKER_RUNTIME not set. Use `nvidia` if GPU is installed. @@ -17,7 +17,7 @@ services: - ./config:/config:ro # Writable shared folder for data exchange with host # We'll use it for storing the generated model files, data exchange folder, etc. - - ./data:/data + - ${HOME-./data}:/data entrypoint: - /sighthound/sio/bin/runPipelineSet # Pipeline configuration file diff --git a/deployment-examples/VideoStreamsConsumer/docker-compose.yml b/deployment-examples/VideoStreamsConsumer/docker-compose.yml index 906bcc2..38c0c93 100644 --- a/deployment-examples/VideoStreamsConsumer/docker-compose.yml +++ b/deployment-examples/VideoStreamsConsumer/docker-compose.yml @@ -31,12 +31,12 @@ services: # The SIO analytics container, consuming the streams and analyzing them analytics_svc: - image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231204}${SIO_DOCKER_TAG_VARIANT} + image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r240318}${SIO_DOCKER_TAG_VARIANT} container_name: sample-sio restart: unless-stopped environment: # Location where SIO will place generated model engine files - - SIO_DATA_DIR=/data/sio-cache + - SIO_DATA_DIR=/data/.sio # Container runtime defaults to `runc` if SIO_DOCKER_RUNTIME not set. Use `nvidia` if GPU is installed. runtime: ${SIO_DOCKER_RUNTIME-runc} volumes: @@ -45,7 +45,7 @@ services: - ./config:/config:ro # Writable shared folder for data exchange with host / other containers. # We'll use it for storing the generated model files, data exchange folder, etc. - - ./data:/data + - ${HOME-./data}:/data entrypoint: - /sighthound/sio/bin/runPipelineSet # Pipeline configuration file diff --git a/deployment-examples/VideoStreamsRecorder/docker-compose.yml b/deployment-examples/VideoStreamsRecorder/docker-compose.yml index ff9bb2e..bd97241 100644 --- a/deployment-examples/VideoStreamsRecorder/docker-compose.yml +++ b/deployment-examples/VideoStreamsRecorder/docker-compose.yml @@ -38,11 +38,11 @@ services: mem_reservation: 512M volumes: # Location of recorded media; should match that specified for SIO's pipeline configuration - - ./data/media:/data/sighthound/media:rw + - ${HOME-./data}/media:/data/sighthound/media:rw # Location for MCP logs - - ./data/logs/mcp:/data/sighthound/logs/mcp:rw + - ${HOME-./data}/logs/mcp:/data/sighthound/logs/mcp:rw # Location of MCP database - - ./data/mcp/db:/data/sighthound/db:rw + - ${HOME-./data}/mcp/db:/data/sighthound/db:rw # MCP configuration - ./config/mcp/mcp.yml:/etc/mcpd/default.json:ro ports: @@ -52,12 +52,12 @@ services: # The SIO analytics container, consuming the streams and analyzing them analytics_svc: - image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231204}${SIO_DOCKER_TAG_VARIANT} + image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r240318}${SIO_DOCKER_TAG_VARIANT} container_name: sample-sio restart: unless-stopped environment: # Location where SIO will place generated model engine files - - SIO_DATA_DIR=/data/sio-cache + - SIO_DATA_DIR=/data/.sio # Container runtime defaults to `runc` if SIO_DOCKER_RUNTIME not set. Use `nvidia` if GPU is installed. runtime: ${SIO_DOCKER_RUNTIME-runc} volumes: @@ -66,7 +66,7 @@ services: - ./config:/config:ro # Writable shared folder for data exchange with host / other containers. # We'll use it for storing the generated model files, data exchange folder, etc. - - ./data:/data + - ${HOME-./data}:/data entrypoint: - /sighthound/sio/bin/runPipelineSet # Pipeline configuration file diff --git a/docs/schemas/anypipe/anypipe.html b/docs/schemas/anypipe/anypipe.html index b0db8b8..046dcde 100644 --- a/docs/schemas/anypipe/anypipe.html +++ b/docs/schemas/anypipe/anypipe.html @@ -1 +1 @@ - Sighthound Analytics

Sighthound Analytics

Type: object

Analytics data sent by the Sighthound video/image analysis pipeline. This data is sent based on configuration when the number of detected objects or attributes of detected objects changes, the confidence of detected objects or their attributes improves, or a configurable timeout occurs.

No Additional Properties

Type: object

Type: integer

Timestamp the frame corresponding to this analytics data was processed at, in milliseconds since the epoch and GMT timezone.

Value must be greater or equal to 0

Type: string

A global unique ID representing the media source, for
instance a specific video stream from a camera sensor or RTSP feed, , or input source location for images or videos

Type: string

An ID corresponding to this frame, which may be used to
access the image corresponding to all box coordinates and object
detections represented in this object, via the Media Service API.

Type: object

The dimensions (width and height) of the frame represented by frameId. Also used as the coordinate base for all bounding box coordinates.

Type: number

Width in pixels

Value must be greater or equal to 0

Type: number

Height in pixels

Value must be greater or equal to 0

Type: integer

Timestamp of the frame corresponding to this analytics data, acccording to the source, in milliseconds since the epoch and GMT timezone.

Value must be greater or equal to 0

Type: string

Type: object

Meta classes include objects such as vehicles, license plates, and people. These are high-level classifications.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

An plural MetaClass name. Supported MetaClasses
include:
vehicles - Objects including cars, buses, trucks, motorbikes.
Vehicles include objects which may potentially include license
plates, may include links to licensePlates.
licensePlates - Objects which are detected/classified as license plates.
people - Pedestrians or people riding skateboards, electric
scooter, wheelchairs,etc.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

A Unique ID representing this object, used to map
additional object properties. This ID is guaranteed unique
for each object, regardless of streamId. It will change the object drops out of
detection/tracking

Type: integer

The analyticsTimestamp with highest confidence score for this object.

Value must be greater or equal to 0

Type: string

Object specific class returned by the model. For objects of the vehicles metaclass this may include car, truck, bus, motorbike, etc based on model capabilities

Type: object

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

A map of attributes for this object. Not all atributes are supported for all object types. Example attributes include:
color - The color of an object
lpString - A string representing license plate text
and numbers
lpRegion - A string representing license plate region
vehicleType - Make model and generation of the vehicle in a single string

No Additional Properties

Type: number

Confidence score for attribute detection, ranging from 0.0 to 1.0. A score of 1.0 indicates 100% confidence.

Value must be greater or equal to 0 and lesser or equal to 1

Type: number

Confidence score for object detection, ranging from 0.0 to 1.0. A score of 1.0 indicates 100% confidence.When included in an attribute, this score represents the
object Detection score for the parent object corresponding to the
timestamp when the attribute value was determined.

Value must be greater or equal to 0 and lesser or equal to 1

Type: boolean

Flag to indicate if the attribute is updated. True means updated, False means not updated.


A value of the attribute. The value is specific to the attribute type.

Type: object

Information about the detected vehicle, including its make, model, and generation.

Type: string

The manufacturer of the detected vehicle, e.g., 'Toyota'.

Type: string

The specific model of the detected vehicle, e.g., 'Camry'.

Type: string

The generation or variant of the detected vehicle, e.g., '2020'.

Type: string

The category to which the detected vehicle belongs, e.g., 'Sedan'.

Additional Properties of any type are allowed.

Type: object

Type: object

Debug information, subject to change
between releases. Do not use this object in an
application.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: string

Type: string

An object hash which uniquely identifies this object and associated attributes. Will change when attributes change. Reserved for future use

Type: object

The bounding box containing this object, in
pixel coordinates where the top left corner of the
image is represented by pixel 0,0, corresponding to the image referenced by imageRef

No Additional Properties

Type: integer

Height of the bounding box in pixels

Value must be greater or equal to 0

Type: integer

Width of the bounding box in pixels

Value must be greater or equal to 0

Type: integer

X coordinate of the top left corner
of the bounding box.

Value must be greater or equal to 0

Type: integer

Y coordinate of the top left corner of
the bounding box

Value must be greater or equal to 0

Type: number

Confidence score for object detection, ranging from 0.0 to 1.0. A score of 1.0 indicates 100% confidence.When included in an attribute, this score represents the
object Detection score for the parent object corresponding to the
timestamp when the attribute value was determined.

Same definition as detectionScore

Type: boolean

Flag to indicate if the attribute is updated. True means updated, False means not updated.

Same definition as updated

Type: integer

The analyticsTimestamp with highest confidence score for this object.

Value must be greater or equal to 0

Type: object

A map of maps describing an event type.
- The top level map key is a name describing the event type. Supported types are presenceSensor, lineCrossingEvent, speedEvent.
- The sub level map key is a Unique ID representing the event, used to map
additional object properties. This ID is guaranteed unique
for each event for a given stream ID.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

A name describing an event type.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: array

A Unique ID representing this event

No Additional Items

Each item of this array must be:


Type: object

Describes an event where one or more objects are present in a region of interest.
The event starts when the first object enters a region of interest. Updates are sent for each change in status, with updateCount incremented for each update. When the last object exits and the region is empty, the sensor event will become immutable and will track the total amount of time at least one object was present in the region of interest. An entry of an object will start a new event and reset the updateCount to 1. Region definitons, object filtering and other items related to sensor definitions are tracked as a part of the sensorId associated with the event.

No Additional Properties

Type: string

The globally unique event ID corresponding to this event.

Type: integer

The total number of objects of a specific type detected within a region of interest, excluding those filtered out based on sensor configuration.

Value must be greater or equal to 0

Type: object

The total number of detected objects in a region grouped by metaclasses.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: integer

The total number of objects detected within a region of interest grouped by metaclass. Metaclasses represent higher-level categories that objects may belong to, such as 'vehicle' or 'people,' while classes represent more specific types, such as 'car' or 'person'.

Value must be greater or equal to 0

Type: object

The total number of detected objects in a region grouped by classes.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: integer

The total number of objects detected within a region of interest grouped by class. For example, if the sensor is configured to detect vehicles, this property may include counts of 'car,' 'bus,' and 'truck'.

Value must be greater or equal to 0

Type: integer

The time in milliseconds since the epoch (GMT) when the event started, or when a link was established.

Value must be greater or equal to 0

Type: integer

The cumulative number of updates sent for this sensor, starting with 1 for the initial update and incremented once for each update sent for each unique sensor event ID. An update refers to a change in the state of the sensor due to a corresponding sensor event (entry, exit, crossing, ...). For sensors which include multiple updates per sensor event (presense sensors), the updateCount will be reset to 1 to indicate the first update for a given event. For sensors (count) which only include 1 update per event, updateCount will be cumulative and count the total number of events per sensor.

Value must be greater or equal to 0

Type: integer

The time in milliseconds since the epoch (GMT) when the event ended.

Value must be greater or equal to 0

Type: object

Describes an event where one object crosses a line

No Additional Properties

Type: string

The globally unique event ID corresponding to this event.

Same definition as eventId

Type: string

The direction of an object's trajectory relative to the sensor's line, with the first point (A) as the pivot point. 'Clockwise' means the object is moving in a clockwise direction relative to the line, while 'counterclockwise' means the object is moving in a counterclockwise direction.

Type: integer

Number of clockwise crossings.

Value must be greater or equal to 0

Type: integer

Number of counterclockwise crossings.

Value must be greater or equal to 0

Type: integer

The time in milliseconds since the epoch (GMT) when the event started, or when a link was established.

Same definition as startedAt

Type: array of object
No Additional Items

Each item of this array must be:

Type: object

Type: string

Media Event type: Ex: image,video

Type: string

Message content

Type: integer

Start of Event Timestamp

Value must be greater or equal to 0

Type: integer

End of Event Timestamp

Value must be greater or equal to 0

Type: string

Message format. Ex: json, jpeg, mp4, ts...

\ No newline at end of file + Sighthound Analytics

Sighthound Analytics

Type: object

Analytics data sent by the Sighthound video/image analysis pipeline. This data is sent based on configuration when the number of detected objects or attributes of detected objects changes, the confidence of detected objects or their attributes improves, or a configurable timeout occurs.

No Additional Properties

Type: object

Type: integer

Timestamp the frame corresponding to this analytics data was processed at, in milliseconds since the epoch and GMT timezone.

Value must be greater or equal to 0

Type: string

A global unique ID representing the media source, for
instance a specific video stream from a camera sensor or RTSP feed, , or input source location for images or videos

Type: string

An ID corresponding to this frame, which may be used to
access the image corresponding to all box coordinates and object
detections represented in this object, via the Media Service API.

Type: object

The dimensions (width and height) of the frame represented by frameId. Also used as the coordinate base for all bounding box coordinates.

Type: number

Width in pixels

Value must be greater or equal to 0

Type: number

Height in pixels

Value must be greater or equal to 0

Type: integer

Timestamp of the frame corresponding to this analytics data, acccording to the source, in milliseconds since the epoch and GMT timezone.

Value must be greater or equal to 0

Type: string

Type: object

Meta classes include objects such as vehicles, license plates, and people. These are high-level classifications.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

An plural MetaClass name. Supported MetaClasses
include:
vehicles - Objects including cars, buses, trucks, motorbikes.
Vehicles include objects which may potentially include license
plates, may include links to licensePlates.
licensePlates - Objects which are detected/classified as license plates.
people - Pedestrians or people riding skateboards, electric
scooter, wheelchairs,etc.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

A Unique ID representing this object, used to map
additional object properties. This ID is guaranteed unique
for each object, regardless of streamId. It will change the object drops out of
detection/tracking

Type: integer

The analyticsTimestamp with highest confidence score for this object.

Value must be greater or equal to 0

Type: string

Object specific class returned by the model. For objects of the vehicles metaclass this may include car, truck, bus, motorbike, etc based on model capabilities

Type: object

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

A map of attributes for this object. Not all atributes are supported for all object types. Example attributes include:
color - The color of an object
lpString - A string representing license plate text
and numbers
lpRegion - A string representing license plate region
vehicleType - Make model and generation of the vehicle in a single string

No Additional Properties

Type: number

Confidence score for attribute detection, ranging from 0.0 to 1.0. A score of 1.0 indicates 100% confidence.

Value must be greater or equal to 0 and lesser or equal to 1

Type: number

Confidence score for object detection, ranging from 0.0 to 1.0. A score of 1.0 indicates 100% confidence.When included in an attribute, this score represents the
object Detection score for the parent object corresponding to the
timestamp when the attribute value was determined.

Value must be greater or equal to 0 and lesser or equal to 1

Type: boolean

Flag to indicate if the attribute is updated. True means updated, False means not updated.


A value of the attribute. The value is specific to the attribute type.

Type: object

Information about the detected vehicle, including its make, model, and generation.

Type: string

The manufacturer of the detected vehicle, e.g., 'Toyota'.

Type: string

The specific model of the detected vehicle, e.g., 'Camry'.

Type: string

The generation or variant of the detected vehicle, e.g., '2020'.

Type: string

The category to which the detected vehicle belongs, e.g., 'Sedan'.

Additional Properties of any type are allowed.

Type: object

Type: object

Debug information, subject to change
between releases. Do not use this object in an
application.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: string

Type: string

An object hash which uniquely identifies this object and associated attributes. Will change when attributes change. Reserved for future use

Type: object

The bounding box containing this object, in
pixel coordinates where the top left corner of the
image is represented by pixel 0,0, corresponding to the image referenced by imageRef

No Additional Properties

Type: integer

Height of the bounding box in pixels

Value must be greater or equal to 0

Type: integer

Width of the bounding box in pixels

Value must be greater or equal to 0

Type: integer

X coordinate of the top left corner
of the bounding box.

Value must be greater or equal to 0

Type: integer

Y coordinate of the top left corner of
the bounding box

Value must be greater or equal to 0

Type: number

Confidence score for object detection, ranging from 0.0 to 1.0. A score of 1.0 indicates 100% confidence.When included in an attribute, this score represents the
object Detection score for the parent object corresponding to the
timestamp when the attribute value was determined.

Same definition as detectionScore

Type: boolean

Flag to indicate if the attribute is updated. True means updated, False means not updated.

Same definition as updated

Type: integer

The analyticsTimestamp with highest confidence score for this object.

Value must be greater or equal to 0

Type: object

A map of maps describing an event type.
- The top level map key is a name describing the event type. Supported types are presenceSensor, lineCrossingEvent, speedEvent.
- The sub level map key is a Unique ID representing the event, used to map
additional object properties. This ID is guaranteed unique
for each event for a given stream ID.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

A name describing an event type.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: array

A Unique ID representing this event

No Additional Items

Each item of this array must be:


Type: object

Describes an event where one or more objects are present in a region of interest.
The event starts when the first object enters a region of interest. Updates are sent for each change in status, with updateCount incremented for each update. When the last object exits and the region is empty, the sensor event will become immutable and will track the total amount of time at least one object was present in the region of interest. An entry of an object will start a new event and reset the updateCount to 1. Region definitons, object filtering and other items related to sensor definitions are tracked as a part of the sensorId associated with the event.

No Additional Properties

Type: string

The globally unique event ID corresponding to this event.

Type: integer

The total number of objects of a specific type detected within a region of interest, excluding those filtered out based on sensor configuration.

Value must be greater or equal to 0

Type: object

The total number of detected objects in a region grouped by metaclasses.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: integer

The total number of objects detected within a region of interest grouped by metaclass. Metaclasses represent higher-level categories that objects may belong to, such as 'vehicle' or 'people,' while classes represent more specific types, such as 'car' or 'person'.

Value must be greater or equal to 0

Type: object

The total number of detected objects in a region grouped by classes.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: integer

The total number of objects detected within a region of interest grouped by class. For example, if the sensor is configured to detect vehicles, this property may include counts of 'car,' 'bus,' and 'truck'.

Value must be greater or equal to 0

Type: integer

The time in milliseconds since the epoch (GMT) when the event started, or when a link was established.

Value must be greater or equal to 0

Type: integer

The cumulative number of updates sent for this sensor, starting with 1 for the initial update and incremented once for each update sent for each unique sensor event ID. An update refers to a change in the state of the sensor due to a corresponding sensor event (entry, exit, crossing, ...). For sensors which include multiple updates per sensor event (presense sensors), the updateCount will be reset to 1 to indicate the first update for a given event. For sensors (count) which only include 1 update per event, updateCount will be cumulative and count the total number of events per sensor.

Value must be greater or equal to 0

Type: integer

The time in milliseconds since the epoch (GMT) when the event ended.

Value must be greater or equal to 0

Type: object

Describes an event where one object crosses a line

No Additional Properties

Type: string

The globally unique event ID corresponding to this event.

Same definition as eventId

Type: string

The direction of an object's trajectory relative to the sensor's line, with the first point (A) as the pivot point. 'Clockwise' means the object is moving in a clockwise direction relative to the line, while 'counterclockwise' means the object is moving in a counterclockwise direction.

Type: integer

Number of clockwise crossings.

Value must be greater or equal to 0

Type: integer

Number of counterclockwise crossings.

Value must be greater or equal to 0

Type: integer

The time in milliseconds since the epoch (GMT) when the event started, or when a link was established.

Same definition as startedAt

Type: array of object
No Additional Items

Each item of this array must be:

Type: object

Type: string

Media Event type: Ex: image,video

Type: string

Message content

Type: integer

Start of Event Timestamp

Value must be greater or equal to 0

Type: integer

End of Event Timestamp

Value must be greater or equal to 0

Type: string

Message format. Ex: json, jpeg, mp4, ts...

\ No newline at end of file diff --git a/examples/MCPEvents/MCPEvents.py b/examples/MCPEvents/MCPEvents.py index 4b4531a..4c0bb58 100644 --- a/examples/MCPEvents/MCPEvents.py +++ b/examples/MCPEvents/MCPEvents.py @@ -10,6 +10,7 @@ import datetime from pathlib import Path import m3u8 +from cachetools import TTLCache class MCPEvents: def get_args(self, args): @@ -42,6 +43,8 @@ def __init__(self, args): self.current_event_seg = {} # A dict of lists of completed event segments by sourceId, waiting to be written to disk when video is available self.completed_event_seg = {} + # A dict of TTLCache objects representing media events, which expire automatically when not used. + self.video_cache = {} # Group events into a single segment when separated # by less than this number of milliseconds. Only valid when use_events is not specified. self.group_events_separation_ms = 5*1000 @@ -103,6 +106,12 @@ def event_segment_complete(self, source, event_segment): video_name = filepath_ts.relative_to(filepath_ts.parent.parent) print(f"Downloading {video_name}") self.mcp_client.download_video(source, video_name, filepath_ts) + if source in self.video_cache and str(video_name) in self.video_cache[source]: + event_segment.videos.append(self.video_cache[source][str(video_name)]) + else: + print(f"Could not find {video_name} in video cache for {source}") + + vidfile = dirpath / Path(f"{filename_base}.m3u8") print(f"Writing {vidfile}") with open(vidfile, "w") as file: @@ -122,14 +131,16 @@ def handle_media_event_callback(self, media_event, sourceId): # If the media event is a video_file_closed event, add it to the current event segment # for the source ID, or to the completed event segments if it's already completed if type == "video_file_closed": - if sourceId in self.current_event_seg: - event_seg = self.current_event_seg[sourceId] - event_seg.videos.append(media_event) + if not sourceId in self.video_cache: + self.video_cache[sourceId] = TTLCache(maxsize=100, ttl=60*2) + self.video_cache[sourceId][msg] = media_event completed_event_segments = self.completed_event_seg.get(sourceId, []) for event_seg in completed_event_segments: - event_seg.videos.append(media_event) - self.event_segment_complete(sourceId, event_seg) - self.completed_event_seg[sourceId].remove(event_seg) + try: + self.event_segment_complete(sourceId, event_seg) + finally: + # Always complete the event segment, even if we couldn't download vids + self.completed_event_seg[sourceId].remove(event_seg) diff --git a/examples/lib/MCP.py b/examples/lib/MCP.py index 33672ae..07dea4d 100644 --- a/examples/lib/MCP.py +++ b/examples/lib/MCP.py @@ -2,6 +2,7 @@ from PIL import Image import numpy as np from io import BytesIO +from requests.adapters import HTTPAdapter, Retry class MCPClient: def __init__(self, conf): @@ -14,12 +15,33 @@ def __init__(self, conf): else: print(f"Connecting to mcp://{self.host}:{self.port}") - def get(self, url): + # See https://www.peterbe.com/plog/best-practice-with-retries-with-requests + def requests_retry_session( + retries=2, + backoff_factor=0.3, + status_forcelist=(500, 502, 504), + session=None,): + + session = session or requests.Session() + retry = Retry( + total=retries, + read=retries, + connect=retries, + backoff_factor=backoff_factor, + status_forcelist=status_forcelist, + ) + adapter = HTTPAdapter(max_retries=retry) + session.mount('http://', adapter) + session.mount('https://', adapter) + return session + + def get(self, url, timeout=5): if self.user and self.password: auth = (self.user, self.password) else: auth = None - response = requests.get(url, auth=auth) + # Retry periodic timeouts, abort any response over 5 seconds + response = self.requests_retry_session().get(url, auth=auth, timeout=timeout) if response.status_code == 401: raise Exception("Unauthorized") @@ -55,7 +77,7 @@ def get_image(self, source_id, image): # curl mcp:9097/hlsfs/source//segment/