diff --git a/README.md b/README.md index b1b49fd..5b1891d 100644 --- a/README.md +++ b/README.md @@ -135,7 +135,7 @@ Enter the path of an MKV file for the example live555 video: Finally, enable the live555 SIO configuration: ```bash -$ ./scripts/sh-services select_example sio live555 +$ ./scripts/sh-services select_example sio file-rtsp ``` ### Configure SIO diff --git a/RELEASE.md b/RELEASE.md index 20db8a5..f1b250c 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,6 +1,12 @@ # Release Notes -## v1.5.3 +## v1.5.4 +- Add more sio examples and rename them +- Fix MCPEvents example +- Update SIO to r231204 that fixes hanging issue +- sh-services: add rabbitmq check fn + +## v1.5.3 - Add Aqueduct API and UI examples - Refine Aqueduct runner example - Add fakeRTSP example configuration @@ -10,7 +16,6 @@ - Fix samples' compatibility with DNNCam/DNNNode - Allow samples to define SIO image version via SIO_RELEASE define - ## v1.5.2 - docs: Add anypipe schemas diff --git a/VERSION b/VERSION index f1a2e63..f074f24 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v1.5.3 +v1.5.4 diff --git a/configurations/camera.conf b/configurations/camera.conf new file mode 100644 index 0000000..12e2c64 --- /dev/null +++ b/configurations/camera.conf @@ -0,0 +1,11 @@ +apply_to_services disable all +enable live555 +enable mcp +enable rabbitmq +enable sio +select_example sio camera +up live555 +up rabbitmq +up mcp +test_rtsp_stream rtsp://sh-camera-rtsp:8555/live 5 +restart sio \ No newline at end of file diff --git a/configurations/countSensors.conf b/configurations/countSensors.conf new file mode 100644 index 0000000..0aebd43 --- /dev/null +++ b/configurations/countSensors.conf @@ -0,0 +1,12 @@ +apply_to_services disable all +enable live555 +enable mcp +enable rabbitmq +enable sio +clean_media +select_example sio count-sensor-nomedia +up live555 +up rabbitmq +up mcp +test_rtsp_stream rtsp://live555/StreetVideo1.mkv 5 +restart sio \ No newline at end of file diff --git a/configurations/fakeRTSP-nomedia.conf b/configurations/fakeRTSP-nomedia.conf new file mode 100644 index 0000000..e3f4adb --- /dev/null +++ b/configurations/fakeRTSP-nomedia.conf @@ -0,0 +1,12 @@ +apply_to_services disable all +enable live555 +enable mcp +enable rabbitmq +enable sio +clean_media +select_example sio live555-nomedia +up live555 +up rabbitmq +up mcp +test_rtsp_stream rtsp://live555/StreetVideo1.mkv 5 +restart sio \ No newline at end of file diff --git a/configurations/fakeRTSP.conf b/configurations/fakeRTSP.conf index fb54113..defa87b 100644 --- a/configurations/fakeRTSP.conf +++ b/configurations/fakeRTSP.conf @@ -1,6 +1,4 @@ -select_live555_video -disable amqp-stats -remove_orphans +apply_to_services disable all enable live555 enable mcp enable rabbitmq @@ -9,5 +7,5 @@ select_example sio live555 up live555 up rabbitmq up mcp -test_rtsp_stream rtsp://localhost:7554/data/my-video.mkv 5 +test_rtsp_stream rtsp://live555/StreetVideo1.mkv 5 restart sio \ No newline at end of file diff --git a/configurations/selectFileRTSP.conf b/configurations/selectFileRTSP.conf new file mode 100644 index 0000000..43abd29 --- /dev/null +++ b/configurations/selectFileRTSP.conf @@ -0,0 +1,12 @@ +select_live555_video +apply_to_services disable all +enable live555 +enable mcp +enable rabbitmq +enable sio +select_example sio file-rtsp +up live555 +up rabbitmq +up mcp +test_rtsp_stream rtsp://live555/data/my-video.mkv 5 +restart sio \ No newline at end of file diff --git a/deployment-examples/SighthoundRestApiGateway/docker-compose.yml b/deployment-examples/SighthoundRestApiGateway/docker-compose.yml index 50dc31e..2aa0085 100644 --- a/deployment-examples/SighthoundRestApiGateway/docker-compose.yml +++ b/deployment-examples/SighthoundRestApiGateway/docker-compose.yml @@ -2,7 +2,7 @@ version: "3" services: analytics: - image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231120}${SIO_DOCKER_TAG_VARIANT} + image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231204}${SIO_DOCKER_TAG_VARIANT} restart: unless-stopped environment: # Location where SIO will place generated model engine files diff --git a/deployment-examples/StandaloneSIOWithExtension/docker-compose-rtsp.yml b/deployment-examples/StandaloneSIOWithExtension/docker-compose-rtsp.yml index 3e5a514..e649abe 100644 --- a/deployment-examples/StandaloneSIOWithExtension/docker-compose-rtsp.yml +++ b/deployment-examples/StandaloneSIOWithExtension/docker-compose-rtsp.yml @@ -13,7 +13,7 @@ services: analytics: - image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231120}${SIO_DOCKER_TAG_VARIANT} + image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231204}${SIO_DOCKER_TAG_VARIANT} restart: unless-stopped environment: # Location where SIO will place generated model engine files diff --git a/deployment-examples/StandaloneSIOWithExtension/docker-compose.yml b/deployment-examples/StandaloneSIOWithExtension/docker-compose.yml index 3b6a438..34d75cb 100644 --- a/deployment-examples/StandaloneSIOWithExtension/docker-compose.yml +++ b/deployment-examples/StandaloneSIOWithExtension/docker-compose.yml @@ -2,7 +2,7 @@ version: "2.3" services: analytics: - image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231120}${SIO_DOCKER_TAG_VARIANT} + image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231204}${SIO_DOCKER_TAG_VARIANT} restart: unless-stopped environment: # Location where SIO will place generated model engine files diff --git a/deployment-examples/VideoStreamsConsumer/docker-compose.yml b/deployment-examples/VideoStreamsConsumer/docker-compose.yml index e123944..906bcc2 100644 --- a/deployment-examples/VideoStreamsConsumer/docker-compose.yml +++ b/deployment-examples/VideoStreamsConsumer/docker-compose.yml @@ -31,7 +31,7 @@ services: # The SIO analytics container, consuming the streams and analyzing them analytics_svc: - image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231120}${SIO_DOCKER_TAG_VARIANT} + image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231204}${SIO_DOCKER_TAG_VARIANT} container_name: sample-sio restart: unless-stopped environment: diff --git a/deployment-examples/VideoStreamsRecorder/docker-compose.yml b/deployment-examples/VideoStreamsRecorder/docker-compose.yml index e53af16..ff9bb2e 100644 --- a/deployment-examples/VideoStreamsRecorder/docker-compose.yml +++ b/deployment-examples/VideoStreamsRecorder/docker-compose.yml @@ -52,7 +52,7 @@ services: # The SIO analytics container, consuming the streams and analyzing them analytics_svc: - image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231120}${SIO_DOCKER_TAG_VARIANT} + image: us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio:${SIO_RELEASE-r231204}${SIO_DOCKER_TAG_VARIANT} container_name: sample-sio restart: unless-stopped environment: diff --git a/docs/schemas/anypipe/anypipe.html b/docs/schemas/anypipe/anypipe.html index 4397ee2..24bc8e5 100644 --- a/docs/schemas/anypipe/anypipe.html +++ b/docs/schemas/anypipe/anypipe.html @@ -1 +1 @@ - Sighthound Analytics

Sighthound Analytics

Type: object

Analytics data sent by the Sighthound video/image analysis pipeline. This data is sent based on configuration when the number of detected objects or attributes of detected objects changes, the confidence of detected objects or their attributes improves, or a configurable timeout occurs.

No Additional Properties

Type: object

Type: integer

Timestamp the frame corresponding to this analytics data was processed at, in milliseconds since the epoch and GMT timezone.

Value must be greater or equal to 0

Type: string

A global unique ID representing the media source, for
instance a specific video stream from a camera sensor or RTSP feed, , or input source location for images or videos

Type: string

An ID corresponding to this frame, which may be used to
access the image corresponding to all box coordinates and object
detections represented in this object, via the Media Service API.

Type: object

The dimensions (width and height) of the frame represented by frameId. Also used as the coordinate base for all bounding box coordinates.

Type: number

Width in pixels

Value must be greater or equal to 0

Type: number

Height in pixels

Value must be greater or equal to 0

Type: integer

Timestamp of the frame corresponding to this analytics data, acccording to the source, in milliseconds since the epoch and GMT timezone.

Value must be greater or equal to 0

Type: string

Type: object

Meta classes include objects such as vehicles, license plates, and people. These are high-level classifications.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

An plural MetaClass name. Supported MetaClasses
include:
vehicles - Objects including cars, buses, trucks, motorbikes.
Vehicles include objects which may potentially include license
plates, may include links to licensePlates.
licensePlates - Objects which are detected/classified as license plates.
people - Pedestrians or people riding skateboards, electric
scooter, wheelchairs,etc.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

A Unique ID representing this object, used to map
additional object properties. This ID is guaranteed unique
for each object, regardless of streamId. It will change the object drops out of
detection/tracking

Type: integer

The analyticsTimestamp with highest confidence score for this object.

Value must be greater or equal to 0

Type: string

Object specific class returned by the model. For objects of the vehicles metaclass this may include car, truck, bus, motorbike, etc based on model capabilities

Type: object

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

A map of attributes for this object. Not all atributes are supported for all object types. Example attributes include:
color - The color of an object
lpString - A string representing license plate text
and numbers
lpRegion - A string representing license plate region
vehicleType - Make model and generation of the vehicle in a single string

No Additional Properties

Type: number

Confidence score for attribute detection, ranging from 0.0 to 1.0. A score of 1.0 indicates 100% confidence.

Value must be greater or equal to 0 and lesser or equal to 1

Type: number

Confidence score for object detection, ranging from 0.0 to 1.0. A score of 1.0 indicates 100% confidence.When included in an attribute, this score represents the
object Detection score for the parent object corresponding to the
timestamp when the attribute value was determined.

Value must be greater or equal to 0 and lesser or equal to 1

Type: boolean

Flag to indicate if the attribute is updated. True means updated, False means not updated.


A value of the attribute. The value is specific to the attribute type.

Type: object

Information about the detected vehicle, including its make, model, and generation.

Type: string

The manufacturer of the detected vehicle, e.g., 'Toyota'.

Type: string

The specific model of the detected vehicle, e.g., 'Camry'.

Type: string

The generation or variant of the detected vehicle, e.g., '2020'.

Type: string

The category to which the detected vehicle belongs, e.g., 'Sedan'.

Additional Properties of any type are allowed.

Type: object

Type: object

Debug information, subject to change
between releases. Do not use this object in an
application.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: string

Type: string

An object hash which uniquely identifies this object and associated attributes. Will change when attributes change. Reserved for future use

Type: object

The bounding box containing this object, in
pixel coordinates where the top left corner of the
image is represented by pixel 0,0, corresponding to the image referenced by imageRef

No Additional Properties

Type: integer

Height of the bounding box in pixels

Value must be greater or equal to 0

Type: integer

Width of the bounding box in pixels

Value must be greater or equal to 0

Type: integer

X coordinate of the top left corner
of the bounding box.

Value must be greater or equal to 0

Type: integer

Y coordinate of the top left corner of
the bounding box

Value must be greater or equal to 0

Type: number

Confidence score for object detection, ranging from 0.0 to 1.0. A score of 1.0 indicates 100% confidence.When included in an attribute, this score represents the
object Detection score for the parent object corresponding to the
timestamp when the attribute value was determined.

Same definition as detectionScore

Type: boolean

Flag to indicate if the attribute is updated. True means updated, False means not updated.

Same definition as updated

Type: integer

The analyticsTimestamp with highest confidence score for this object.

Value must be greater or equal to 0

Type: object

A map of maps describing an event type.
- The top level map key is a name describing the event type. Supported types are presenceSensor, lineCrossingEvent, speedEvent.
- The sub level map key is a Unique ID representing the event, used to map
additional object properties. This ID is guaranteed unique
for each event for a given stream ID.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

A name describing an event type.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: array

A Unique ID representing this event

No Additional Items

Each item of this array must be:


Type: object

Describes an event where one or more objects are present in a region of interest.
The event starts when the first object enters a region of interest. Updates are sent for each change in status, with updateCount incremented for each update. When the last object exits and the region is empty, the sensor event will become immutable and will track the total amount of time at least one object was present in the region of interest. An entry of an object will start a new event and reset the updateCount to 1. Region definitons, object filtering and other items related to sensor definitions are tracked as a part of the sensorId associated with the event.

No Additional Properties

Type: string

The globally unique event ID corresponding to this event.

Type: integer

The total number of objects of a specific type detected within a region of interest, excluding those filtered out based on sensor configuration.

Value must be greater or equal to 0

Type: object

The total number of detected objects in a region grouped by metaclasses.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: integer

The total number of objects detected within a region of interest grouped by metaclass. Metaclasses represent higher-level categories that objects may belong to, such as 'vehicle' or 'people,' while classes represent more specific types, such as 'car' or 'person'.

Value must be greater or equal to 0

Type: object

The total number of detected objects in a region grouped by classes.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: integer

The total number of objects detected within a region of interest grouped by class. For example, if the sensor is configured to detect vehicles, this property may include counts of 'car,' 'bus,' and 'truck'.

Value must be greater or equal to 0

Type: integer

The time in milliseconds since the epoch (GMT) when the event started, or when a link was established.

Value must be greater or equal to 0

Type: integer

The cumulative number of updates sent for this sensor, starting with 1 for the initial update and incremented once for each update sent for each unique sensor event ID. An update refers to a change in the state of the sensor due to a corresponding sensor event (entry, exit, crossing, ...). For sensors which include multiple updates per sensor event (presense sensors), the updateCount will be reset to 1 to indicate the first update for a given event. For sensors (count) which only include 1 update per event, updateCount will be cumulative and count the total number of events per sensor.

Value must be greater or equal to 0

Type: integer

The time in milliseconds since the epoch (GMT) when the event ended.

Value must be greater or equal to 0

Type: object

Describes an event where one object crosses a line

No Additional Properties

Type: string

The globally unique event ID corresponding to this event.

Same definition as eventId

Type: string

The direction of an object's trajectory relative to the sensor's line, with the first point (A) as the pivot point. 'Clockwise' means the object is moving in a clockwise direction relative to the line, while 'counterclockwise' means the object is moving in a counterclockwise direction.

Type: integer

Number of clockwise crossings.

Value must be greater or equal to 0

Type: integer

Number of counterclockwise crossings.

Value must be greater or equal to 0

Type: integer

The time in milliseconds since the epoch (GMT) when the event started, or when a link was established.

Same definition as startedAt

Type: array of object
No Additional Items

Each item of this array must be:

Type: object

Type: string

Media Event type: Ex: image,video

Type: string

Message content

Type: integer

Start of Event Timestamp

Value must be greater or equal to 0

Type: integer

End of Event Timestamp

Value must be greater or equal to 0

Type: string

Message format. Ex: json, jpeg, mp4, ts...

\ No newline at end of file + Sighthound Analytics

Sighthound Analytics

Type: object

Analytics data sent by the Sighthound video/image analysis pipeline. This data is sent based on configuration when the number of detected objects or attributes of detected objects changes, the confidence of detected objects or their attributes improves, or a configurable timeout occurs.

No Additional Properties

Type: object

Type: integer

Timestamp the frame corresponding to this analytics data was processed at, in milliseconds since the epoch and GMT timezone.

Value must be greater or equal to 0

Type: string

A global unique ID representing the media source, for
instance a specific video stream from a camera sensor or RTSP feed, , or input source location for images or videos

Type: string

An ID corresponding to this frame, which may be used to
access the image corresponding to all box coordinates and object
detections represented in this object, via the Media Service API.

Type: object

The dimensions (width and height) of the frame represented by frameId. Also used as the coordinate base for all bounding box coordinates.

Type: number

Width in pixels

Value must be greater or equal to 0

Type: number

Height in pixels

Value must be greater or equal to 0

Type: integer

Timestamp of the frame corresponding to this analytics data, acccording to the source, in milliseconds since the epoch and GMT timezone.

Value must be greater or equal to 0

Type: string

Type: object

Meta classes include objects such as vehicles, license plates, and people. These are high-level classifications.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

An plural MetaClass name. Supported MetaClasses
include:
vehicles - Objects including cars, buses, trucks, motorbikes.
Vehicles include objects which may potentially include license
plates, may include links to licensePlates.
licensePlates - Objects which are detected/classified as license plates.
people - Pedestrians or people riding skateboards, electric
scooter, wheelchairs,etc.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

A Unique ID representing this object, used to map
additional object properties. This ID is guaranteed unique
for each object, regardless of streamId. It will change the object drops out of
detection/tracking

Type: integer

The analyticsTimestamp with highest confidence score for this object.

Value must be greater or equal to 0

Type: string

Object specific class returned by the model. For objects of the vehicles metaclass this may include car, truck, bus, motorbike, etc based on model capabilities

Type: object

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

A map of attributes for this object. Not all atributes are supported for all object types. Example attributes include:
color - The color of an object
lpString - A string representing license plate text
and numbers
lpRegion - A string representing license plate region
vehicleType - Make model and generation of the vehicle in a single string

No Additional Properties

Type: number

Confidence score for attribute detection, ranging from 0.0 to 1.0. A score of 1.0 indicates 100% confidence.

Value must be greater or equal to 0 and lesser or equal to 1

Type: number

Confidence score for object detection, ranging from 0.0 to 1.0. A score of 1.0 indicates 100% confidence.When included in an attribute, this score represents the
object Detection score for the parent object corresponding to the
timestamp when the attribute value was determined.

Value must be greater or equal to 0 and lesser or equal to 1

Type: boolean

Flag to indicate if the attribute is updated. True means updated, False means not updated.


A value of the attribute. The value is specific to the attribute type.

Type: object

Information about the detected vehicle, including its make, model, and generation.

Type: string

The manufacturer of the detected vehicle, e.g., 'Toyota'.

Type: string

The specific model of the detected vehicle, e.g., 'Camry'.

Type: string

The generation or variant of the detected vehicle, e.g., '2020'.

Type: string

The category to which the detected vehicle belongs, e.g., 'Sedan'.

Additional Properties of any type are allowed.

Type: object

Type: object

Debug information, subject to change
between releases. Do not use this object in an
application.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: string

Type: string

An object hash which uniquely identifies this object and associated attributes. Will change when attributes change. Reserved for future use

Type: object

The bounding box containing this object, in
pixel coordinates where the top left corner of the
image is represented by pixel 0,0, corresponding to the image referenced by imageRef

No Additional Properties

Type: integer

Height of the bounding box in pixels

Value must be greater or equal to 0

Type: integer

Width of the bounding box in pixels

Value must be greater or equal to 0

Type: integer

X coordinate of the top left corner
of the bounding box.

Value must be greater or equal to 0

Type: integer

Y coordinate of the top left corner of
the bounding box

Value must be greater or equal to 0

Type: number

Confidence score for object detection, ranging from 0.0 to 1.0. A score of 1.0 indicates 100% confidence.When included in an attribute, this score represents the
object Detection score for the parent object corresponding to the
timestamp when the attribute value was determined.

Same definition as detectionScore

Type: boolean

Flag to indicate if the attribute is updated. True means updated, False means not updated.

Same definition as updated

Type: integer

The analyticsTimestamp with highest confidence score for this object.

Value must be greater or equal to 0

Type: object

A map of maps describing an event type.
- The top level map key is a name describing the event type. Supported types are presenceSensor, lineCrossingEvent, speedEvent.
- The sub level map key is a Unique ID representing the event, used to map
additional object properties. This ID is guaranteed unique
for each event for a given stream ID.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: object

A name describing an event type.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: array

A Unique ID representing this event

No Additional Items

Each item of this array must be:


Type: object

Describes an event where one or more objects are present in a region of interest.
The event starts when the first object enters a region of interest. Updates are sent for each change in status, with updateCount incremented for each update. When the last object exits and the region is empty, the sensor event will become immutable and will track the total amount of time at least one object was present in the region of interest. An entry of an object will start a new event and reset the updateCount to 1. Region definitons, object filtering and other items related to sensor definitions are tracked as a part of the sensorId associated with the event.

No Additional Properties

Type: string

The globally unique event ID corresponding to this event.

Type: integer

The total number of objects of a specific type detected within a region of interest, excluding those filtered out based on sensor configuration.

Value must be greater or equal to 0

Type: object

The total number of detected objects in a region grouped by metaclasses.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: integer

The total number of objects detected within a region of interest grouped by metaclass. Metaclasses represent higher-level categories that objects may belong to, such as 'vehicle' or 'people,' while classes represent more specific types, such as 'car' or 'person'.

Value must be greater or equal to 0

Type: object

The total number of detected objects in a region grouped by classes.

All properties whose name matches the following regular expression must respect the following conditions

Property name regular expression: ^.*$
Type: integer

The total number of objects detected within a region of interest grouped by class. For example, if the sensor is configured to detect vehicles, this property may include counts of 'car,' 'bus,' and 'truck'.

Value must be greater or equal to 0

Type: integer

The time in milliseconds since the epoch (GMT) when the event started, or when a link was established.

Value must be greater or equal to 0

Type: integer

The cumulative number of updates sent for this sensor, starting with 1 for the initial update and incremented once for each update sent for each unique sensor event ID. An update refers to a change in the state of the sensor due to a corresponding sensor event (entry, exit, crossing, ...). For sensors which include multiple updates per sensor event (presense sensors), the updateCount will be reset to 1 to indicate the first update for a given event. For sensors (count) which only include 1 update per event, updateCount will be cumulative and count the total number of events per sensor.

Value must be greater or equal to 0

Type: integer

The time in milliseconds since the epoch (GMT) when the event ended.

Value must be greater or equal to 0

Type: object

Describes an event where one object crosses a line

No Additional Properties

Type: string

The globally unique event ID corresponding to this event.

Same definition as eventId

Type: string

The direction of an object's trajectory relative to the sensor's line, with the first point (A) as the pivot point. 'Clockwise' means the object is moving in a clockwise direction relative to the line, while 'counterclockwise' means the object is moving in a counterclockwise direction.

Type: integer

Number of clockwise crossings.

Value must be greater or equal to 0

Type: integer

Number of counterclockwise crossings.

Value must be greater or equal to 0

Type: integer

The time in milliseconds since the epoch (GMT) when the event started, or when a link was established.

Same definition as startedAt

Type: array of object
No Additional Items

Each item of this array must be:

Type: object

Type: string

Media Event type: Ex: image,video

Type: string

Message content

Type: integer

Start of Event Timestamp

Value must be greater or equal to 0

Type: integer

End of Event Timestamp

Value must be greater or equal to 0

Type: string

Message format. Ex: json, jpeg, mp4, ts...

\ No newline at end of file diff --git a/examples/MCPEvents/EventSegment.py b/examples/MCPEvents/EventSegment.py index 25b753b..6b5fa75 100644 --- a/examples/MCPEvents/EventSegment.py +++ b/examples/MCPEvents/EventSegment.py @@ -8,6 +8,12 @@ def __init__(self, start_ts): self.end_ts = start_ts self.videos = [] + def add_event(self, event): + self.events_list.append(event) + + def set_end_ts(self, end_ts): + self.end_ts = end_ts + def write_json(self, path): with open(path, 'w') as f: json.dump(self, f, indent=4, default=vars) diff --git a/examples/MCPEvents/MCPEvents.py b/examples/MCPEvents/MCPEvents.py index a21a33a..4b4531a 100644 --- a/examples/MCPEvents/MCPEvents.py +++ b/examples/MCPEvents/MCPEvents.py @@ -59,6 +59,8 @@ def __init__(self, args): if args.sensors_json: print("Generating events based on sensors.json") self.roi_filter = ROIFilter(args.sensors_json) + else: + print("Generating events based on all events") if args.annotate: self.annotator = MCPEventAnnotator(capture_dir = capture_dir, sensors_json = args.sensors_json) @@ -99,20 +101,24 @@ def event_segment_complete(self, source, event_segment): filepath_ts = dirpath / Path(segment.uri) filepath_ts.parent.mkdir(parents=True, exist_ok=True) video_name = filepath_ts.relative_to(filepath_ts.parent.parent) + print(f"Downloading {video_name}") self.mcp_client.download_video(source, video_name, filepath_ts) vidfile = dirpath / Path(f"{filename_base}.m3u8") + print(f"Writing {vidfile}") with open(vidfile, "w") as file: file.write(m3u8_content) json_file = dirpath_json / Path(f"{filename_base}.json") + print(f"Writing {json_file}") event_segment.write_json(json_file) if self.annotator: self.annotator.create_annotation(json_file, vidfile) + print(f"Event segment complete") # This method is called when a media event is received from the MCP def handle_media_event_callback(self, media_event, sourceId): # Get the type and message of the media event type = media_event.get("type", "unknown") - msg = media_event.get("msg", "unknown") + # msg = media_event.get("msg", "unknown") # If the media event is a video_file_closed event, add it to the current event segment # for the source ID, or to the completed event segments if it's already completed if type == "video_file_closed": @@ -128,14 +134,21 @@ def handle_media_event_callback(self, media_event, sourceId): def json_callback(self, data): + if 'frameTimestamp' not in data or 'sourceId' not in data: + print(f"Invalid message received: {data}") + return + sourceId = data.get("sourceId", "unknown") + frameTimestamp = data.get("frameTimestamp", 0) mediaEvents = data.get("mediaEvents", {}) + for event in mediaEvents: self.handle_media_event_callback(event, sourceId) + # Record only sensor events (count, region, etc) and ignore other events if self.use_events: if 'sensorEvents' in data: - start_ts = data['frameTimestamp'] + start_ts = frameTimestamp # The end timestamp for all active events end_ts = None event_in_progress = False @@ -150,42 +163,40 @@ def json_callback(self, data): else: event_in_progress = True if event_in_progress: - if not data['sourceId'] in self.current_event_seg: - self.current_event_seg[data['sourceId']] = EventSegment(start_ts) - elif data['sourceId'] in self.current_event_seg: - current_event_seg = self.current_event_seg[data['sourceId']] - current_event_seg.events_list.append(data) - current_event_seg.end_ts = data['frameTimestamp'] if end_ts is None else end_ts + if not sourceId in self.current_event_seg: + self.current_event_seg[sourceId] = EventSegment(start_ts) + elif sourceId in self.current_event_seg: + current_event_seg = self.current_event_seg[sourceId] + current_event_seg.add_event(data) + current_event_seg.end_ts = frameTimestamp if end_ts is None else end_ts if end_ts is not None: - self.new_event_segment(data['sourceId'], current_event_seg) - del self.current_event_seg[data['sourceId']] - if data['sourceId'] in self.current_event_seg: - self.current_event_seg[data['sourceId']].events_list.append(data) + self.new_event_segment(sourceId, current_event_seg) + del self.current_event_seg[sourceId] + if sourceId in self.current_event_seg: + self.current_event_seg[sourceId].add_event(data) else: - if 'frameTimestamp' in data and 'sourceId' in data and \ - data['sourceId'] in self.current_event_seg: - current_event_seg = self.current_event_seg[data['sourceId']] - if data['frameTimestamp'] - current_event_seg.start_ts > \ - self.group_events_max_length : + if sourceId in self.current_event_seg: + current_event_seg = self.current_event_seg[sourceId] + if frameTimestamp - current_event_seg.start_ts > self.group_events_max_length : print(f"Event length exceeded {self.group_events_max_length} ms, restarting event segment") - self.new_event_segment(data['sourceId'],current_event_seg) - del self.current_event_seg[data['sourceId']] + self.new_event_segment(sourceId,current_event_seg) + del self.current_event_seg[sourceId] - elif data['frameTimestamp'] - current_event_seg.end_ts > \ - self.group_events_separation_ms : + elif frameTimestamp - current_event_seg.end_ts > self.group_events_separation_ms : print(f"More than {self.group_events_separation_ms} ms between events, restarting event segment") - print(f"Current frame timestamp is {self.frame_timestamp_to_timestr(data['frameTimestamp'])}"\ + print(f"Current frame timestamp is {self.frame_timestamp_to_timestr(frameTimestamp)}"\ f" last event timestamp was {self.frame_timestamp_to_timestr(current_event_seg.end_ts)}") - self.new_event_segment(data['sourceId'],current_event_seg) - del self.current_event_seg[data['sourceId']] - - if 'metaClasses' in data and 'sourceId' in data: - if not self.roi_filter or self.roi_filter.objects_in_roi(data): - if not data['sourceId'] in self.current_event_seg: - self.current_event_seg[data['sourceId']] = EventSegment(data['frameTimestamp']) - current_event_seg = self.current_event_seg[data['sourceId']] - current_event_seg.events_list.append(data) - current_event_seg.end_ts = data['frameTimestamp'] + self.new_event_segment(sourceId,current_event_seg) + del self.current_event_seg[sourceId] + + if 'metaClasses' in data: + if not self.roi_filter or self.roi_filter.objects_in_roi(data): + if not sourceId in self.current_event_seg: + self.current_event_seg[sourceId] = EventSegment(frameTimestamp) + current_event_seg = self.current_event_seg[sourceId] + current_event_seg.add_event(data) + current_event_seg.end_ts = frameTimestamp + def start(self): self.path_prefix.mkdir(parents=True, exist_ok=True) diff --git a/scripts/sh-services b/scripts/sh-services index f1a9b91..2ab3044 100755 --- a/scripts/sh-services +++ b/scripts/sh-services @@ -11,6 +11,8 @@ SERVICES_PATH="$(realpath "${SCRIPT_PATH}"/../services)" export SERVICES_PATH EXAMPLES_PATH="$(realpath "${SCRIPT_PATH}"/../examples)" export EXAMPLES_PATH +DEPLOYMENT_EXAMPLES_PATH="$(realpath "${SCRIPT_PATH}"/../deployment-examples)" +export DEPLOYMENT_EXAMPLES_PATH MEDIA_PATH="${SH_BASE}"/media export MEDIA_PATH DB_PATH="${SH_BASE}"/db @@ -39,11 +41,8 @@ fi test_rtsp_stream() { local rtsp_url="$1" local timeout_seconds="$2" - if ! command -v ffprobe &> /dev/null; then - echo "FFmpeg is not installed. Please install FFmpeg first." - return 1 - fi - local status_code=$(ffprobe -v quiet -select_streams v:0 -show_entries stream=codec_name -of default=nokey=1:noprint_wrappers=1 -rtsp_transport tcp "$rtsp_url" -timeout "$timeout_seconds" 2>/dev/null) + + local status_code=$(docker run --entrypoint=/usr/local/bin/ffprobe --network=core_sighthound linuxserver/ffmpeg -v quiet -select_streams v:0 -show_entries stream=codec_name -of default=nokey=1:noprint_wrappers=1 -rtsp_transport tcp "$rtsp_url" -timeout "$timeout_seconds" 2>/dev/null) if [[ -n $status_code ]]; then echo "RTSP stream $rtsp_url is up" @@ -54,6 +53,13 @@ test_rtsp_stream() { fi } +check_rabbitmq() { + echo "RabbitMQ exchanges:" + curl -s -u guest:guest http://localhost:15672/api/exchanges | tr ',' '\n' | tac | grep -E '"name"|publish_in_details|publish_out_details' | sed 's/"name":/- Exchange: /' | sed 's/"publish_in_details":/ Publish in: /' | sed 's/"publish_out_details":/ Publish out: /' + echo "RabbitMQ queues:" + curl -s -u guest:guest http://localhost:15672/api/queues | tr ',' '\n' | tac | grep -E '"name"|message_bytes_.*' | sed 's/"name":/- Queue: /' +} + check_endpoint() { curl --connect-timeout 2 --output /dev/null --silent --head $@ "$1" } @@ -183,6 +189,15 @@ LICENSE_PATH=${LICENSE_PATH} EOF } +function get_l4t_version() { + L4TBASE_VERSION=32.7.3 + if command -v bai-osinfo &>/dev/null; then + L4TBASE_VERSION=$(bai-osinfo -l) + [[ "$L4TBASE_VERSION" == 32.7.* ]] && L4TBASE_VERSION=32.7.3 + fi + echo "$L4TBASE_VERSION" +} + function save_arch_info() { if [ ! -d "${SERVICES_PATH}"/conf ]; then mkdir -p "${SERVICES_PATH}"/conf @@ -196,10 +211,8 @@ EOF echo "SH_ARCH=-amd64" >>"${SERVICES_PATH}"/conf/0010-arch.env echo "SH_NODE=false" >>"${SERVICES_PATH}"/conf/0010-arch.env elif [ "$(uname -m)" == "aarch64" ]; then - L4TBASE_VERSION=32.7.3 + L4TBASE_VERSION=$(get_l4t_version) if command -v bai-osinfo &>/dev/null; then - L4TBASE_VERSION=$(bai-osinfo -l) - [[ "$L4TBASE_VERSION" == 32.7.* ]] && L4TBASE_VERSION=32.7.3 SH_MACHINE=$(bai-osinfo -m) SH_SERIAL=$(bai-osinfo -s) if [[ $SH_MACHINE == *"node"* ]]; then @@ -298,6 +311,11 @@ function config() { function start_example() { local example=$1 + if [ "$example" == "" ]; then + echo "No example specified. Available Options:" + find "${EXAMPLES_PATH}"/* -maxdepth 0 -type d -exec basename {} \; | awk '{print " - "$0}' + return + fi local example_full_path="${EXAMPLES_PATH}/${example}" if [ ! -f "${example_full_path}/docker-compose.yml" ]; then echo "No docker-compose.yml file found for ${example}" @@ -311,6 +329,30 @@ function start_example() { ${DOCKER_COMPOSE} ps } +function deploy_example() { + local example=$1 + if [ "$example" == "" ]; then + echo "No example specified. Available Options:" + find "${DEPLOYMENT_EXAMPLES_PATH}"/* -maxdepth 0 -type d -exec basename {} \;| awk '{print " - "$0}' + return + fi + local example_full_path="${DEPLOYMENT_EXAMPLES_PATH}/${example}" + if [ ! -f "${example_full_path}/docker-compose.yml" ]; then + echo "No docker-compose.yml file found for ${example}" + return + fi + echo "Starting ${example}" + cd "${example_full_path}" || exit 1 + if [ "$(uname -m)" == "aarch64" ]; then + L4TBASE_VERSION=$(get_l4t_version) + SIO_DOCKER_RUNTIME=nvidia SIO_DOCKER_TAG_VARIANT="-r${L4TBASE_VERSION}-arm64v8" ${DOCKER_COMPOSE} up -d + else + ${DOCKER_COMPOSE} up -d + fi + sleep 1 + ${DOCKER_COMPOSE} ps +} + function edit() { local service=$1 local service_full_path="${SERVICES_PATH}/${service}" @@ -446,6 +488,11 @@ function create_network() { fi } +function create_default_networks() { + create_network "sh-device-ui_sh-ui-net" + create_network "core_sighthound" +} + function depends() { local service=$1 local service_full_path="${SERVICES_PATH}/${service}" @@ -539,6 +586,7 @@ function start(){ function up() { local service=$1 + local background=$2 local service_full_path="${SERVICES_PATH}/${service}" if [ ! -f "${service_full_path}/docker-compose.yml" ]; then echo "Service ${service} not found." @@ -549,10 +597,20 @@ function up() { echo "Skipping ${service} : disabled file found" return fi + if [ "${service}" == "sio" ] && [ ! -f "${service_full_path}/conf/${service}.json" ]; then + echo "WARN: sio.json file not found: using the default one from the example" + select_example "${service}" "default" + echo "${service_full_path}"/conf/"${service}".json + return 1 + fi echo "Bringing up ${service}" merge "${service}" - compose_up "${service}" & - sleep 1 + if [ -n "${background}" ] && [ "${background}" = "true" ]; then + compose_up "${service}" & + sleep 1 + else + compose_up "${service}" + fi } function compose_up() { @@ -649,7 +707,7 @@ function license() { cat "${LICENSE_PATH}/sighthound-license.json" else echo "No license file found" - read -rep "Do you want to create alicense file: ${LICENSE_PATH}/sighthound-license.json? (y/n)" answer + read -rep "Do you want to create a license file: ${LICENSE_PATH}/sighthound-license.json? (y/n)" answer if [[ $answer == [yY] ]]; then EDITOR=$(set_text_editor) ${EDITOR} "${LICENSE_PATH}/sighthound-license.json" @@ -808,8 +866,7 @@ function apply_to_services() { exit 1 fi # Create networks - create_network "sh-device-ui_sh-ui-net" - create_network "core_sighthound" + create_default_networks # Check all services if element_in_list "all" "${SERVICES[@]}"; then @@ -840,13 +897,18 @@ function apply_to_services() { echo "###############################################" echo "Applying command '${COMMAND}' to ${service}" fi - $COMMAND "${service}" + # If services is an array of length > 1 then run in background, otherwise send false + if [ "${#SERVICES[@]}" -gt 1 ]; then + ${COMMAND} "${service}" "true" + else + ${COMMAND} "${service}" "false" + fi else echo "Unknown service: ${service}" echo "Available services: ${AVAILABLE_SERVICES}" fi done - if [ "${COMMAND}" == "restart" ] || [ "${COMMAND}" == "up" ] || [ "${COMMAND}" == "down" ]; then + if [ "${COMMAND}" == "restart" ] || [ "${COMMAND}" == "up" ] ; then echo "Waiting for services to finish: ${COMMAND}..." fi wait @@ -861,6 +923,11 @@ function apply() { echo "File not found: $filename" exit 1 fi + # Pre apply commands + create_default_networks + remove_orphans + license + # Apply commands set -e while IFS= read -r line || [[ -n "$line" ]] do @@ -900,13 +967,13 @@ LIST_OF_COMMANDS=" apply - Apply a configuration file COMMAND=$1 shift 1 || true case $COMMAND in -"select_example" | "set_example" | "select_live555_video" | "apply" | "test_rtsp_stream" | "start_example") +"select_example" | "set_example" | "select_live555_video" | "apply" | "test_rtsp_stream" | "start_example" | "deploy_example" ) "$COMMAND" $@ ;; "merge" | "up" | "start" | "down" | "stop" | "restart" | "enable" | "disable" | "depends" | "edit" | "config" | "show" | "status") apply_to_services "$COMMAND" $@ ;; -"clean_media" | "clean_logs" | "clean_rabbitmq" | "remove_orphans" | "ps" | "license") +"clean_media" | "clean_logs" | "clean_rabbitmq" | "remove_orphans" | "check_rabbitmq"| "ps" | "license") "$COMMAND" ;; "" | "help" | "-h" | "--help") diff --git a/services/sio/README.md b/services/sio/README.md index 7c35092..dcbc77c 100644 --- a/services/sio/README.md +++ b/services/sio/README.md @@ -37,3 +37,7 @@ cp examples/plugins/* conf ../scripts/sh-services up sio ``` +### Setting SIO sensors + +If you need to set count, presence or speed sensors, use one of the following examples: +- [Count Sensor Example](./examples/count-sensor-nomedia/) `./scripts/sh-services select_example sio count-sensor-nomedia` diff --git a/services/sio/conf/default.env b/services/sio/conf/default.env index e5b8b9f..fec84c9 100644 --- a/services/sio/conf/default.env +++ b/services/sio/conf/default.env @@ -1,5 +1,5 @@ SIO_DOCKER_IMAGE=us-central1-docker.pkg.dev/ext-edge-analytics/docker/sio -SIO_DOCKER_TAG=r231120 +SIO_DOCKER_TAG=r231204 SIO_LOG_LEVEL=info SIO_USER_PLUGINS_DIR=/lib/sio/plugins/ SIO_APP=runPipelineSet diff --git a/services/sio/docker-compose.yml b/services/sio/docker-compose.yml index 23bfaf4..0e60313 100644 --- a/services/sio/docker-compose.yml +++ b/services/sio/docker-compose.yml @@ -9,9 +9,11 @@ services: - ${LICENSE_PATH}/sighthound-license.json:/sighthound/sio/share/sighthound-license.json:ro - ${LICENSE_PATH}/sighthound-license.json:/sighthound/sio/share/license.json:ro - ./conf/:/etc/sio/:ro + - ./conf/:/sighthound/sio/conf:ro - ./conf/plugins/:/lib/sio/plugins/:ro - ${MEDIA_PATH}:/data/sighthound/media:rw - ${SH_BASE}/.cache:/root/.sio/modelCache + # working_dir: /sighthound/sio # This is default env_file: - .env command: sh -c 'eval "/sighthound/sio/bin/${SIO_APP} ${SIO_ENTRYPOINT} --log ${SIO_LOG_LEVEL} ${SIO_EXTRA_ARGS}"' diff --git a/services/sio/examples/count-sensor-nomedia/sensors.json b/services/sio/examples/count-sensor-nomedia/sensors.json new file mode 100644 index 0000000..0a87d2a --- /dev/null +++ b/services/sio/examples/count-sensor-nomedia/sensors.json @@ -0,0 +1,46 @@ +{ + "countSensors": [ + { + "id": "15CCEB3C-54D5-420C-B9ED-55DDED300EF9", + "name": "Vehicles lateral movement", + "classes": [ + "vehicles" + ], + "clockwiseName": "going-left", + "counterclockwiseName": "going-right", + "referencePoint": "center", + "sendUpdate": "onChange", + "lineCoordinates": [ + { + "x": 0.5, + "y": 0.0 + }, + { + "x": 0.5, + "y": 1.0 + } + ] + }, + { + "id": "15CCEB3C-54D5-420C-B9ED-55DDED300EF9", + "name": "Vehicles vertical movement", + "classes": [ + "vehicles" + ], + "clockwiseName": "going-down", + "counterclockwiseName": "going-up", + "referencePoint": "center", + "sendUpdate": "onChange", + "lineCoordinates": [ + { + "x": 0.0, + "y": 0.5 + }, + { + "x": 1.0, + "y": 0.5 + } + ] + } + ] + } \ No newline at end of file diff --git a/services/sio/examples/count-sensor-nomedia/sio.json b/services/sio/examples/count-sensor-nomedia/sio.json new file mode 100644 index 0000000..c56ddda --- /dev/null +++ b/services/sio/examples/count-sensor-nomedia/sio.json @@ -0,0 +1,20 @@ +{ + "one-person-one-car" : { + "pipeline" : "./share/pipelines/TrafficAnalytics/TrafficAnalyticsRTSP.yaml", + "restartPolicy" : "restart", + "parameters" : { + "VIDEO_IN" : "rtsp://live555/StreetVideo1.mkv", + "sourceId" : "StreetVideo1-nomedia", + "recordTo":"", + "imageSaveDir":"", + "amqpHost":"rabbitmq", + "amqpPort":"5672", + "amqpExchange":"anypipe", + "amqpUser":"guest", + "amqpPassword":"guest", + "amqpErrorOnFailure":"true", + "useTracker":"true", + "sensorsConfigFile":"./conf/sensors.json" + } + } +} \ No newline at end of file diff --git a/services/sio/conf/sio.json b/services/sio/examples/default/sio.json similarity index 100% rename from services/sio/conf/sio.json rename to services/sio/examples/default/sio.json diff --git a/services/sio/examples/file-rtsp/sio.json b/services/sio/examples/file-rtsp/sio.json new file mode 100644 index 0000000..cd4b27d --- /dev/null +++ b/services/sio/examples/file-rtsp/sio.json @@ -0,0 +1,18 @@ +{ + "one-person-one-car" : { + "pipeline" : "./share/pipelines/VehicleAnalytics/VehicleAnalyticsRTSP.yaml", + "restartPolicy" : "restart", + "parameters" : { + "VIDEO_IN" : "rtsp://live555/data/my-video.mkv", + "sourceId" : "my-video", + "recordTo":"/data/sighthound/media/output/video/my-video/", + "imageSaveDir":"/data/sighthound/media/output/image/my-video/", + "amqpHost":"rabbitmq", + "amqpPort":"5672", + "amqpExchange":"anypipe", + "amqpUser":"guest", + "amqpPassword":"guest", + "amqpErrorOnFailure":"true" + } + } +} \ No newline at end of file diff --git a/services/sio/examples/live555-nomedia/sio.json b/services/sio/examples/live555-nomedia/sio.json new file mode 100644 index 0000000..a7a48f9 --- /dev/null +++ b/services/sio/examples/live555-nomedia/sio.json @@ -0,0 +1,18 @@ +{ + "one-person-one-car" : { + "pipeline" : "./share/pipelines/VehicleAnalytics/VehicleAnalyticsRTSP.yaml", + "restartPolicy" : "restart", + "parameters" : { + "VIDEO_IN" : "rtsp://live555/StreetVideo1.mkv", + "sourceId" : "StreetVideo1-nomedia", + "recordTo":"", + "imageSaveDir":"", + "amqpHost":"rabbitmq", + "amqpPort":"5672", + "amqpExchange":"anypipe", + "amqpUser":"guest", + "amqpPassword":"guest", + "amqpErrorOnFailure":"true" + } + } +} \ No newline at end of file diff --git a/services/sio/examples/live555/sio.json b/services/sio/examples/live555/sio.json index 7a7f5f2..c4e708e 100644 --- a/services/sio/examples/live555/sio.json +++ b/services/sio/examples/live555/sio.json @@ -3,10 +3,10 @@ "pipeline" : "./share/pipelines/VehicleAnalytics/VehicleAnalyticsRTSP.yaml", "restartPolicy" : "restart", "parameters" : { - "VIDEO_IN" : "rtsp://live555/my-video.mkv", - "sourceId" : "my-video", - "recordTo":"/data/sighthound/media/output/video/my-video/", - "imageSaveDir":"/data/sighthound/media/output/image/my-video/", + "VIDEO_IN" : "rtsp://live555/StreetVideo1.mkv", + "sourceId" : "StreetVideo1", + "recordTo":"/data/sighthound/media/output/video/StreetVideo1/", + "imageSaveDir":"/data/sighthound/media/output/image/StreetVideo1/", "amqpHost":"rabbitmq", "amqpPort":"5672", "amqpExchange":"anypipe",