diff --git a/CMakeLists.txt b/CMakeLists.txt
index 930a5dbbc..e02e2f0a7 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -203,9 +203,11 @@ if (CUDA_FOUND)
# target_include_directories(nv_vid_codec_h265_enc SYSTEM PUBLIC deps/nvenc)
endif ()
-mrover_add_nodelet(gst_websocket_streamer src/esw/gst_websocket_streamer/*.c* src/esw/gst_websocket_streamer src/esw/gst_websocket_streamer/pch.hpp)
-mrover_nodelet_link_libraries(gst_websocket_streamer PRIVATE websocket_server ${Gst_LIBRARIES} ${GstApp_LIBRARIES})
-mrover_nodelet_include_directories(gst_websocket_streamer ${Gst_INCLUDE_DIRS} ${GstApp_INCLUDE_DIRS})
+if (Gst_FOUND AND GstApp_FOUND AND LibUsb_FOUND AND LibUdev_FOUND)
+ mrover_add_nodelet(gst_websocket_streamer src/esw/gst_websocket_streamer/*.c* src/esw/gst_websocket_streamer src/esw/gst_websocket_streamer/pch.hpp)
+ mrover_nodelet_link_libraries(gst_websocket_streamer PRIVATE websocket_server ${Gst_LIBRARIES} ${GstApp_LIBRARIES} ${LibUsb_LIBRARIES} ${LibUdev_LIBRARIES} opencv_core opencv_imgproc)
+ mrover_nodelet_include_directories(gst_websocket_streamer ${Gst_INCLUDE_DIRS} ${GstApp_INCLUDE_DIRS} ${LibUsb_INCLUDE_DIRS} ${LibUdev_INCLUDE_DIRS})
+endif ()
if (ZED_FOUND)
mrover_add_nodelet(object_detector src/perception/object_detector/*.c* src/perception/object_detector src/perception/object_detector/pch.hpp)
diff --git a/ansible/roles/esw/tasks/main.yml b/ansible/roles/esw/tasks/main.yml
index f9a81d7a5..9ae41142b 100644
--- a/ansible/roles/esw/tasks/main.yml
+++ b/ansible/roles/esw/tasks/main.yml
@@ -22,4 +22,4 @@
- name: Install Moteus GUI
pip:
- name: moteus_gui
+ name: moteus-gui
diff --git a/cmake/deps.cmake b/cmake/deps.cmake
index b73637ce8..b3bb4a520 100644
--- a/cmake/deps.cmake
+++ b/cmake/deps.cmake
@@ -69,3 +69,5 @@ pkg_search_module(NetLink libnl-3.0 QUIET)
pkg_search_module(NetLinkRoute libnl-route-3.0 QUIET)
pkg_search_module(Gst gstreamer-1.0 QUIET)
pkg_search_module(GstApp gstreamer-app-1.0 QUIET)
+pkg_search_module(LibUsb libusb-1.0 QUIET)
+pkg_search_module(LibUdev libudev QUIET)
diff --git a/launch/esw_base.launch b/launch/esw_base.launch
index 0bcbf81c8..3ac09785b 100644
--- a/launch/esw_base.launch
+++ b/launch/esw_base.launch
@@ -1,77 +1,73 @@
-
+
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
-
+
-
-
-
-
-
+
+
+
+
+
-
+
diff --git a/src/esw/gst_websocket_streamer/gst_websocket_streamer.cpp b/src/esw/gst_websocket_streamer/gst_websocket_streamer.cpp
index 61a75f304..477839ae3 100644
--- a/src/esw/gst_websocket_streamer/gst_websocket_streamer.cpp
+++ b/src/esw/gst_websocket_streamer/gst_websocket_streamer.cpp
@@ -2,6 +2,8 @@
namespace mrover {
+ using namespace std::string_view_literals;
+
template
auto gstCheck(T* t) -> T* {
if (!t) throw std::runtime_error{"Failed to create"};
@@ -40,16 +42,16 @@ namespace mrover {
// Source
std::string launch;
- if (mCaptureDevice.empty()) {
+ if (mDeviceNode.empty()) {
// App source is pushed to when we get a ROS BGRA image message
// is-live prevents frames from being pushed when the pipeline is in READY
launch += "appsrc name=imageSource is-live=true ";
} else {
- launch += std::format("v4l2src device={} ", mCaptureDevice);
+ launch += std::format("v4l2src device={} ", mDeviceNode);
}
// Source format
- std::string captureFormat = mCaptureDevice.empty() ? "video/x-raw,format=BGRA" : mDecodeJpegFromDevice ? "image/jpeg"
- : "video/x-raw,format=YUY2";
+ std::string captureFormat = mDeviceNode.empty() ? "video/x-raw,format=BGRA" : mDecodeJpegFromDevice ? "image/jpeg"
+ : "video/x-raw,format=YUY2";
launch += std::format("! {},width={},height={},framerate={}/1 ",
captureFormat, mImageWidth, mImageHeight, mImageFramerate);
// Source decoder and H265 encoder
@@ -99,7 +101,7 @@ namespace mrover {
ROS_INFO_STREAM(std::format("GStreamer launch: {}", launch));
mPipeline = gstCheck(gst_parse_launch(launch.c_str(), nullptr));
- if (mCaptureDevice.empty()) mImageSource = gstCheck(gst_bin_get_by_name(GST_BIN(mPipeline), "imageSource"));
+ if (mDeviceNode.empty()) mImageSource = gstCheck(gst_bin_get_by_name(GST_BIN(mPipeline), "imageSource"));
mStreamSink = gstCheck(gst_bin_get_by_name(GST_BIN(mPipeline), "streamSink"));
if (gst_element_set_state(mPipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE)
@@ -125,14 +127,22 @@ namespace mrover {
if (mStreamServer->clientCount() == 0) return;
if (msg->encoding != sensor_msgs::image_encodings::BGRA8) throw std::runtime_error{"Unsupported encoding"};
- if (msg->width != mImageWidth || msg->height != mImageHeight) throw std::runtime_error{"Unsupported resolution"};
+
+ cv::Size receivedSize{static_cast(msg->width), static_cast(msg->height)};
+ cv::Mat bgraFrame{receivedSize, CV_8UC4, const_cast(msg->data.data()), msg->step};
+
+ if (cv::Size targetSize{static_cast(mImageWidth), static_cast(mImageHeight)};
+ receivedSize != targetSize) {
+ ROS_WARN_ONCE("Image size does not match pipeline app source size, will resize");
+ resize(bgraFrame, bgraFrame, targetSize);
+ }
// "step" is the number of bytes (NOT pixels) in an image row
- std::size_t size = msg->step * msg->height;
+ std::size_t size = bgraFrame.step * bgraFrame.rows;
GstBuffer* buffer = gstCheck(gst_buffer_new_allocate(nullptr, size, nullptr));
GstMapInfo info;
gst_buffer_map(buffer, &info, GST_MAP_WRITE);
- std::memcpy(info.data, msg->data.data(), size);
+ std::memcpy(info.data, bgraFrame.data, size);
gst_buffer_unmap(buffer, &info);
gst_app_src_push_buffer(GST_APP_SRC(mImageSource), buffer);
@@ -150,12 +160,48 @@ namespace mrover {
throw std::runtime_error{"Unsupported resolution"};
}
+ auto findDeviceNode(std::string_view devicePath) -> std::string {
+ udev* udevContext = udev_new();
+ if (!udevContext) throw std::runtime_error{"Failed to initialize udev"};
+
+ udev_enumerate* enumerate = udev_enumerate_new(udevContext);
+ if (!enumerate) throw std::runtime_error{"Failed to create udev enumeration"};
+
+ udev_enumerate_add_match_subsystem(enumerate, "video4linux");
+ udev_enumerate_scan_devices(enumerate);
+
+ udev_list_entry* devices = udev_enumerate_get_list_entry(enumerate);
+ if (!devices) throw std::runtime_error{"Failed to get udev device list"};
+
+ udev_device* device{};
+ udev_list_entry* entry;
+ udev_list_entry_foreach(entry, devices) {
+ device = udev_device_new_from_syspath(udevContext, udev_list_entry_get_name(entry));
+
+ if (udev_device_get_devpath(device) != devicePath) continue;
+
+ if (!device) throw std::runtime_error{"Failed to get udev device"};
+
+ break;
+ }
+ if (!device) throw std::runtime_error{"Failed to find udev device"};
+
+ std::string deviceNode = udev_device_get_devnode(device);
+
+ udev_device_unref(device);
+ udev_enumerate_unref(enumerate);
+ udev_unref(udevContext);
+
+ return deviceNode;
+ }
+
auto GstWebsocketStreamerNodelet::onInit() -> void {
try {
mNh = getMTNodeHandle();
mPnh = getMTPrivateNodeHandle();
- mCaptureDevice = mPnh.param("device", "");
+ mDeviceNode = mPnh.param("dev_node", "");
+ mDevicePath = mPnh.param("dev_path", "1");
mDecodeJpegFromDevice = mPnh.param("decode_jpeg_from_device", true);
mImageTopic = mPnh.param("image_topic", "image");
mImageWidth = mPnh.param("width", 640);
@@ -167,6 +213,8 @@ namespace mrover {
mChunkHeader.resolution = widthAndHeightToResolution(mImageWidth, mImageHeight);
+ if (!mDevicePath.empty()) mDeviceNode = findDeviceNode(mDevicePath);
+
gst_init(nullptr, nullptr);
initPipeline();
@@ -201,7 +249,7 @@ namespace mrover {
ROS_INFO("Stopped GStreamer pipeline");
});
- if (mCaptureDevice.empty()) {
+ if (mDeviceNode.empty()) {
mImageSubscriber = mNh.subscribe(mImageTopic, 1, &GstWebsocketStreamerNodelet::imageCallback, this);
}
diff --git a/src/esw/gst_websocket_streamer/gst_websocket_streamer.hpp b/src/esw/gst_websocket_streamer/gst_websocket_streamer.hpp
index afe50b346..69520febe 100644
--- a/src/esw/gst_websocket_streamer/gst_websocket_streamer.hpp
+++ b/src/esw/gst_websocket_streamer/gst_websocket_streamer.hpp
@@ -25,9 +25,19 @@ namespace mrover {
ros::NodeHandle mNh, mPnh;
- std::string mCaptureDevice;
+ // For example, /dev/video0
+ // These device paths are not garunteed to stay the same between reboots
+ // Prefer sys path for non-debugging purposes
+ std::string mDeviceNode;
bool mDecodeJpegFromDevice{}; // Uses less USB hub bandwidth, which is limited since we are using 2.0
std::string mImageTopic;
+ // To find the sys path:
+ // 1) Disconnect all cameras
+ // 2) Confirm there are no /dev/video* devices
+ // 2) Connect the camera you want to use
+ // 3) Run "ls /dev/video*" to verify the device is connected
+ // 4) Run "udevadm info -q path -n /dev/video0" to get the sys path
+ std::string mDevicePath;
std::uint64_t mBitrate{};
std::uint32_t mImageWidth{}, mImageHeight{}, mImageFramerate{};
diff --git a/src/esw/gst_websocket_streamer/pch.hpp b/src/esw/gst_websocket_streamer/pch.hpp
index 2c224a3ec..2769438ad 100644
--- a/src/esw/gst_websocket_streamer/pch.hpp
+++ b/src/esw/gst_websocket_streamer/pch.hpp
@@ -1,20 +1,26 @@
#pragma once
-#include
#include
#include
+#include
#include
-#include
-#include
#include
-#include
#include
#include
#include
+#include
+#include
+#include
#include
#include
#include
+#include
+
+#include
+
+#include
+
#include