add GStreamer
This commit is contained in:
@@ -11,6 +11,21 @@ set(VIZIONSDK_LIB_DIR "${VIZIONSDK_ROOT}/lib")
|
||||
# Add VizionSDK include directory
|
||||
include_directories(${VIZIONSDK_INCLUDE_DIR})
|
||||
|
||||
# ---------------- GStreamer integration ----------------
|
||||
find_package(PkgConfig REQUIRED)
|
||||
pkg_check_modules(GSTREAMER REQUIRED gstreamer-1.0)
|
||||
pkg_check_modules(GSTREAMER_APP REQUIRED gstreamer-app-1.0)
|
||||
|
||||
include_directories(
|
||||
${GSTREAMER_INCLUDE_DIRS}
|
||||
${GSTREAMER_APP_INCLUDE_DIRS}
|
||||
)
|
||||
|
||||
link_directories(
|
||||
${GSTREAMER_LIBRARY_DIRS}
|
||||
${GSTREAMER_APP_LIBRARY_DIRS}
|
||||
)
|
||||
|
||||
# Find VizionSDK library
|
||||
find_library(VIZIONSDK_LIBRARY
|
||||
NAMES VizionSDK
|
||||
@@ -24,10 +39,16 @@ add_executable(vizionStreamer
|
||||
main.cpp
|
||||
SocketServer.cpp
|
||||
CameraController.cpp
|
||||
GStreamerPipeline.cpp
|
||||
StreamingEngine.cpp
|
||||
)
|
||||
|
||||
# Link VizionSDK library
|
||||
target_link_libraries(vizionStreamer PRIVATE ${VIZIONSDK_LIBRARY})
|
||||
# Link libraries
|
||||
target_link_libraries(vizionStreamer PRIVATE
|
||||
${VIZIONSDK_LIBRARY}
|
||||
${GSTREAMER_LIBRARIES}
|
||||
${GSTREAMER_APP_LIBRARIES}
|
||||
)
|
||||
|
||||
# Set RPATH so the executable can find the SDK .so at runtime without needing LD_LIBRARY_PATH
|
||||
set_target_properties(vizionStreamer PROPERTIES
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
#include <iostream>
|
||||
|
||||
CameraController::CameraController(std::shared_ptr<VxCamera> camera)
|
||||
: camera_(camera), streaming_(false) {}
|
||||
: camera_(camera), gstPipeline_("videoconvert ! autovideosink") {
|
||||
streamingEngine_ = std::make_shared<StreamingEngine>(camera);
|
||||
}
|
||||
|
||||
std::string CameraController::processCommand(const std::string& jsonCommand) {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
@@ -71,6 +73,8 @@ std::string CameraController::processCommand(const std::string& jsonCommand) {
|
||||
return handleStartStream();
|
||||
} else if (command == "stop_stream") {
|
||||
return handleStopStream();
|
||||
} else if (command == "set_pipeline") {
|
||||
return handleSetPipeline(getParam("pipeline"));
|
||||
} else {
|
||||
return createErrorResponse("Unknown command: " + command);
|
||||
}
|
||||
@@ -78,7 +82,7 @@ std::string CameraController::processCommand(const std::string& jsonCommand) {
|
||||
|
||||
std::string CameraController::handleSetFormat(const std::string& width, const std::string& height,
|
||||
const std::string& framerate, const std::string& format) {
|
||||
if (streaming_) {
|
||||
if (streamingEngine_->isRunning()) {
|
||||
return createErrorResponse("Cannot change format while streaming");
|
||||
}
|
||||
|
||||
@@ -94,6 +98,7 @@ std::string CameraController::handleSetFormat(const std::string& width, const st
|
||||
return createErrorResponse("Failed to set format");
|
||||
}
|
||||
|
||||
streamingEngine_->setFormat(fmt);
|
||||
return createSuccessResponse("Format set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
@@ -231,34 +236,44 @@ std::string CameraController::handleSetGain(const std::string& value) {
|
||||
|
||||
std::string CameraController::handleGetStatus() {
|
||||
std::ostringstream oss;
|
||||
oss << "{\"status\":\"success\",\"streaming\":" << (streaming_ ? "true" : "false") << "}";
|
||||
oss << "{\"status\":\"success\",\"streaming\":" << (streamingEngine_->isRunning() ? "true" : "false")
|
||||
<< ",\"pipeline\":\"" << gstPipeline_ << "\"}";
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
std::string CameraController::handleStartStream() {
|
||||
if (streaming_) {
|
||||
if (streamingEngine_->isRunning()) {
|
||||
return createErrorResponse("Already streaming");
|
||||
}
|
||||
|
||||
if (VxStartStreaming(camera_) != 0) {
|
||||
if (!streamingEngine_->start(gstPipeline_)) {
|
||||
return createErrorResponse("Failed to start streaming");
|
||||
}
|
||||
|
||||
streaming_ = true;
|
||||
return createSuccessResponse("Streaming started");
|
||||
}
|
||||
|
||||
std::string CameraController::handleStopStream() {
|
||||
if (!streaming_) {
|
||||
if (!streamingEngine_->isRunning()) {
|
||||
return createErrorResponse("Not streaming");
|
||||
}
|
||||
|
||||
if (VxStopStreaming(camera_) != 0) {
|
||||
return createErrorResponse("Failed to stop streaming");
|
||||
streamingEngine_->stop();
|
||||
return createSuccessResponse("Streaming stopped");
|
||||
}
|
||||
|
||||
streaming_ = false;
|
||||
return createSuccessResponse("Streaming stopped");
|
||||
std::string CameraController::handleSetPipeline(const std::string& pipeline) {
|
||||
if (streamingEngine_->isRunning()) {
|
||||
return createErrorResponse("Cannot change pipeline while streaming");
|
||||
}
|
||||
|
||||
if (pipeline.empty()) {
|
||||
return createErrorResponse("Pipeline cannot be empty");
|
||||
}
|
||||
|
||||
gstPipeline_ = pipeline;
|
||||
streamingEngine_->setPipelineDescription(pipeline);
|
||||
return createSuccessResponse("Pipeline set successfully");
|
||||
}
|
||||
|
||||
VX_IMAGE_FORMAT CameraController::stringToFormat(const std::string& format) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include <vizionsdk/VizionSDK.h>
|
||||
#include "StreamingEngine.h"
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <mutex>
|
||||
@@ -12,6 +13,9 @@ public:
|
||||
// Process JSON command and return JSON response
|
||||
std::string processCommand(const std::string& jsonCommand);
|
||||
|
||||
// Get streaming engine for external control
|
||||
std::shared_ptr<StreamingEngine> getStreamingEngine() { return streamingEngine_; }
|
||||
|
||||
private:
|
||||
// Command handlers
|
||||
std::string handleSetFormat(const std::string& width, const std::string& height,
|
||||
@@ -28,6 +32,7 @@ private:
|
||||
std::string handleGetStatus();
|
||||
std::string handleStartStream();
|
||||
std::string handleStopStream();
|
||||
std::string handleSetPipeline(const std::string& pipeline);
|
||||
|
||||
// Helper functions
|
||||
VX_IMAGE_FORMAT stringToFormat(const std::string& format);
|
||||
@@ -36,6 +41,7 @@ private:
|
||||
std::string createSuccessResponse(const std::string& message = "");
|
||||
|
||||
std::shared_ptr<VxCamera> camera_;
|
||||
std::shared_ptr<StreamingEngine> streamingEngine_;
|
||||
std::mutex mutex_;
|
||||
bool streaming_;
|
||||
std::string gstPipeline_;
|
||||
};
|
||||
|
||||
164
GStreamerPipeline.cpp
Normal file
164
GStreamerPipeline.cpp
Normal file
@@ -0,0 +1,164 @@
|
||||
#include "GStreamerPipeline.h"
|
||||
#include <iostream>
|
||||
#include <cstring>
|
||||
|
||||
GStreamerPipeline::GStreamerPipeline(const std::string& pipelineDescription)
|
||||
: pipeline_(nullptr), appsrc_(nullptr), bus_(nullptr), running_(false),
|
||||
pipelineDescription_(pipelineDescription), width_(0), height_(0) {
|
||||
gst_init(nullptr, nullptr);
|
||||
}
|
||||
|
||||
GStreamerPipeline::~GStreamerPipeline() {
|
||||
stop();
|
||||
}
|
||||
|
||||
void GStreamerPipeline::setPipelineDescription(const std::string& description) {
|
||||
if (!running_) {
|
||||
pipelineDescription_ = description;
|
||||
}
|
||||
}
|
||||
|
||||
bool GStreamerPipeline::start() {
|
||||
if (running_) {
|
||||
std::cerr << "GStreamer pipeline already running" << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (pipelineDescription_.empty()) {
|
||||
std::cerr << "Pipeline description is empty" << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
GError* error = nullptr;
|
||||
std::string fullPipeline = "appsrc name=source ! " + pipelineDescription_;
|
||||
|
||||
pipeline_ = gst_parse_launch(fullPipeline.c_str(), &error);
|
||||
if (error) {
|
||||
std::cerr << "Failed to create pipeline: " << error->message << std::endl;
|
||||
g_error_free(error);
|
||||
return false;
|
||||
}
|
||||
|
||||
appsrc_ = gst_bin_get_by_name(GST_BIN(pipeline_), "source");
|
||||
if (!appsrc_) {
|
||||
std::cerr << "Failed to get appsrc element" << std::endl;
|
||||
gst_object_unref(pipeline_);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Configure appsrc
|
||||
g_object_set(G_OBJECT(appsrc_),
|
||||
"stream-type", GST_APP_STREAM_TYPE_STREAM,
|
||||
"format", GST_FORMAT_TIME,
|
||||
"is-live", TRUE,
|
||||
nullptr);
|
||||
|
||||
// Set callbacks
|
||||
GstAppSrcCallbacks callbacks;
|
||||
callbacks.need_data = onNeedData;
|
||||
callbacks.enough_data = onEnoughData;
|
||||
callbacks.seek_data = nullptr;
|
||||
gst_app_src_set_callbacks(GST_APP_SRC(appsrc_), &callbacks, this, nullptr);
|
||||
|
||||
// Start the pipeline
|
||||
GstStateChangeReturn ret = gst_element_set_state(pipeline_, GST_STATE_PLAYING);
|
||||
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||
std::cerr << "Failed to start pipeline" << std::endl;
|
||||
gst_object_unref(appsrc_);
|
||||
gst_object_unref(pipeline_);
|
||||
return false;
|
||||
}
|
||||
|
||||
bus_ = gst_element_get_bus(pipeline_);
|
||||
running_ = true;
|
||||
|
||||
std::cout << "GStreamer pipeline started: " << fullPipeline << std::endl;
|
||||
return true;
|
||||
}
|
||||
|
||||
void GStreamerPipeline::stop() {
|
||||
if (!running_) {
|
||||
return;
|
||||
}
|
||||
|
||||
running_ = false;
|
||||
|
||||
if (appsrc_) {
|
||||
gst_app_src_end_of_stream(GST_APP_SRC(appsrc_));
|
||||
}
|
||||
|
||||
if (pipeline_) {
|
||||
gst_element_set_state(pipeline_, GST_STATE_NULL);
|
||||
gst_object_unref(pipeline_);
|
||||
pipeline_ = nullptr;
|
||||
}
|
||||
|
||||
if (appsrc_) {
|
||||
gst_object_unref(appsrc_);
|
||||
appsrc_ = nullptr;
|
||||
}
|
||||
|
||||
if (bus_) {
|
||||
gst_object_unref(bus_);
|
||||
bus_ = nullptr;
|
||||
}
|
||||
|
||||
std::cout << "GStreamer pipeline stopped" << std::endl;
|
||||
}
|
||||
|
||||
bool GStreamerPipeline::pushBuffer(uint8_t* data, size_t size, int width, int height, const std::string& format) {
|
||||
if (!running_ || !appsrc_) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Update format if changed
|
||||
if (width != width_ || height != height_ || format != format_) {
|
||||
width_ = width;
|
||||
height_ = height;
|
||||
format_ = format;
|
||||
|
||||
// Set caps based on format
|
||||
std::string capsStr;
|
||||
if (format == "YUY2" || format == "UYVY") {
|
||||
capsStr = "video/x-raw,format=" + format + ",width=" + std::to_string(width) +
|
||||
",height=" + std::to_string(height) + ",framerate=30/1";
|
||||
} else if (format == "MJPG") {
|
||||
capsStr = "image/jpeg,width=" + std::to_string(width) +
|
||||
",height=" + std::to_string(height) + ",framerate=30/1";
|
||||
} else if (format == "BGR" || format == "RGB") {
|
||||
capsStr = "video/x-raw,format=" + format + ",width=" + std::to_string(width) +
|
||||
",height=" + std::to_string(height) + ",framerate=30/1";
|
||||
} else {
|
||||
capsStr = "video/x-raw,width=" + std::to_string(width) +
|
||||
",height=" + std::to_string(height) + ",framerate=30/1";
|
||||
}
|
||||
|
||||
GstCaps* caps = gst_caps_from_string(capsStr.c_str());
|
||||
gst_app_src_set_caps(GST_APP_SRC(appsrc_), caps);
|
||||
gst_caps_unref(caps);
|
||||
}
|
||||
|
||||
// Create buffer and copy data
|
||||
GstBuffer* buffer = gst_buffer_new_allocate(nullptr, size, nullptr);
|
||||
GstMapInfo map;
|
||||
gst_buffer_map(buffer, &map, GST_MAP_WRITE);
|
||||
memcpy(map.data, data, size);
|
||||
gst_buffer_unmap(buffer, &map);
|
||||
|
||||
// Push buffer to pipeline
|
||||
GstFlowReturn ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc_), buffer);
|
||||
if (ret != GST_FLOW_OK) {
|
||||
std::cerr << "Failed to push buffer to pipeline: " << ret << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void GStreamerPipeline::onNeedData(GstAppSrc* appsrc, guint unused, gpointer user_data) {
|
||||
// Called when pipeline needs more data
|
||||
}
|
||||
|
||||
void GStreamerPipeline::onEnoughData(GstAppSrc* appsrc, gpointer user_data) {
|
||||
// Called when pipeline has enough data buffered
|
||||
}
|
||||
32
GStreamerPipeline.h
Normal file
32
GStreamerPipeline.h
Normal file
@@ -0,0 +1,32 @@
|
||||
#pragma once
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include <gst/app/gstappsrc.h>
|
||||
#include <string>
|
||||
#include <memory>
|
||||
|
||||
class GStreamerPipeline {
|
||||
public:
|
||||
explicit GStreamerPipeline(const std::string& pipelineDescription);
|
||||
~GStreamerPipeline();
|
||||
|
||||
bool start();
|
||||
void stop();
|
||||
bool pushBuffer(uint8_t* data, size_t size, int width, int height, const std::string& format);
|
||||
bool isRunning() const { return running_; }
|
||||
|
||||
void setPipelineDescription(const std::string& description);
|
||||
|
||||
private:
|
||||
static void onNeedData(GstAppSrc* appsrc, guint unused, gpointer user_data);
|
||||
static void onEnoughData(GstAppSrc* appsrc, gpointer user_data);
|
||||
|
||||
GstElement* pipeline_;
|
||||
GstElement* appsrc_;
|
||||
GstBus* bus_;
|
||||
bool running_;
|
||||
std::string pipelineDescription_;
|
||||
int width_;
|
||||
int height_;
|
||||
std::string format_;
|
||||
};
|
||||
173
SOCKET_API.md
173
SOCKET_API.md
@@ -151,9 +151,67 @@ Stop video streaming.
|
||||
|
||||
---
|
||||
|
||||
### 5. Get Status
|
||||
### 5. Set GStreamer Pipeline
|
||||
|
||||
Get current streaming status.
|
||||
Configure the GStreamer pipeline for video output. This determines where and how the video stream is processed/displayed.
|
||||
|
||||
**Note:** Cannot be changed while streaming is active.
|
||||
|
||||
**Command:**
|
||||
```json
|
||||
{
|
||||
"command": "set_pipeline",
|
||||
"params": {
|
||||
"pipeline": "videoconvert ! x264enc ! rtph264pay ! udpsink host=192.168.1.100 port=5000"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"status": "success",
|
||||
"message": "Pipeline set successfully"
|
||||
}
|
||||
```
|
||||
|
||||
**Common Pipeline Examples:**
|
||||
|
||||
1. **Display locally:**
|
||||
```
|
||||
videoconvert ! autovideosink
|
||||
```
|
||||
|
||||
2. **Stream over UDP (H.264):**
|
||||
```
|
||||
videoconvert ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=192.168.1.100 port=5000
|
||||
```
|
||||
|
||||
3. **Stream over RTSP (requires gst-rtsp-server):**
|
||||
```
|
||||
videoconvert ! x264enc ! rtph264pay name=pay0
|
||||
```
|
||||
|
||||
4. **Save to file:**
|
||||
```
|
||||
videoconvert ! x264enc ! mp4mux ! filesink location=/tmp/output.mp4
|
||||
```
|
||||
|
||||
5. **Stream over TCP:**
|
||||
```
|
||||
videoconvert ! x264enc ! h264parse ! mpegtsmux ! tcpserversink host=0.0.0.0 port=5000
|
||||
```
|
||||
|
||||
6. **MJPEG over HTTP:**
|
||||
```
|
||||
videoconvert ! jpegenc ! multipartmux ! tcpserversink host=0.0.0.0 port=8080
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 6. Get Status
|
||||
|
||||
Get current streaming status and pipeline configuration.
|
||||
|
||||
**Command:**
|
||||
```json
|
||||
@@ -166,13 +224,14 @@ Get current streaming status.
|
||||
```json
|
||||
{
|
||||
"status": "success",
|
||||
"streaming": true
|
||||
"streaming": true,
|
||||
"pipeline": "videoconvert ! autovideosink"
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 6. Set Exposure
|
||||
### 7. Set Exposure
|
||||
|
||||
Configure camera exposure settings.
|
||||
|
||||
@@ -201,7 +260,7 @@ Configure camera exposure settings.
|
||||
|
||||
---
|
||||
|
||||
### 7. Set White Balance
|
||||
### 8. Set White Balance
|
||||
|
||||
Configure white balance settings.
|
||||
|
||||
@@ -230,7 +289,7 @@ Configure white balance settings.
|
||||
|
||||
---
|
||||
|
||||
### 8. Set Brightness
|
||||
### 9. Set Brightness
|
||||
|
||||
Adjust camera brightness.
|
||||
|
||||
@@ -254,7 +313,7 @@ Adjust camera brightness.
|
||||
|
||||
---
|
||||
|
||||
### 9. Set Contrast
|
||||
### 10. Set Contrast
|
||||
|
||||
Adjust camera contrast.
|
||||
|
||||
@@ -278,7 +337,7 @@ Adjust camera contrast.
|
||||
|
||||
---
|
||||
|
||||
### 10. Set Saturation
|
||||
### 11. Set Saturation
|
||||
|
||||
Adjust color saturation.
|
||||
|
||||
@@ -302,7 +361,7 @@ Adjust color saturation.
|
||||
|
||||
---
|
||||
|
||||
### 11. Set Sharpness
|
||||
### 12. Set Sharpness
|
||||
|
||||
Adjust image sharpness.
|
||||
|
||||
@@ -326,7 +385,7 @@ Adjust image sharpness.
|
||||
|
||||
---
|
||||
|
||||
### 12. Set Gamma
|
||||
### 13. Set Gamma
|
||||
|
||||
Adjust gamma correction.
|
||||
|
||||
@@ -350,7 +409,7 @@ Adjust gamma correction.
|
||||
|
||||
---
|
||||
|
||||
### 13. Set Gain
|
||||
### 14. Set Gain
|
||||
|
||||
Adjust camera gain.
|
||||
|
||||
@@ -376,6 +435,45 @@ Adjust camera gain.
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Complete Workflow Example
|
||||
|
||||
```bash
|
||||
# 1. Set GStreamer pipeline for UDP streaming
|
||||
echo '{"command":"set_pipeline","params":{"pipeline":"videoconvert ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=192.168.1.100 port=5000"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
|
||||
|
||||
# 2. Set video format
|
||||
echo '{"command":"set_format","params":{"width":"1920","height":"1080","framerate":"30","format":"YUY2"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
|
||||
|
||||
# 3. Configure camera settings
|
||||
echo '{"command":"set_exposure","params":{"mode":"auto"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
|
||||
echo '{"command":"set_brightness","params":{"value":"50"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
|
||||
|
||||
# 4. Start streaming
|
||||
echo '{"command":"start_stream"}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
|
||||
|
||||
# 5. Check status
|
||||
echo '{"command":"get_status"}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
|
||||
|
||||
# 6. Stop streaming when done
|
||||
echo '{"command":"stop_stream"}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
|
||||
```
|
||||
|
||||
### GStreamer Pipeline Examples
|
||||
|
||||
```bash
|
||||
# Stream to local display
|
||||
echo '{"command":"set_pipeline","params":{"pipeline":"videoconvert ! autovideosink"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
|
||||
|
||||
# Stream over UDP (H.264)
|
||||
echo '{"command":"set_pipeline","params":{"pipeline":"videoconvert ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=192.168.1.100 port=5000"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
|
||||
|
||||
# Save to MP4 file
|
||||
echo '{"command":"set_pipeline","params":{"pipeline":"videoconvert ! x264enc ! mp4mux ! filesink location=/tmp/output.mp4"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
|
||||
|
||||
# MJPEG HTTP server
|
||||
echo '{"command":"set_pipeline","params":{"pipeline":"videoconvert ! jpegenc ! multipartmux ! tcpserversink host=0.0.0.0 port=8080"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
|
||||
```
|
||||
|
||||
### Using `socat`
|
||||
|
||||
```bash
|
||||
@@ -505,10 +603,61 @@ else:
|
||||
|
||||
The socket server handles one client connection at a time. Commands are processed sequentially with mutex protection to ensure thread safety with the camera operations.
|
||||
|
||||
## GStreamer Integration
|
||||
|
||||
VizionStreamer uses GStreamer for video processing and output. The captured frames from the VizionSDK camera are continuously fed into a GStreamer pipeline in a separate acquisition thread.
|
||||
|
||||
### How It Works
|
||||
|
||||
1. **Continuous Acquisition Loop**: A dedicated thread continuously captures frames from the camera using `VxGetImage()`
|
||||
2. **Frame Buffering**: Captured frames are pushed into the GStreamer pipeline via `appsrc`
|
||||
3. **Pipeline Processing**: GStreamer processes the frames according to the configured pipeline
|
||||
4. **Output**: Frames are displayed, saved, or streamed based on the pipeline configuration
|
||||
|
||||
### Performance Monitoring
|
||||
|
||||
The acquisition loop prints FPS statistics every second:
|
||||
```
|
||||
FPS: 30 | Total frames: 1234 | Frame size: 4147200 bytes
|
||||
```
|
||||
|
||||
### Receiving UDP Stream
|
||||
|
||||
If you configured a UDP streaming pipeline, receive it with:
|
||||
|
||||
```bash
|
||||
# Using GStreamer
|
||||
gst-launch-1.0 udpsrc port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! autovideosink
|
||||
|
||||
# Using FFplay
|
||||
ffplay -fflags nobuffer -flags low_delay -framedrop udp://0.0.0.0:5000
|
||||
|
||||
# Using VLC
|
||||
vlc udp://@:5000
|
||||
```
|
||||
|
||||
### Receiving MJPEG HTTP Stream
|
||||
|
||||
If you configured an MJPEG HTTP server pipeline:
|
||||
|
||||
```bash
|
||||
# View in browser
|
||||
firefox http://192.168.1.100:8080
|
||||
|
||||
# Using FFplay
|
||||
ffplay http://192.168.1.100:8080
|
||||
|
||||
# Using curl to save frames
|
||||
curl http://192.168.1.100:8080 > stream.mjpg
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- The socket file is automatically created when VizionStreamer starts
|
||||
- The socket file is removed when VizionStreamer exits cleanly
|
||||
- Format changes require streaming to be stopped first
|
||||
- Format and pipeline changes require streaming to be stopped first
|
||||
- The acquisition loop runs continuously while streaming is active
|
||||
- Some parameters may not be supported on all camera models
|
||||
- Invalid parameter values will return an error response
|
||||
- GStreamer pipeline errors will be reported when starting the stream
|
||||
- Default pipeline: `videoconvert ! autovideosink` (display locally)
|
||||
|
||||
150
StreamingEngine.cpp
Normal file
150
StreamingEngine.cpp
Normal file
@@ -0,0 +1,150 @@
|
||||
#include "StreamingEngine.h"
|
||||
#include <iostream>
|
||||
#include <chrono>
|
||||
|
||||
StreamingEngine::StreamingEngine(std::shared_ptr<VxCamera> camera)
|
||||
: camera_(camera), running_(false), bufferSize_(0) {
|
||||
gstPipeline_ = std::make_unique<GStreamerPipeline>("");
|
||||
}
|
||||
|
||||
StreamingEngine::~StreamingEngine() {
|
||||
stop();
|
||||
}
|
||||
|
||||
void StreamingEngine::setPipelineDescription(const std::string& pipeline) {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
if (!running_) {
|
||||
gstPipeline_->setPipelineDescription(pipeline);
|
||||
}
|
||||
}
|
||||
|
||||
bool StreamingEngine::start(const std::string& gstPipeline) {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
|
||||
if (running_) {
|
||||
std::cerr << "Streaming engine already running" << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Set pipeline description
|
||||
gstPipeline_->setPipelineDescription(gstPipeline);
|
||||
|
||||
// Start camera streaming
|
||||
if (VxStartStreaming(camera_) != 0) {
|
||||
std::cerr << "Failed to start camera streaming" << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Get current format to allocate buffer
|
||||
std::vector<VxFormat> fmtList;
|
||||
if (VxGetFormatList(camera_, fmtList) != 0 || fmtList.empty()) {
|
||||
std::cerr << "Failed to get format list" << std::endl;
|
||||
VxStopStreaming(camera_);
|
||||
return false;
|
||||
}
|
||||
currentFormat_ = fmtList[0];
|
||||
|
||||
// Allocate buffer (assume worst case: uncompressed)
|
||||
bufferSize_ = currentFormat_.width * currentFormat_.height * 4;
|
||||
buffer_ = std::make_unique<uint8_t[]>(bufferSize_);
|
||||
|
||||
// Start GStreamer pipeline
|
||||
if (!gstPipeline_->start()) {
|
||||
std::cerr << "Failed to start GStreamer pipeline" << std::endl;
|
||||
VxStopStreaming(camera_);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Start acquisition thread
|
||||
running_ = true;
|
||||
acquisitionThread_ = std::make_unique<std::thread>(&StreamingEngine::acquisitionLoop, this);
|
||||
|
||||
std::cout << "Streaming engine started" << std::endl;
|
||||
return true;
|
||||
}
|
||||
|
||||
void StreamingEngine::stop() {
|
||||
if (!running_) {
|
||||
return;
|
||||
}
|
||||
|
||||
running_ = false;
|
||||
|
||||
// Wait for acquisition thread to finish
|
||||
if (acquisitionThread_ && acquisitionThread_->joinable()) {
|
||||
acquisitionThread_->join();
|
||||
}
|
||||
|
||||
// Stop GStreamer pipeline
|
||||
gstPipeline_->stop();
|
||||
|
||||
// Stop camera streaming
|
||||
VxStopStreaming(camera_);
|
||||
|
||||
std::cout << "Streaming engine stopped" << std::endl;
|
||||
}
|
||||
|
||||
void StreamingEngine::setFormat(const VxFormat& format) {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
if (!running_) {
|
||||
currentFormat_ = format;
|
||||
if (VxSetFormat(camera_, format) != 0) {
|
||||
std::cerr << "Failed to set format" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void StreamingEngine::acquisitionLoop() {
|
||||
uint64_t frameCount = 0;
|
||||
auto lastStatsTime = std::chrono::steady_clock::now();
|
||||
uint64_t framesInLastSecond = 0;
|
||||
|
||||
std::cout << "Acquisition loop started" << std::endl;
|
||||
|
||||
while (running_) {
|
||||
int dataSize = 0;
|
||||
VX_CAPTURE_RESULT result = VxGetImage(camera_, buffer_.get(), &dataSize, 1000);
|
||||
|
||||
if (result == VX_CAPTURE_RESULT::VX_SUCCESS && dataSize > 0) {
|
||||
// Push frame to GStreamer pipeline
|
||||
std::string formatStr;
|
||||
switch (currentFormat_.format) {
|
||||
case VX_IMAGE_FORMAT::YUY2: formatStr = "YUY2"; break;
|
||||
case VX_IMAGE_FORMAT::UYVY: formatStr = "UYVY"; break;
|
||||
case VX_IMAGE_FORMAT::MJPG: formatStr = "MJPG"; break;
|
||||
case VX_IMAGE_FORMAT::BGR: formatStr = "BGR"; break;
|
||||
case VX_IMAGE_FORMAT::RGB: formatStr = "RGB"; break;
|
||||
default: formatStr = "UNKNOWN"; break;
|
||||
}
|
||||
|
||||
if (!gstPipeline_->pushBuffer(buffer_.get(), dataSize,
|
||||
currentFormat_.width, currentFormat_.height,
|
||||
formatStr)) {
|
||||
std::cerr << "Failed to push frame to GStreamer pipeline" << std::endl;
|
||||
}
|
||||
|
||||
frameCount++;
|
||||
framesInLastSecond++;
|
||||
|
||||
// Print statistics every second
|
||||
auto now = std::chrono::steady_clock::now();
|
||||
auto elapsed = std::chrono::duration_cast<std::chrono::seconds>(now - lastStatsTime);
|
||||
if (elapsed.count() >= 1) {
|
||||
std::cout << "FPS: " << framesInLastSecond
|
||||
<< " | Total frames: " << frameCount
|
||||
<< " | Frame size: " << dataSize << " bytes" << std::endl;
|
||||
framesInLastSecond = 0;
|
||||
lastStatsTime = now;
|
||||
}
|
||||
} else if (result == VX_CAPTURE_RESULT::VX_TIMEOUT) {
|
||||
// Timeout is normal, just continue
|
||||
continue;
|
||||
} else {
|
||||
std::cerr << "Failed to capture frame: " << static_cast<int>(result) << std::endl;
|
||||
// Don't break on error, just continue trying
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
}
|
||||
|
||||
std::cout << "Acquisition loop stopped. Total frames captured: " << frameCount << std::endl;
|
||||
}
|
||||
35
StreamingEngine.h
Normal file
35
StreamingEngine.h
Normal file
@@ -0,0 +1,35 @@
|
||||
#pragma once
|
||||
|
||||
#include <vizionsdk/VizionSDK.h>
|
||||
#include "GStreamerPipeline.h"
|
||||
#include <memory>
|
||||
#include <thread>
|
||||
#include <atomic>
|
||||
#include <mutex>
|
||||
|
||||
class StreamingEngine {
|
||||
public:
|
||||
explicit StreamingEngine(std::shared_ptr<VxCamera> camera);
|
||||
~StreamingEngine();
|
||||
|
||||
bool start(const std::string& gstPipeline);
|
||||
void stop();
|
||||
bool isRunning() const { return running_; }
|
||||
|
||||
void setFormat(const VxFormat& format);
|
||||
VxFormat getCurrentFormat() const { return currentFormat_; }
|
||||
|
||||
void setPipelineDescription(const std::string& pipeline);
|
||||
|
||||
private:
|
||||
void acquisitionLoop();
|
||||
|
||||
std::shared_ptr<VxCamera> camera_;
|
||||
std::unique_ptr<GStreamerPipeline> gstPipeline_;
|
||||
std::unique_ptr<std::thread> acquisitionThread_;
|
||||
std::atomic<bool> running_;
|
||||
std::mutex mutex_;
|
||||
VxFormat currentFormat_;
|
||||
std::unique_ptr<uint8_t[]> buffer_;
|
||||
size_t bufferSize_;
|
||||
};
|
||||
8
main.cpp
8
main.cpp
@@ -73,6 +73,8 @@ int main() {
|
||||
|
||||
std::cout << "\nVizion Streamer is running." << std::endl;
|
||||
std::cout << "Control socket: " << socketPath << std::endl;
|
||||
std::cout << "Default GStreamer pipeline: videoconvert ! autovideosink" << std::endl;
|
||||
std::cout << "Use socket commands to start streaming and configure pipeline.\n" << std::endl;
|
||||
std::cout << "Press Ctrl+C to exit.\n" << std::endl;
|
||||
|
||||
// Main loop - keep running until signaled to stop
|
||||
@@ -82,6 +84,12 @@ int main() {
|
||||
|
||||
// Cleanup
|
||||
std::cout << "Shutting down..." << std::endl;
|
||||
|
||||
// Stop streaming engine if running
|
||||
if (controller->getStreamingEngine()->isRunning()) {
|
||||
controller->getStreamingEngine()->stop();
|
||||
}
|
||||
|
||||
server.stop();
|
||||
VxClose(cam);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user