re-organize folder structure
This commit is contained in:
412
src/CameraController.cpp
Normal file
412
src/CameraController.cpp
Normal file
@@ -0,0 +1,412 @@
|
||||
/*
|
||||
* VizionStreamer - Camera Control Implementation
|
||||
* Copyright (c) 2025 Maik Jurischka
|
||||
*
|
||||
* Licensed under CC BY-NC-SA 4.0
|
||||
* https://creativecommons.org/licenses/by-nc-sa/4.0/
|
||||
*/
|
||||
|
||||
#include "vizionstreamer/CameraController.h"
|
||||
#include <sstream>
|
||||
#include <iostream>
|
||||
|
||||
CameraController::CameraController(std::shared_ptr<VxCamera> camera)
|
||||
: camera_(camera), gstPipeline_("videoconvert ! autovideosink") {
|
||||
streamingEngine_ = std::make_shared<StreamingEngine>(camera);
|
||||
}
|
||||
|
||||
std::string CameraController::processCommand(const std::string& jsonCommand) {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
|
||||
// Simple JSON parsing (basic implementation)
|
||||
// Format: {"command":"name","params":{...}}
|
||||
|
||||
const size_t cmdPos = jsonCommand.find("\"command\"");
|
||||
if (cmdPos == std::string::npos) {
|
||||
return createErrorResponse("Missing command field");
|
||||
}
|
||||
|
||||
const size_t colonPos = jsonCommand.find(":", cmdPos);
|
||||
const size_t quoteStart = jsonCommand.find("\"", colonPos);
|
||||
const size_t quoteEnd = jsonCommand.find("\"", quoteStart + 1);
|
||||
|
||||
if (quoteStart == std::string::npos || quoteEnd == std::string::npos) {
|
||||
return createErrorResponse("Invalid command format");
|
||||
}
|
||||
|
||||
const std::string command = jsonCommand.substr(quoteStart + 1, quoteEnd - quoteStart - 1);
|
||||
|
||||
// Helper lambda to extract parameter value
|
||||
auto getParam = [&jsonCommand](const std::string& paramName) -> std::string {
|
||||
const size_t pos = jsonCommand.find("\"" + paramName + "\"");
|
||||
if (pos == std::string::npos) return "";
|
||||
|
||||
const size_t colonPos = jsonCommand.find(":", pos);
|
||||
|
||||
if (size_t valueStart = jsonCommand.find_first_not_of(" \t\n\r", colonPos + 1); jsonCommand[valueStart] == '\"') {
|
||||
size_t valueEnd = jsonCommand.find("\"", valueStart + 1);
|
||||
return jsonCommand.substr(valueStart + 1, valueEnd - valueStart - 1);
|
||||
} else {
|
||||
size_t valueEnd = jsonCommand.find_first_of(",}", valueStart);
|
||||
return jsonCommand.substr(valueStart, valueEnd - valueStart);
|
||||
}
|
||||
};
|
||||
|
||||
// Route commands
|
||||
if (command == "set_format") {
|
||||
return handleSetFormat(getParam("width"), getParam("height"),
|
||||
getParam("framerate"), getParam("format"));
|
||||
} else if (command == "get_formats") {
|
||||
return handleGetFormats();
|
||||
} else if (command == "set_exposure") {
|
||||
return handleSetExposure(getParam("mode"), getParam("value"));
|
||||
} else if (command == "set_whitebalance") {
|
||||
return handleSetWhiteBalance(getParam("mode"), getParam("temperature"));
|
||||
} else if (command == "set_brightness") {
|
||||
return handleSetBrightness(getParam("value"));
|
||||
} else if (command == "set_contrast") {
|
||||
return handleSetContrast(getParam("value"));
|
||||
} else if (command == "set_saturation") {
|
||||
return handleSetSaturation(getParam("value"));
|
||||
} else if (command == "set_sharpness") {
|
||||
return handleSetSharpness(getParam("value"));
|
||||
} else if (command == "set_gamma") {
|
||||
return handleSetGamma(getParam("value"));
|
||||
} else if (command == "set_gain") {
|
||||
return handleSetGain(getParam("value"));
|
||||
} else if (command == "get_status") {
|
||||
return handleGetStatus();
|
||||
} else if (command == "start_stream") {
|
||||
return handleStartStream();
|
||||
} else if (command == "stop_stream") {
|
||||
return handleStopStream();
|
||||
} else if (command == "set_pipeline") {
|
||||
return handleSetPipeline(getParam("pipeline"));
|
||||
} else if (command == "set_ehdr_mode") {
|
||||
return handleSetEHDRMode(getParam("mode"));
|
||||
} else if (command == "set_ehdr_exposure_min") {
|
||||
return handleSetEHDRExposureMin(getParam("value"));
|
||||
} else if (command == "set_ehdr_exposure_max") {
|
||||
return handleSetEHDRExposureMax(getParam("value"));
|
||||
} else if (command == "set_ehdr_ratio_min") {
|
||||
return handleSetEHDRRatioMin(getParam("value"));
|
||||
} else if (command == "set_ehdr_ratio_max") {
|
||||
return handleSetEHDRRatioMax(getParam("value"));
|
||||
} else if (command == "get_ehdr_status") {
|
||||
return handleGetEHDRStatus();
|
||||
} else {
|
||||
return createErrorResponse("Unknown command: " + command);
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetFormat(const std::string& width, const std::string& height,
|
||||
const std::string& framerate, const std::string& format) {
|
||||
if (streamingEngine_->isRunning()) {
|
||||
return createErrorResponse("Cannot change format while streaming");
|
||||
}
|
||||
|
||||
try {
|
||||
VxFormat fmt;
|
||||
fmt.width = std::stoi(width);
|
||||
fmt.height = std::stoi(height);
|
||||
fmt.framerate = std::stoi(framerate);
|
||||
fmt.format = stringToFormat(format);
|
||||
fmt.mediatypeIdx = 0;
|
||||
|
||||
if (VxSetFormat(camera_, fmt) != 0) {
|
||||
return createErrorResponse("Failed to set format");
|
||||
}
|
||||
|
||||
streamingEngine_->setFormat(fmt);
|
||||
return createSuccessResponse("Format set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleGetFormats() {
|
||||
std::vector<VxFormat> fmtList;
|
||||
if (VxGetFormatList(camera_, fmtList) != 0) {
|
||||
return createErrorResponse("Failed to get format list");
|
||||
}
|
||||
|
||||
std::ostringstream oss;
|
||||
oss << "{\"status\":\"success\",\"formats\":[";
|
||||
for (size_t i = 0; i < fmtList.size(); i++) {
|
||||
if (i > 0) oss << ",";
|
||||
oss << "{\"width\":" << fmtList[i].width
|
||||
<< ",\"height\":" << fmtList[i].height
|
||||
<< ",\"framerate\":" << fmtList[i].framerate
|
||||
<< ",\"format\":\"" << formatToString(fmtList[i].format) << "\"}";
|
||||
}
|
||||
oss << "]}";
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetExposure(const std::string& mode, const std::string& value) {
|
||||
try {
|
||||
const int flag = (mode == "auto") ? 1 : 0;
|
||||
const long expValue = value.empty() ? 0 : std::stol(value);
|
||||
|
||||
if (VxSetUVCImageProcessing(camera_, VX_UVC_IMAGE_PROPERTIES::UVC_IMAGE_EXPOSURE,
|
||||
expValue, flag) != 0) {
|
||||
return createErrorResponse("Failed to set exposure");
|
||||
}
|
||||
|
||||
return createSuccessResponse("Exposure set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetWhiteBalance(const std::string& mode, const std::string& temperature) {
|
||||
try {
|
||||
const int flag = (mode == "auto") ? 1 : 0;
|
||||
const long tempValue = temperature.empty() ? 0 : std::stol(temperature);
|
||||
|
||||
if (VxSetUVCImageProcessing(camera_, VX_UVC_IMAGE_PROPERTIES::UVC_IMAGE_WHITEBALANCE,
|
||||
tempValue, flag) != 0) {
|
||||
return createErrorResponse("Failed to set white balance");
|
||||
}
|
||||
|
||||
return createSuccessResponse("White balance set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetBrightness(const std::string& value) {
|
||||
try {
|
||||
const long val = std::stol(value);
|
||||
if (VxSetUVCImageProcessing(camera_, VX_UVC_IMAGE_PROPERTIES::UVC_IMAGE_BRIGHTNESS,
|
||||
val, 0) != 0) {
|
||||
return createErrorResponse("Failed to set brightness");
|
||||
}
|
||||
return createSuccessResponse("Brightness set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetContrast(const std::string& value) {
|
||||
try {
|
||||
const long val = std::stol(value);
|
||||
if (VxSetUVCImageProcessing(camera_, VX_UVC_IMAGE_PROPERTIES::UVC_IMAGE_CONTRAST,
|
||||
val, 0) != 0) {
|
||||
return createErrorResponse("Failed to set contrast");
|
||||
}
|
||||
return createSuccessResponse("Contrast set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetSaturation(const std::string& value) {
|
||||
try {
|
||||
const long val = std::stol(value);
|
||||
if (VxSetUVCImageProcessing(camera_, VX_UVC_IMAGE_PROPERTIES::UVC_IMAGE_SATURATION,
|
||||
val, 0) != 0) {
|
||||
return createErrorResponse("Failed to set saturation");
|
||||
}
|
||||
return createSuccessResponse("Saturation set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetSharpness(const std::string& value) {
|
||||
try {
|
||||
const long val = std::stol(value);
|
||||
if (VxSetUVCImageProcessing(camera_, VX_UVC_IMAGE_PROPERTIES::UVC_IMAGE_SHARPNESS,
|
||||
val, 0) != 0) {
|
||||
return createErrorResponse("Failed to set sharpness");
|
||||
}
|
||||
return createSuccessResponse("Sharpness set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetGamma(const std::string& value) {
|
||||
try {
|
||||
const long val = std::stol(value);
|
||||
if (VxSetUVCImageProcessing(camera_, VX_UVC_IMAGE_PROPERTIES::UVC_IMAGE_GAMMA,
|
||||
val, 0) != 0) {
|
||||
return createErrorResponse("Failed to set gamma");
|
||||
}
|
||||
return createSuccessResponse("Gamma set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetGain(const std::string& value) {
|
||||
try {
|
||||
const long val = std::stol(value);
|
||||
if (VxSetUVCImageProcessing(camera_, VX_UVC_IMAGE_PROPERTIES::UVC_IMAGE_GAIN,
|
||||
val, 0) != 0) {
|
||||
return createErrorResponse("Failed to set gain");
|
||||
}
|
||||
return createSuccessResponse("Gain set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetEHDRMode(const std::string& value) {
|
||||
try {
|
||||
const int mode = std::stoi(value);
|
||||
if (VxSetISPImageProcessing(camera_, VX_ISP_IMAGE_PROPERTIES::ISP_IMAGE_EHDR_MODE, mode) != 0) {
|
||||
return createErrorResponse("Failed to set eHDR mode");
|
||||
}
|
||||
return createSuccessResponse("eHDR mode set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetEHDRExposureMin(const std::string& value) {
|
||||
try {
|
||||
const int minExp = std::stoi(value);
|
||||
if (VxSetISPImageProcessing(camera_, VX_ISP_IMAGE_PROPERTIES::ISP_EHDR_EXPOSURE_MIN_NUMBER, minExp) != 0) {
|
||||
return createErrorResponse("Failed to set eHDR exposure min");
|
||||
}
|
||||
return createSuccessResponse("eHDR exposure min set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetEHDRExposureMax(const std::string& value) {
|
||||
try {
|
||||
const int maxExp = std::stoi(value);
|
||||
if (VxSetISPImageProcessing(camera_, VX_ISP_IMAGE_PROPERTIES::ISP_EHDR_EXPOSURE_MAX_NUMBER, maxExp) != 0) {
|
||||
return createErrorResponse("Failed to set eHDR exposure max");
|
||||
}
|
||||
return createSuccessResponse("eHDR exposure max set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetEHDRRatioMin(const std::string& value) {
|
||||
try {
|
||||
const int minRatio = std::stoi(value);
|
||||
if (VxSetISPImageProcessing(camera_, VX_ISP_IMAGE_PROPERTIES::ISP_EHDR_RATIO_MIN, minRatio) != 0) {
|
||||
return createErrorResponse("Failed to set eHDR ratio min");
|
||||
}
|
||||
return createSuccessResponse("eHDR ratio min set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetEHDRRatioMax(const std::string& value) {
|
||||
try {
|
||||
const int maxRatio = std::stoi(value);
|
||||
if (VxSetISPImageProcessing(camera_, VX_ISP_IMAGE_PROPERTIES::ISP_EHDR_RATIO_MAX, maxRatio) != 0) {
|
||||
return createErrorResponse("Failed to set eHDR ratio max");
|
||||
}
|
||||
return createSuccessResponse("eHDR ratio max set successfully");
|
||||
} catch (const std::exception& e) {
|
||||
return createErrorResponse(std::string("Invalid parameters: ") + e.what());
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::handleGetEHDRStatus() {
|
||||
int mode = 0, flag = 0;
|
||||
int expMin = 0, expMax = 0;
|
||||
int ratioMin = 0, ratioMax = 0;
|
||||
|
||||
if (VxGetISPImageProcessing(camera_, VX_ISP_IMAGE_PROPERTIES::ISP_IMAGE_EHDR_MODE, mode, flag) != 0) {
|
||||
return createErrorResponse("Failed to get eHDR mode");
|
||||
}
|
||||
|
||||
VxGetISPImageProcessing(camera_, VX_ISP_IMAGE_PROPERTIES::ISP_EHDR_EXPOSURE_MIN_NUMBER, expMin, flag);
|
||||
VxGetISPImageProcessing(camera_, VX_ISP_IMAGE_PROPERTIES::ISP_EHDR_EXPOSURE_MAX_NUMBER, expMax, flag);
|
||||
VxGetISPImageProcessing(camera_, VX_ISP_IMAGE_PROPERTIES::ISP_EHDR_RATIO_MIN, ratioMin, flag);
|
||||
VxGetISPImageProcessing(camera_, VX_ISP_IMAGE_PROPERTIES::ISP_EHDR_RATIO_MAX, ratioMax, flag);
|
||||
|
||||
std::ostringstream oss;
|
||||
oss << "{\"status\":\"success\",\"ehdr_mode\":" << mode
|
||||
<< ",\"exposure_min\":" << expMin
|
||||
<< ",\"exposure_max\":" << expMax
|
||||
<< ",\"ratio_min\":" << ratioMin
|
||||
<< ",\"ratio_max\":" << ratioMax << "}";
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
std::string CameraController::handleGetStatus() {
|
||||
std::ostringstream oss;
|
||||
oss << "{\"status\":\"success\",\"streaming\":" << (streamingEngine_->isRunning() ? "true" : "false")
|
||||
<< ",\"pipeline\":\"" << gstPipeline_ << "\"}";
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
std::string CameraController::handleStartStream() {
|
||||
if (streamingEngine_->isRunning()) {
|
||||
return createErrorResponse("Already streaming");
|
||||
}
|
||||
|
||||
if (!streamingEngine_->start(gstPipeline_)) {
|
||||
return createErrorResponse("Failed to start streaming");
|
||||
}
|
||||
|
||||
return createSuccessResponse("Streaming started");
|
||||
}
|
||||
|
||||
std::string CameraController::handleStopStream() {
|
||||
if (!streamingEngine_->isRunning()) {
|
||||
return createErrorResponse("Not streaming");
|
||||
}
|
||||
|
||||
streamingEngine_->stop();
|
||||
return createSuccessResponse("Streaming stopped");
|
||||
}
|
||||
|
||||
std::string CameraController::handleSetPipeline(const std::string& pipeline) {
|
||||
if (streamingEngine_->isRunning()) {
|
||||
return createErrorResponse("Cannot change pipeline while streaming");
|
||||
}
|
||||
|
||||
if (pipeline.empty()) {
|
||||
return createErrorResponse("Pipeline cannot be empty");
|
||||
}
|
||||
|
||||
gstPipeline_ = pipeline;
|
||||
streamingEngine_->setPipelineDescription(pipeline);
|
||||
return createSuccessResponse("Pipeline set successfully");
|
||||
}
|
||||
|
||||
VX_IMAGE_FORMAT CameraController::stringToFormat(const std::string& format) {
|
||||
if (format == "YUY2") return VX_IMAGE_FORMAT::YUY2;
|
||||
if (format == "UYVY") return VX_IMAGE_FORMAT::UYVY;
|
||||
if (format == "NV12") return VX_IMAGE_FORMAT::NV12;
|
||||
if (format == "MJPG") return VX_IMAGE_FORMAT::MJPG;
|
||||
if (format == "BGR") return VX_IMAGE_FORMAT::BGR;
|
||||
if (format == "RGB") return VX_IMAGE_FORMAT::RGB;
|
||||
return VX_IMAGE_FORMAT::NONE;
|
||||
}
|
||||
|
||||
std::string CameraController::formatToString(VX_IMAGE_FORMAT format) {
|
||||
switch (format) {
|
||||
case VX_IMAGE_FORMAT::YUY2: return "YUY2";
|
||||
case VX_IMAGE_FORMAT::UYVY: return "UYVY";
|
||||
case VX_IMAGE_FORMAT::NV12: return "NV12";
|
||||
case VX_IMAGE_FORMAT::MJPG: return "MJPG";
|
||||
case VX_IMAGE_FORMAT::BGR: return "BGR";
|
||||
case VX_IMAGE_FORMAT::RGB: return "RGB";
|
||||
default: return "NONE";
|
||||
}
|
||||
}
|
||||
|
||||
std::string CameraController::createErrorResponse(const std::string& error) {
|
||||
return "{\"status\":\"error\",\"message\":\"" + error + "\"}";
|
||||
}
|
||||
|
||||
std::string CameraController::createSuccessResponse(const std::string& message) {
|
||||
if (message.empty()) {
|
||||
return "{\"status\":\"success\"}";
|
||||
}
|
||||
return "{\"status\":\"success\",\"message\":\"" + message + "\"}";
|
||||
}
|
||||
222
src/GStreamerPipeline.cpp
Normal file
222
src/GStreamerPipeline.cpp
Normal file
@@ -0,0 +1,222 @@
|
||||
/*
|
||||
* VizionStreamer - GStreamer Pipeline Implementation
|
||||
* Copyright (c) 2025 Maik Jurischka
|
||||
*
|
||||
* Licensed under CC BY-NC-SA 4.0
|
||||
* https://creativecommons.org/licenses/by-nc-sa/4.0/
|
||||
*/
|
||||
|
||||
#include "vizionstreamer/GStreamerPipeline.h"
|
||||
#include <iostream>
|
||||
#include <cstring>
|
||||
#include <utility>
|
||||
|
||||
GStreamerPipeline::GStreamerPipeline(std::string pipelineDescription)
|
||||
: pipeline_(nullptr), appsrc_(nullptr), bus_(nullptr), running_(false),
|
||||
pipelineDescription_(std::move(pipelineDescription)), width_(0), height_(0) {
|
||||
std::cout << "[DEBUG] Initializing GStreamer..." << std::endl;
|
||||
gst_init(nullptr, nullptr);
|
||||
std::cout << "[DEBUG] GStreamer initialized successfully" << std::endl;
|
||||
if (!pipelineDescription_.empty()) {
|
||||
std::cout << "[DEBUG] Pipeline description: " << pipelineDescription_ << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
GStreamerPipeline::~GStreamerPipeline() {
|
||||
std::cout << "[DEBUG] Destroying GStreamerPipeline..." << std::endl;
|
||||
stop();
|
||||
std::cout << "[DEBUG] GStreamerPipeline destroyed" << std::endl;
|
||||
}
|
||||
|
||||
void GStreamerPipeline::setPipelineDescription(const std::string& description) {
|
||||
if (!running_) {
|
||||
std::cout << "[DEBUG] Setting pipeline description: " << description << std::endl;
|
||||
pipelineDescription_ = description;
|
||||
} else {
|
||||
std::cerr << "[DEBUG] Cannot set pipeline description while running" << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
bool GStreamerPipeline::start() {
|
||||
std::cout << "[DEBUG] Starting GStreamer pipeline..." << std::endl;
|
||||
|
||||
if (running_) {
|
||||
std::cerr << "[ERROR] GStreamer pipeline already running" << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (pipelineDescription_.empty()) {
|
||||
std::cerr << "[ERROR] Pipeline description is empty" << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
GError* error = nullptr;
|
||||
const std::string fullPipeline = "appsrc name=source ! " + pipelineDescription_;
|
||||
std::cout << "[DEBUG] Full pipeline string: " << fullPipeline << std::endl;
|
||||
|
||||
std::cout << "[DEBUG] Parsing pipeline..." << std::endl;
|
||||
pipeline_ = gst_parse_launch(fullPipeline.c_str(), &error);
|
||||
if (error) {
|
||||
std::cerr << "[ERROR] Failed to create pipeline: " << error->message << std::endl;
|
||||
g_error_free(error);
|
||||
return false;
|
||||
}
|
||||
std::cout << "[DEBUG] Pipeline parsed successfully" << std::endl;
|
||||
|
||||
std::cout << "[DEBUG] Getting appsrc element..." << std::endl;
|
||||
appsrc_ = gst_bin_get_by_name(GST_BIN(pipeline_), "source");
|
||||
if (!appsrc_) {
|
||||
std::cerr << "[ERROR] Failed to get appsrc element" << std::endl;
|
||||
gst_object_unref(pipeline_);
|
||||
return false;
|
||||
}
|
||||
std::cout << "[DEBUG] Appsrc element retrieved successfully" << std::endl;
|
||||
|
||||
// Configure appsrc
|
||||
std::cout << "[DEBUG] Configuring appsrc properties..." << std::endl;
|
||||
g_object_set(G_OBJECT(appsrc_),
|
||||
"stream-type", GST_APP_STREAM_TYPE_STREAM,
|
||||
"format", GST_FORMAT_TIME,
|
||||
"is-live", TRUE,
|
||||
nullptr);
|
||||
|
||||
// Set callbacks
|
||||
std::cout << "[DEBUG] Setting appsrc callbacks..." << std::endl;
|
||||
GstAppSrcCallbacks callbacks = {};
|
||||
callbacks.need_data = onNeedData;
|
||||
callbacks.enough_data = onEnoughData;
|
||||
callbacks.seek_data = nullptr;
|
||||
gst_app_src_set_callbacks(GST_APP_SRC(appsrc_), &callbacks, this, nullptr);
|
||||
|
||||
// Start the pipeline
|
||||
std::cout << "[DEBUG] Setting pipeline state to PLAYING..." << std::endl;
|
||||
const GstStateChangeReturn ret = gst_element_set_state(pipeline_, GST_STATE_PLAYING);
|
||||
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||
std::cerr << "[ERROR] Failed to set pipeline state to PLAYING" << std::endl;
|
||||
gst_object_unref(appsrc_);
|
||||
gst_object_unref(pipeline_);
|
||||
return false;
|
||||
}
|
||||
|
||||
std::cout << "[DEBUG] Pipeline state change return: ";
|
||||
switch(ret) {
|
||||
case GST_STATE_CHANGE_SUCCESS:
|
||||
std::cout << "SUCCESS" << std::endl;
|
||||
break;
|
||||
case GST_STATE_CHANGE_ASYNC:
|
||||
std::cout << "ASYNC" << std::endl;
|
||||
break;
|
||||
case GST_STATE_CHANGE_NO_PREROLL:
|
||||
std::cout << "NO_PREROLL" << std::endl;
|
||||
break;
|
||||
default:
|
||||
std::cout << "UNKNOWN" << std::endl;
|
||||
}
|
||||
|
||||
bus_ = gst_element_get_bus(pipeline_);
|
||||
running_ = true;
|
||||
|
||||
std::cout << "[SUCCESS] GStreamer pipeline started successfully" << std::endl;
|
||||
std::cout << "[INFO] Full pipeline: " << fullPipeline << std::endl;
|
||||
return true;
|
||||
}
|
||||
|
||||
void GStreamerPipeline::stop() {
|
||||
if (!running_) {
|
||||
std::cout << "[DEBUG] Pipeline already stopped, nothing to do" << std::endl;
|
||||
return;
|
||||
}
|
||||
|
||||
std::cout << "[DEBUG] Stopping GStreamer pipeline..." << std::endl;
|
||||
running_ = false;
|
||||
|
||||
if (appsrc_) {
|
||||
std::cout << "[DEBUG] Sending EOS to appsrc..." << std::endl;
|
||||
gst_app_src_end_of_stream(GST_APP_SRC(appsrc_));
|
||||
}
|
||||
|
||||
if (pipeline_) {
|
||||
std::cout << "[DEBUG] Setting pipeline state to NULL..." << std::endl;
|
||||
gst_element_set_state(pipeline_, GST_STATE_NULL);
|
||||
std::cout << "[DEBUG] Unreferencing pipeline..." << std::endl;
|
||||
gst_object_unref(pipeline_);
|
||||
pipeline_ = nullptr;
|
||||
}
|
||||
|
||||
if (appsrc_) {
|
||||
std::cout << "[DEBUG] Unreferencing appsrc..." << std::endl;
|
||||
gst_object_unref(appsrc_);
|
||||
appsrc_ = nullptr;
|
||||
}
|
||||
|
||||
if (bus_) {
|
||||
std::cout << "[DEBUG] Unreferencing bus..." << std::endl;
|
||||
gst_object_unref(bus_);
|
||||
bus_ = nullptr;
|
||||
}
|
||||
|
||||
std::cout << "[SUCCESS] GStreamer pipeline stopped" << std::endl;
|
||||
}
|
||||
|
||||
bool GStreamerPipeline::pushBuffer(const uint8_t* data, const size_t size, const int width, const int height, const std::string& format) {
|
||||
if (!running_ || !appsrc_) {
|
||||
std::cerr << "[DEBUG] Cannot push buffer: pipeline not running or appsrc is null" << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Update format if changed
|
||||
if (width != width_ || height != height_ || format != format_) {
|
||||
std::cout << "[DEBUG] Stream format changed - updating caps" << std::endl;
|
||||
std::cout << "[DEBUG] Old: " << width_ << "x" << height_ << " (" << format_ << ")" << std::endl;
|
||||
std::cout << "[DEBUG] New: " << width << "x" << height << " (" << format << ")" << std::endl;
|
||||
|
||||
width_ = width;
|
||||
height_ = height;
|
||||
format_ = format;
|
||||
|
||||
// Set caps based on format
|
||||
std::string capsStr;
|
||||
if (format == "YUY2" || format == "UYVY" || format == "BGR" || format == "RGB") {
|
||||
capsStr = "video/x-raw,format=" + format + ",width=" + std::to_string(width) +
|
||||
",height=" + std::to_string(height) + ",framerate=30/1";
|
||||
} else if (format == "MJPG") {
|
||||
capsStr = "image/jpeg,width=" + std::to_string(width) +
|
||||
",height=" + std::to_string(height) + ",framerate=30/1";
|
||||
} else {
|
||||
capsStr = "video/x-raw,width=" + std::to_string(width) +
|
||||
",height=" + std::to_string(height) + ",framerate=30/1";
|
||||
}
|
||||
|
||||
std::cout << "[DEBUG] Setting caps: " << capsStr << std::endl;
|
||||
GstCaps* caps = gst_caps_from_string(capsStr.c_str());
|
||||
gst_app_src_set_caps(GST_APP_SRC(appsrc_), caps);
|
||||
gst_caps_unref(caps);
|
||||
std::cout << "[DEBUG] Caps set successfully" << std::endl;
|
||||
}
|
||||
|
||||
// Create buffer and copy data
|
||||
GstBuffer* buffer = gst_buffer_new_allocate(nullptr, size, nullptr);
|
||||
GstMapInfo map;
|
||||
gst_buffer_map(buffer, &map, GST_MAP_WRITE);
|
||||
memcpy(map.data, data, size);
|
||||
gst_buffer_unmap(buffer, &map);
|
||||
|
||||
// Push buffer to pipeline
|
||||
const GstFlowReturn ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc_), buffer);
|
||||
if (ret != GST_FLOW_OK) {
|
||||
std::cerr << "[ERROR] Failed to push buffer to pipeline, flow return: " << ret << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void GStreamerPipeline::onNeedData(GstAppSrc* appsrc, guint unused, gpointer user_data) {
|
||||
// Called when pipeline needs more data
|
||||
//std::cout << "[DEBUG] Pipeline callback: need_data - pipeline ready for more data" << std::endl;
|
||||
}
|
||||
|
||||
void GStreamerPipeline::onEnoughData(GstAppSrc* appsrc, gpointer user_data) {
|
||||
// Called when pipeline has enough data buffered
|
||||
// std::cout << "[DEBUG] Pipeline callback: enough_data - pipeline buffer full" << std::endl;
|
||||
}
|
||||
118
src/SocketServer.cpp
Normal file
118
src/SocketServer.cpp
Normal file
@@ -0,0 +1,118 @@
|
||||
/*
|
||||
* VizionStreamer - Unix Socket Server Implementation
|
||||
* Copyright (c) 2025 Maik Jurischka
|
||||
*
|
||||
* Licensed under CC BY-NC-SA 4.0
|
||||
* https://creativecommons.org/licenses/by-nc-sa/4.0/
|
||||
*/
|
||||
|
||||
#include "vizionstreamer/SocketServer.h"
|
||||
#include <sys/socket.h>
|
||||
#include <sys/un.h>
|
||||
#include <unistd.h>
|
||||
#include <cstring>
|
||||
#include <iostream>
|
||||
|
||||
SocketServer::SocketServer(const std::string& socketPath)
|
||||
: socketPath_(socketPath), serverFd_(-1), running_(false) {}
|
||||
|
||||
SocketServer::~SocketServer() {
|
||||
stop();
|
||||
}
|
||||
|
||||
bool SocketServer::start(CommandCallback callback) {
|
||||
if (running_) {
|
||||
return false;
|
||||
}
|
||||
|
||||
commandCallback_ = std::move(callback);
|
||||
|
||||
// Remove existing socket file if it exists
|
||||
unlink(socketPath_.c_str());
|
||||
|
||||
// Create Unix domain socket
|
||||
serverFd_ = socket(AF_UNIX, SOCK_STREAM, 0);
|
||||
if (serverFd_ < 0) {
|
||||
std::cerr << "Failed to create socket" << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Bind socket
|
||||
struct sockaddr_un addr = {};
|
||||
addr.sun_family = AF_UNIX;
|
||||
strncpy(addr.sun_path, socketPath_.c_str(), sizeof(addr.sun_path) - 1);
|
||||
|
||||
if (bind(serverFd_, reinterpret_cast<struct sockaddr *>(&addr), sizeof(addr)) < 0) {
|
||||
std::cerr << "Failed to bind socket: " << strerror(errno) << std::endl;
|
||||
close(serverFd_);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Listen for connections
|
||||
if (listen(serverFd_, 5) < 0) {
|
||||
std::cerr << "Failed to listen on socket" << std::endl;
|
||||
close(serverFd_);
|
||||
unlink(socketPath_.c_str());
|
||||
return false;
|
||||
}
|
||||
|
||||
running_ = true;
|
||||
serverThread_ = std::make_unique<std::thread>(&SocketServer::serverLoop, this);
|
||||
|
||||
std::cout << "Socket server started on " << socketPath_ << std::endl;
|
||||
return true;
|
||||
}
|
||||
|
||||
void SocketServer::stop() {
|
||||
if (!running_) {
|
||||
return;
|
||||
}
|
||||
|
||||
running_ = false;
|
||||
|
||||
// Close server socket to unblock accept()
|
||||
if (serverFd_ >= 0) {
|
||||
shutdown(serverFd_, SHUT_RDWR);
|
||||
close(serverFd_);
|
||||
serverFd_ = -1;
|
||||
}
|
||||
|
||||
// Wait for server thread to finish
|
||||
if (serverThread_ && serverThread_->joinable()) {
|
||||
serverThread_->join();
|
||||
}
|
||||
|
||||
unlink(socketPath_.c_str());
|
||||
std::cout << "Socket server stopped" << std::endl;
|
||||
}
|
||||
|
||||
void SocketServer::serverLoop() {
|
||||
while (running_) {
|
||||
int clientFd = accept(serverFd_, nullptr, nullptr);
|
||||
if (clientFd < 0) {
|
||||
if (running_) {
|
||||
std::cerr << "Accept failed: " << strerror(errno) << std::endl;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
handleClient(clientFd);
|
||||
close(clientFd);
|
||||
}
|
||||
}
|
||||
|
||||
void SocketServer::handleClient(const int clientFd) {
|
||||
char buffer[4096];
|
||||
const ssize_t bytesRead = recv(clientFd, buffer, sizeof(buffer) - 1, 0);
|
||||
|
||||
if (bytesRead > 0) {
|
||||
buffer[bytesRead] = '\0';
|
||||
const std::string command(buffer);
|
||||
|
||||
// Call the command callback
|
||||
const std::string response = commandCallback_(command);
|
||||
|
||||
// Send response back to client
|
||||
send(clientFd, response.c_str(), response.length(), 0);
|
||||
}
|
||||
}
|
||||
160
src/StreamingEngine.cpp
Normal file
160
src/StreamingEngine.cpp
Normal file
@@ -0,0 +1,160 @@
|
||||
/*
|
||||
* VizionStreamer - Streaming Engine Implementation
|
||||
* Copyright (c) 2025 Maik Jurischka
|
||||
*
|
||||
* Licensed under CC BY-NC-SA 4.0
|
||||
* https://creativecommons.org/licenses/by-nc-sa/4.0/
|
||||
*/
|
||||
|
||||
#include "vizionstreamer/StreamingEngine.h"
|
||||
#include <iostream>
|
||||
#include <chrono>
|
||||
#include <utility>
|
||||
|
||||
StreamingEngine::StreamingEngine(std::shared_ptr<VxCamera> camera)
|
||||
: camera_(std::move(camera)), running_(false), currentFormat_(), bufferSize_(0) {
|
||||
gstPipeline_ = std::make_unique<GStreamerPipeline>("");
|
||||
}
|
||||
|
||||
StreamingEngine::~StreamingEngine() {
|
||||
stop();
|
||||
}
|
||||
|
||||
void StreamingEngine::setPipelineDescription(const std::string& pipeline) {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
if (!running_) {
|
||||
gstPipeline_->setPipelineDescription(pipeline);
|
||||
}
|
||||
}
|
||||
|
||||
bool StreamingEngine::start(const std::string& gstPipeline) {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
|
||||
if (running_) {
|
||||
std::cerr << "Streaming engine already running" << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Set pipeline description
|
||||
gstPipeline_->setPipelineDescription(gstPipeline);
|
||||
|
||||
// Start camera streaming
|
||||
if (VxStartStreaming(camera_) != 0) {
|
||||
std::cerr << "Failed to start camera streaming" << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Get current format to allocate buffer
|
||||
std::vector<VxFormat> fmtList;
|
||||
if (VxGetFormatList(camera_, fmtList) != 0 || fmtList.empty()) {
|
||||
std::cerr << "Failed to get format list" << std::endl;
|
||||
VxStopStreaming(camera_);
|
||||
return false;
|
||||
}
|
||||
currentFormat_ = fmtList[0];
|
||||
|
||||
// Allocate buffer (assume worst case: uncompressed)
|
||||
const size_t calculatedBufferSize = currentFormat_.width * currentFormat_.height * 4;
|
||||
bufferSize_ = calculatedBufferSize;
|
||||
buffer_ = std::make_unique<uint8_t[]>(bufferSize_);
|
||||
|
||||
// Start GStreamer pipeline
|
||||
if (!gstPipeline_->start()) {
|
||||
std::cerr << "Failed to start GStreamer pipeline" << std::endl;
|
||||
VxStopStreaming(camera_);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Start acquisition thread
|
||||
running_ = true;
|
||||
acquisitionThread_ = std::make_unique<std::thread>(&StreamingEngine::acquisitionLoop, this);
|
||||
|
||||
std::cout << "Streaming engine started" << std::endl;
|
||||
return true;
|
||||
}
|
||||
|
||||
void StreamingEngine::stop() {
|
||||
if (!running_) {
|
||||
return;
|
||||
}
|
||||
|
||||
running_ = false;
|
||||
|
||||
// Wait for acquisition thread to finish
|
||||
if (acquisitionThread_ && acquisitionThread_->joinable()) {
|
||||
acquisitionThread_->join();
|
||||
}
|
||||
|
||||
// Stop GStreamer pipeline
|
||||
gstPipeline_->stop();
|
||||
|
||||
// Stop camera streaming
|
||||
VxStopStreaming(camera_);
|
||||
|
||||
std::cout << "Streaming engine stopped" << std::endl;
|
||||
}
|
||||
|
||||
void StreamingEngine::setFormat(const VxFormat& format) {
|
||||
std::lock_guard<std::mutex> lock(mutex_);
|
||||
if (!running_) {
|
||||
currentFormat_ = format;
|
||||
if (VxSetFormat(camera_, format) != 0) {
|
||||
std::cerr << "Failed to set format" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void StreamingEngine::acquisitionLoop() {
|
||||
uint64_t frameCount = 0;
|
||||
auto lastStatsTime = std::chrono::steady_clock::now();
|
||||
uint64_t framesInLastSecond = 0;
|
||||
|
||||
std::cout << "Acquisition loop started" << std::endl;
|
||||
|
||||
while (running_) {
|
||||
int dataSize = 0;
|
||||
const VX_CAPTURE_RESULT result = VxGetImage(camera_, buffer_.get(), &dataSize, 1000);
|
||||
|
||||
if (result == VX_CAPTURE_RESULT::VX_SUCCESS && dataSize > 0) {
|
||||
// Push frame to GStreamer pipeline
|
||||
std::string formatStr;
|
||||
switch (currentFormat_.format) {
|
||||
case VX_IMAGE_FORMAT::YUY2: formatStr = "YUY2"; break;
|
||||
case VX_IMAGE_FORMAT::UYVY: formatStr = "UYVY"; break;
|
||||
case VX_IMAGE_FORMAT::MJPG: formatStr = "MJPG"; break;
|
||||
case VX_IMAGE_FORMAT::BGR: formatStr = "BGR"; break;
|
||||
case VX_IMAGE_FORMAT::RGB: formatStr = "RGB"; break;
|
||||
default: formatStr = "UNKNOWN"; break;
|
||||
}
|
||||
|
||||
if (!gstPipeline_->pushBuffer(buffer_.get(), dataSize,
|
||||
currentFormat_.width, currentFormat_.height,
|
||||
formatStr)) {
|
||||
std::cerr << "Failed to push frame to GStreamer pipeline" << std::endl;
|
||||
}
|
||||
|
||||
frameCount++;
|
||||
framesInLastSecond++;
|
||||
|
||||
// Print statistics every second
|
||||
const auto now = std::chrono::steady_clock::now();
|
||||
const auto elapsed = std::chrono::duration_cast<std::chrono::seconds>(now - lastStatsTime);
|
||||
if (elapsed.count() >= 1) {
|
||||
std::cout << "FPS: " << framesInLastSecond
|
||||
<< " | Total frames: " << frameCount
|
||||
<< " | Frame size: " << dataSize << " bytes" << std::endl;
|
||||
framesInLastSecond = 0;
|
||||
lastStatsTime = now;
|
||||
}
|
||||
} else if (result == VX_CAPTURE_RESULT::VX_TIMEOUT) {
|
||||
// Timeout is normal, just continue
|
||||
continue;
|
||||
} else {
|
||||
std::cerr << "Failed to capture frame: " << static_cast<int>(result) << std::endl;
|
||||
// Don't break on error, just continue trying
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
}
|
||||
|
||||
std::cout << "Acquisition loop stopped. Total frames captured: " << frameCount << std::endl;
|
||||
}
|
||||
123
src/main.cpp
Normal file
123
src/main.cpp
Normal file
@@ -0,0 +1,123 @@
|
||||
/*
|
||||
* VizionStreamer - Main Application
|
||||
* Copyright (c) 2025 Maik Jurischka
|
||||
*
|
||||
* Licensed under CC BY-NC-SA 4.0
|
||||
* https://creativecommons.org/licenses/by-nc-sa/4.0/
|
||||
*/
|
||||
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <csignal>
|
||||
#include <atomic>
|
||||
#include <vizionsdk/VizionSDK.h>
|
||||
#include "vizionstreamer/SocketServer.h"
|
||||
#include "vizionstreamer/CameraController.h"
|
||||
|
||||
std::atomic<bool> g_running(true);
|
||||
|
||||
void signalHandler(int signal) {
|
||||
std::cout << "\nReceived signal " << signal << ", shutting down..." << std::endl;
|
||||
g_running = false;
|
||||
}
|
||||
|
||||
int main() {
|
||||
// Setup signal handlers for clean shutdown
|
||||
signal(SIGINT, signalHandler);
|
||||
signal(SIGTERM, signalHandler);
|
||||
|
||||
// List available cameras
|
||||
std::vector<std::string> devList;
|
||||
const int deviceCount = VxDiscoverCameraDevices(devList);
|
||||
|
||||
if (deviceCount == 0) {
|
||||
std::cout << "No cameras found" << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
std::cout << "Found " << deviceCount << " camera(s):" << std::endl;
|
||||
for (size_t i = 0; i < devList.size(); i++) {
|
||||
std::cout << "[" << i << "] " << devList[i] << std::endl;
|
||||
}
|
||||
|
||||
// Find VCI-AR0234 camera
|
||||
int cameraIndex = -1;
|
||||
for (size_t i = 0; i < devList.size(); i++) {
|
||||
if (devList[i].find("VCI-AR0234") != std::string::npos) {
|
||||
cameraIndex = static_cast<int>(i);
|
||||
std::cout << "Selected camera [" << cameraIndex << "]: " << devList[i] << std::endl;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (cameraIndex == -1) {
|
||||
std::cout << "VCI-AR0234 camera not found, using default (index 0)" << std::endl;
|
||||
cameraIndex = 0;
|
||||
}
|
||||
|
||||
// Open camera
|
||||
auto cam = VxInitialCameraDevice(cameraIndex);
|
||||
if (!cam || VxOpen(cam) != 0) {
|
||||
std::cout << "Failed to initialize/open camera" << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Get and set default format
|
||||
std::vector<VxFormat> fmtList;
|
||||
if (VxGetFormatList(cam, fmtList) != 0) {
|
||||
std::cout << "Failed to get format list" << std::endl;
|
||||
VxClose(cam);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (VxSetFormat(cam, fmtList[0]) != 0) {
|
||||
std::cout << "Failed to set format" << std::endl;
|
||||
VxClose(cam);
|
||||
return -1;
|
||||
}
|
||||
|
||||
std::cout << "Initial format: " << fmtList[0].width << "x" << fmtList[0].height
|
||||
<< " @ " << fmtList[0].framerate << " fps" << std::endl;
|
||||
|
||||
const auto controller = std::make_shared<CameraController>(cam);
|
||||
|
||||
const std::string socketPath = "/tmp/vizion_control.sock";
|
||||
SocketServer server(socketPath);
|
||||
|
||||
if (!server.start([controller](const std::string& cmd) {
|
||||
return controller->processCommand(cmd);
|
||||
})) {
|
||||
std::cout << "Failed to start socket server" << std::endl;
|
||||
VxClose(cam);
|
||||
return -1;
|
||||
}
|
||||
|
||||
std::cout << "\n========================================" << std::endl;
|
||||
std::cout << "VizionStreamer Ready" << std::endl;
|
||||
std::cout << "Author: Maik Jurischka <maik@skadilabs.de>" << std::endl;
|
||||
std::cout << "License: CC BY-NC-SA 4.0 -> https://creativecommons.org/licenses/by-nc-sa/4.0/" << std::endl;
|
||||
std::cout << "========================================" << std::endl << std::endl;
|
||||
std::cout << "Control socket: " << socketPath << std::endl;
|
||||
std::cout << "Default pipeline: videoconvert ! autovideosink" << std::endl;
|
||||
std::cout << "\nQuick start:" << std::endl;
|
||||
std::cout << R"( echo '{"command":"start_stream"}' | socat - UNIX-CONNECT:)" << socketPath << std::endl;
|
||||
std::cout << "\nTo change pipeline before starting:" << std::endl;
|
||||
std::cout << R"( echo '{"command":"set_pipeline","params":{"pipeline":"YOUR_PIPELINE"}}' | socat - UNIX-CONNECT:)" << socketPath << std::endl;
|
||||
std::cout << "\nPress Ctrl+C to exit.\n" << std::endl;
|
||||
|
||||
while (g_running) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(100));
|
||||
}
|
||||
|
||||
std::cout << "Shutting down..." << std::endl;
|
||||
|
||||
if (controller->getStreamingEngine()->isRunning()) {
|
||||
controller->getStreamingEngine()->stop();
|
||||
}
|
||||
|
||||
server.stop();
|
||||
VxClose(cam);
|
||||
|
||||
std::cout << "Shutdown complete." << std::endl;
|
||||
return 0;
|
||||
}
|
||||
Reference in New Issue
Block a user