first commit

This commit is contained in:
Maik Jurischka
2025-12-18 16:10:55 +01:00
commit 69e2f3ae1d
19 changed files with 2707 additions and 0 deletions

53
.gitignore vendored Normal file
View File

@@ -0,0 +1,53 @@
# CMake build files
build/
build-*/
CMakeFiles/
CMakeCache.txt
cmake_install.cmake
Makefile
*.cmake
# Qt Creator files
*.pro.user
*.pro.user.*
.qtcreator/
*.autosave
# Compiled Object files
*.o
*.obj
*.so
*.dylib
*.dll
# Executables
gstreamerViewer
*.exe
*.app
# Qt Meta Object Compiler files
moc_*.cpp
moc_*.h
qrc_*.cpp
ui_*.h
# Qt Resource Compiler
*.qrc.depends
*.qm
# IDE and editor files
.vscode/
.idea/
*.swp
*.swo
*~
.DS_Store
# Debug and temporary files
*.log
core
*.core
# Documentation generation
html/
latex/

47
CMakeLists.txt Normal file
View File

@@ -0,0 +1,47 @@
cmake_minimum_required(VERSION 3.19)
project(gstreamerViewer LANGUAGES CXX)
find_package(Qt6 6.5 REQUIRED COMPONENTS Core Widgets)
find_package(PkgConfig REQUIRED)
pkg_check_modules(GSTREAMER REQUIRED gstreamer-1.0 gstreamer-video-1.0)
qt_standard_project_setup()
qt_add_executable(gstreamerViewer
WIN32 MACOSX_BUNDLE
main.cpp
mainwindow.cpp
mainwindow.h
mainwindow.ui
socketclient.cpp
socketclient.h
gstreamerpipelinewidget.cpp
gstreamerpipelinewidget.h
cameracontrolwidget.cpp
cameracontrolwidget.h
videoviewerwidget.cpp
videoviewerwidget.h
)
target_include_directories(gstreamerViewer PRIVATE ${GSTREAMER_INCLUDE_DIRS})
target_link_libraries(gstreamerViewer
PRIVATE
Qt::Core
Qt::Widgets
${GSTREAMER_LIBRARIES}
)
include(GNUInstallDirs)
install(TARGETS gstreamerViewer
BUNDLE DESTINATION .
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
)
qt_generate_deploy_app_script(
TARGET gstreamerViewer
OUTPUT_SCRIPT deploy_script
NO_UNSUPPORTED_PLATFORM_ERROR
)
install(SCRIPT ${deploy_script})

347
README.md Normal file
View File

@@ -0,0 +1,347 @@
# GStreamer Viewer
A Qt6-based GUI application for controlling and viewing video streams from cameras via the VizionStreamer backend. This application provides real-time camera control, GStreamer pipeline configuration, and video display capabilities.
## Features
- **Video Streaming Control**: Configure and start/stop camera streaming with GStreamer pipelines
- **Real-time Video Display**: View the streamed video in a separate window
- **Camera Parameter Control**: Adjust exposure, white balance, brightness, contrast, saturation, sharpness, gamma, and gain
- **Pipeline Presets**: Quick access to common pipeline configurations (MJPEG UDP, H.264 UDP, local display, etc.)
- **Format Detection**: Automatically fetch and select supported camera formats
- **Quick Start**: One-click auto-configuration and streaming
- **Unix Socket Communication**: Communicates with VizionStreamer backend via `/tmp/vizion_control.sock`
## System Requirements
- Linux (tested on Arch Linux)
- Qt6
- GStreamer 1.0
- VizionStreamer backend (not included)
## Installation
### Arch Linux
Install the required packages using pacman:
```bash
sudo pacman -S qt6-base gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad cmake base-devel
```
**Package breakdown:**
- `qt6-base`: Qt6 framework (Widgets, Network, Core modules)
- `gstreamer`: GStreamer multimedia framework
- `gst-plugins-base`: Base GStreamer plugins (videoconvert, etc.)
- `gst-plugins-good`: Good quality plugins (JPEG encoding/decoding, RTP, UDP)
- `gst-plugins-bad`: Additional plugins (optional, for more formats)
- `cmake`: Build system
- `base-devel`: C++ compiler and build tools
### Debian/Ubuntu
Install the required packages using apt:
```bash
sudo apt update
sudo apt install qt6-base-dev libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \
gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-tools \
cmake build-essential
```
**Package breakdown:**
- `qt6-base-dev`: Qt6 development files
- `libgstreamer1.0-dev`: GStreamer development headers
- `libgstreamer-plugins-base1.0-dev`: Base plugins development files
- `gstreamer1.0-plugins-good`: Good quality plugins runtime
- `gstreamer1.0-plugins-bad`: Additional plugins (optional)
- `gstreamer1.0-tools`: GStreamer command-line tools (for debugging with gst-launch-1.0)
- `cmake`: Build system
- `build-essential`: C++ compiler and build tools
### Optional: H.264 Support
For H.264 streaming (requires additional codec):
**Arch Linux:**
```bash
sudo pacman -S gst-libav
```
**Debian/Ubuntu:**
```bash
sudo apt install gstreamer1.0-libav
```
## Building
### Quick Build (using build script)
The easiest way to build the project:
```bash
cd /home/maik/project/gstreamerViewer
./build.sh
```
### Manual Build
1. Clone or extract the project:
```bash
cd /home/maik/project/gstreamerViewer
```
2. Create a build directory:
```bash
mkdir -p build
cd build
```
3. Configure with CMake:
```bash
cmake ..
```
4. Build the project:
```bash
make -j$(nproc)
```
5. The executable will be located at:
```bash
./gstreamerViewer
```
## Usage
### Prerequisites
1. **VizionStreamer Backend**: Ensure the VizionStreamer backend is running and the Unix domain socket is available at `/tmp/vizion_control.sock`
2. **Camera Connection**: Connect your camera (e.g., VCI-AR0234-C) and ensure VizionStreamer has selected the correct camera device
### Quick Start Workflow
The easiest way to start streaming:
1. Launch the application:
```bash
# Using the run script (checks for VizionStreamer)
./run.sh
# Or directly from build directory
cd build && ./gstreamerViewer
```
2. Navigate to the **"GStreamer Pipeline"** tab
3. Click the **"⚡ Quick Start (Auto Configure & Stream)"** button
- This automatically:
- Sets the camera format (1280x720@30fps UYVY or best available)
- Configures the MJPEG UDP streaming pipeline
- Starts the stream
4. Switch to the **"Video Viewer"** tab
5. Ensure the source type is set to **"UDP MJPEG Stream"**
6. Click **"Start Viewer"** to display the video
### Manual Configuration
For more control over the streaming setup:
#### Step 1: Configure Camera Format
1. Go to the **"Camera Control"** tab
2. Click **"Get Available Formats"** to fetch supported formats from the camera
3. Select your desired format from the dropdown
4. Click **"Set Format"**
#### Step 2: Configure Pipeline
1. Go to the **"GStreamer Pipeline"** tab
2. Select a pipeline preset from the dropdown, or enter a custom pipeline:
- **MJPEG UDP Stream**: Best for raw formats (UYVY, YUY2), no additional plugins needed
- **UDP H.264 Stream**: Requires gst-libav, better compression
- **Local Display**: For testing (shows video on server side)
3. Click **"Set Pipeline"**
#### Step 3: Start Streaming
1. Click **"Start Stream"** in the GStreamer Pipeline tab
2. The status should show "Status: Streaming" with a green background
#### Step 4: View the Stream
1. Go to the **"Video Viewer"** tab
2. Select the appropriate source type (matches your pipeline):
- **UDP MJPEG Stream** for MJPEG UDP pipeline
- **UDP H.264 Stream** for H.264 UDP pipeline
3. Verify host/port settings (default: port 5000)
4. Click **"Start Viewer"**
5. Video will appear in a separate window
### Camera Control
The **"Camera Control"** tab provides real-time adjustment of camera parameters:
- **Exposure**: Auto or Manual mode with adjustable value
- **White Balance**: Auto or Manual with temperature control (2800-6500K)
- **Image Adjustments**:
- Brightness (0-255)
- Contrast (0-255)
- Saturation (0-255)
- Sharpness (0-255)
- Gamma (72-500)
- Gain (0-100)
All slider changes are applied immediately to the camera.
## Pipeline Presets Explained
### MJPEG UDP Stream
```
videoconvert ! jpegenc ! rtpjpegpay ! udpsink host=127.0.0.1 port=5000
```
- **Best for**: Raw formats (UYVY, YUY2, RGB)
- **Pros**: No additional plugins needed, reliable
- **Cons**: Lower compression than H.264
### UDP H.264 Stream
```
videoconvert ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
```
- **Best for**: Higher compression, lower bandwidth
- **Pros**: Better compression
- **Cons**: Requires gst-libav plugin
### Local Display
```
videoconvert ! autovideosink
```
- **Best for**: Testing camera without network streaming
- **Shows**: Video on the server machine
## Troubleshooting
### Issue: "Failed to start streaming"
**Solution**: Ensure the camera format is set before starting the stream:
1. Click "Get Available Formats" in Camera Control tab
2. Select a supported format
3. Click "Set Format"
4. Try starting the stream again
### Issue: Video is black/not displaying
**Possible causes:**
1. **Pipeline mismatch**: Ensure the viewer source type matches the streaming pipeline
2. **No UDP packets**: Verify with tcpdump:
```bash
sudo tcpdump -i lo udp port 5000 -nn
```
3. **Wrong camera selected**: Check VizionStreamer logs to ensure correct camera is active
### Issue: "No element 'avdec_h264'"
**Solution**: Install the gst-libav plugin or use the MJPEG UDP pipeline instead
**Arch Linux:**
```bash
sudo pacman -S gst-libav
```
**Debian/Ubuntu:**
```bash
sudo apt install gstreamer1.0-libav
```
### Issue: Connection error to VizionStreamer
**Solution**: Verify the backend is running:
```bash
ls -la /tmp/vizion_control.sock
```
If the socket doesn't exist, start VizionStreamer.
**Test the connection**:
```bash
./test_connection.sh
```
This script will verify the socket exists and test basic communication with VizionStreamer.
### Issue: X11 BadWindow errors with video display
**Note**: The application uses `autovideosink` which opens a separate video window. This is intentional due to X11 limitations with embedded video overlays in Qt.
### Debug: Test pipeline manually
Test if GStreamer can receive the stream:
```bash
gst-launch-1.0 udpsrc port=5000 ! application/x-rtp,encoding-name=JPEG,payload=26 ! rtpjpegdepay ! jpegdec ! autovideosink
```
## Supported Cameras
This application works with cameras supported by the VizionStreamer backend, including:
- VCI-AR0234-C (tested: UYVY at 1920x1200@60fps)
- Other V4L2-compatible cameras
## Architecture
```
┌─────────────────────┐
│ gstreamerViewer │
│ (Qt6 GUI) │
└──────────┬──────────┘
│ Unix Socket (/tmp/vizion_control.sock)
│ JSON Commands
┌─────────────────────┐
│ VizionStreamer │
│ Backend │
└──────────┬──────────┘
│ V4L2 / VizionSDK
┌─────────────────────┐
│ Camera Hardware │
│ (VCI-AR0234-C) │
└─────────────────────┘
Video Stream Flow:
Camera → VizionStreamer → GStreamer Pipeline → UDP/Local → VideoViewer
```
## Socket API Commands
The application communicates with VizionStreamer using JSON commands. See `SOCKET_API.md` for full protocol documentation.
Example commands:
- `get_formats`: Retrieve available camera formats
- `set_format`: Set camera resolution, framerate, and pixel format
- `set_pipeline`: Configure GStreamer pipeline
- `start_stream`: Start camera streaming
- `stop_stream`: Stop camera streaming
- `get_status`: Query streaming status
- `set_exposure`, `set_brightness`, etc.: Camera parameter controls
## License
[Specify your license here]
## Credits
Built with:
- Qt6 Framework
- GStreamer Multimedia Framework
- VizionSDK
## Support
For issues or questions:
1. Check the Troubleshooting section above
2. Verify VizionStreamer backend is running correctly
3. Test GStreamer pipelines manually with `gst-launch-1.0`

663
SOCKET_API.md Normal file
View File

@@ -0,0 +1,663 @@
# VizionStreamer Socket Control API
VizionStreamer can be controlled via a Unix Domain Socket interface. This allows external applications to configure camera parameters and stream settings at runtime.
## Socket Connection
- **Socket Path**: `/tmp/vizion_control.sock`
- **Protocol**: Unix Domain Socket (SOCK_STREAM)
- **Message Format**: JSON
## Command Format
All commands follow this JSON structure:
```json
{
"command": "command_name",
"params": {
"param1": "value1",
"param2": "value2"
}
}
```
## Response Format
All responses follow this JSON structure:
**Success Response:**
```json
{
"status": "success",
"message": "Optional success message"
}
```
**Error Response:**
```json
{
"status": "error",
"message": "Error description"
}
```
## Available Commands
### 1. Get Available Formats
Retrieve all supported video formats.
**Command:**
```json
{
"command": "get_formats"
}
```
**Response:**
```json
{
"status": "success",
"formats": [
{
"width": 1920,
"height": 1080,
"framerate": 30,
"format": "YUY2"
},
{
"width": 1280,
"height": 720,
"framerate": 60,
"format": "MJPG"
}
]
}
```
**Supported Formats:** YUY2, UYVY, NV12, MJPG, BGR, RGB
---
### 2. Set Video Format
Change the video format (resolution, framerate, pixel format).
**Note:** Cannot be changed while streaming is active.
**Command:**
```json
{
"command": "set_format",
"params": {
"width": "1920",
"height": "1080",
"framerate": "30",
"format": "YUY2"
}
}
```
**Response:**
```json
{
"status": "success",
"message": "Format set successfully"
}
```
---
### 3. Start Streaming
Start video streaming from the camera.
**Command:**
```json
{
"command": "start_stream"
}
```
**Response:**
```json
{
"status": "success",
"message": "Streaming started"
}
```
---
### 4. Stop Streaming
Stop video streaming.
**Command:**
```json
{
"command": "stop_stream"
}
```
**Response:**
```json
{
"status": "success",
"message": "Streaming stopped"
}
```
---
### 5. Set GStreamer Pipeline
Configure the GStreamer pipeline for video output. This determines where and how the video stream is processed/displayed.
**Note:** Cannot be changed while streaming is active.
**Command:**
```json
{
"command": "set_pipeline",
"params": {
"pipeline": "videoconvert ! x264enc ! rtph264pay ! udpsink host=192.168.1.100 port=5000"
}
}
```
**Response:**
```json
{
"status": "success",
"message": "Pipeline set successfully"
}
```
**Common Pipeline Examples:**
1. **Display locally:**
```
videoconvert ! autovideosink
```
2. **Stream over UDP (H.264):**
```
videoconvert ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=192.168.1.100 port=5000
```
3. **Stream over RTSP (requires gst-rtsp-server):**
```
videoconvert ! x264enc ! rtph264pay name=pay0
```
4. **Save to file:**
```
videoconvert ! x264enc ! mp4mux ! filesink location=/tmp/output.mp4
```
5. **Stream over TCP:**
```
videoconvert ! x264enc ! h264parse ! mpegtsmux ! tcpserversink host=0.0.0.0 port=5000
```
6. **MJPEG over HTTP:**
```
videoconvert ! jpegenc ! multipartmux ! tcpserversink host=0.0.0.0 port=8080
```
---
### 6. Get Status
Get current streaming status and pipeline configuration.
**Command:**
```json
{
"command": "get_status"
}
```
**Response:**
```json
{
"status": "success",
"streaming": true,
"pipeline": "videoconvert ! autovideosink"
}
```
---
### 7. Set Exposure
Configure camera exposure settings.
**Command:**
```json
{
"command": "set_exposure",
"params": {
"mode": "manual",
"value": "100"
}
}
```
**Parameters:**
- `mode`: "auto" or "manual"
- `value`: Exposure value (only used in manual mode)
**Response:**
```json
{
"status": "success",
"message": "Exposure set successfully"
}
```
---
### 8. Set White Balance
Configure white balance settings.
**Command:**
```json
{
"command": "set_whitebalance",
"params": {
"mode": "auto",
"temperature": "4500"
}
}
```
**Parameters:**
- `mode`: "auto" or "manual"
- `temperature`: Color temperature in Kelvin (only used in manual mode)
**Response:**
```json
{
"status": "success",
"message": "White balance set successfully"
}
```
---
### 9. Set Brightness
Adjust camera brightness.
**Command:**
```json
{
"command": "set_brightness",
"params": {
"value": "50"
}
}
```
**Response:**
```json
{
"status": "success",
"message": "Brightness set successfully"
}
```
---
### 10. Set Contrast
Adjust camera contrast.
**Command:**
```json
{
"command": "set_contrast",
"params": {
"value": "32"
}
}
```
**Response:**
```json
{
"status": "success",
"message": "Contrast set successfully"
}
```
---
### 11. Set Saturation
Adjust color saturation.
**Command:**
```json
{
"command": "set_saturation",
"params": {
"value": "64"
}
}
```
**Response:**
```json
{
"status": "success",
"message": "Saturation set successfully"
}
```
---
### 12. Set Sharpness
Adjust image sharpness.
**Command:**
```json
{
"command": "set_sharpness",
"params": {
"value": "3"
}
}
```
**Response:**
```json
{
"status": "success",
"message": "Sharpness set successfully"
}
```
---
### 13. Set Gamma
Adjust gamma correction.
**Command:**
```json
{
"command": "set_gamma",
"params": {
"value": "100"
}
}
```
**Response:**
```json
{
"status": "success",
"message": "Gamma set successfully"
}
```
---
### 14. Set Gain
Adjust camera gain.
**Command:**
```json
{
"command": "set_gain",
"params": {
"value": "0"
}
}
```
**Response:**
```json
{
"status": "success",
"message": "Gain set successfully"
}
```
---
## Usage Examples
### Complete Workflow Example
```bash
# 1. Set GStreamer pipeline for UDP streaming
echo '{"command":"set_pipeline","params":{"pipeline":"videoconvert ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=192.168.1.100 port=5000"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# 2. Set video format
echo '{"command":"set_format","params":{"width":"1920","height":"1080","framerate":"30","format":"YUY2"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# 3. Configure camera settings
echo '{"command":"set_exposure","params":{"mode":"auto"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
echo '{"command":"set_brightness","params":{"value":"50"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# 4. Start streaming
echo '{"command":"start_stream"}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# 5. Check status
echo '{"command":"get_status"}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# 6. Stop streaming when done
echo '{"command":"stop_stream"}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
```
### GStreamer Pipeline Examples
```bash
# Stream to local display
echo '{"command":"set_pipeline","params":{"pipeline":"videoconvert ! autovideosink"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# Stream over UDP (H.264)
echo '{"command":"set_pipeline","params":{"pipeline":"videoconvert ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=192.168.1.100 port=5000"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# Save to MP4 file
echo '{"command":"set_pipeline","params":{"pipeline":"videoconvert ! x264enc ! mp4mux ! filesink location=/tmp/output.mp4"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# MJPEG HTTP server
echo '{"command":"set_pipeline","params":{"pipeline":"videoconvert ! jpegenc ! multipartmux ! tcpserversink host=0.0.0.0 port=8080"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
```
### Using `socat`
```bash
# Get available formats
echo '{"command":"get_formats"}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# Set video format
echo '{"command":"set_format","params":{"width":"1920","height":"1080","framerate":"30","format":"YUY2"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# Start streaming
echo '{"command":"start_stream"}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# Set exposure to auto
echo '{"command":"set_exposure","params":{"mode":"auto"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# Set brightness
echo '{"command":"set_brightness","params":{"value":"50"}}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# Get status
echo '{"command":"get_status"}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
# Stop streaming
echo '{"command":"stop_stream"}' | socat - UNIX-CONNECT:/tmp/vizion_control.sock
```
### Using `nc` (netcat with Unix socket support)
```bash
echo '{"command":"get_formats"}' | nc -U /tmp/vizion_control.sock
```
### Using Python
```python
import socket
import json
def send_command(command, params=None):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect('/tmp/vizion_control.sock')
cmd = {"command": command}
if params:
cmd["params"] = params
sock.send(json.dumps(cmd).encode())
response = sock.recv(4096).decode()
sock.close()
return json.loads(response)
# Examples
print(send_command("get_formats"))
print(send_command("set_format", {
"width": "1920",
"height": "1080",
"framerate": "30",
"format": "YUY2"
}))
print(send_command("set_exposure", {"mode": "auto"}))
print(send_command("start_stream"))
```
### Using C++
```cpp
#include <sys/socket.h>
#include <sys/un.h>
#include <unistd.h>
#include <string>
#include <iostream>
std::string sendCommand(const std::string& command) {
int sock = socket(AF_UNIX, SOCK_STREAM, 0);
struct sockaddr_un addr;
memset(&addr, 0, sizeof(addr));
addr.sun_family = AF_UNIX;
strcpy(addr.sun_path, "/tmp/vizion_control.sock");
connect(sock, (struct sockaddr*)&addr, sizeof(addr));
send(sock, command.c_str(), command.length(), 0);
char buffer[4096];
int bytesRead = recv(sock, buffer, sizeof(buffer) - 1, 0);
buffer[bytesRead] = '\0';
close(sock);
return std::string(buffer);
}
// Example usage
int main() {
std::cout << sendCommand(R"({"command":"get_formats"})") << std::endl;
std::cout << sendCommand(R"({"command":"set_brightness","params":{"value":"50"}})") << std::endl;
return 0;
}
```
## Parameter Value Ranges
The valid ranges for camera parameters depend on the specific camera model. You can query the camera capabilities through the VizionSDK API or experimentally determine valid ranges.
**Typical ranges (camera-dependent):**
- Brightness: 0-255
- Contrast: 0-255
- Saturation: 0-255
- Sharpness: 0-255
- Gamma: 72-500
- Gain: 0-100
- Exposure: 1-10000 (in auto mode, value is ignored)
- White Balance Temperature: 2800-6500 Kelvin
## Error Handling
Always check the `status` field in the response:
```python
response = send_command("set_format", {...})
if response["status"] == "error":
print(f"Command failed: {response['message']}")
else:
print("Command successful")
```
## Thread Safety
The socket server handles one client connection at a time. Commands are processed sequentially with mutex protection to ensure thread safety with the camera operations.
## GStreamer Integration
VizionStreamer uses GStreamer for video processing and output. The captured frames from the VizionSDK camera are continuously fed into a GStreamer pipeline in a separate acquisition thread.
### How It Works
1. **Continuous Acquisition Loop**: A dedicated thread continuously captures frames from the camera using `VxGetImage()`
2. **Frame Buffering**: Captured frames are pushed into the GStreamer pipeline via `appsrc`
3. **Pipeline Processing**: GStreamer processes the frames according to the configured pipeline
4. **Output**: Frames are displayed, saved, or streamed based on the pipeline configuration
### Performance Monitoring
The acquisition loop prints FPS statistics every second:
```
FPS: 30 | Total frames: 1234 | Frame size: 4147200 bytes
```
### Receiving UDP Stream
If you configured a UDP streaming pipeline, receive it with:
```bash
# Using GStreamer
gst-launch-1.0 udpsrc port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! autovideosink
# Using FFplay
ffplay -fflags nobuffer -flags low_delay -framedrop udp://0.0.0.0:5000
# Using VLC
vlc udp://@:5000
```
### Receiving MJPEG HTTP Stream
If you configured an MJPEG HTTP server pipeline:
```bash
# View in browser
firefox http://192.168.1.100:8080
# Using FFplay
ffplay http://192.168.1.100:8080
# Using curl to save frames
curl http://192.168.1.100:8080 > stream.mjpg
```
## Notes
- The socket file is automatically created when VizionStreamer starts
- The socket file is removed when VizionStreamer exits cleanly
- Format and pipeline changes require streaming to be stopped first
- The acquisition loop runs continuously while streaming is active
- Some parameters may not be supported on all camera models
- Invalid parameter values will return an error response
- GStreamer pipeline errors will be reported when starting the stream
- Default pipeline: `videoconvert ! autovideosink` (display locally)

28
build.sh Executable file
View File

@@ -0,0 +1,28 @@
#!/bin/bash
# Build script for gstreamerViewer
set -e # Exit on error
echo "=== Building gstreamerViewer ==="
# Create build directory if it doesn't exist
if [ ! -d "build" ]; then
echo "Creating build directory..."
mkdir -p build
fi
cd build
echo "Running CMake..."
cmake ..
echo "Building with make..."
make -j$(nproc)
echo ""
echo "=== Build successful! ==="
echo "Executable: $(pwd)/gstreamerViewer"
echo ""
echo "To run the application:"
echo " cd build && ./gstreamerViewer"
echo ""

381
cameracontrolwidget.cpp Normal file
View File

@@ -0,0 +1,381 @@
#include "cameracontrolwidget.h"
#include <QVBoxLayout>
#include <QHBoxLayout>
#include <QFormLayout>
#include <QScrollArea>
#include <QButtonGroup>
#include <QJsonArray>
CameraControlWidget::CameraControlWidget(SocketClient* socketClient, QWidget *parent)
: QWidget(parent), m_socketClient(socketClient)
{
setupUI();
}
void CameraControlWidget::setupUI()
{
QVBoxLayout* mainLayout = new QVBoxLayout(this);
// Create scroll area for all controls
QScrollArea* scrollArea = new QScrollArea(this);
scrollArea->setWidgetResizable(true);
scrollArea->setHorizontalScrollBarPolicy(Qt::ScrollBarAlwaysOff);
QWidget* scrollWidget = new QWidget();
QVBoxLayout* scrollLayout = new QVBoxLayout(scrollWidget);
// Add all control groups
scrollLayout->addWidget(createFormatGroup());
scrollLayout->addWidget(createExposureGroup());
scrollLayout->addWidget(createWhiteBalanceGroup());
scrollLayout->addWidget(createImageAdjustmentGroup());
scrollLayout->addStretch();
scrollWidget->setLayout(scrollLayout);
scrollArea->setWidget(scrollWidget);
mainLayout->addWidget(scrollArea);
// Status label at bottom
m_statusLabel = new QLabel("Status: Ready", this);
m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }");
mainLayout->addWidget(m_statusLabel);
setLayout(mainLayout);
}
QGroupBox* CameraControlWidget::createFormatGroup()
{
QGroupBox* groupBox = new QGroupBox("Video Format", this);
QVBoxLayout* layout = new QVBoxLayout();
m_formatCombo = new QComboBox(this);
m_formatCombo->addItem("1280x720@30fps UYVY (Supported)", "1280,720,30,UYVY");
m_getFormatsBtn = new QPushButton("Get Available Formats", this);
m_setFormatBtn = new QPushButton("Set Format", this);
connect(m_getFormatsBtn, &QPushButton::clicked, this, &CameraControlWidget::onGetFormats);
connect(m_setFormatBtn, &QPushButton::clicked, this, &CameraControlWidget::onSetFormat);
layout->addWidget(new QLabel("Select Format:", this));
layout->addWidget(m_formatCombo);
layout->addWidget(m_getFormatsBtn);
layout->addWidget(m_setFormatBtn);
groupBox->setLayout(layout);
return groupBox;
}
QGroupBox* CameraControlWidget::createExposureGroup()
{
QGroupBox* groupBox = new QGroupBox("Exposure", this);
QVBoxLayout* layout = new QVBoxLayout();
QButtonGroup* exposureGroup = new QButtonGroup(this);
m_exposureAuto = new QRadioButton("Auto", this);
m_exposureManual = new QRadioButton("Manual", this);
m_exposureAuto->setChecked(true);
exposureGroup->addButton(m_exposureAuto);
exposureGroup->addButton(m_exposureManual);
connect(m_exposureAuto, &QRadioButton::toggled, this, &CameraControlWidget::onExposureModeChanged);
QHBoxLayout* modeLayout = new QHBoxLayout();
modeLayout->addWidget(m_exposureAuto);
modeLayout->addWidget(m_exposureManual);
m_exposureValue = new QSpinBox(this);
m_exposureValue->setRange(1, 10000);
m_exposureValue->setValue(100);
m_exposureValue->setEnabled(false);
m_setExposureBtn = new QPushButton("Set Exposure", this);
connect(m_setExposureBtn, &QPushButton::clicked, this, &CameraControlWidget::onSetExposure);
QFormLayout* formLayout = new QFormLayout();
formLayout->addRow("Mode:", modeLayout);
formLayout->addRow("Value:", m_exposureValue);
layout->addLayout(formLayout);
layout->addWidget(m_setExposureBtn);
groupBox->setLayout(layout);
return groupBox;
}
QGroupBox* CameraControlWidget::createWhiteBalanceGroup()
{
QGroupBox* groupBox = new QGroupBox("White Balance", this);
QVBoxLayout* layout = new QVBoxLayout();
QButtonGroup* wbGroup = new QButtonGroup(this);
m_whiteBalanceAuto = new QRadioButton("Auto", this);
m_whiteBalanceManual = new QRadioButton("Manual", this);
m_whiteBalanceAuto->setChecked(true);
wbGroup->addButton(m_whiteBalanceAuto);
wbGroup->addButton(m_whiteBalanceManual);
connect(m_whiteBalanceAuto, &QRadioButton::toggled, this, &CameraControlWidget::onWhiteBalanceModeChanged);
QHBoxLayout* modeLayout = new QHBoxLayout();
modeLayout->addWidget(m_whiteBalanceAuto);
modeLayout->addWidget(m_whiteBalanceManual);
m_whiteBalanceTemp = new QSpinBox(this);
m_whiteBalanceTemp->setRange(2800, 6500);
m_whiteBalanceTemp->setValue(4500);
m_whiteBalanceTemp->setSuffix(" K");
m_whiteBalanceTemp->setEnabled(false);
m_setWhiteBalanceBtn = new QPushButton("Set White Balance", this);
connect(m_setWhiteBalanceBtn, &QPushButton::clicked, this, &CameraControlWidget::onSetWhiteBalance);
QFormLayout* formLayout = new QFormLayout();
formLayout->addRow("Mode:", modeLayout);
formLayout->addRow("Temperature:", m_whiteBalanceTemp);
layout->addLayout(formLayout);
layout->addWidget(m_setWhiteBalanceBtn);
groupBox->setLayout(layout);
return groupBox;
}
QGroupBox* CameraControlWidget::createImageAdjustmentGroup()
{
QGroupBox* groupBox = new QGroupBox("Image Adjustments", this);
QVBoxLayout* layout = new QVBoxLayout();
layout->addWidget(createSliderControl("Brightness (0-255):", 0, 255, 128,
&m_brightnessSlider, &m_brightnessSpinBox));
connect(m_brightnessSlider, &QSlider::valueChanged, this, &CameraControlWidget::onBrightnessChanged);
layout->addWidget(createSliderControl("Contrast (0-255):", 0, 255, 32,
&m_contrastSlider, &m_contrastSpinBox));
connect(m_contrastSlider, &QSlider::valueChanged, this, &CameraControlWidget::onContrastChanged);
layout->addWidget(createSliderControl("Saturation (0-255):", 0, 255, 64,
&m_saturationSlider, &m_saturationSpinBox));
connect(m_saturationSlider, &QSlider::valueChanged, this, &CameraControlWidget::onSaturationChanged);
layout->addWidget(createSliderControl("Sharpness (0-255):", 0, 255, 3,
&m_sharpnessSlider, &m_sharpnessSpinBox));
connect(m_sharpnessSlider, &QSlider::valueChanged, this, &CameraControlWidget::onSharpnessChanged);
layout->addWidget(createSliderControl("Gamma (72-500):", 72, 500, 100,
&m_gammaSlider, &m_gammaSpinBox));
connect(m_gammaSlider, &QSlider::valueChanged, this, &CameraControlWidget::onGammaChanged);
layout->addWidget(createSliderControl("Gain (0-100):", 0, 100, 0,
&m_gainSlider, &m_gainSpinBox));
connect(m_gainSlider, &QSlider::valueChanged, this, &CameraControlWidget::onGainChanged);
groupBox->setLayout(layout);
return groupBox;
}
QWidget* CameraControlWidget::createSliderControl(const QString& label, int min, int max, int defaultValue,
QSlider** slider, QSpinBox** spinBox)
{
QWidget* widget = new QWidget(this);
QVBoxLayout* layout = new QVBoxLayout(widget);
layout->setContentsMargins(0, 5, 0, 5);
QLabel* titleLabel = new QLabel(label, this);
QHBoxLayout* controlLayout = new QHBoxLayout();
*slider = new QSlider(Qt::Horizontal, this);
(*slider)->setRange(min, max);
(*slider)->setValue(defaultValue);
*spinBox = new QSpinBox(this);
(*spinBox)->setRange(min, max);
(*spinBox)->setValue(defaultValue);
connect(*slider, &QSlider::valueChanged, *spinBox, &QSpinBox::setValue);
connect(*spinBox, QOverload<int>::of(&QSpinBox::valueChanged), *slider, &QSlider::setValue);
controlLayout->addWidget(*slider, 1);
controlLayout->addWidget(*spinBox);
layout->addWidget(titleLabel);
layout->addLayout(controlLayout);
return widget;
}
void CameraControlWidget::onGetFormats()
{
m_socketClient->sendCommand("get_formats", QJsonObject(),
[this](const QJsonObject& response) {
if (response.contains("formats")) {
QJsonArray formats = response["formats"].toArray();
m_formatCombo->clear();
for (const QJsonValue& val : formats) {
QJsonObject fmt = val.toObject();
int width = fmt["width"].toInt();
int height = fmt["height"].toInt();
int fps = fmt["framerate"].toInt();
QString format = fmt["format"].toString();
QString displayText = QString("%1x%2@%3fps %4")
.arg(width).arg(height).arg(fps).arg(format);
QString data = QString("%1,%2,%3,%4").arg(width).arg(height).arg(fps).arg(format);
m_formatCombo->addItem(displayText, data);
}
updateStatus(QString("Found %1 available formats").arg(formats.size()), true);
}
},
[this](const QString& error) {
updateStatus("Error: Failed to get formats: " + error, false);
});
}
void CameraControlWidget::onSetFormat()
{
QString data = m_formatCombo->currentData().toString();
QStringList parts = data.split(',');
if (parts.size() != 4) {
updateStatus("Error: Invalid format selection", false);
return;
}
QJsonObject params;
params["width"] = parts[0];
params["height"] = parts[1];
params["framerate"] = parts[2];
params["format"] = parts[3];
m_socketClient->sendCommand("set_format", params,
[this](const QJsonObject& response) {
updateStatus("Format set successfully", true);
},
[this](const QString& error) {
updateStatus("Error: Failed to set format: " + error, false);
});
}
void CameraControlWidget::onSetExposure()
{
QJsonObject params;
params["mode"] = m_exposureAuto->isChecked() ? "auto" : "manual";
if (m_exposureManual->isChecked()) {
params["value"] = QString::number(m_exposureValue->value());
}
m_socketClient->sendCommand("set_exposure", params,
[this](const QJsonObject& response) {
updateStatus("Exposure set successfully", true);
},
[this](const QString& error) {
updateStatus("Error: Failed to set exposure: " + error, false);
});
}
void CameraControlWidget::onSetWhiteBalance()
{
QJsonObject params;
params["mode"] = m_whiteBalanceAuto->isChecked() ? "auto" : "manual";
if (m_whiteBalanceManual->isChecked()) {
params["temperature"] = QString::number(m_whiteBalanceTemp->value());
}
m_socketClient->sendCommand("set_whitebalance", params,
[this](const QJsonObject& response) {
updateStatus("White balance set successfully", true);
},
[this](const QString& error) {
updateStatus("Error: Failed to set white balance: " + error, false);
});
}
void CameraControlWidget::onBrightnessChanged(int value)
{
QJsonObject params;
params["value"] = QString::number(value);
m_socketClient->sendCommand("set_brightness", params,
[](const QJsonObject&) {},
[](const QString&) {});
}
void CameraControlWidget::onContrastChanged(int value)
{
QJsonObject params;
params["value"] = QString::number(value);
m_socketClient->sendCommand("set_contrast", params,
[](const QJsonObject&) {},
[](const QString&) {});
}
void CameraControlWidget::onSaturationChanged(int value)
{
QJsonObject params;
params["value"] = QString::number(value);
m_socketClient->sendCommand("set_saturation", params,
[](const QJsonObject&) {},
[](const QString&) {});
}
void CameraControlWidget::onSharpnessChanged(int value)
{
QJsonObject params;
params["value"] = QString::number(value);
m_socketClient->sendCommand("set_sharpness", params,
[](const QJsonObject&) {},
[](const QString&) {});
}
void CameraControlWidget::onGammaChanged(int value)
{
QJsonObject params;
params["value"] = QString::number(value);
m_socketClient->sendCommand("set_gamma", params,
[](const QJsonObject&) {},
[](const QString&) {});
}
void CameraControlWidget::onGainChanged(int value)
{
QJsonObject params;
params["value"] = QString::number(value);
m_socketClient->sendCommand("set_gain", params,
[](const QJsonObject&) {},
[](const QString&) {});
}
void CameraControlWidget::onExposureModeChanged()
{
m_exposureValue->setEnabled(m_exposureManual->isChecked());
}
void CameraControlWidget::onWhiteBalanceModeChanged()
{
m_whiteBalanceTemp->setEnabled(m_whiteBalanceManual->isChecked());
}
void CameraControlWidget::updateStatus(const QString& status, bool isSuccess)
{
m_statusLabel->setText("Status: " + status);
if (isSuccess) {
m_statusLabel->setStyleSheet("QLabel { background-color: #90EE90; padding: 5px; border-radius: 3px; }");
} else if (status.startsWith("Error")) {
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
} else {
m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }");
}
}

88
cameracontrolwidget.h Normal file
View File

@@ -0,0 +1,88 @@
#ifndef CAMERACONTROLWIDGET_H
#define CAMERACONTROLWIDGET_H
#include <QWidget>
#include <QSlider>
#include <QSpinBox>
#include <QComboBox>
#include <QRadioButton>
#include <QPushButton>
#include <QLabel>
#include <QGroupBox>
#include "socketclient.h"
class CameraControlWidget : public QWidget
{
Q_OBJECT
public:
explicit CameraControlWidget(SocketClient* socketClient, QWidget *parent = nullptr);
private slots:
void onGetFormats();
void onSetFormat();
void onSetExposure();
void onSetWhiteBalance();
void onBrightnessChanged(int value);
void onContrastChanged(int value);
void onSaturationChanged(int value);
void onSharpnessChanged(int value);
void onGammaChanged(int value);
void onGainChanged(int value);
void onExposureModeChanged();
void onWhiteBalanceModeChanged();
private:
void setupUI();
QGroupBox* createFormatGroup();
QGroupBox* createExposureGroup();
QGroupBox* createWhiteBalanceGroup();
QGroupBox* createImageAdjustmentGroup();
QWidget* createSliderControl(const QString& label, int min, int max, int defaultValue,
QSlider** slider, QSpinBox** spinBox);
SocketClient* m_socketClient;
// Format controls
QComboBox* m_formatCombo;
QPushButton* m_getFormatsBtn;
QPushButton* m_setFormatBtn;
// Exposure controls
QRadioButton* m_exposureAuto;
QRadioButton* m_exposureManual;
QSpinBox* m_exposureValue;
QPushButton* m_setExposureBtn;
// White Balance controls
QRadioButton* m_whiteBalanceAuto;
QRadioButton* m_whiteBalanceManual;
QSpinBox* m_whiteBalanceTemp;
QPushButton* m_setWhiteBalanceBtn;
// Image adjustment controls
QSlider* m_brightnessSlider;
QSpinBox* m_brightnessSpinBox;
QSlider* m_contrastSlider;
QSpinBox* m_contrastSpinBox;
QSlider* m_saturationSlider;
QSpinBox* m_saturationSpinBox;
QSlider* m_sharpnessSlider;
QSpinBox* m_sharpnessSpinBox;
QSlider* m_gammaSlider;
QSpinBox* m_gammaSpinBox;
QSlider* m_gainSlider;
QSpinBox* m_gainSpinBox;
// Status display
QLabel* m_statusLabel;
void updateStatus(const QString& status, bool isSuccess);
};
#endif // CAMERACONTROLWIDGET_H

328
gstreamerpipelinewidget.cpp Normal file
View File

@@ -0,0 +1,328 @@
#include "gstreamerpipelinewidget.h"
#include <QVBoxLayout>
#include <QHBoxLayout>
#include <QGroupBox>
#include <QFrame>
#include <QTimer>
#include <QJsonArray>
GStreamerPipelineWidget::GStreamerPipelineWidget(SocketClient* socketClient, QWidget *parent)
: QWidget(parent), m_socketClient(socketClient)
{
setupUI();
onGetStatus();
fetchAvailableFormats();
}
void GStreamerPipelineWidget::setupUI()
{
QVBoxLayout* mainLayout = new QVBoxLayout(this);
QGroupBox* groupBox = new QGroupBox("GStreamer Pipeline", this);
QVBoxLayout* groupLayout = new QVBoxLayout(groupBox);
// Info label with instructions
m_infoLabel = new QLabel(
"<b>Quick Start:</b> Click 'Quick Start' to automatically configure and start streaming.<br>"
"<b>Manual:</b> 1. Set video format → 2. Set pipeline → 3. Start stream", this);
m_infoLabel->setStyleSheet("QLabel { background-color: #e3f2fd; padding: 8px; border-radius: 4px; }");
m_infoLabel->setWordWrap(true);
groupLayout->addWidget(m_infoLabel);
// Quick Start button (prominent)
m_quickStartBtn = new QPushButton("⚡ Quick Start (Auto Configure & Stream)", this);
m_quickStartBtn->setStyleSheet("QPushButton { background-color: #4CAF50; color: white; font-weight: bold; padding: 10px; }");
connect(m_quickStartBtn, &QPushButton::clicked, this, &GStreamerPipelineWidget::onQuickStart);
groupLayout->addWidget(m_quickStartBtn);
// Separator
QFrame* line = new QFrame(this);
line->setFrameShape(QFrame::HLine);
line->setFrameShadow(QFrame::Sunken);
groupLayout->addWidget(line);
// Format selection
QLabel* formatLabel = new QLabel("Video Format:", this);
m_formatCombo = new QComboBox(this);
m_formatCombo->addItem("1280x720@30fps UYVY (Default/Supported)", "1280,720,30,UYVY");
groupLayout->addWidget(formatLabel);
groupLayout->addWidget(m_formatCombo);
// Pipeline presets
QLabel* presetsLabel = new QLabel("Pipeline Presets:", this);
m_pipelinePresets = new QComboBox(this);
m_pipelinePresets->addItem("MJPEG UDP Stream (Best for raw formats)", "videoconvert ! jpegenc ! rtpjpegpay ! udpsink host=127.0.0.1 port=5000");
m_pipelinePresets->addItem("UDP H.264 Stream (Requires gst-libav)", "videoconvert ! x264enc tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000");
m_pipelinePresets->addItem("Custom", "");
m_pipelinePresets->addItem("Test - Fake Sink (No Output)", "fakesink");
m_pipelinePresets->addItem("Local Display", "videoconvert ! autovideosink");
m_pipelinePresets->addItem("TCP H.264 Stream", "videoconvert ! x264enc ! h264parse ! mpegtsmux ! tcpserversink host=0.0.0.0 port=5000");
m_pipelinePresets->addItem("MJPEG HTTP Stream", "videoconvert ! jpegenc ! multipartmux ! tcpserversink host=0.0.0.0 port=8080");
m_pipelinePresets->addItem("Save to File", "videoconvert ! x264enc ! mp4mux ! filesink location=/tmp/output.mp4");
connect(m_pipelinePresets, QOverload<int>::of(&QComboBox::currentIndexChanged),
this, &GStreamerPipelineWidget::onPipelinePresetChanged);
groupLayout->addWidget(presetsLabel);
groupLayout->addWidget(m_pipelinePresets);
// Pipeline editor
QLabel* pipelineLabel = new QLabel("Pipeline:", this);
m_pipelineEdit = new QTextEdit(this);
m_pipelineEdit->setMaximumHeight(80);
m_pipelineEdit->setPlaceholderText("Enter GStreamer pipeline here...\nExample: videoconvert ! autovideosink");
groupLayout->addWidget(pipelineLabel);
groupLayout->addWidget(m_pipelineEdit);
// Set pipeline button
m_setPipelineBtn = new QPushButton("Set Pipeline", this);
connect(m_setPipelineBtn, &QPushButton::clicked, this, &GStreamerPipelineWidget::onSetPipeline);
groupLayout->addWidget(m_setPipelineBtn);
// Stream control buttons
QHBoxLayout* buttonLayout = new QHBoxLayout();
m_startStreamBtn = new QPushButton("Start Stream", this);
m_stopStreamBtn = new QPushButton("Stop Stream", this);
m_getStatusBtn = new QPushButton("Get Status", this);
connect(m_startStreamBtn, &QPushButton::clicked, this, &GStreamerPipelineWidget::onStartStream);
connect(m_stopStreamBtn, &QPushButton::clicked, this, &GStreamerPipelineWidget::onStopStream);
connect(m_getStatusBtn, &QPushButton::clicked, this, &GStreamerPipelineWidget::onGetStatus);
buttonLayout->addWidget(m_startStreamBtn);
buttonLayout->addWidget(m_stopStreamBtn);
buttonLayout->addWidget(m_getStatusBtn);
groupLayout->addLayout(buttonLayout);
// Status label
m_statusLabel = new QLabel("Status: Unknown", this);
m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }");
groupLayout->addWidget(m_statusLabel);
mainLayout->addWidget(groupBox);
mainLayout->addStretch();
setLayout(mainLayout);
}
void GStreamerPipelineWidget::onPipelinePresetChanged(int index)
{
QString pipeline = m_pipelinePresets->currentData().toString();
if (!pipeline.isEmpty()) {
m_pipelineEdit->setPlainText(pipeline);
}
}
void GStreamerPipelineWidget::onSetPipeline()
{
QString pipeline = m_pipelineEdit->toPlainText().trimmed();
if (pipeline.isEmpty()) {
updateStatus("Error: Empty pipeline", false);
return;
}
QJsonObject params;
params["pipeline"] = pipeline;
m_socketClient->sendCommand("set_pipeline", params,
[this](const QJsonObject& response) {
updateStatus("Pipeline set successfully", false);
},
[this](const QString& error) {
updateStatus("Error: " + error, false);
});
}
void GStreamerPipelineWidget::onStartStream()
{
// First ensure format is set, then start stream
QString formatData = m_formatCombo->currentData().toString();
QStringList parts = formatData.split(',');
QJsonObject formatParams;
formatParams["width"] = parts[0];
formatParams["height"] = parts[1];
formatParams["framerate"] = parts[2];
formatParams["format"] = parts[3];
m_socketClient->sendCommand("set_format", formatParams,
[this](const QJsonObject& response) {
// Now start stream
m_socketClient->sendCommand("start_stream", QJsonObject(),
[this](const QJsonObject& response) {
updateStatus("Streaming started", true);
},
[this](const QString& error) {
updateStatus("Error: Failed to start stream: " + error, false);
});
},
[this](const QString& error) {
// Format setting failed, but maybe it was already set - try starting anyway
m_socketClient->sendCommand("start_stream", QJsonObject(),
[this](const QJsonObject& response) {
updateStatus("Streaming started", true);
},
[this](const QString& error) {
updateStatus("Error: Failed to start stream: " + error, false);
});
});
}
void GStreamerPipelineWidget::onStopStream()
{
m_socketClient->sendCommand("stop_stream", QJsonObject(),
[this](const QJsonObject& response) {
updateStatus("Streaming stopped", false);
},
[this](const QString& error) {
updateStatus("Error: Failed to stop stream: " + error, false);
});
}
void GStreamerPipelineWidget::onGetStatus()
{
m_socketClient->sendCommand("get_status", QJsonObject(),
[this](const QJsonObject& response) {
bool streaming = response["streaming"].toBool();
QString pipeline = response["pipeline"].toString();
updateStatus(streaming ? "Streaming" : "Stopped", streaming);
if (!pipeline.isEmpty() && m_pipelineEdit->toPlainText().isEmpty()) {
m_pipelineEdit->setPlainText(pipeline);
}
},
[this](const QString& error) {
updateStatus("Connection Error", false);
});
}
void GStreamerPipelineWidget::updateStatus(const QString& status, bool streaming)
{
m_statusLabel->setText("Status: " + status);
if (streaming) {
m_statusLabel->setStyleSheet("QLabel { background-color: #90EE90; padding: 5px; border-radius: 3px; }");
} else if (status.contains("Error")) {
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
} else {
m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }");
}
}
void GStreamerPipelineWidget::onQuickStart()
{
// Disable button during process
m_quickStartBtn->setEnabled(false);
m_quickStartBtn->setText("Configuring...");
// Step 1: Set format
QString formatData = m_formatCombo->currentData().toString();
QStringList parts = formatData.split(',');
QJsonObject formatParams;
formatParams["width"] = parts[0];
formatParams["height"] = parts[1];
formatParams["framerate"] = parts[2];
formatParams["format"] = parts[3];
m_socketClient->sendCommand("set_format", formatParams,
[this](const QJsonObject& response) {
// Step 2: Use selected preset pipeline or default to MJPEG
QString pipeline = m_pipelinePresets->currentData().toString();
if (pipeline.isEmpty()) {
pipeline = "videoconvert ! jpegenc ! rtpjpegpay ! udpsink host=127.0.0.1 port=5000";
}
m_pipelineEdit->setPlainText(pipeline);
QJsonObject pipelineParams;
pipelineParams["pipeline"] = pipeline;
m_socketClient->sendCommand("set_pipeline", pipelineParams,
[this](const QJsonObject& response) {
// Step 3: Start stream
m_socketClient->sendCommand("start_stream", QJsonObject(),
[this](const QJsonObject& response) {
updateStatus("Streaming started - Switch to Video Viewer tab and click 'Start Viewer'", true);
m_quickStartBtn->setEnabled(true);
m_quickStartBtn->setText("⚡ Quick Start (Auto Configure & Stream)");
},
[this](const QString& error) {
m_quickStartBtn->setEnabled(true);
m_quickStartBtn->setText("⚡ Quick Start (Auto Configure & Stream)");
updateStatus("Error: Failed to start stream: " + error, false);
});
},
[this](const QString& error) {
m_quickStartBtn->setEnabled(true);
m_quickStartBtn->setText("⚡ Quick Start (Auto Configure & Stream)");
updateStatus("Error: Failed to set pipeline: " + error, false);
});
},
[this](const QString& error) {
m_quickStartBtn->setEnabled(true);
m_quickStartBtn->setText("⚡ Quick Start (Auto Configure & Stream)");
updateStatus("Error: Failed to set format: " + error, false);
});
}
void GStreamerPipelineWidget::setFormatAndPipeline()
{
// Called before manual stream start to ensure format is set
QString formatData = m_formatCombo->currentData().toString();
QStringList parts = formatData.split(',');
QJsonObject formatParams;
formatParams["width"] = parts[0];
formatParams["height"] = parts[1];
formatParams["framerate"] = parts[2];
formatParams["format"] = parts[3];
m_socketClient->sendCommand("set_format", formatParams,
[](const QJsonObject& response) {},
[](const QString& error) {});
}
void GStreamerPipelineWidget::fetchAvailableFormats()
{
m_socketClient->sendCommand("get_formats", QJsonObject(),
[this](const QJsonObject& response) {
onFormatsReceived(response);
},
[this](const QString& error) {
qDebug() << "Failed to fetch formats:" << error;
});
}
void GStreamerPipelineWidget::onFormatsReceived(const QJsonObject& response)
{
if (!response.contains("formats")) {
return;
}
QJsonArray formats = response["formats"].toArray();
if (formats.isEmpty()) {
return;
}
// Clear existing formats
m_formatCombo->clear();
// Add all available formats
for (const QJsonValue& val : formats) {
QJsonObject fmt = val.toObject();
int width = fmt["width"].toInt();
int height = fmt["height"].toInt();
int fps = fmt["framerate"].toInt();
QString format = fmt["format"].toString();
QString displayText = QString("%1x%2@%3fps %4")
.arg(width).arg(height).arg(fps).arg(format);
QString data = QString("%1,%2,%3,%4").arg(width).arg(height).arg(fps).arg(format);
m_formatCombo->addItem(displayText, data);
}
qDebug() << "Loaded" << formats.size() << "available formats from camera";
}

46
gstreamerpipelinewidget.h Normal file
View File

@@ -0,0 +1,46 @@
#ifndef GSTREAMERPIPELINEWIDGET_H
#define GSTREAMERPIPELINEWIDGET_H
#include <QWidget>
#include <QTextEdit>
#include <QPushButton>
#include <QLabel>
#include <QComboBox>
#include "socketclient.h"
class GStreamerPipelineWidget : public QWidget
{
Q_OBJECT
public:
explicit GStreamerPipelineWidget(SocketClient* socketClient, QWidget *parent = nullptr);
private slots:
void onSetPipeline();
void onStartStream();
void onStopStream();
void onGetStatus();
void onPipelinePresetChanged(int index);
void onQuickStart();
void onFormatsReceived(const QJsonObject& response);
private:
void setupUI();
void updateStatus(const QString& status, bool streaming);
void setFormatAndPipeline();
void fetchAvailableFormats();
SocketClient* m_socketClient;
QTextEdit* m_pipelineEdit;
QPushButton* m_setPipelineBtn;
QPushButton* m_startStreamBtn;
QPushButton* m_stopStreamBtn;
QPushButton* m_getStatusBtn;
QPushButton* m_quickStartBtn;
QLabel* m_statusLabel;
QLabel* m_infoLabel;
QComboBox* m_pipelinePresets;
QComboBox* m_formatCombo;
};
#endif // GSTREAMERPIPELINEWIDGET_H

11
main.cpp Normal file
View File

@@ -0,0 +1,11 @@
#include "mainwindow.h"
#include <QApplication>
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
MainWindow w;
w.show();
return a.exec();
}

49
mainwindow.cpp Normal file
View File

@@ -0,0 +1,49 @@
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QHBoxLayout>
#include <QVBoxLayout>
#include <QSplitter>
#include <QTabWidget>
MainWindow::MainWindow(QWidget *parent)
: QMainWindow(parent)
, ui(new Ui::MainWindow)
{
ui->setupUi(this);
setWindowTitle("GStreamer Camera Viewer");
resize(1400, 900);
setupUI();
}
MainWindow::~MainWindow()
{
delete m_socketClient;
delete ui;
}
void MainWindow::setupUI()
{
// Create socket client
m_socketClient = new SocketClient("/tmp/vizion_control.sock", this);
// Create widgets
m_videoWidget = new VideoViewerWidget(this);
m_pipelineWidget = new GStreamerPipelineWidget(m_socketClient, this);
m_cameraWidget = new CameraControlWidget(m_socketClient, this);
// Create tab widget for controls
QTabWidget* controlTabs = new QTabWidget(this);
controlTabs->addTab(m_pipelineWidget, "Pipeline Control");
controlTabs->addTab(m_cameraWidget, "Camera Control");
// Create vertical splitter: video on top, controls on bottom
QSplitter* mainSplitter = new QSplitter(Qt::Vertical, this);
mainSplitter->addWidget(m_videoWidget);
mainSplitter->addWidget(controlTabs);
mainSplitter->setStretchFactor(0, 3); // Video gets more space
mainSplitter->setStretchFactor(1, 1); // Controls get less space
// Set as central widget
setCentralWidget(mainSplitter);
}

33
mainwindow.h Normal file
View File

@@ -0,0 +1,33 @@
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QMainWindow>
#include "socketclient.h"
#include "gstreamerpipelinewidget.h"
#include "cameracontrolwidget.h"
#include "videoviewerwidget.h"
QT_BEGIN_NAMESPACE
namespace Ui {
class MainWindow;
}
QT_END_NAMESPACE
class MainWindow : public QMainWindow
{
Q_OBJECT
public:
MainWindow(QWidget *parent = nullptr);
~MainWindow();
private:
void setupUI();
Ui::MainWindow *ui;
SocketClient* m_socketClient;
GStreamerPipelineWidget* m_pipelineWidget;
CameraControlWidget* m_cameraWidget;
VideoViewerWidget* m_videoWidget;
};
#endif // MAINWINDOW_H

31
mainwindow.ui Normal file
View File

@@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>MainWindow</class>
<widget class="QMainWindow" name="MainWindow">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>800</width>
<height>600</height>
</rect>
</property>
<property name="windowTitle">
<string>MainWindow</string>
</property>
<widget class="QWidget" name="centralwidget"/>
<widget class="QMenuBar" name="menubar">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>800</width>
<height>23</height>
</rect>
</property>
</widget>
<widget class="QStatusBar" name="statusbar"/>
</widget>
<resources/>
<connections/>
</ui>

28
run.sh Executable file
View File

@@ -0,0 +1,28 @@
#!/bin/bash
# Run script for gstreamerViewer
SOCKET="/tmp/vizion_control.sock"
EXECUTABLE="./build/gstreamerViewer"
# Check if executable exists
if [ ! -f "$EXECUTABLE" ]; then
echo "Error: Executable not found at $EXECUTABLE"
echo "Please run ./build.sh first to build the application."
exit 1
fi
# Check if VizionStreamer socket exists
if [ ! -S "$SOCKET" ]; then
echo "Warning: VizionStreamer socket not found at $SOCKET"
echo "Please ensure VizionStreamer backend is running."
echo ""
read -p "Continue anyway? (y/n) " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
exit 1
fi
fi
echo "Starting gstreamerViewer..."
cd build
./gstreamerViewer

111
socketclient.cpp Normal file
View File

@@ -0,0 +1,111 @@
#include "socketclient.h"
#include <sys/socket.h>
#include <sys/un.h>
#include <unistd.h>
#include <cstring>
#include <errno.h>
#include <QJsonDocument>
#include <QJsonObject>
#include <QDebug>
SocketClient::SocketClient(const QString& socketPath, QObject *parent)
: QObject(parent), m_socketPath(socketPath)
{
}
void SocketClient::sendCommand(const QString& command, const QJsonObject& params,
ResponseCallback onSuccess, ErrorCallback onError)
{
qDebug() << "[SocketClient] Sending command:" << command;
qDebug() << "[SocketClient] Parameters:" << params;
QJsonObject response = executeCommand(command, params);
qDebug() << "[SocketClient] Response:" << response;
if (response.isEmpty()) {
QString errorMsg = "Failed to connect to socket: " + m_socketPath;
qDebug() << "[SocketClient] ERROR:" << errorMsg;
emit connectionError(errorMsg);
if (onError) {
onError(errorMsg);
}
return;
}
QString status = response["status"].toString();
if (status == "success") {
qDebug() << "[SocketClient] Command successful";
emit commandSuccess(response);
if (onSuccess) {
onSuccess(response);
}
} else {
QString errorMsg = response["message"].toString("Unknown error");
qDebug() << "[SocketClient] Command error:" << errorMsg;
emit commandError(errorMsg);
if (onError) {
onError(errorMsg);
}
}
}
QJsonObject SocketClient::executeCommand(const QString& command, const QJsonObject& params)
{
int sock = socket(AF_UNIX, SOCK_STREAM, 0);
if (sock < 0) {
qDebug() << "[SocketClient] Failed to create socket";
return QJsonObject();
}
struct sockaddr_un addr;
memset(&addr, 0, sizeof(addr));
addr.sun_family = AF_UNIX;
strncpy(addr.sun_path, m_socketPath.toUtf8().constData(), sizeof(addr.sun_path) - 1);
if (::connect(sock, (struct sockaddr*)&addr, sizeof(addr)) < 0) {
qDebug() << "[SocketClient] Failed to connect to socket:" << m_socketPath;
qDebug() << "[SocketClient] Error:" << strerror(errno);
close(sock);
return QJsonObject();
}
QJsonObject cmdObj;
cmdObj["command"] = command;
if (!params.isEmpty()) {
cmdObj["params"] = params;
}
QJsonDocument cmdDoc(cmdObj);
QByteArray cmdData = cmdDoc.toJson(QJsonDocument::Compact);
qDebug() << "[SocketClient] Sending:" << cmdData;
ssize_t sent = send(sock, cmdData.constData(), cmdData.size(), 0);
if (sent < 0) {
qDebug() << "[SocketClient] Failed to send data:" << strerror(errno);
close(sock);
return QJsonObject();
}
char buffer[4096];
int bytesRead = recv(sock, buffer, sizeof(buffer) - 1, 0);
if (bytesRead < 0) {
qDebug() << "[SocketClient] Failed to receive data:" << strerror(errno);
close(sock);
return QJsonObject();
}
close(sock);
if (bytesRead > 0) {
buffer[bytesRead] = '\0';
qDebug() << "[SocketClient] Received:" << QByteArray(buffer, bytesRead);
QJsonDocument responseDoc = QJsonDocument::fromJson(QByteArray(buffer, bytesRead));
return responseDoc.object();
}
qDebug() << "[SocketClient] No data received";
return QJsonObject();
}

33
socketclient.h Normal file
View File

@@ -0,0 +1,33 @@
#ifndef SOCKETCLIENT_H
#define SOCKETCLIENT_H
#include <QObject>
#include <QString>
#include <QJsonDocument>
#include <QJsonObject>
#include <functional>
class SocketClient : public QObject
{
Q_OBJECT
public:
explicit SocketClient(const QString& socketPath = "/tmp/vizion_control.sock", QObject *parent = nullptr);
using ResponseCallback = std::function<void(const QJsonObject&)>;
using ErrorCallback = std::function<void(const QString&)>;
void sendCommand(const QString& command, const QJsonObject& params = QJsonObject(),
ResponseCallback onSuccess = nullptr, ErrorCallback onError = nullptr);
signals:
void commandSuccess(const QJsonObject& response);
void commandError(const QString& errorMessage);
void connectionError(const QString& errorMessage);
private:
QString m_socketPath;
QJsonObject executeCommand(const QString& command, const QJsonObject& params);
};
#endif // SOCKETCLIENT_H

52
test_connection.sh Executable file
View File

@@ -0,0 +1,52 @@
#!/bin/bash
# Test script to verify VizionStreamer connection
SOCKET="/tmp/vizion_control.sock"
echo "=== VizionStreamer Connection Test ==="
echo ""
# Check if socket exists
if [ ! -S "$SOCKET" ]; then
echo "❌ FAIL: Socket not found at $SOCKET"
echo "Please ensure VizionStreamer backend is running."
exit 1
fi
echo "✓ Socket found at $SOCKET"
echo ""
# Test get_status command
echo "Testing get_status command..."
RESPONSE=$(echo '{"command":"get_status"}' | socat - UNIX-CONNECT:$SOCKET 2>/dev/null)
if [ $? -eq 0 ]; then
echo "✓ Connection successful"
echo "Response: $RESPONSE"
echo ""
else
echo "❌ FAIL: Could not connect to socket"
exit 1
fi
# Test get_formats command
echo "Testing get_formats command..."
FORMATS=$(echo '{"command":"get_formats"}' | socat - UNIX-CONNECT:$SOCKET 2>/dev/null)
if [ $? -eq 0 ]; then
echo "✓ get_formats successful"
# Pretty print if python3 is available
if command -v python3 &> /dev/null; then
echo "$FORMATS" | python3 -m json.tool 2>/dev/null || echo "$FORMATS"
else
echo "$FORMATS"
fi
echo ""
else
echo "❌ FAIL: Could not get formats"
exit 1
fi
echo "=== All tests passed! ==="
echo "VizionStreamer backend is ready for use."

323
videoviewerwidget.cpp Normal file
View File

@@ -0,0 +1,323 @@
#include "videoviewerwidget.h"
#include <QVBoxLayout>
#include <QHBoxLayout>
#include <QFormLayout>
#include <QGroupBox>
#include <QDebug>
#include <QTimer>
#include <gst/video/videooverlay.h>
VideoViewerWidget::VideoViewerWidget(QWidget *parent)
: QWidget(parent), m_pipeline(nullptr), m_videoSink(nullptr),
m_busWatchId(0), m_windowId(0)
{
initGStreamer();
setupUI();
}
VideoViewerWidget::~VideoViewerWidget()
{
cleanupGStreamer();
}
void VideoViewerWidget::initGStreamer()
{
gst_init(nullptr, nullptr);
}
void VideoViewerWidget::setupUI()
{
QVBoxLayout* mainLayout = new QVBoxLayout(this);
// Video display container
QGroupBox* videoGroup = new QGroupBox("Video Display", this);
QVBoxLayout* videoLayout = new QVBoxLayout();
m_videoContainer = new QWidget(this);
m_videoContainer->setMinimumSize(640, 480);
m_videoContainer->setStyleSheet("background-color: black;");
m_videoContainer->setAttribute(Qt::WA_NativeWindow);
videoLayout->addWidget(m_videoContainer);
videoGroup->setLayout(videoLayout);
// Controls
QGroupBox* controlGroup = new QGroupBox("Viewer Controls", this);
QVBoxLayout* controlLayout = new QVBoxLayout();
// Source type selection
QHBoxLayout* sourceLayout = new QHBoxLayout();
sourceLayout->addWidget(new QLabel("Source Type:", this));
m_sourceType = new QComboBox(this);
m_sourceType->addItem("UDP MJPEG Stream (No plugins needed)", "udp-mjpeg");
m_sourceType->addItem("UDP H.264 Stream (Requires gst-libav)", "udp-h264");
m_sourceType->addItem("TCP H.264 Stream", "tcp");
m_sourceType->addItem("MJPEG HTTP Stream", "http");
m_sourceType->addItem("Test Pattern", "test");
connect(m_sourceType, QOverload<int>::of(&QComboBox::currentIndexChanged),
this, &VideoViewerWidget::onSourceTypeChanged);
sourceLayout->addWidget(m_sourceType);
// Host and port
QFormLayout* formLayout = new QFormLayout();
m_hostEdit = new QLineEdit("127.0.0.1", this);
m_portEdit = new QLineEdit("5000", this);
formLayout->addRow("Host:", m_hostEdit);
formLayout->addRow("Port:", m_portEdit);
// Control buttons
QHBoxLayout* buttonLayout = new QHBoxLayout();
m_startBtn = new QPushButton("Start Viewer", this);
m_stopBtn = new QPushButton("Stop Viewer", this);
m_stopBtn->setEnabled(false);
connect(m_startBtn, &QPushButton::clicked, this, &VideoViewerWidget::onStartViewer);
connect(m_stopBtn, &QPushButton::clicked, this, &VideoViewerWidget::onStopViewer);
buttonLayout->addWidget(m_startBtn);
buttonLayout->addWidget(m_stopBtn);
// Status label
m_statusLabel = new QLabel("Status: Stopped", this);
m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }");
controlLayout->addLayout(sourceLayout);
controlLayout->addLayout(formLayout);
controlLayout->addLayout(buttonLayout);
controlLayout->addWidget(m_statusLabel);
controlGroup->setLayout(controlLayout);
mainLayout->addWidget(videoGroup, 1);
mainLayout->addWidget(controlGroup);
setLayout(mainLayout);
}
void VideoViewerWidget::showEvent(QShowEvent* event)
{
QWidget::showEvent(event);
if (!m_windowId) {
m_videoContainer->winId(); // Force window creation
QTimer::singleShot(100, this, [this]() {
m_windowId = m_videoContainer->winId();
qDebug() << "[VideoViewer] Window ID initialized:" << m_windowId;
});
}
}
QString VideoViewerWidget::buildPipelineString()
{
QString sourceType = m_sourceType->currentData().toString();
QString host = m_hostEdit->text();
QString port = m_portEdit->text();
QString pipeline;
// Note: Using autovideosink which opens a separate window
// VideoOverlay with Qt widgets doesn't work reliably on this system
if (sourceType == "udp-mjpeg") {
pipeline = QString("udpsrc port=%1 ! application/x-rtp,encoding-name=JPEG,payload=26 ! "
"rtpjpegdepay ! jpegdec ! autovideosink")
.arg(port);
} else if (sourceType == "udp-h264") {
pipeline = QString("udpsrc port=%1 ! application/x-rtp,encoding-name=H264 ! "
"rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! autovideosink")
.arg(port);
} else if (sourceType == "tcp") {
pipeline = QString("tcpclientsrc host=%1 port=%2 ! tsdemux ! h264parse ! avdec_h264 ! "
"videoconvert ! autovideosink")
.arg(host).arg(port);
} else if (sourceType == "http") {
pipeline = QString("souphttpsrc location=http://%1:%2 ! multipartdemux ! jpegdec ! "
"videoconvert ! autovideosink")
.arg(host).arg(port);
} else if (sourceType == "test") {
pipeline = "videotestsrc ! autovideosink";
}
return pipeline;
}
void VideoViewerWidget::startPipeline()
{
if (m_pipeline) {
stopPipeline();
}
QString pipelineStr = buildPipelineString();
qDebug() << "[VideoViewer] Starting pipeline:" << pipelineStr;
GError* error = nullptr;
m_pipeline = gst_parse_launch(pipelineStr.toUtf8().constData(), &error);
if (error) {
m_statusLabel->setText(QString("Status: Pipeline Error - %1").arg(error->message));
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
g_error_free(error);
return;
}
if (!m_pipeline) {
m_statusLabel->setText("Status: Failed to create pipeline");
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
return;
}
// Set up bus callback
GstBus* bus = gst_element_get_bus(m_pipeline);
m_busWatchId = gst_bus_add_watch(bus, busCallback, this);
gst_object_unref(bus);
// Note: VideoOverlay disabled - using autovideosink with separate window instead
// Start playing
GstStateChangeReturn ret = gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
qDebug() << "[VideoViewer] Pipeline state change return:" << ret;
if (ret == GST_STATE_CHANGE_FAILURE) {
m_statusLabel->setText("Status: Failed to start pipeline");
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
cleanupGStreamer();
return;
}
qDebug() << "[VideoViewer] Pipeline started successfully";
m_statusLabel->setText("Status: Playing");
m_statusLabel->setStyleSheet("QLabel { background-color: #90EE90; padding: 5px; border-radius: 3px; }");
m_startBtn->setEnabled(false);
m_stopBtn->setEnabled(true);
}
void VideoViewerWidget::stopPipeline()
{
if (m_pipeline) {
gst_element_set_state(m_pipeline, GST_STATE_NULL);
gst_object_unref(m_pipeline);
m_pipeline = nullptr;
}
if (m_videoSink) {
gst_object_unref(m_videoSink);
m_videoSink = nullptr;
}
if (m_busWatchId > 0) {
g_source_remove(m_busWatchId);
m_busWatchId = 0;
}
m_statusLabel->setText("Status: Stopped");
m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }");
m_startBtn->setEnabled(true);
m_stopBtn->setEnabled(false);
}
void VideoViewerWidget::cleanupGStreamer()
{
stopPipeline();
}
gboolean VideoViewerWidget::busCallback(GstBus* bus, GstMessage* msg, gpointer data)
{
VideoViewerWidget* viewer = static_cast<VideoViewerWidget*>(data);
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_ERROR: {
GError* err;
gchar* debug_info;
gst_message_parse_error(msg, &err, &debug_info);
QString errorMsg = QString("GStreamer Error: %1\nDebug: %2")
.arg(err->message)
.arg(debug_info ? debug_info : "none");
qDebug() << "[VideoViewer] ERROR:" << errorMsg;
QMetaObject::invokeMethod(viewer, [viewer, errorMsg]() {
viewer->m_statusLabel->setText("Status: Stream Error - " + errorMsg);
viewer->m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
viewer->stopPipeline();
}, Qt::QueuedConnection);
g_error_free(err);
g_free(debug_info);
break;
}
case GST_MESSAGE_EOS:
qDebug() << "[VideoViewer] End of stream";
QMetaObject::invokeMethod(viewer, [viewer]() {
viewer->m_statusLabel->setText("Status: End of Stream");
viewer->stopPipeline();
}, Qt::QueuedConnection);
break;
case GST_MESSAGE_STATE_CHANGED:
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(viewer->m_pipeline)) {
GstState oldState, newState, pendingState;
gst_message_parse_state_changed(msg, &oldState, &newState, &pendingState);
qDebug() << "[VideoViewer] State changed:"
<< gst_element_state_get_name(oldState) << "->"
<< gst_element_state_get_name(newState);
}
break;
case GST_MESSAGE_WARNING: {
GError* err;
gchar* debug_info;
gst_message_parse_warning(msg, &err, &debug_info);
qDebug() << "[VideoViewer] WARNING:" << err->message;
g_error_free(err);
g_free(debug_info);
break;
}
case GST_MESSAGE_INFO: {
GError* err;
gchar* debug_info;
gst_message_parse_info(msg, &err, &debug_info);
qDebug() << "[VideoViewer] INFO:" << err->message;
g_error_free(err);
g_free(debug_info);
break;
}
default:
break;
}
return TRUE;
}
void VideoViewerWidget::onStartViewer()
{
startPipeline();
}
void VideoViewerWidget::onStopViewer()
{
stopPipeline();
}
void VideoViewerWidget::onSourceTypeChanged(int index)
{
QString sourceType = m_sourceType->currentData().toString();
bool needsNetwork = (sourceType != "test");
bool isUdp = (sourceType == "udp-mjpeg" || sourceType == "udp-h264");
m_hostEdit->setEnabled(needsNetwork && !isUdp);
m_portEdit->setEnabled(needsNetwork);
}
void VideoViewerWidget::onPrepareWindowHandle(GstBus* bus, GstMessage* msg, gpointer data)
{
if (!gst_is_video_overlay_prepare_window_handle_message(msg)) {
return;
}
VideoViewerWidget* viewer = static_cast<VideoViewerWidget*>(data);
if (viewer->m_windowId) {
GstElement* sink = GST_ELEMENT(GST_MESSAGE_SRC(msg));
qDebug() << "[VideoViewer] prepare-window-handle: Setting window ID" << viewer->m_windowId;
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink), viewer->m_windowId);
} else {
qDebug() << "[VideoViewer] prepare-window-handle: No window ID available yet";
}
}

55
videoviewerwidget.h Normal file
View File

@@ -0,0 +1,55 @@
#ifndef VIDEOVIEWERWIDGET_H
#define VIDEOVIEWERWIDGET_H
#include <QWidget>
#include <QPushButton>
#include <QComboBox>
#include <QLineEdit>
#include <QLabel>
#include <gst/gst.h>
class VideoViewerWidget : public QWidget
{
Q_OBJECT
public:
explicit VideoViewerWidget(QWidget *parent = nullptr);
~VideoViewerWidget();
protected:
void showEvent(QShowEvent* event) override;
private slots:
void onStartViewer();
void onStopViewer();
void onSourceTypeChanged(int index);
private:
void setupUI();
void initGStreamer();
void cleanupGStreamer();
void startPipeline();
void stopPipeline();
QString buildPipelineString();
void setupVideoOverlay();
static gboolean busCallback(GstBus* bus, GstMessage* msg, gpointer data);
static void onPrepareWindowHandle(GstBus* bus, GstMessage* msg, gpointer data);
// UI elements
QWidget* m_videoContainer;
QPushButton* m_startBtn;
QPushButton* m_stopBtn;
QComboBox* m_sourceType;
QLineEdit* m_hostEdit;
QLineEdit* m_portEdit;
QLabel* m_statusLabel;
// GStreamer elements
GstElement* m_pipeline;
GstElement* m_videoSink;
guint m_busWatchId;
WId m_windowId;
};
#endif // VIDEOVIEWERWIDGET_H