Files
gstreamerViewer/videoviewerwidget.cpp
Maik Jurischka 5fed3070de add zoom control
2025-12-19 07:01:27 +01:00

496 lines
17 KiB
C++

#include "videoviewerwidget.h"
#include <QVBoxLayout>
#include <QHBoxLayout>
#include <QFormLayout>
#include <QGroupBox>
#include <QDebug>
#include <QPixmap>
#include <gst/video/video.h>
VideoViewerWidget::VideoViewerWidget(QWidget *parent)
: QWidget(parent), m_pipeline(nullptr), m_appSink(nullptr),
m_busWatchId(0), m_zoomFactor(1.0)
{
// Register QImage as meta type for signal/slot across threads
qRegisterMetaType<QImage>("QImage");
initGStreamer();
setupUI();
// Connect signal for frame display
connect(this, &VideoViewerWidget::newFrameAvailable,
this, &VideoViewerWidget::displayFrame, Qt::QueuedConnection);
}
VideoViewerWidget::~VideoViewerWidget()
{
cleanupGStreamer();
}
void VideoViewerWidget::initGStreamer()
{
gst_init(nullptr, nullptr);
}
void VideoViewerWidget::setupUI()
{
QVBoxLayout* mainLayout = new QVBoxLayout(this);
mainLayout->setContentsMargins(0, 0, 0, 0);
mainLayout->setSpacing(5);
// Video display in scroll area for zoom support
m_scrollArea = new QScrollArea(this);
m_scrollArea->setWidgetResizable(false);
m_scrollArea->setAlignment(Qt::AlignCenter);
m_scrollArea->setStyleSheet("QScrollArea { background-color: black; border: 1px solid #666; }");
m_videoDisplay = new QLabel();
m_videoDisplay->setMinimumSize(320, 240);
m_videoDisplay->setStyleSheet("background-color: black;");
m_videoDisplay->setAlignment(Qt::AlignCenter);
m_videoDisplay->setScaledContents(false);
m_scrollArea->setWidget(m_videoDisplay);
// Zoom control
QHBoxLayout* zoomLayout = new QHBoxLayout();
zoomLayout->addWidget(new QLabel("Zoom:", this));
m_zoomSlider = new QSlider(Qt::Horizontal, this);
m_zoomSlider->setMinimum(50); // 50%
m_zoomSlider->setMaximum(200); // 200%
m_zoomSlider->setValue(100); // 100% default
m_zoomSlider->setTickPosition(QSlider::TicksBelow);
m_zoomSlider->setTickInterval(25);
connect(m_zoomSlider, &QSlider::valueChanged, this, &VideoViewerWidget::onZoomChanged);
m_zoomLabel = new QLabel("100%", this);
m_zoomLabel->setMinimumWidth(50);
zoomLayout->addWidget(m_zoomSlider);
zoomLayout->addWidget(m_zoomLabel);
// Controls
QGroupBox* controlGroup = new QGroupBox("Viewer Controls", this);
QVBoxLayout* controlLayout = new QVBoxLayout();
// Source type selection
QHBoxLayout* sourceLayout = new QHBoxLayout();
sourceLayout->addWidget(new QLabel("Source Type:", this));
m_sourceType = new QComboBox(this);
m_sourceType->addItem("UDP MJPEG Stream (No plugins needed)", "udp-mjpeg");
m_sourceType->addItem("UDP H.264 Stream (Requires gst-libav)", "udp-h264");
m_sourceType->addItem("TCP H.264 Stream", "tcp");
m_sourceType->addItem("MJPEG HTTP Stream", "http");
m_sourceType->addItem("Test Pattern", "test");
connect(m_sourceType, QOverload<int>::of(&QComboBox::currentIndexChanged),
this, &VideoViewerWidget::onSourceTypeChanged);
sourceLayout->addWidget(m_sourceType);
// Host and port
QFormLayout* formLayout = new QFormLayout();
m_hostEdit = new QLineEdit("127.0.0.1", this);
m_portEdit = new QLineEdit("5000", this);
formLayout->addRow("Host:", m_hostEdit);
formLayout->addRow("Port:", m_portEdit);
// Control buttons
QHBoxLayout* buttonLayout = new QHBoxLayout();
m_startBtn = new QPushButton("Start Viewer", this);
m_stopBtn = new QPushButton("Stop Viewer", this);
m_stopBtn->setEnabled(false);
connect(m_startBtn, &QPushButton::clicked, this, &VideoViewerWidget::onStartViewer);
connect(m_stopBtn, &QPushButton::clicked, this, &VideoViewerWidget::onStopViewer);
buttonLayout->addWidget(m_startBtn);
buttonLayout->addWidget(m_stopBtn);
// Status label
m_statusLabel = new QLabel("Status: Stopped", this);
m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }");
controlLayout->addLayout(sourceLayout);
controlLayout->addLayout(formLayout);
controlLayout->addLayout(buttonLayout);
controlLayout->addWidget(m_statusLabel);
controlGroup->setLayout(controlLayout);
// Add to main layout: video takes most space, zoom control, then viewer controls at bottom
mainLayout->addWidget(m_scrollArea, 1);
mainLayout->addLayout(zoomLayout);
mainLayout->addWidget(controlGroup);
setLayout(mainLayout);
}
QString VideoViewerWidget::buildPipelineString()
{
QString sourceType = m_sourceType->currentData().toString();
QString host = m_hostEdit->text();
QString port = m_portEdit->text();
QString pipeline;
// Using appsink to get frames for embedded display in Qt widget
// All pipelines convert to RGB format for easy QImage conversion
QString sinkPipeline = "videoconvert ! video/x-raw,format=RGB ! appsink name=videosink emit-signals=true";
if (sourceType == "udp-mjpeg") {
pipeline = QString("udpsrc port=%1 ! application/x-rtp,encoding-name=JPEG,payload=26 ! "
"rtpjpegdepay ! jpegdec ! %2")
.arg(port).arg(sinkPipeline);
} else if (sourceType == "udp-h264") {
pipeline = QString("udpsrc port=%1 ! application/x-rtp,encoding-name=H264 ! "
"rtph264depay ! h264parse ! avdec_h264 ! %2")
.arg(port).arg(sinkPipeline);
} else if (sourceType == "tcp") {
pipeline = QString("tcpclientsrc host=%1 port=%2 ! tsdemux ! h264parse ! avdec_h264 ! %3")
.arg(host).arg(port).arg(sinkPipeline);
} else if (sourceType == "http") {
pipeline = QString("souphttpsrc location=http://%1:%2 ! multipartdemux ! jpegdec ! %3")
.arg(host).arg(port).arg(sinkPipeline);
} else if (sourceType == "test") {
pipeline = QString("videotestsrc ! %1").arg(sinkPipeline);
}
return pipeline;
}
void VideoViewerWidget::startPipeline()
{
if (m_pipeline) {
stopPipeline();
}
QString pipelineStr = buildPipelineString();
qDebug() << "[VideoViewer] Starting pipeline:" << pipelineStr;
GError* error = nullptr;
m_pipeline = gst_parse_launch(pipelineStr.toUtf8().constData(), &error);
if (error) {
m_statusLabel->setText(QString("Status: Pipeline Error - %1").arg(error->message));
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
g_error_free(error);
return;
}
if (!m_pipeline) {
m_statusLabel->setText("Status: Failed to create pipeline");
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
return;
}
// Set up bus callback
GstBus* bus = gst_element_get_bus(m_pipeline);
m_busWatchId = gst_bus_add_watch(bus, busCallback, this);
gst_object_unref(bus);
// Get appsink element and configure it
m_appSink = gst_bin_get_by_name(GST_BIN(m_pipeline), "videosink");
if (!m_appSink) {
m_statusLabel->setText("Status: Failed to get appsink element");
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
cleanupGStreamer();
return;
}
// Configure appsink
g_object_set(m_appSink, "emit-signals", TRUE, "sync", FALSE, "max-buffers", 1, "drop", TRUE, nullptr);
// Set callback for new samples - properly initialize all fields
GstAppSinkCallbacks callbacks = { 0 };
callbacks.new_sample = newSampleCallback;
callbacks.eos = nullptr;
callbacks.new_preroll = nullptr;
#if GST_CHECK_VERSION(1,20,0)
callbacks.new_event = nullptr;
#endif
gst_app_sink_set_callbacks(GST_APP_SINK(m_appSink), &callbacks, this, nullptr);
// Start playing
GstStateChangeReturn ret = gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
qDebug() << "[VideoViewer] Pipeline state change return:" << ret;
if (ret == GST_STATE_CHANGE_FAILURE) {
m_statusLabel->setText("Status: Failed to start pipeline");
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
cleanupGStreamer();
return;
}
qDebug() << "[VideoViewer] Pipeline started successfully";
m_statusLabel->setText("Status: Playing");
m_statusLabel->setStyleSheet("QLabel { background-color: #90EE90; padding: 5px; border-radius: 3px; }");
m_startBtn->setEnabled(false);
m_stopBtn->setEnabled(true);
}
void VideoViewerWidget::stopPipeline()
{
if (m_pipeline) {
gst_element_set_state(m_pipeline, GST_STATE_NULL);
gst_object_unref(m_pipeline);
m_pipeline = nullptr;
}
if (m_appSink) {
gst_object_unref(m_appSink);
m_appSink = nullptr;
}
if (m_busWatchId > 0) {
g_source_remove(m_busWatchId);
m_busWatchId = 0;
}
// Clear video display and current frame
m_videoDisplay->clear();
m_videoDisplay->setText("");
m_currentFrame = QImage();
m_statusLabel->setText("Status: Stopped");
m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }");
m_startBtn->setEnabled(true);
m_stopBtn->setEnabled(false);
}
void VideoViewerWidget::cleanupGStreamer()
{
stopPipeline();
}
gboolean VideoViewerWidget::busCallback(GstBus* bus, GstMessage* msg, gpointer data)
{
VideoViewerWidget* viewer = static_cast<VideoViewerWidget*>(data);
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_ERROR: {
GError* err;
gchar* debug_info;
gst_message_parse_error(msg, &err, &debug_info);
QString errorMsg = QString("GStreamer Error: %1\nDebug: %2")
.arg(err->message)
.arg(debug_info ? debug_info : "none");
qDebug() << "[VideoViewer] ERROR:" << errorMsg;
QMetaObject::invokeMethod(viewer, [viewer, errorMsg]() {
viewer->m_statusLabel->setText("Status: Stream Error - " + errorMsg);
viewer->m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
viewer->stopPipeline();
}, Qt::QueuedConnection);
g_error_free(err);
g_free(debug_info);
break;
}
case GST_MESSAGE_EOS:
qDebug() << "[VideoViewer] End of stream";
QMetaObject::invokeMethod(viewer, [viewer]() {
viewer->m_statusLabel->setText("Status: End of Stream");
viewer->stopPipeline();
}, Qt::QueuedConnection);
break;
case GST_MESSAGE_STATE_CHANGED:
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(viewer->m_pipeline)) {
GstState oldState, newState, pendingState;
gst_message_parse_state_changed(msg, &oldState, &newState, &pendingState);
qDebug() << "[VideoViewer] State changed:"
<< gst_element_state_get_name(oldState) << "->"
<< gst_element_state_get_name(newState);
}
break;
case GST_MESSAGE_WARNING: {
GError* err;
gchar* debug_info;
gst_message_parse_warning(msg, &err, &debug_info);
qDebug() << "[VideoViewer] WARNING:" << err->message;
g_error_free(err);
g_free(debug_info);
break;
}
case GST_MESSAGE_INFO: {
GError* err;
gchar* debug_info;
gst_message_parse_info(msg, &err, &debug_info);
qDebug() << "[VideoViewer] INFO:" << err->message;
g_error_free(err);
g_free(debug_info);
break;
}
default:
break;
}
return TRUE;
}
void VideoViewerWidget::onStartViewer()
{
startPipeline();
}
void VideoViewerWidget::onStopViewer()
{
stopPipeline();
}
void VideoViewerWidget::onSourceTypeChanged(int index)
{
QString sourceType = m_sourceType->currentData().toString();
bool needsNetwork = (sourceType != "test");
bool isUdp = (sourceType == "udp-mjpeg" || sourceType == "udp-h264");
m_hostEdit->setEnabled(needsNetwork && !isUdp);
m_portEdit->setEnabled(needsNetwork);
}
void VideoViewerWidget::onZoomChanged(int value)
{
m_zoomFactor = value / 100.0;
m_zoomLabel->setText(QString("%1%").arg(value));
// Re-display the current frame with new zoom factor
if (!m_currentFrame.isNull()) {
displayFrame(m_currentFrame);
}
}
GstFlowReturn VideoViewerWidget::newSampleCallback(GstAppSink* appsink, gpointer user_data)
{
static int callbackCount = 0;
callbackCount++;
if (callbackCount == 1) {
qDebug() << "[VideoViewer] Callback: First sample callback!";
} else if (callbackCount % 30 == 0) {
qDebug() << "[VideoViewer] Callback: Samples received:" << callbackCount;
}
VideoViewerWidget* viewer = static_cast<VideoViewerWidget*>(user_data);
if (!viewer) {
qDebug() << "[VideoViewer] Callback: viewer is null";
return GST_FLOW_ERROR;
}
// Pull the sample from appsink
GstSample* sample = gst_app_sink_pull_sample(appsink);
if (!sample) {
qDebug() << "[VideoViewer] Callback: Failed to pull sample";
return GST_FLOW_ERROR;
}
// Get the buffer from the sample
GstBuffer* buffer = gst_sample_get_buffer(sample);
if (!buffer) {
qDebug() << "[VideoViewer] Callback: No buffer in sample";
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// Get the caps to extract width and height
GstCaps* caps = gst_sample_get_caps(sample);
if (!caps) {
qDebug() << "[VideoViewer] Callback: No caps in sample";
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
GstStructure* structure = gst_caps_get_structure(caps, 0);
int width = 0, height = 0;
if (!gst_structure_get_int(structure, "width", &width) ||
!gst_structure_get_int(structure, "height", &height)) {
qDebug() << "[VideoViewer] Callback: Failed to get dimensions";
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// Map the buffer to access the raw data
GstMapInfo map;
if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
qDebug() << "[VideoViewer] Callback: Failed to map buffer";
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// Calculate expected size for RGB888 format
gsize expected_size = width * height * 3;
if (map.size < expected_size) {
qDebug() << "[VideoViewer] Callback: Buffer too small. Expected:" << expected_size << "Got:" << map.size;
gst_buffer_unmap(buffer, &map);
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// Create QImage from the RGB data with proper stride
// QImage::Format_RGB888 expects RGB data
QImage frame(map.data, width, height, width * 3, QImage::Format_RGB888);
// Make a deep copy since the buffer will be unmapped
QImage frameCopy = frame.copy();
// Unmap and cleanup
gst_buffer_unmap(buffer, &map);
gst_sample_unref(sample);
// Use QMetaObject::invokeMethod for thread-safe signal emission
QMetaObject::invokeMethod(viewer, "newFrameAvailable", Qt::QueuedConnection,
Q_ARG(QImage, frameCopy));
return GST_FLOW_OK;
}
void VideoViewerWidget::displayFrame(const QImage& frame)
{
if (frame.isNull()) {
qDebug() << "[VideoViewer] displayFrame: Frame is null";
return;
}
static bool firstFrame = true;
static int frameCount = 0;
frameCount++;
bool debugThisFrame = firstFrame || (frameCount % 30 == 0);
if (firstFrame) {
qDebug() << "[VideoViewer] First frame received! Size:" << frame.width() << "x" << frame.height()
<< "Format:" << frame.format();
firstFrame = false;
} else if (frameCount % 30 == 0) {
qDebug() << "[VideoViewer] Frames received:" << frameCount;
}
// Store current frame for zoom changes
m_currentFrame = frame;
// Convert QImage to QPixmap
QPixmap pixmap = QPixmap::fromImage(frame);
if (pixmap.isNull()) {
qDebug() << "[VideoViewer] ERROR: Pixmap conversion failed!";
return;
}
// Calculate target size with zoom factor
QSize targetSize = frame.size() * m_zoomFactor;
// Scale pixmap with zoom factor
QPixmap scaledPixmap = pixmap.scaled(targetSize,
Qt::KeepAspectRatio,
Qt::SmoothTransformation);
if (debugThisFrame) {
qDebug() << "[VideoViewer] Target size:" << targetSize << "Zoom:" << m_zoomFactor
<< "Scaled pixmap:" << scaledPixmap.size();
}
// Update the label size to match the scaled pixmap
m_videoDisplay->setPixmap(scaledPixmap);
m_videoDisplay->resize(scaledPixmap.size());
m_videoDisplay->update();
}