use appsink instead of videosink

This commit is contained in:
Maik Jurischka
2025-12-19 06:40:58 +01:00
parent 69e2f3ae1d
commit 34148a592a
3 changed files with 153 additions and 56 deletions

View File

@@ -3,7 +3,7 @@ project(gstreamerViewer LANGUAGES CXX)
find_package(Qt6 6.5 REQUIRED COMPONENTS Core Widgets) find_package(Qt6 6.5 REQUIRED COMPONENTS Core Widgets)
find_package(PkgConfig REQUIRED) find_package(PkgConfig REQUIRED)
pkg_check_modules(GSTREAMER REQUIRED gstreamer-1.0 gstreamer-video-1.0) pkg_check_modules(GSTREAMER REQUIRED gstreamer-1.0 gstreamer-video-1.0 gstreamer-app-1.0)
qt_standard_project_setup() qt_standard_project_setup()

View File

@@ -4,15 +4,22 @@
#include <QFormLayout> #include <QFormLayout>
#include <QGroupBox> #include <QGroupBox>
#include <QDebug> #include <QDebug>
#include <QTimer> #include <QPixmap>
#include <gst/video/videooverlay.h> #include <gst/video/video.h>
VideoViewerWidget::VideoViewerWidget(QWidget *parent) VideoViewerWidget::VideoViewerWidget(QWidget *parent)
: QWidget(parent), m_pipeline(nullptr), m_videoSink(nullptr), : QWidget(parent), m_pipeline(nullptr), m_appSink(nullptr),
m_busWatchId(0), m_windowId(0) m_busWatchId(0)
{ {
// Register QImage as meta type for signal/slot across threads
qRegisterMetaType<QImage>("QImage");
initGStreamer(); initGStreamer();
setupUI(); setupUI();
// Connect signal for frame display
connect(this, &VideoViewerWidget::newFrameAvailable,
this, &VideoViewerWidget::displayFrame, Qt::QueuedConnection);
} }
VideoViewerWidget::~VideoViewerWidget() VideoViewerWidget::~VideoViewerWidget()
@@ -33,12 +40,13 @@ void VideoViewerWidget::setupUI()
QGroupBox* videoGroup = new QGroupBox("Video Display", this); QGroupBox* videoGroup = new QGroupBox("Video Display", this);
QVBoxLayout* videoLayout = new QVBoxLayout(); QVBoxLayout* videoLayout = new QVBoxLayout();
m_videoContainer = new QWidget(this); m_videoDisplay = new QLabel(this);
m_videoContainer->setMinimumSize(640, 480); m_videoDisplay->setMinimumSize(640, 480);
m_videoContainer->setStyleSheet("background-color: black;"); m_videoDisplay->setStyleSheet("background-color: black;");
m_videoContainer->setAttribute(Qt::WA_NativeWindow); m_videoDisplay->setAlignment(Qt::AlignCenter);
m_videoDisplay->setScaledContents(true); // Enable scaling for zoom later
videoLayout->addWidget(m_videoContainer); videoLayout->addWidget(m_videoDisplay);
videoGroup->setLayout(videoLayout); videoGroup->setLayout(videoLayout);
// Controls // Controls
@@ -93,18 +101,6 @@ void VideoViewerWidget::setupUI()
setLayout(mainLayout); setLayout(mainLayout);
} }
void VideoViewerWidget::showEvent(QShowEvent* event)
{
QWidget::showEvent(event);
if (!m_windowId) {
m_videoContainer->winId(); // Force window creation
QTimer::singleShot(100, this, [this]() {
m_windowId = m_videoContainer->winId();
qDebug() << "[VideoViewer] Window ID initialized:" << m_windowId;
});
}
}
QString VideoViewerWidget::buildPipelineString() QString VideoViewerWidget::buildPipelineString()
{ {
QString sourceType = m_sourceType->currentData().toString(); QString sourceType = m_sourceType->currentData().toString();
@@ -112,26 +108,26 @@ QString VideoViewerWidget::buildPipelineString()
QString port = m_portEdit->text(); QString port = m_portEdit->text();
QString pipeline; QString pipeline;
// Note: Using autovideosink which opens a separate window // Using appsink to get frames for embedded display in Qt widget
// VideoOverlay with Qt widgets doesn't work reliably on this system // All pipelines convert to RGB format for easy QImage conversion
QString sinkPipeline = "videoconvert ! video/x-raw,format=RGB ! appsink name=videosink emit-signals=true";
if (sourceType == "udp-mjpeg") { if (sourceType == "udp-mjpeg") {
pipeline = QString("udpsrc port=%1 ! application/x-rtp,encoding-name=JPEG,payload=26 ! " pipeline = QString("udpsrc port=%1 ! application/x-rtp,encoding-name=JPEG,payload=26 ! "
"rtpjpegdepay ! jpegdec ! autovideosink") "rtpjpegdepay ! jpegdec ! %2")
.arg(port); .arg(port).arg(sinkPipeline);
} else if (sourceType == "udp-h264") { } else if (sourceType == "udp-h264") {
pipeline = QString("udpsrc port=%1 ! application/x-rtp,encoding-name=H264 ! " pipeline = QString("udpsrc port=%1 ! application/x-rtp,encoding-name=H264 ! "
"rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! autovideosink") "rtph264depay ! h264parse ! avdec_h264 ! %2")
.arg(port); .arg(port).arg(sinkPipeline);
} else if (sourceType == "tcp") { } else if (sourceType == "tcp") {
pipeline = QString("tcpclientsrc host=%1 port=%2 ! tsdemux ! h264parse ! avdec_h264 ! " pipeline = QString("tcpclientsrc host=%1 port=%2 ! tsdemux ! h264parse ! avdec_h264 ! %3")
"videoconvert ! autovideosink") .arg(host).arg(port).arg(sinkPipeline);
.arg(host).arg(port);
} else if (sourceType == "http") { } else if (sourceType == "http") {
pipeline = QString("souphttpsrc location=http://%1:%2 ! multipartdemux ! jpegdec ! " pipeline = QString("souphttpsrc location=http://%1:%2 ! multipartdemux ! jpegdec ! %3")
"videoconvert ! autovideosink") .arg(host).arg(port).arg(sinkPipeline);
.arg(host).arg(port);
} else if (sourceType == "test") { } else if (sourceType == "test") {
pipeline = "videotestsrc ! autovideosink"; pipeline = QString("videotestsrc ! %1").arg(sinkPipeline);
} }
return pipeline; return pipeline;
@@ -168,7 +164,27 @@ void VideoViewerWidget::startPipeline()
m_busWatchId = gst_bus_add_watch(bus, busCallback, this); m_busWatchId = gst_bus_add_watch(bus, busCallback, this);
gst_object_unref(bus); gst_object_unref(bus);
// Note: VideoOverlay disabled - using autovideosink with separate window instead // Get appsink element and configure it
m_appSink = gst_bin_get_by_name(GST_BIN(m_pipeline), "videosink");
if (!m_appSink) {
m_statusLabel->setText("Status: Failed to get appsink element");
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
cleanupGStreamer();
return;
}
// Configure appsink
g_object_set(m_appSink, "emit-signals", TRUE, "sync", FALSE, "max-buffers", 1, "drop", TRUE, nullptr);
// Set callback for new samples - properly initialize all fields
GstAppSinkCallbacks callbacks = { 0 };
callbacks.new_sample = newSampleCallback;
callbacks.eos = nullptr;
callbacks.new_preroll = nullptr;
#if GST_CHECK_VERSION(1,20,0)
callbacks.new_event = nullptr;
#endif
gst_app_sink_set_callbacks(GST_APP_SINK(m_appSink), &callbacks, this, nullptr);
// Start playing // Start playing
GstStateChangeReturn ret = gst_element_set_state(m_pipeline, GST_STATE_PLAYING); GstStateChangeReturn ret = gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
@@ -197,9 +213,9 @@ void VideoViewerWidget::stopPipeline()
m_pipeline = nullptr; m_pipeline = nullptr;
} }
if (m_videoSink) { if (m_appSink) {
gst_object_unref(m_videoSink); gst_object_unref(m_appSink);
m_videoSink = nullptr; m_appSink = nullptr;
} }
if (m_busWatchId > 0) { if (m_busWatchId > 0) {
@@ -207,6 +223,10 @@ void VideoViewerWidget::stopPipeline()
m_busWatchId = 0; m_busWatchId = 0;
} }
// Clear video display
m_videoDisplay->clear();
m_videoDisplay->setText("");
m_statusLabel->setText("Status: Stopped"); m_statusLabel->setText("Status: Stopped");
m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }"); m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }");
m_startBtn->setEnabled(true); m_startBtn->setEnabled(true);
@@ -305,19 +325,95 @@ void VideoViewerWidget::onSourceTypeChanged(int index)
m_portEdit->setEnabled(needsNetwork); m_portEdit->setEnabled(needsNetwork);
} }
void VideoViewerWidget::onPrepareWindowHandle(GstBus* bus, GstMessage* msg, gpointer data) GstFlowReturn VideoViewerWidget::newSampleCallback(GstAppSink* appsink, gpointer user_data)
{ {
if (!gst_is_video_overlay_prepare_window_handle_message(msg)) { VideoViewerWidget* viewer = static_cast<VideoViewerWidget*>(user_data);
if (!viewer) {
qDebug() << "[VideoViewer] Callback: viewer is null";
return GST_FLOW_ERROR;
}
// Pull the sample from appsink
GstSample* sample = gst_app_sink_pull_sample(appsink);
if (!sample) {
qDebug() << "[VideoViewer] Callback: Failed to pull sample";
return GST_FLOW_ERROR;
}
// Get the buffer from the sample
GstBuffer* buffer = gst_sample_get_buffer(sample);
if (!buffer) {
qDebug() << "[VideoViewer] Callback: No buffer in sample";
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// Get the caps to extract width and height
GstCaps* caps = gst_sample_get_caps(sample);
if (!caps) {
qDebug() << "[VideoViewer] Callback: No caps in sample";
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
GstStructure* structure = gst_caps_get_structure(caps, 0);
int width = 0, height = 0;
if (!gst_structure_get_int(structure, "width", &width) ||
!gst_structure_get_int(structure, "height", &height)) {
qDebug() << "[VideoViewer] Callback: Failed to get dimensions";
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// Map the buffer to access the raw data
GstMapInfo map;
if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
qDebug() << "[VideoViewer] Callback: Failed to map buffer";
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// Calculate expected size for RGB888 format
gsize expected_size = width * height * 3;
if (map.size < expected_size) {
qDebug() << "[VideoViewer] Callback: Buffer too small. Expected:" << expected_size << "Got:" << map.size;
gst_buffer_unmap(buffer, &map);
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// Create QImage from the RGB data with proper stride
// QImage::Format_RGB888 expects RGB data
QImage frame(map.data, width, height, width * 3, QImage::Format_RGB888);
// Make a deep copy since the buffer will be unmapped
QImage frameCopy = frame.copy();
// Unmap and cleanup
gst_buffer_unmap(buffer, &map);
gst_sample_unref(sample);
// Use QMetaObject::invokeMethod for thread-safe signal emission
QMetaObject::invokeMethod(viewer, "newFrameAvailable", Qt::QueuedConnection,
Q_ARG(QImage, frameCopy));
return GST_FLOW_OK;
}
void VideoViewerWidget::displayFrame(const QImage& frame)
{
if (frame.isNull()) {
qDebug() << "[VideoViewer] displayFrame: Frame is null";
return; return;
} }
VideoViewerWidget* viewer = static_cast<VideoViewerWidget*>(data); static bool firstFrame = true;
if (firstFrame) {
if (viewer->m_windowId) { qDebug() << "[VideoViewer] First frame received! Size:" << frame.width() << "x" << frame.height();
GstElement* sink = GST_ELEMENT(GST_MESSAGE_SRC(msg)); firstFrame = false;
qDebug() << "[VideoViewer] prepare-window-handle: Setting window ID" << viewer->m_windowId;
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink), viewer->m_windowId);
} else {
qDebug() << "[VideoViewer] prepare-window-handle: No window ID available yet";
} }
// Convert QImage to QPixmap and display in label
QPixmap pixmap = QPixmap::fromImage(frame);
m_videoDisplay->setPixmap(pixmap.scaled(m_videoDisplay->size(), Qt::KeepAspectRatio, Qt::SmoothTransformation));
} }

View File

@@ -6,7 +6,9 @@
#include <QComboBox> #include <QComboBox>
#include <QLineEdit> #include <QLineEdit>
#include <QLabel> #include <QLabel>
#include <QImage>
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/app/gstappsink.h>
class VideoViewerWidget : public QWidget class VideoViewerWidget : public QWidget
{ {
@@ -16,13 +18,14 @@ public:
explicit VideoViewerWidget(QWidget *parent = nullptr); explicit VideoViewerWidget(QWidget *parent = nullptr);
~VideoViewerWidget(); ~VideoViewerWidget();
protected: signals:
void showEvent(QShowEvent* event) override; void newFrameAvailable(const QImage& frame);
private slots: private slots:
void onStartViewer(); void onStartViewer();
void onStopViewer(); void onStopViewer();
void onSourceTypeChanged(int index); void onSourceTypeChanged(int index);
void displayFrame(const QImage& frame);
private: private:
void setupUI(); void setupUI();
@@ -31,13 +34,12 @@ private:
void startPipeline(); void startPipeline();
void stopPipeline(); void stopPipeline();
QString buildPipelineString(); QString buildPipelineString();
void setupVideoOverlay();
static gboolean busCallback(GstBus* bus, GstMessage* msg, gpointer data); static gboolean busCallback(GstBus* bus, GstMessage* msg, gpointer data);
static void onPrepareWindowHandle(GstBus* bus, GstMessage* msg, gpointer data); static GstFlowReturn newSampleCallback(GstAppSink* appsink, gpointer user_data);
// UI elements // UI elements
QWidget* m_videoContainer; QLabel* m_videoDisplay;
QPushButton* m_startBtn; QPushButton* m_startBtn;
QPushButton* m_stopBtn; QPushButton* m_stopBtn;
QComboBox* m_sourceType; QComboBox* m_sourceType;
@@ -47,9 +49,8 @@ private:
// GStreamer elements // GStreamer elements
GstElement* m_pipeline; GstElement* m_pipeline;
GstElement* m_videoSink; GstElement* m_appSink;
guint m_busWatchId; guint m_busWatchId;
WId m_windowId;
}; };
#endif // VIDEOVIEWERWIDGET_H #endif // VIDEOVIEWERWIDGET_H