use appsink instead of videosink

This commit is contained in:
Maik Jurischka
2025-12-19 06:40:58 +01:00
parent 69e2f3ae1d
commit 34148a592a
3 changed files with 153 additions and 56 deletions

View File

@@ -4,15 +4,22 @@
#include <QFormLayout>
#include <QGroupBox>
#include <QDebug>
#include <QTimer>
#include <gst/video/videooverlay.h>
#include <QPixmap>
#include <gst/video/video.h>
VideoViewerWidget::VideoViewerWidget(QWidget *parent)
: QWidget(parent), m_pipeline(nullptr), m_videoSink(nullptr),
m_busWatchId(0), m_windowId(0)
: QWidget(parent), m_pipeline(nullptr), m_appSink(nullptr),
m_busWatchId(0)
{
// Register QImage as meta type for signal/slot across threads
qRegisterMetaType<QImage>("QImage");
initGStreamer();
setupUI();
// Connect signal for frame display
connect(this, &VideoViewerWidget::newFrameAvailable,
this, &VideoViewerWidget::displayFrame, Qt::QueuedConnection);
}
VideoViewerWidget::~VideoViewerWidget()
@@ -33,12 +40,13 @@ void VideoViewerWidget::setupUI()
QGroupBox* videoGroup = new QGroupBox("Video Display", this);
QVBoxLayout* videoLayout = new QVBoxLayout();
m_videoContainer = new QWidget(this);
m_videoContainer->setMinimumSize(640, 480);
m_videoContainer->setStyleSheet("background-color: black;");
m_videoContainer->setAttribute(Qt::WA_NativeWindow);
m_videoDisplay = new QLabel(this);
m_videoDisplay->setMinimumSize(640, 480);
m_videoDisplay->setStyleSheet("background-color: black;");
m_videoDisplay->setAlignment(Qt::AlignCenter);
m_videoDisplay->setScaledContents(true); // Enable scaling for zoom later
videoLayout->addWidget(m_videoContainer);
videoLayout->addWidget(m_videoDisplay);
videoGroup->setLayout(videoLayout);
// Controls
@@ -93,18 +101,6 @@ void VideoViewerWidget::setupUI()
setLayout(mainLayout);
}
void VideoViewerWidget::showEvent(QShowEvent* event)
{
QWidget::showEvent(event);
if (!m_windowId) {
m_videoContainer->winId(); // Force window creation
QTimer::singleShot(100, this, [this]() {
m_windowId = m_videoContainer->winId();
qDebug() << "[VideoViewer] Window ID initialized:" << m_windowId;
});
}
}
QString VideoViewerWidget::buildPipelineString()
{
QString sourceType = m_sourceType->currentData().toString();
@@ -112,26 +108,26 @@ QString VideoViewerWidget::buildPipelineString()
QString port = m_portEdit->text();
QString pipeline;
// Note: Using autovideosink which opens a separate window
// VideoOverlay with Qt widgets doesn't work reliably on this system
// Using appsink to get frames for embedded display in Qt widget
// All pipelines convert to RGB format for easy QImage conversion
QString sinkPipeline = "videoconvert ! video/x-raw,format=RGB ! appsink name=videosink emit-signals=true";
if (sourceType == "udp-mjpeg") {
pipeline = QString("udpsrc port=%1 ! application/x-rtp,encoding-name=JPEG,payload=26 ! "
"rtpjpegdepay ! jpegdec ! autovideosink")
.arg(port);
"rtpjpegdepay ! jpegdec ! %2")
.arg(port).arg(sinkPipeline);
} else if (sourceType == "udp-h264") {
pipeline = QString("udpsrc port=%1 ! application/x-rtp,encoding-name=H264 ! "
"rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! autovideosink")
.arg(port);
"rtph264depay ! h264parse ! avdec_h264 ! %2")
.arg(port).arg(sinkPipeline);
} else if (sourceType == "tcp") {
pipeline = QString("tcpclientsrc host=%1 port=%2 ! tsdemux ! h264parse ! avdec_h264 ! "
"videoconvert ! autovideosink")
.arg(host).arg(port);
pipeline = QString("tcpclientsrc host=%1 port=%2 ! tsdemux ! h264parse ! avdec_h264 ! %3")
.arg(host).arg(port).arg(sinkPipeline);
} else if (sourceType == "http") {
pipeline = QString("souphttpsrc location=http://%1:%2 ! multipartdemux ! jpegdec ! "
"videoconvert ! autovideosink")
.arg(host).arg(port);
pipeline = QString("souphttpsrc location=http://%1:%2 ! multipartdemux ! jpegdec ! %3")
.arg(host).arg(port).arg(sinkPipeline);
} else if (sourceType == "test") {
pipeline = "videotestsrc ! autovideosink";
pipeline = QString("videotestsrc ! %1").arg(sinkPipeline);
}
return pipeline;
@@ -168,7 +164,27 @@ void VideoViewerWidget::startPipeline()
m_busWatchId = gst_bus_add_watch(bus, busCallback, this);
gst_object_unref(bus);
// Note: VideoOverlay disabled - using autovideosink with separate window instead
// Get appsink element and configure it
m_appSink = gst_bin_get_by_name(GST_BIN(m_pipeline), "videosink");
if (!m_appSink) {
m_statusLabel->setText("Status: Failed to get appsink element");
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
cleanupGStreamer();
return;
}
// Configure appsink
g_object_set(m_appSink, "emit-signals", TRUE, "sync", FALSE, "max-buffers", 1, "drop", TRUE, nullptr);
// Set callback for new samples - properly initialize all fields
GstAppSinkCallbacks callbacks = { 0 };
callbacks.new_sample = newSampleCallback;
callbacks.eos = nullptr;
callbacks.new_preroll = nullptr;
#if GST_CHECK_VERSION(1,20,0)
callbacks.new_event = nullptr;
#endif
gst_app_sink_set_callbacks(GST_APP_SINK(m_appSink), &callbacks, this, nullptr);
// Start playing
GstStateChangeReturn ret = gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
@@ -197,9 +213,9 @@ void VideoViewerWidget::stopPipeline()
m_pipeline = nullptr;
}
if (m_videoSink) {
gst_object_unref(m_videoSink);
m_videoSink = nullptr;
if (m_appSink) {
gst_object_unref(m_appSink);
m_appSink = nullptr;
}
if (m_busWatchId > 0) {
@@ -207,6 +223,10 @@ void VideoViewerWidget::stopPipeline()
m_busWatchId = 0;
}
// Clear video display
m_videoDisplay->clear();
m_videoDisplay->setText("");
m_statusLabel->setText("Status: Stopped");
m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }");
m_startBtn->setEnabled(true);
@@ -305,19 +325,95 @@ void VideoViewerWidget::onSourceTypeChanged(int index)
m_portEdit->setEnabled(needsNetwork);
}
void VideoViewerWidget::onPrepareWindowHandle(GstBus* bus, GstMessage* msg, gpointer data)
GstFlowReturn VideoViewerWidget::newSampleCallback(GstAppSink* appsink, gpointer user_data)
{
if (!gst_is_video_overlay_prepare_window_handle_message(msg)) {
VideoViewerWidget* viewer = static_cast<VideoViewerWidget*>(user_data);
if (!viewer) {
qDebug() << "[VideoViewer] Callback: viewer is null";
return GST_FLOW_ERROR;
}
// Pull the sample from appsink
GstSample* sample = gst_app_sink_pull_sample(appsink);
if (!sample) {
qDebug() << "[VideoViewer] Callback: Failed to pull sample";
return GST_FLOW_ERROR;
}
// Get the buffer from the sample
GstBuffer* buffer = gst_sample_get_buffer(sample);
if (!buffer) {
qDebug() << "[VideoViewer] Callback: No buffer in sample";
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// Get the caps to extract width and height
GstCaps* caps = gst_sample_get_caps(sample);
if (!caps) {
qDebug() << "[VideoViewer] Callback: No caps in sample";
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
GstStructure* structure = gst_caps_get_structure(caps, 0);
int width = 0, height = 0;
if (!gst_structure_get_int(structure, "width", &width) ||
!gst_structure_get_int(structure, "height", &height)) {
qDebug() << "[VideoViewer] Callback: Failed to get dimensions";
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// Map the buffer to access the raw data
GstMapInfo map;
if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
qDebug() << "[VideoViewer] Callback: Failed to map buffer";
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// Calculate expected size for RGB888 format
gsize expected_size = width * height * 3;
if (map.size < expected_size) {
qDebug() << "[VideoViewer] Callback: Buffer too small. Expected:" << expected_size << "Got:" << map.size;
gst_buffer_unmap(buffer, &map);
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
// Create QImage from the RGB data with proper stride
// QImage::Format_RGB888 expects RGB data
QImage frame(map.data, width, height, width * 3, QImage::Format_RGB888);
// Make a deep copy since the buffer will be unmapped
QImage frameCopy = frame.copy();
// Unmap and cleanup
gst_buffer_unmap(buffer, &map);
gst_sample_unref(sample);
// Use QMetaObject::invokeMethod for thread-safe signal emission
QMetaObject::invokeMethod(viewer, "newFrameAvailable", Qt::QueuedConnection,
Q_ARG(QImage, frameCopy));
return GST_FLOW_OK;
}
void VideoViewerWidget::displayFrame(const QImage& frame)
{
if (frame.isNull()) {
qDebug() << "[VideoViewer] displayFrame: Frame is null";
return;
}
VideoViewerWidget* viewer = static_cast<VideoViewerWidget*>(data);
if (viewer->m_windowId) {
GstElement* sink = GST_ELEMENT(GST_MESSAGE_SRC(msg));
qDebug() << "[VideoViewer] prepare-window-handle: Setting window ID" << viewer->m_windowId;
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(sink), viewer->m_windowId);
} else {
qDebug() << "[VideoViewer] prepare-window-handle: No window ID available yet";
static bool firstFrame = true;
if (firstFrame) {
qDebug() << "[VideoViewer] First frame received! Size:" << frame.width() << "x" << frame.height();
firstFrame = false;
}
// Convert QImage to QPixmap and display in label
QPixmap pixmap = QPixmap::fromImage(frame);
m_videoDisplay->setPixmap(pixmap.scaled(m_videoDisplay->size(), Qt::KeepAspectRatio, Qt::SmoothTransformation));
}