560 lines
19 KiB
C++
560 lines
19 KiB
C++
#include "videoviewerwidget.h"
|
|
#include <QVBoxLayout>
|
|
#include <QHBoxLayout>
|
|
#include <QFormLayout>
|
|
#include <QGroupBox>
|
|
#include <QDebug>
|
|
#include <QPixmap>
|
|
#include <QTransform>
|
|
#include <gst/video/video.h>
|
|
|
|
VideoViewerWidget::VideoViewerWidget(QWidget *parent)
|
|
: QWidget(parent),
|
|
m_scrollArea(nullptr),
|
|
m_videoDisplay(nullptr),
|
|
m_startBtn(nullptr),
|
|
m_stopBtn(nullptr),
|
|
m_sourceType(nullptr),
|
|
m_hostEdit(nullptr),
|
|
m_portEdit(nullptr),
|
|
m_statusLabel(nullptr),
|
|
m_zoomSlider(nullptr),
|
|
m_zoomLabel(nullptr),
|
|
m_rotationCombo(nullptr),
|
|
m_flipHorizontal(nullptr),
|
|
m_flipVertical(nullptr),
|
|
m_pipeline(nullptr),
|
|
m_appSink(nullptr),
|
|
m_zoomFactor(1.0),
|
|
m_busWatchId(0),
|
|
m_rotationAngle(0),
|
|
m_flipHorizontalEnabled(false),
|
|
m_flipVerticalEnabled(false)
|
|
{
|
|
initGStreamer();
|
|
setupUI();
|
|
|
|
connect(this, &VideoViewerWidget::newFrameAvailable,
|
|
this, &VideoViewerWidget::displayFrame, Qt::QueuedConnection);
|
|
}
|
|
|
|
VideoViewerWidget::~VideoViewerWidget()
|
|
{
|
|
cleanupGStreamer();
|
|
}
|
|
|
|
void VideoViewerWidget::initGStreamer()
|
|
{
|
|
gst_init(nullptr, nullptr);
|
|
}
|
|
|
|
void VideoViewerWidget::setupUI()
|
|
{
|
|
auto* mainLayout = new QVBoxLayout(this);
|
|
mainLayout->setContentsMargins(0, 0, 0, 0);
|
|
mainLayout->setSpacing(5);
|
|
|
|
m_scrollArea = new QScrollArea(this);
|
|
m_scrollArea->setWidgetResizable(false);
|
|
m_scrollArea->setAlignment(Qt::AlignCenter);
|
|
m_scrollArea->setStyleSheet("QScrollArea { background-color: black; border: 1px solid #666; }");
|
|
|
|
m_videoDisplay = new QLabel();
|
|
m_videoDisplay->setMinimumSize(320, 240);
|
|
m_videoDisplay->setStyleSheet("background-color: black;");
|
|
m_videoDisplay->setAlignment(Qt::AlignCenter);
|
|
m_videoDisplay->setScaledContents(false);
|
|
|
|
m_scrollArea->setWidget(m_videoDisplay);
|
|
|
|
auto* zoomLayout = new QHBoxLayout();
|
|
zoomLayout->addWidget(new QLabel("Zoom:", this));
|
|
|
|
m_zoomSlider = new QSlider(Qt::Horizontal, this);
|
|
m_zoomSlider->setMinimum(50); // 50%
|
|
m_zoomSlider->setMaximum(200); // 200%
|
|
m_zoomSlider->setValue(100); // 100% default
|
|
m_zoomSlider->setTickPosition(QSlider::TicksBelow);
|
|
m_zoomSlider->setTickInterval(25);
|
|
connect(m_zoomSlider, &QSlider::valueChanged, this, &VideoViewerWidget::onZoomChanged);
|
|
|
|
m_zoomLabel = new QLabel("100%", this);
|
|
m_zoomLabel->setMinimumWidth(50);
|
|
|
|
zoomLayout->addWidget(m_zoomSlider);
|
|
zoomLayout->addWidget(m_zoomLabel);
|
|
|
|
auto* transformLayout = new QHBoxLayout();
|
|
transformLayout->addWidget(new QLabel("Rotation:", this));
|
|
|
|
m_rotationCombo = new QComboBox(this);
|
|
m_rotationCombo->addItem("0°", 0);
|
|
m_rotationCombo->addItem("90°", 90);
|
|
m_rotationCombo->addItem("180°", 180);
|
|
m_rotationCombo->addItem("270°", 270);
|
|
m_rotationCombo->setSizePolicy(QSizePolicy::Fixed, QSizePolicy::Fixed);
|
|
m_rotationCombo->setMinimumWidth(70);
|
|
connect(m_rotationCombo, QOverload<int>::of(&QComboBox::currentIndexChanged),
|
|
this, &VideoViewerWidget::onRotationChanged);
|
|
transformLayout->addWidget(m_rotationCombo);
|
|
|
|
m_flipHorizontal = new QCheckBox("Flip H", this);
|
|
connect(m_flipHorizontal, &QCheckBox::checkStateChanged, this, &VideoViewerWidget::onFlipHorizontalChanged);
|
|
transformLayout->addWidget(m_flipHorizontal);
|
|
|
|
m_flipVertical = new QCheckBox("Flip V", this);
|
|
connect(m_flipVertical, &QCheckBox::checkStateChanged, this, &VideoViewerWidget::onFlipVerticalChanged);
|
|
transformLayout->addWidget(m_flipVertical);
|
|
|
|
transformLayout->addStretch();
|
|
|
|
auto* controlGroup = new QGroupBox("Viewer Controls", this);
|
|
auto* controlLayout = new QVBoxLayout();
|
|
|
|
auto* sourceLayout = new QHBoxLayout();
|
|
sourceLayout->addWidget(new QLabel("Source Type:", this));
|
|
m_sourceType = new QComboBox(this);
|
|
m_sourceType->addItem("UDP MJPEG Stream (No plugins needed)", "udp-mjpeg");
|
|
m_sourceType->addItem("UDP H.264 Stream (Requires gst-libav)", "udp-h264");
|
|
m_sourceType->addItem("TCP H.264 Stream", "tcp");
|
|
m_sourceType->addItem("MJPEG HTTP Stream", "http");
|
|
m_sourceType->addItem("Test Pattern", "test");
|
|
m_sourceType->setSizeAdjustPolicy(QComboBox::AdjustToMinimumContentsLengthWithIcon);
|
|
m_sourceType->setMinimumContentsLength(15);
|
|
connect(m_sourceType, QOverload<int>::of(&QComboBox::currentIndexChanged),
|
|
this, &VideoViewerWidget::onSourceTypeChanged);
|
|
sourceLayout->addWidget(m_sourceType, 1);
|
|
|
|
auto* formLayout = new QFormLayout();
|
|
m_hostEdit = new QLineEdit("127.0.0.1", this);
|
|
m_portEdit = new QLineEdit("5000", this);
|
|
formLayout->addRow("Host:", m_hostEdit);
|
|
formLayout->addRow("Port:", m_portEdit);
|
|
|
|
auto* buttonLayout = new QHBoxLayout();
|
|
m_startBtn = new QPushButton("Start Viewer", this);
|
|
m_stopBtn = new QPushButton("Stop Viewer", this);
|
|
m_stopBtn->setEnabled(false);
|
|
|
|
connect(m_startBtn, &QPushButton::clicked, this, &VideoViewerWidget::onStartViewer);
|
|
connect(m_stopBtn, &QPushButton::clicked, this, &VideoViewerWidget::onStopViewer);
|
|
|
|
buttonLayout->addWidget(m_startBtn);
|
|
buttonLayout->addWidget(m_stopBtn);
|
|
|
|
m_statusLabel = new QLabel("Status: Stopped", this);
|
|
m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }");
|
|
|
|
controlLayout->addLayout(sourceLayout);
|
|
controlLayout->addLayout(formLayout);
|
|
controlLayout->addLayout(buttonLayout);
|
|
controlLayout->addWidget(m_statusLabel);
|
|
controlGroup->setLayout(controlLayout);
|
|
controlGroup->setMaximumWidth(500);
|
|
|
|
mainLayout->addWidget(m_scrollArea, 1);
|
|
mainLayout->addLayout(zoomLayout);
|
|
mainLayout->addLayout(transformLayout);
|
|
mainLayout->addWidget(controlGroup);
|
|
|
|
setLayout(mainLayout);
|
|
}
|
|
|
|
QString VideoViewerWidget::buildPipelineString() const {
|
|
QString sourceType = m_sourceType->currentData().toString();
|
|
QString host = m_hostEdit->text();
|
|
QString port = m_portEdit->text();
|
|
QString pipeline;
|
|
|
|
// Using appsink to get frames for embedded display in Qt widget
|
|
// All pipelines convert to RGB format for easy QImage conversion
|
|
QString sinkPipeline = "videoconvert ! video/x-raw,format=RGB ! appsink name=videosink emit-signals=true";
|
|
|
|
if (sourceType == "udp-mjpeg") {
|
|
pipeline = QString("udpsrc port=%1 ! application/x-rtp,encoding-name=JPEG,payload=26 ! rtpjpegdepay ! jpegdec ! %2")
|
|
.arg(port, sinkPipeline);
|
|
} else if (sourceType == "udp-h264") {
|
|
pipeline = QString("udpsrc port=%1 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! avdec_h264 ! %2")
|
|
.arg(port, sinkPipeline);
|
|
} else if (sourceType == "tcp") {
|
|
pipeline = QString("tcpclientsrc host=%1 port=%2 ! tsdemux ! h264parse ! avdec_h264 ! %3")
|
|
.arg(host, port, sinkPipeline);
|
|
} else if (sourceType == "http") {
|
|
pipeline = QString("souphttpsrc location=http://%1:%2 ! multipartdemux ! jpegdec ! %3")
|
|
.arg(host, port, sinkPipeline);
|
|
} else if (sourceType == "test") {
|
|
pipeline = QString("videotestsrc ! %1").arg(sinkPipeline);
|
|
}
|
|
|
|
return pipeline;
|
|
}
|
|
|
|
void VideoViewerWidget::startPipeline()
|
|
{
|
|
if (m_pipeline) {
|
|
stopPipeline();
|
|
}
|
|
|
|
QString pipelineStr = buildPipelineString();
|
|
qDebug() << "[VideoViewer] Starting pipeline:" << pipelineStr;
|
|
|
|
GError* error = nullptr;
|
|
|
|
m_pipeline = gst_parse_launch(pipelineStr.toUtf8().constData(), &error);
|
|
|
|
if (error) {
|
|
m_statusLabel->setText(QString("Status: Pipeline Error - %1").arg(error->message));
|
|
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
|
|
g_error_free(error);
|
|
return;
|
|
}
|
|
|
|
if (!m_pipeline) {
|
|
m_statusLabel->setText("Status: Failed to create pipeline");
|
|
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
|
|
return;
|
|
}
|
|
|
|
GstBus* bus = gst_element_get_bus(m_pipeline);
|
|
m_busWatchId = gst_bus_add_watch(bus, busCallback, this);
|
|
gst_object_unref(bus);
|
|
|
|
m_appSink = gst_bin_get_by_name(GST_BIN(m_pipeline), "videosink");
|
|
if (!m_appSink) {
|
|
m_statusLabel->setText("Status: Failed to get appsink element");
|
|
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
|
|
cleanupGStreamer();
|
|
return;
|
|
}
|
|
|
|
g_object_set(m_appSink, "emit-signals", TRUE, "sync", FALSE, "max-buffers", 1, "drop", TRUE, nullptr);
|
|
|
|
// Properly initialize all callback fields
|
|
GstAppSinkCallbacks callbacks = { 0 };
|
|
callbacks.new_sample = newSampleCallback;
|
|
callbacks.eos = nullptr;
|
|
callbacks.new_preroll = nullptr;
|
|
#if GST_CHECK_VERSION(1,20,0)
|
|
callbacks.new_event = nullptr;
|
|
#endif
|
|
gst_app_sink_set_callbacks(GST_APP_SINK(m_appSink), &callbacks, this, nullptr);
|
|
|
|
GstStateChangeReturn ret = gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
|
|
|
|
qDebug() << "[VideoViewer] Pipeline state change return:" << ret;
|
|
|
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
|
m_statusLabel->setText("Status: Failed to start pipeline");
|
|
m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
|
|
cleanupGStreamer();
|
|
return;
|
|
}
|
|
|
|
qDebug() << "[VideoViewer] Pipeline started successfully";
|
|
m_statusLabel->setText("Status: Playing");
|
|
m_statusLabel->setStyleSheet("QLabel { background-color: #90EE90; padding: 5px; border-radius: 3px; }");
|
|
m_startBtn->setEnabled(false);
|
|
m_stopBtn->setEnabled(true);
|
|
}
|
|
|
|
void VideoViewerWidget::stopPipeline()
|
|
{
|
|
if (m_pipeline) {
|
|
gst_element_set_state(m_pipeline, GST_STATE_NULL);
|
|
gst_object_unref(m_pipeline);
|
|
m_pipeline = nullptr;
|
|
}
|
|
|
|
if (m_appSink) {
|
|
gst_object_unref(m_appSink);
|
|
m_appSink = nullptr;
|
|
}
|
|
|
|
if (m_busWatchId > 0) {
|
|
g_source_remove(m_busWatchId);
|
|
m_busWatchId = 0;
|
|
}
|
|
|
|
m_videoDisplay->clear();
|
|
m_videoDisplay->setText("");
|
|
m_currentFrame = QImage();
|
|
|
|
m_statusLabel->setText("Status: Stopped");
|
|
m_statusLabel->setStyleSheet("QLabel { background-color: #f0f0f0; padding: 5px; border-radius: 3px; }");
|
|
m_startBtn->setEnabled(true);
|
|
m_stopBtn->setEnabled(false);
|
|
}
|
|
|
|
void VideoViewerWidget::cleanupGStreamer()
|
|
{
|
|
stopPipeline();
|
|
}
|
|
|
|
gboolean VideoViewerWidget::busCallback(GstBus* bus, GstMessage* msg, gpointer data)
|
|
{
|
|
VideoViewerWidget* viewer = static_cast<VideoViewerWidget*>(data);
|
|
|
|
switch (GST_MESSAGE_TYPE(msg)) {
|
|
case GST_MESSAGE_ERROR: {
|
|
GError* err;
|
|
gchar* debug_info;
|
|
gst_message_parse_error(msg, &err, &debug_info);
|
|
|
|
QString errorMsg = QString("GStreamer Error: %1\nDebug: %2")
|
|
.arg(err->message)
|
|
.arg(debug_info ? debug_info : "none");
|
|
|
|
qDebug() << "[VideoViewer] ERROR:" << errorMsg;
|
|
|
|
QMetaObject::invokeMethod(viewer, [viewer, errorMsg]() {
|
|
viewer->m_statusLabel->setText("Status: Stream Error - " + errorMsg);
|
|
viewer->m_statusLabel->setStyleSheet("QLabel { background-color: #FFB6C1; padding: 5px; border-radius: 3px; }");
|
|
viewer->stopPipeline();
|
|
}, Qt::QueuedConnection);
|
|
|
|
g_error_free(err);
|
|
g_free(debug_info);
|
|
break;
|
|
}
|
|
case GST_MESSAGE_EOS:
|
|
qDebug() << "[VideoViewer] End of stream";
|
|
QMetaObject::invokeMethod(viewer, [viewer]() {
|
|
viewer->m_statusLabel->setText("Status: End of Stream");
|
|
viewer->stopPipeline();
|
|
}, Qt::QueuedConnection);
|
|
break;
|
|
case GST_MESSAGE_STATE_CHANGED:
|
|
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(viewer->m_pipeline)) {
|
|
GstState oldState, newState, pendingState;
|
|
gst_message_parse_state_changed(msg, &oldState, &newState, &pendingState);
|
|
qDebug() << "[VideoViewer] State changed:"
|
|
<< gst_element_state_get_name(oldState) << "->"
|
|
<< gst_element_state_get_name(newState);
|
|
}
|
|
break;
|
|
case GST_MESSAGE_WARNING: {
|
|
GError* err;
|
|
gchar* debug_info;
|
|
gst_message_parse_warning(msg, &err, &debug_info);
|
|
qDebug() << "[VideoViewer] WARNING:" << err->message;
|
|
g_error_free(err);
|
|
g_free(debug_info);
|
|
break;
|
|
}
|
|
case GST_MESSAGE_INFO: {
|
|
GError* err;
|
|
gchar* debug_info;
|
|
gst_message_parse_info(msg, &err, &debug_info);
|
|
qDebug() << "[VideoViewer] INFO:" << err->message;
|
|
g_error_free(err);
|
|
g_free(debug_info);
|
|
break;
|
|
}
|
|
default:
|
|
break;
|
|
}
|
|
|
|
return TRUE;
|
|
}
|
|
|
|
void VideoViewerWidget::onStartViewer()
|
|
{
|
|
startPipeline();
|
|
}
|
|
|
|
void VideoViewerWidget::onStopViewer()
|
|
{
|
|
stopPipeline();
|
|
}
|
|
|
|
void VideoViewerWidget::onSourceTypeChanged(int index)
|
|
{
|
|
QString sourceType = m_sourceType->currentData().toString();
|
|
|
|
bool needsNetwork = (sourceType != "test");
|
|
bool isUdp = (sourceType == "udp-mjpeg" || sourceType == "udp-h264");
|
|
m_hostEdit->setEnabled(needsNetwork && !isUdp);
|
|
m_portEdit->setEnabled(needsNetwork);
|
|
}
|
|
|
|
void VideoViewerWidget::onZoomChanged(int value)
|
|
{
|
|
m_zoomFactor = value / 100.0;
|
|
m_zoomLabel->setText(QString("%1%").arg(value));
|
|
|
|
if (!m_currentFrame.isNull()) {
|
|
displayFrame(m_currentFrame);
|
|
}
|
|
}
|
|
|
|
void VideoViewerWidget::onRotationChanged(int index)
|
|
{
|
|
m_rotationAngle = m_rotationCombo->currentData().toInt();
|
|
|
|
if (!m_currentFrame.isNull()) {
|
|
displayFrame(m_currentFrame);
|
|
}
|
|
}
|
|
|
|
void VideoViewerWidget::onFlipHorizontalChanged(Qt::CheckState state)
|
|
{
|
|
m_flipHorizontalEnabled = (state == Qt::Checked);
|
|
|
|
if (!m_currentFrame.isNull()) {
|
|
displayFrame(m_currentFrame);
|
|
}
|
|
}
|
|
|
|
void VideoViewerWidget::onFlipVerticalChanged(Qt::CheckState state)
|
|
{
|
|
m_flipVerticalEnabled = (state == Qt::Checked);
|
|
|
|
if (!m_currentFrame.isNull()) {
|
|
displayFrame(m_currentFrame);
|
|
}
|
|
}
|
|
|
|
GstFlowReturn VideoViewerWidget::newSampleCallback(GstAppSink* appsink, gpointer user_data)
|
|
{
|
|
static int callbackCount = 0;
|
|
callbackCount++;
|
|
|
|
if (callbackCount == 1) {
|
|
qDebug() << "[VideoViewer] Callback: First sample callback!";
|
|
} else if (callbackCount % 30 == 0) {
|
|
qDebug() << "[VideoViewer] Callback: Samples received:" << callbackCount;
|
|
}
|
|
|
|
VideoViewerWidget* viewer = static_cast<VideoViewerWidget*>(user_data);
|
|
if (!viewer) {
|
|
qDebug() << "[VideoViewer] Callback: viewer is null";
|
|
return GST_FLOW_ERROR;
|
|
}
|
|
|
|
GstSample* sample = gst_app_sink_pull_sample(appsink);
|
|
if (!sample) {
|
|
qDebug() << "[VideoViewer] Callback: Failed to pull sample";
|
|
return GST_FLOW_ERROR;
|
|
}
|
|
|
|
GstBuffer* buffer = gst_sample_get_buffer(sample);
|
|
if (!buffer) {
|
|
qDebug() << "[VideoViewer] Callback: No buffer in sample";
|
|
gst_sample_unref(sample);
|
|
return GST_FLOW_ERROR;
|
|
}
|
|
|
|
GstCaps* caps = gst_sample_get_caps(sample);
|
|
if (!caps) {
|
|
qDebug() << "[VideoViewer] Callback: No caps in sample";
|
|
gst_sample_unref(sample);
|
|
return GST_FLOW_ERROR;
|
|
}
|
|
|
|
GstStructure* structure = gst_caps_get_structure(caps, 0);
|
|
int width = 0, height = 0;
|
|
if (!gst_structure_get_int(structure, "width", &width) ||
|
|
!gst_structure_get_int(structure, "height", &height)) {
|
|
qDebug() << "[VideoViewer] Callback: Failed to get dimensions";
|
|
gst_sample_unref(sample);
|
|
return GST_FLOW_ERROR;
|
|
}
|
|
|
|
GstMapInfo map;
|
|
if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
|
|
qDebug() << "[VideoViewer] Callback: Failed to map buffer";
|
|
gst_sample_unref(sample);
|
|
return GST_FLOW_ERROR;
|
|
}
|
|
|
|
// RGB888 format: width * height * 3 bytes
|
|
gsize expected_size = width * height * 3;
|
|
if (map.size < expected_size) {
|
|
qDebug() << "[VideoViewer] Callback: Buffer too small. Expected:" << expected_size << "Got:" << map.size;
|
|
gst_buffer_unmap(buffer, &map);
|
|
gst_sample_unref(sample);
|
|
return GST_FLOW_ERROR;
|
|
}
|
|
|
|
// QImage requires RGB data with proper stride (width * 3 bytes per row)
|
|
QImage frame(map.data, width, height, width * 3, QImage::Format_RGB888);
|
|
|
|
// Make a deep copy since the buffer will be unmapped
|
|
QImage frameCopy = frame.copy();
|
|
|
|
gst_buffer_unmap(buffer, &map);
|
|
gst_sample_unref(sample);
|
|
|
|
// Use QMetaObject::invokeMethod for thread-safe signal emission
|
|
QMetaObject::invokeMethod(viewer, "newFrameAvailable", Qt::QueuedConnection,
|
|
Q_ARG(QImage, frameCopy));
|
|
|
|
return GST_FLOW_OK;
|
|
}
|
|
|
|
void VideoViewerWidget::displayFrame(const QImage& frame)
|
|
{
|
|
if (frame.isNull()) {
|
|
qDebug() << "[VideoViewer] displayFrame: Frame is null";
|
|
return;
|
|
}
|
|
|
|
static bool firstFrame = true;
|
|
static int frameCount = 0;
|
|
frameCount++;
|
|
|
|
bool debugThisFrame = firstFrame || (frameCount % 30 == 0);
|
|
|
|
if (firstFrame) {
|
|
qDebug() << "[VideoViewer] First frame received! Size:" << frame.width() << "x" << frame.height()
|
|
<< "Format:" << frame.format();
|
|
firstFrame = false;
|
|
} else if (frameCount % 30 == 0) {
|
|
qDebug() << "[VideoViewer] Frames received:" << frameCount;
|
|
}
|
|
|
|
m_currentFrame = frame;
|
|
|
|
QImage transformedFrame = frame;
|
|
|
|
// Apply rotation
|
|
if (m_rotationAngle != 0) {
|
|
QTransform rotation;
|
|
rotation.rotate(m_rotationAngle);
|
|
transformedFrame = transformedFrame.transformed(rotation, Qt::SmoothTransformation);
|
|
}
|
|
|
|
// Apply flipping
|
|
if (m_flipHorizontalEnabled || m_flipVerticalEnabled) {
|
|
Qt::Orientations orientations;
|
|
if (m_flipHorizontalEnabled) {
|
|
orientations |= Qt::Horizontal;
|
|
}
|
|
if (m_flipVerticalEnabled) {
|
|
orientations |= Qt::Vertical;
|
|
}
|
|
transformedFrame = transformedFrame.flipped(orientations);
|
|
}
|
|
|
|
QPixmap pixmap = QPixmap::fromImage(transformedFrame);
|
|
if (pixmap.isNull()) {
|
|
qDebug() << "[VideoViewer] ERROR: Pixmap conversion failed!";
|
|
return;
|
|
}
|
|
|
|
QSize targetSize = transformedFrame.size() * m_zoomFactor;
|
|
|
|
QPixmap scaledPixmap = pixmap.scaled(targetSize,
|
|
Qt::KeepAspectRatio,
|
|
Qt::SmoothTransformation);
|
|
|
|
if (debugThisFrame) {
|
|
qDebug() << "[VideoViewer] Target size:" << targetSize << "Zoom:" << m_zoomFactor
|
|
<< "Scaled pixmap:" << scaledPixmap.size();
|
|
}
|
|
|
|
m_videoDisplay->setPixmap(scaledPixmap);
|
|
m_videoDisplay->resize(scaledPixmap.size());
|
|
m_videoDisplay->update();
|
|
}
|