SVG5/qgstreamerplayer_old.cpp
2025-10-12 13:55:56 +09:00

2047 lines
60 KiB
C++

#include "qgstreamerplayer.h"
#include "qrtspthread.h"
#include "qvideowidget.h"
#include "mainwindow.h"
//#define __720P
bool g_bSend = false;
QGstreamerPlayer::QGstreamerPlayer(QObject *parent) : QObject(parent)
{
m_bCaptureImage = false;
m_bWorkingRTSP = false;
m_bSeeking = false;
m_nFrameWidth = 0;
m_nFrameHeight = 0;
m_DataGST.m_bShow = false;
m_DataGST.terminate = false;
m_bPipeStart = false;
int argc = 0;
gst_init (&argc, NULL);
gst_segtrap_set_enabled(FALSE);
MainWindow* pMainWindow = MainWindow::GetMainWindow();
connect(this, SIGNAL(ExitCapture()), pMainWindow, SLOT(ExitCapture()));
connect(this, SIGNAL(ErrorCapture()), pMainWindow, SLOT(ErrorCapture()));
if(m_DataGST.source==NULL)
{
m_DataGST.source = gst_element_factory_make("videotestsrc", "source");
}
gst_element_set_state (m_DataGST.source, GST_STATE_READY);
if(m_DataGST.m_pCaps==NULL)
{
m_DataGST.m_pCaps = gst_element_factory_make("capsfilter", "caps");
}
gst_element_set_state (m_DataGST.m_pCaps, GST_STATE_READY);
//if(m_DataGST.queue_src==NULL)
{
m_DataGST.queue_src = gst_element_factory_make ("queue", NULL);
}
gst_element_set_state (m_DataGST.queue_src, GST_STATE_READY);
if(m_DataGST.tee==NULL)
{
m_DataGST.tee = gst_element_factory_make ("tee", "teename");
}
gst_element_set_state (m_DataGST.tee, GST_STATE_READY);
if(m_DataGST.sink==NULL)
{
m_DataGST.sink = gst_element_factory_make("appsink", "test_sink");
GstAppSinkCallbacks* appsink_callbacks = NULL;
if(m_DataGST.sink!=NULL)
{
appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks));
appsink_callbacks->eos = app_sink_eos;
appsink_callbacks->new_preroll = app_sink_new_preroll;
#ifdef _GSTREAMER_1_0
appsink_callbacks->new_sample = app_sink_new_sample;
#else
appsink_callbacks->new_buffer = app_sink_new_sample;
#endif
gst_app_sink_set_callbacks(GST_APP_SINK(m_DataGST.sink), appsink_callbacks, (gpointer)&m_DataGST, NULL);
g_object_set (m_DataGST.sink, "sync", TRUE, NULL);
gst_app_sink_set_drop(GST_APP_SINK(m_DataGST.sink), TRUE);
}
}
gst_element_set_state (m_DataGST.sink, GST_STATE_READY);
m_DataGST.pipeline = gst_pipeline_new("test-pipeline");
gst_bin_add_many(GST_BIN(m_DataGST.pipeline), m_DataGST.source, m_DataGST.m_pCaps, m_DataGST.queue_src, m_DataGST.tee, m_DataGST.sink, NULL);
gboolean link_ok;
link_ok = gst_element_link_many (m_DataGST.source, m_DataGST.m_pCaps, m_DataGST.queue_src, m_DataGST.tee, m_DataGST.sink, NULL);
if(link_ok==0)
{
int a=0;
}
GstCaps *caps;
#ifdef __x86_64
caps = gst_caps_new_simple ("video/x-raw",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"framerate", GST_TYPE_FRACTION, 60, 1,
"format", G_TYPE_STRING, "UYVY",
NULL);
#else
caps = gst_caps_new_simple ("video/x-raw",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"framerate", GST_TYPE_FRACTION, 60, 1,
"format", G_TYPE_STRING, "UYVY",
NULL);
#endif
g_object_set(G_OBJECT(m_DataGST.m_pCaps), "caps", caps, NULL);
}
QGstreamerPlayer::~QGstreamerPlayer()
{
MainWindow* pMainWindow = MainWindow::GetMainWindow();
disconnect(this, SIGNAL(ExitCapture()), pMainWindow, SLOT(ExitCapture()));
disconnect(this, SIGNAL(ErrorCapture()), pMainWindow, SLOT(ErrorCapture()));
//gst_deinit();
}
bool QGstreamerPlayer::IsPipeStart()
{
return m_bPipeStart;
}
void QGstreamerPlayer::OpenMovieFile(QString strFilename)
{
m_nFrameWidth = 0;
m_nFrameHeight = 0;
m_DataGST.m_strFilename = strFilename;
pid_t pid_current = getpid();
GstBus *bus;
GstStateChangeReturn ret;
GstMessage *msg;
GError *error = NULL;
gboolean bTrapEnabled = FALSE;
GstPad *queue_app_pad;
GstCaps *video_caps;
m_bTerminate = false;
/* init */
int argc = 0;
gst_init (&argc, NULL);
gst_segtrap_set_enabled(FALSE);
bTrapEnabled = gst_segtrap_is_enabled();
/*
m_DataGST.source = gst_element_factory_make ("videotestsrc", "source");
m_DataGST.bin = gst_element_factory_make ("decodebin", "bin");
m_DataGST.queue_src = gst_element_factory_make ("queue", "queue");
m_DataGST.sink = gst_element_factory_make ("appsink", "sink");
m_DataGST.pipeline = gst_pipeline_new ("test-pipeline");
gst_bin_add_many (GST_BIN (m_DataGST.pipeline), m_DataGST.source, m_DataGST.bin, m_DataGST.sink, NULL);
if (!gst_element_link (m_DataGST.source, m_DataGST.sink))
{
g_printerr ("Elements could not be linked.\n");
gst_object_unref (m_DataGST.pipeline);
return;
}
*/
#ifdef _GSTREAMER_1_0
//m_DataGST.pipeline = gst_parse_launch("filesrc name=test_src ! queue name=queue_src ! decodebin ! nvvidconv ! video/x-raw,format=I420,width=1920,height=1080 ! queue name=queue_sink ! appsink name=test_sink", &error);
m_DataGST.pipeline = gst_parse_launch("filesrc name=test_src ! queue name=queue_src ! decodebin ! tee name=teename teename. ! videoconvert ! videoscale ! video/x-raw,format=I420,width=1920,height=1080 ! queue name=queue_sink ! appsink name=test_sink", &error);
#else
m_DataGST.pipeline = gst_parse_launch("rtspsrc name=test_src location=rtsp://192.168.11.110:8554/test123 latency=0 ! rtph264depay ! queue name=queue_src ! vpudec ! queue name=queue_sink ! appsink name=test_sink max-lateness=-1", &error);
#endif
if(error!=NULL)
{
g_printerr ("Can not Create Pipeline.\n");
return;
}
m_DataGST.source = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "test_src");
m_DataGST.sink = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "test_sink");
m_DataGST.queue_src = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "queue_src");
m_DataGST.queue_sink = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "queue_sink");
m_DataGST.tee = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "teename");
m_DataGST.pWidget = m_pVideoWidget;
m_DataGST.pPlayer = this;
//test!!!
//gst_base_sink_set_last_sample_enabled(GST_BASE_SINK(m_DataGST.sink), true);
g_signal_connect (m_DataGST.source, "pad-added", G_CALLBACK (QGstreamerPlayer::pad_added_handler), &m_DataGST);
GstPad* pad = gst_element_get_static_pad (m_DataGST.sink, "sink");
if (!m_DataGST.pipeline || !m_DataGST.source || !m_DataGST.queue_src || !m_DataGST.sink) {
g_printerr ("Not all elements could be created.\n");
return;
}
if(m_DataGST.queue_src!=NULL)
{
g_object_set (m_DataGST.queue_src, "max-size-buffers", 10, NULL);
}
if(m_DataGST.source!=NULL)
{
g_object_set (m_DataGST.source, "location", strFilename.toStdString().c_str(), NULL);
//g_object_set (m_DataGST.source, "location", "rtsp://192.168.11.110:8554/test123", NULL);
}
if(m_DataGST.sink!=NULL)
{
GstAppSinkCallbacks* appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks));
appsink_callbacks->eos = app_sink_eos;
appsink_callbacks->new_preroll = app_sink_new_preroll;
#ifdef _GSTREAMER_1_0
appsink_callbacks->new_sample = app_sink_new_sample;
#else
appsink_callbacks->new_buffer = app_sink_new_sample;
#endif
//test
gst_app_sink_set_callbacks(GST_APP_SINK(m_DataGST.sink), appsink_callbacks, (gpointer)&m_DataGST, NULL);
//g_signal_connect (m_DataGST.source, "new-buffer", G_CALLBACK (new_buffer), &m_DataGST);
g_object_set (m_DataGST.sink, "sync", TRUE, NULL);
gst_app_sink_set_drop(GST_APP_SINK(m_DataGST.sink), TRUE);
}
//queue_app_pad = gst_element_get_static_pad (m_DataGST.app_queue, "sink");
//sigint_setup();
ret = gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING);
//ret = gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (m_DataGST.pipeline);
return;
}
bus = gst_element_get_bus (m_DataGST.pipeline);
int64_t tEnd=0;
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
(GstMessageType)(GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_STEP_DONE | GST_MESSAGE_STEP_START | GST_MESSAGE_ASYNC_DONE));
usleep(100);
if(tEnd==0)
{
gst_element_query_duration (m_DataGST.pipeline, GST_FORMAT_TIME, &tEnd);
if(tEnd!=0)
{
int nHour = 0;
int nMinute = 0;
int nSec = 0;
int nMilliSec = tEnd/(1000*1000);
nSec = nMilliSec/1000;
nMinute = nSec/60;
nHour = nMinute/60;
nSec = nSec%60;
nMinute = nMinute%60;
nHour = nHour;
nMilliSec = (nMilliSec/1000)%1000;
qDebug() << "Duration: " << nHour << ":" << nMinute << ":" << nSec << ":" << nMilliSec ;
//m_bTerminate = true;
GstState state;
GstState pending;
gst_element_get_state(m_DataGST.pipeline, &state, &pending, GST_CLOCK_TIME_NONE);
}
}
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
m_bTerminate = true;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
{
m_bTerminate = true;
}
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (m_DataGST.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
if(new_state==GST_STATE_PAUSED && old_state==GST_STATE_PLAYING)
{
//m_bTerminate = true;
}
}
break;
case GST_MESSAGE_STEP_DONE:
{
GstFormat format;
guint64 amount;
gdouble rate;
gboolean flush, intermediate;
guint64 duration;
gboolean eos;
gst_message_parse_step_done (msg, &format, &amount, &rate,
&flush, &intermediate, &duration, &eos);
app_sink_new_sample((GstAppSink *)m_DataGST.sink, &m_DataGST);
}
break;
case GST_MESSAGE_STEP_START:
{
int a=0;
}
break;
case GST_MESSAGE_ASYNC_DONE:
{
qDebug() << "Async Done";
//app_sink_new_sample((GstAppSink *)m_DataGST.sink, &m_DataGST);
m_bSeeking = false;
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (m_bTerminate==false);
SendFinished();
}
bool QGstreamerPlayer::IsWorkingRTSP()
{
return m_bWorkingRTSP;
}
int QGstreamerPlayer::OpenCamera(QString strVideoDevice)
{
return OpenCamera2(strVideoDevice);
m_bWorkingRTSP = true;
m_nFrameWidth = 0;
m_nFrameHeight = 0;
m_DataGST.m_strFilename = strVideoDevice;
GstBus *bus;
GstStateChangeReturn ret;
GstMessage *msg;
GError *error = NULL;
gboolean bTrapEnabled = FALSE;
m_bTerminate = false;
/* init */
bTrapEnabled = gst_segtrap_is_enabled();
QString strTest = QString("v4l2src device=/dev/video0 name=test_src ! video/x-raw,format=(string)UYVY,width=1920,height=1080,framerate=(fraction)60/1 ! queue name=queue_src ! videorate name=test_videorate ! queue name=queue_name1 ! timeoverlay name=test_timeoverlay ! tee name=teename ! queue name=queue_name2 ! appsink name=test_sink");
#ifdef _GSTREAMER_1_0
#ifdef __x86_64
strTest = QString("videotestsrc name=test_src ! video/x-raw,format=(string)UYVY,width=1920,height=1080,framerate=(fraction)60/1 ! queue name=queue_src ! tee name=teename ! appsink name=test_sink");
#else
strTest = QString("videotestsrc name=test_src ! video/x-raw,format=(string)UYVY,width=1920,height=1080,framerate=(fraction)60/1 ! queue name=queue_src ! tee name=teename ! appsink name=test_sink");
#endif
#else
m_DataGST.pipeline = gst_parse_launch("rtspsrc name=test_src location=rtsp://192.168.11.110:8554/test123 latency=0 ! rtph264depay ! queue name=queue_src ! vpudec ! queue name=queue_sink ! appsink name=test_sink max-lateness=-1", &error);
#endif
if(error!=NULL)
{
g_printerr ("Can not Create Pipeline.\n");
m_bWorkingRTSP = false;
ErrorCapture();
return -1;
}
m_DataGST.pWidget = m_pVideoWidget;
m_DataGST.pPlayer = this;
m_DataGST.m_bShow = true;
//test!!!
g_signal_connect (m_DataGST.source, "pad-added", G_CALLBACK (QGstreamerPlayer::pad_added_handler), &m_DataGST);
GstPad* pad = gst_element_get_static_pad (m_DataGST.sink, "sink");
//if (!m_DataGST.pipeline || !m_DataGST.source || !m_DataGST.queue_src || !m_DataGST.sink) {
if (!m_DataGST.pipeline || !m_DataGST.source || !m_DataGST.sink) {
g_printerr ("Not all elements could be created.\n");
m_bWorkingRTSP = false;
ErrorCapture();
return -2;
}
if(m_DataGST.queue_src!=NULL)
{
//g_object_set (m_DataGST.queue_src, "max-size-buffers", 10, NULL);
}
if(m_DataGST.source!=NULL)
{
//g_object_set (m_DataGST.source, "device", strVideoDevice.toStdString().c_str(), NULL);
}
ret = gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (m_DataGST.pipeline);
m_bPipeStart = false;
m_bWorkingRTSP = false;
ExitCapture();
return -3;
}
bus = gst_element_get_bus (m_DataGST.pipeline);
m_bPipeStart = true;
int64_t tEnd=0;
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
(GstMessageType)(GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_STEP_DONE | GST_MESSAGE_STEP_START | GST_MESSAGE_ASYNC_DONE));
if(tEnd==0)
{
gst_element_query_duration (m_DataGST.pipeline, GST_FORMAT_TIME, &tEnd);
if(tEnd!=0)
{
int nHour = 0;
int nMinute = 0;
int nSec = 0;
int nMilliSec = tEnd/(1000*1000);
nSec = nMilliSec/1000;
nMinute = nSec/60;
nHour = nMinute/60;
nSec = nSec%60;
nMinute = nMinute%60;
nHour = nHour;
nMilliSec = (nMilliSec/1000)%1000;
qDebug() << "Duration: " << nHour << ":" << nMinute << ":" << nSec << ":" << nMilliSec ;
//m_bTerminate = true;
GstState state;
GstState pending;
gst_element_get_state(m_DataGST.pipeline, &state, &pending, GST_CLOCK_TIME_NONE);
}
}
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
m_bTerminate = true;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
{
//gst_message_unref (msg);
m_bTerminate = true;
}
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (m_DataGST.pipeline)) {
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
if(new_state==GST_STATE_PAUSED && old_state==GST_STATE_PLAYING)
{
//m_bTerminate = true;
int a=0;
ExitCapture();
}
else if(new_state==GST_STATE_PLAYING && old_state==GST_STATE_PAUSED)
{
int a=0;
}
}
break;
case GST_MESSAGE_STEP_DONE:
{
GstFormat format;
guint64 amount;
gdouble rate;
gboolean flush, intermediate;
guint64 duration;
gboolean eos;
gst_message_parse_step_done (msg, &format, &amount, &rate,
&flush, &intermediate, &duration, &eos);
app_sink_new_sample((GstAppSink *)m_DataGST.sink, &m_DataGST);
}
break;
case GST_MESSAGE_STEP_START:
{
int a=0;
}
break;
case GST_MESSAGE_ASYNC_DONE:
{
qDebug() << "Async Done";
//app_sink_new_sample((GstAppSink *)m_DataGST.sink, &m_DataGST);
m_bSeeking = false;
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
if(m_DataGST.terminate==true)
{
m_bTerminate = true;
}
}
} while (m_bTerminate==false);
/*
gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
m_bPipeStart = false;
m_bTerminate = false;
m_DataGST.terminate = false;
//gst_deinit();
//gst_object_ref(caps);
//free(caps);
ExitCapture();
return 0;
*/
if(m_DataGST.pipeline!=NULL)
{
gst_element_set_state (m_DataGST.pipeline, GST_STATE_NULL);
gst_object_unref (m_DataGST.pipeline);
m_DataGST.pipeline = NULL;
}
if(m_DataGST.source!=NULL)
{
//gst_element_set_state (m_DataGST.source, GST_STATE_NULL);
//gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.source);
gst_object_unref (m_DataGST.source);
m_DataGST.source = NULL;
}
if(m_DataGST.tee!=NULL)
{
//gst_element_set_state (m_DataGST.tee, GST_STATE_NULL);
//gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.tee);
gst_object_unref (m_DataGST.tee);
m_DataGST.tee = NULL;
}
if(m_DataGST.queue_src!=NULL)
{
//gst_element_set_state (m_DataGST.queue_src, GST_STATE_NULL);
//gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.queue_src);
gst_object_unref (m_DataGST.queue_src);
m_DataGST.queue_src = NULL;
}
if(m_DataGST.queue_sink!=NULL)
{
//gst_element_set_state (m_DataGST.queue_sink, GST_STATE_NULL);
//gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.queue_sink);
gst_object_unref (m_DataGST.queue_sink);
m_DataGST.queue_sink = NULL;
}
if(m_DataGST.sink!=NULL)
{
//gst_element_set_state (m_DataGST.sink, GST_STATE_NULL);
//gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.sink);
gst_object_unref (m_DataGST.sink);
m_DataGST.sink = NULL;
}
gst_object_unref (bus);
//free(appsink_callbacks);
m_bPipeStart = false;
m_bTerminate = false;
m_DataGST.terminate = false;
//gst_deinit();
//gst_object_ref(caps);
//free(caps);
//ExitCapture();
return 0;
}
int QGstreamerPlayer::OpenCamera2(QString strVideoDevice)
{
m_bWorkingRTSP = true;
m_nFrameWidth = 0;
m_nFrameHeight = 0;
m_DataGST.m_strFilename = strVideoDevice;
pid_t pid_current = getpid();
GstBus *bus;
GstStateChangeReturn ret;
GstMessage *msg;
GError *error = NULL;
gboolean bTrapEnabled = FALSE;
GstPad *queue_app_pad;
GstCaps *video_caps;
m_bTerminate = false;
/* init */
//int argc = 0;
//gst_init (&argc, NULL);
//gst_segtrap_set_enabled(FALSE);
bTrapEnabled = gst_segtrap_is_enabled();
QSize nSize = m_pVideoWidget->size();
QString strTest = QString("v4l2src device=/dev/video0 name=test_src ! video/x-raw,format=(string)UYVY,width=1920,height=1080,framerate=(fraction)60/1 ! queue name=queue_src ! videorate name=test_videorate ! queue name=queue_name1 ! timeoverlay name=test_timeoverlay ! tee name=teename ! queue name=queue_name2 ! appsink name=test_sink");
#ifdef _GSTREAMER_1_0
#ifdef __x86_64
strTest = QString("videotestsrc name=test_src ! video/x-raw,format=(string)UYVY,width=1920,height=1080,framerate=(fraction)60/1 ! queue name=queue_src ! videorate name=test_videorate ! queue name=queue_name1 ! timeoverlay name=test_timeoverlay ! tee name=teename ! queue name=queue_name2 ! appsink name=test_sink");
#ifdef __720P
m_DataGST.pipeline = gst_parse_launch("v4l2src name=test_src ! video/x-raw,format=(string)YUY2,width=1280,height=720,framerate=(fraction)60/1 ! queue name=queue_src ! tee name=teename teename. ! queue ! appsink name=test_sink", &error);
#else
//m_DataGST.pipeline = gst_parse_launch("v4l2src name=test_src ! 'video/x-raw,format=(string)YUY2,width=1920,height=1080,framerate=(fraction)120/1' ! queue name=queue_src ! tee name=teename teename. ! queue ! appsink name=test_sink", &error);
//m_DataGST.pipeline = gst_parse_launch("videotestsrc name=test_src ! video/x-raw,format=(string)YUY2,width=1920,height=1080,framerate=(fraction)60/1 ! queue name=queue_src ! videorate name=test_videorate ! clockoverlay ! appsink name=test_sink", &error);
m_DataGST.pipeline = gst_parse_launch(strTest.toStdString().c_str(), &error);
#endif
#else
//strTest = QString("videotestsrc name=test_src ! video/x-raw,format=(string)UYVY,width=1920,height=1080,framerate=(fraction)60/1 ! queue name=queue_src ! videorate name=test_videorate ! queue name=queue_name1 ! timeoverlay name=test_timeoverlay ! tee name=teename ! queue name=queue_name2 ! appsink name=test_sink");
strTest = QString("v4l2src name=test_src ! video/x-raw,format=(string)UYVY,width=1920,height=1080,framerate=(fraction)60/1 ! queue name=queue_src ! tee name=teename ! queue name=queue_sink ! appsink name=test_sink");
m_DataGST.pipeline = gst_parse_launch(strTest.toStdString().c_str(), &error);
/*
m_DataGST.pipeline = gst_parse_launch("v4l2src name=test_src ! video/x-raw,width=1920,height=1080,framerate=(fraction)60/1,format=(string)YUY2 ! queue name=queue_src ! tee name=teename teename. ! queue ! videoconvert ! queue ! appsink name=test_sink ", &error);
//m_DataGST.pipeline = gst_parse_launch("videotestsrc name=test_src ! video/x-raw,format=(string)UYVY,width=1920,height=1080,framerate=(fraction)60/1 ! queue name=queue_src ! appsink name=test_sink", &error);
*/
#endif
#else
m_DataGST.pipeline = gst_parse_launch("rtspsrc name=test_src location=rtsp://192.168.11.110:8554/test123 latency=0 ! rtph264depay ! queue name=queue_src ! vpudec ! queue name=queue_sink ! appsink name=test_sink max-lateness=-1", &error);
#endif
if(error!=NULL)
{
g_printerr ("Can not Create Pipeline.\n");
m_bWorkingRTSP = false;
ErrorCapture();
return -1;
}
/*
GstElement* pVideoRate = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "test_videorate");
GstElement* pTimeOverlay = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "test_timeoverlay");
GstElement* pQueue1 = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "queue_name1");
GstElement* pQueue2 = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "queue_name2");
*/
m_DataGST.source = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "test_src");
m_DataGST.sink = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "test_sink");
m_DataGST.queue_src = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "queue_src");
m_DataGST.queue_sink = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "queue_sink");
m_DataGST.tee = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "teename");
m_DataGST.pWidget = m_pVideoWidget;
m_DataGST.pPlayer = this;
m_DataGST.m_bShow = true;
//test!!!
//gst_base_sink_set_last_sample_enabled(GST_BASE_SINK(m_DataGST.sink), true);
g_signal_connect (m_DataGST.source, "pad-added", G_CALLBACK (QGstreamerPlayer::pad_added_handler), &m_DataGST);
GstPad* pad = gst_element_get_static_pad (m_DataGST.sink, "sink");
//if (!m_DataGST.pipeline || !m_DataGST.source || !m_DataGST.queue_src || !m_DataGST.sink) {
if (!m_DataGST.pipeline || !m_DataGST.source || !m_DataGST.sink) {
g_printerr ("Not all elements could be created.\n");
m_bWorkingRTSP = false;
ErrorCapture();
return -2;
}
if(m_DataGST.queue_src!=NULL)
{
g_object_set (m_DataGST.queue_src, "max-size-buffers", 10, NULL);
}
if(m_DataGST.source!=NULL)
{
//g_object_set (m_DataGST.source, "device", strVideoDevice.toStdString().c_str(), NULL);
}
GstAppSinkCallbacks* appsink_callbacks = NULL;
if(m_DataGST.sink!=NULL)
{
appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks));
appsink_callbacks->eos = app_sink_eos;
appsink_callbacks->new_preroll = app_sink_new_preroll;
#ifdef _GSTREAMER_1_0
appsink_callbacks->new_sample = app_sink_new_sample;
#else
appsink_callbacks->new_buffer = app_sink_new_sample;
#endif
//test
gst_app_sink_set_callbacks(GST_APP_SINK(m_DataGST.sink), appsink_callbacks, (gpointer)&m_DataGST, NULL);
//g_signal_connect (m_DataGST.source, "new-buffer", G_CALLBACK (new_buffer), &m_DataGST);
//g_object_set (m_DataGST.sink, "sync", FALSE, NULL);
g_object_set (m_DataGST.sink, "sync", TRUE, NULL);
gst_app_sink_set_drop(GST_APP_SINK(m_DataGST.sink), TRUE);
//gst_app_sink_set_drop(GST_APP_SINK(m_DataGST.sink), FALSE);
}
//queue_app_pad = gst_element_get_static_pad (m_DataGST.app_queue, "sink");
//sigint_setup();
//ret = gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING);
ret = gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (m_DataGST.pipeline);
m_bPipeStart = false;
m_bWorkingRTSP = false;
//ErrorCapture();
ExitCapture();
return -3;
}
bus = gst_element_get_bus (m_DataGST.pipeline);
m_bPipeStart = true;
int64_t tEnd=0;
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
(GstMessageType)(GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_STEP_DONE | GST_MESSAGE_STEP_START | GST_MESSAGE_ASYNC_DONE));
if(tEnd==0)
{
gst_element_query_duration (m_DataGST.pipeline, GST_FORMAT_TIME, &tEnd);
if(tEnd!=0)
{
int nHour = 0;
int nMinute = 0;
int nSec = 0;
int nMilliSec = tEnd/(1000*1000);
nSec = nMilliSec/1000;
nMinute = nSec/60;
nHour = nMinute/60;
nSec = nSec%60;
nMinute = nMinute%60;
nHour = nHour;
nMilliSec = (nMilliSec/1000)%1000;
qDebug() << "Duration: " << nHour << ":" << nMinute << ":" << nSec << ":" << nMilliSec ;
//m_bTerminate = true;
GstState state;
GstState pending;
gst_element_get_state(m_DataGST.pipeline, &state, &pending, GST_CLOCK_TIME_NONE);
}
}
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
m_bTerminate = true;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
{
//gst_message_unref (msg);
m_bTerminate = true;
}
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (m_DataGST.pipeline)) {
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
if(new_state==GST_STATE_PAUSED && old_state==GST_STATE_PLAYING)
{
//m_bTerminate = true;
}
}
break;
case GST_MESSAGE_STEP_DONE:
{
GstFormat format;
guint64 amount;
gdouble rate;
gboolean flush, intermediate;
guint64 duration;
gboolean eos;
gst_message_parse_step_done (msg, &format, &amount, &rate,
&flush, &intermediate, &duration, &eos);
app_sink_new_sample((GstAppSink *)m_DataGST.sink, &m_DataGST);
}
break;
case GST_MESSAGE_STEP_START:
{
int a=0;
}
break;
case GST_MESSAGE_ASYNC_DONE:
{
qDebug() << "Async Done";
//app_sink_new_sample((GstAppSink *)m_DataGST.sink, &m_DataGST);
m_bSeeking = false;
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
if(m_DataGST.terminate==true)
{
m_bTerminate = true;
}
}
} while (m_bTerminate==false);
/*
if(pVideoRate!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), pVideoRate);
gst_element_set_state (pVideoRate, GST_STATE_NULL);
gst_object_unref (pVideoRate);
}
if(pTimeOverlay!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), pTimeOverlay);
gst_element_set_state (pTimeOverlay, GST_STATE_NULL);
gst_object_unref (pTimeOverlay);
}
if(pQueue1!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), pQueue1);
gst_element_set_state (pQueue1, GST_STATE_NULL);
gst_object_unref (pQueue1);
}
if(pQueue2!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), pQueue2);
gst_element_set_state (pQueue2, GST_STATE_NULL);
gst_object_unref (pQueue2);
}
*/
if(m_DataGST.source!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.source);
gst_element_set_state (m_DataGST.source, GST_STATE_NULL);
gst_object_unref (m_DataGST.source);
}
if(m_DataGST.tee!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.tee);
gst_element_set_state (m_DataGST.tee, GST_STATE_NULL);
gst_object_unref (m_DataGST.tee);
}
if(m_DataGST.queue_src!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.queue_src);
gst_element_set_state (m_DataGST.queue_src, GST_STATE_NULL);
gst_object_unref (m_DataGST.queue_src);
}
if(m_DataGST.queue_sink!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.queue_sink);
gst_element_set_state (m_DataGST.queue_sink, GST_STATE_NULL);
gst_object_unref (m_DataGST.queue_sink);
}
if(m_DataGST.sink!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.sink);
gst_element_set_state (m_DataGST.sink, GST_STATE_NULL);
gst_object_unref (m_DataGST.sink);
}
if(m_DataGST.pipeline!=NULL)
{
gst_element_set_state (m_DataGST.pipeline, GST_STATE_NULL);
gst_object_unref (m_DataGST.pipeline);
}
gst_object_unref (bus);
free(appsink_callbacks);
m_bPipeStart = false;
m_bTerminate = false;
m_DataGST.terminate = false;
//gst_deinit();
ExitCapture();
return 0;
}
void QGstreamerPlayer::app_sink_eos(GstAppSink *sink, gpointer user_data)
{
GST_CustomData* pData = (GST_CustomData*)user_data;
pData->terminate = true;
qDebug() << "Enter EOS";
if(pData->m_pSink!=NULL)
{
if(pData->m_pSink->removing==TRUE)
{
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pQueue);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pRate);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pCaps);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pConvert);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pSink);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pEncoder);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pParser);
gst_element_set_state (pData->m_pSink->m_pQueue, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pRate, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pCaps, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_NULL);
gst_object_unref (pData->m_pSink->m_pQueue);
gst_object_unref (pData->m_pSink->m_pRate);
gst_object_unref (pData->m_pSink->m_pCaps);
gst_object_unref (pData->m_pSink->m_pConvert);
gst_object_unref (pData->m_pSink->m_pSink);
gst_object_unref (pData->m_pSink->m_pEncoder);
gst_object_unref (pData->m_pSink->m_pParser);
gst_element_release_request_pad (pData->tee, pData->m_pSink->m_pTeePad);
gst_object_unref (pData->m_pSink->m_pTeePad);
g_free(pData->m_pSink);
pData->m_pSink = NULL;
}
//else
{
}
}
}
GstFlowReturn QGstreamerPlayer::app_sink_new_preroll(GstAppSink *sink, gpointer user_data)
{
qDebug() << "preroll" ;
GST_CustomData* pd = (GST_CustomData*)user_data;
GstSample* sample = NULL;
int nState = pd->pThread->GetState();
if(nState>0)
{
//return GST_FLOW_OK;
}
if(g_bSend==true)
{
//qDebug() << "track";
//gst_element_set_state (pd->pipeline, GST_STATE_PAUSED);
}
//sample = gst_app_sink_pull_sample(sink);
sample = gst_app_sink_pull_preroll(sink);
if(sample==NULL)
{
qDebug() << "app_sink_new_proroll ERROR";
return GST_FLOW_OK;
}
QGstreamerPlayer* pPlayer = (QGstreamerPlayer*)pd->pPlayer;
if(pPlayer->GetFrameWidth()==0 || pPlayer->GetFrameHeight()==0)
{
GstCaps* pCaps = NULL;
pCaps = gst_sample_get_caps(sample);
GstStructure* s = gst_caps_get_structure (pCaps, 0);
gint width, height;
int res = gst_structure_get_int (s, "width", &width) | gst_structure_get_int (s, "height", &height);
pPlayer->SetFrameWidth(width);
pPlayer->SetFrameHeight(height);
pd->pWidget->SetTextureSize(width, height);
}
gboolean bEnabledLast = gst_base_sink_is_last_sample_enabled(GST_BASE_SINK(sink));
GstBuffer* buffer = gst_sample_get_buffer(sample);
GstMemory* memory = gst_buffer_get_all_memory(buffer);
GstMapInfo map_info;
if(! gst_memory_map(memory, &map_info, GST_MAP_READ)) {
gst_memory_unref(memory);
gst_sample_unref(sample);
usleep(10*1000);
return GST_FLOW_ERROR;
}
guint8* pData = map_info.data;
if(pd->pWidget!=NULL)
{
if(map_info.size>4096)
{
//g_pTestDT->
pd->pWidget->SetTextureData(pData, map_info.size);
/*
FILE* pFile = NULL;
pFile = fopen("test.data", "wb");
fwrite(pData, 1280*720*1.5, 1, pFile);
fclose(pFile);
*/
}
else
{
int a=0;
}
}
else
{
FILE* pFile = NULL;
pFile = fopen("test.data", "wb");
fwrite(pData, 1280*720, 1, pFile);
fclose(pFile);
}
if(sample == NULL) {
return GST_FLOW_ERROR;
}
gst_memory_unmap(memory, &map_info);
gst_memory_unref(memory);
gst_sample_unref(sample);
usleep(1*1000);
return GST_FLOW_OK;
}
GstFlowReturn QGstreamerPlayer::app_sink_new_sample(GstAppSink *sink, gpointer user_data)
{
GST_CustomData* pd = (GST_CustomData*)user_data;
GstSample* sample = NULL;
if(pd->m_bShow==false)
{
pd->terminate = true;
//gst_element_set_state (pd->pipeline, GST_STATE_PAUSED);
gst_bin_remove (GST_BIN (pd->pipeline), pd->source);
gst_element_set_state (pd->source, GST_STATE_NULL);
gst_object_unref (pd->source);
gst_element_set_state (pd->pipeline, GST_STATE_NULL);
gst_object_unref (pd->pipeline);
//gst_object_unref (pd->bus);
//m_bTerminate = false;
//m_DataGST.terminate = false;
return GST_FLOW_EOS;
}
//qDebug() << "app_sink_new_sample";
int nState = pd->pThread->GetState();
if(nState>0)
{
//return GST_FLOW_OK;
}
if(g_bSend==true)
{
//qDebug() << "track";
//gst_element_set_state (pd->pipeline, GST_STATE_PAUSED);
}
sample = gst_app_sink_pull_sample(sink);
if(sample==NULL)
{
qDebug() << "app_sink_new_sample ERROR";
return GST_FLOW_CUSTOM_ERROR;
}
QGstreamerPlayer* pPlayer = (QGstreamerPlayer*)pd->pPlayer;
if(pPlayer->GetFrameWidth()==0 || pPlayer->GetFrameHeight()==0)
{
GstCaps* pCaps = NULL;
pCaps = gst_sample_get_caps(sample);
GstStructure* s = gst_caps_get_structure (pCaps, 0);
gint width, height;
int res = gst_structure_get_int (s, "width", &width) | gst_structure_get_int (s, "height", &height);
pPlayer->SetFrameWidth(width);
pPlayer->SetFrameHeight(height);
pd->pWidget->SetTextureSize(width, height);
}
//sample = gst_app_sink_pull_preroll(sink);
gboolean bEnabledLast = gst_base_sink_is_last_sample_enabled(GST_BASE_SINK(sink));
GstBuffer* buffer = gst_sample_get_buffer(sample);
GstMemory* memory = gst_buffer_get_all_memory(buffer);
GstMapInfo map_info;
if(! gst_memory_map(memory, &map_info, GST_MAP_READ)) {
gst_memory_unref(memory);
gst_sample_unref(sample);
usleep(10*1000);
return GST_FLOW_ERROR;
}
guint8* pData = map_info.data;
if(pd->pWidget!=NULL && pd->m_bShow==true)
{
if(map_info.size>4096)
{
//g_pTestDT->
if(pd->pWidget->isVisible()==false)
{
int a=0;
}
if(pd->pWidget->isActiveWindow()==false)
{
int a=0;
}
pd->pWidget->SetTextureData(pData, map_info.size);
/*
FILE* pFile = NULL;
pFile = fopen("test.data", "wb");
//fwrite(pData, 1280*720*1.5, 1, pFile);
fwrite(pData, 1920*1080*2, 1, pFile);
fclose(pFile);
*/
}
else
{
int a=0;
}
}
else
{
if(pd->m_bShow==false)
{
int a=0;
}
/*
FILE* pFile = NULL;
pFile = fopen("test.data", "wb");
fwrite(pData, 1280*720, 1, pFile);
fclose(pFile);
*/
}
if(sample == NULL) {
return GST_FLOW_ERROR;
}
gst_memory_unmap(memory, &map_info);
gst_memory_unref(memory);
gst_sample_unref(sample);
//usleep(1*1000);
return GST_FLOW_OK;
}
void QGstreamerPlayer::pad_added_handler (GstElement *src, GstPad *new_pad, GST_CustomData *data) {
GstPad *sink_pad = gst_element_get_static_pad (data->sink, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
goto exit;
}
/* Check the new pad's type */
new_pad_caps = gst_pad_get_allowed_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
goto exit;
}
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}
void QGstreamerPlayer::SetThread(QRTSPThread *pThread)
{
m_DataGST.pThread = pThread;
}
void QGstreamerPlayer::SetVideoWidget(QVideoWidget *pWidget)
{
m_pVideoWidget = pWidget;
m_DataGST.pWidget = pWidget;
}
void QGstreamerPlayer::Seek(gint64 nPos)
{
GstEvent *seek_event;
m_DataGST.rate = 1.0;
if(m_bSeeking==false)
{
m_nSeekPos = nPos;
qDebug() << nPos;
m_bSeeking = true;
//gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING);
gst_element_seek_simple(m_DataGST.pipeline, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE), nPos);
//seek_event = gst_event_new_seek (m_DataGST.rate, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_KEY_UNIT),
//seek_event = gst_event_new_seek (m_DataGST.rate, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
// GST_SEEK_TYPE_SET, nPos, GST_SEEK_TYPE_END, nPos);
//gboolean bSend = gst_element_send_event (m_DataGST.pipeline, seek_event);
//gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
//gst_element_send_event (m_DataGST.pipeline, gst_event_new_step (GST_FORMAT_BUFFERS, 1, m_DataGST.rate, TRUE, FALSE));
g_bSend = true;
}
else
{
m_ListSeeking.push_back(nPos);
}
}
void QGstreamerPlayer::Pause()
{
gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
}
void QGstreamerPlayer::StartRTSP()
{
gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING);
m_DataGST.m_bShow = true;
m_pVideoWidget->setVisible(true);
m_pVideoWidget->activateWindow();
m_pVideoWidget->setFocus(Qt::ActiveWindowFocusReason);
m_pVideoWidget->update();
return;
}
void QGstreamerPlayer::StopRTSP()
{
gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
return;
//m_DataGST.m_bShow = false;
m_DataGST.terminate = true;
//gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
gst_element_send_event (m_DataGST.pipeline, gst_event_new_eos ());
return;
//gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
}
void QGstreamerPlayer::exitRTSP()
{
m_DataGST.m_bShow = false;
m_bTerminate = true;
gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING);
usleep(100*1000);
gst_element_send_event (m_DataGST.pipeline, gst_event_new_eos ());
return;
//gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
}
void QGstreamerPlayer::Resume()
{
gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING);
}
bool QGstreamerPlayer::IsSeeking()
{
return m_bSeeking;
}
gint64 QGstreamerPlayer::GetSeekPos()
{
return m_nSeekPos;
}
void QGstreamerPlayer::SetFrameWidth(int nWidth)
{
m_nFrameWidth = nWidth;
}
void QGstreamerPlayer::SetFrameHeight(int nHeight)
{
m_nFrameHeight = nHeight;
}
int QGstreamerPlayer::GetFrameWidth()
{
return m_nFrameWidth;
}
int QGstreamerPlayer::GetFrameHeight()
{
return m_nFrameHeight;
}
void QGstreamerPlayer::SaveFile(QString strFilename)
{
#ifdef __x86_64
CustomDataSink *sink = g_new0 (CustomDataSink, 1);
GstPad *sinkpad;
GstPadTemplate *templ;
//gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
templ = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (m_DataGST.tee), "src_%u");
g_print ("add\n");
sink->m_pTeePad = gst_element_request_pad (m_DataGST.tee, templ, NULL, NULL);
sink->m_pQueue = gst_element_factory_make ("queue", NULL);
sink->m_pConvert = gst_element_factory_make ("videoconvert", NULL);
sink->m_pSink = gst_element_factory_make ("filesink", NULL);
sink->m_pEncoder = gst_element_factory_make("x264enc", NULL);
sink->m_pParser = gst_element_factory_make("matroskamux", NULL);
// sink->m_pRate = gst_element_factory_make("videorate", NULL);
// sink->m_pCaps = gst_element_factory_make("capsfilter", "caps_scale");
sink->removing = FALSE;
//GstElement* pQueue2 = gst_element_factory_make ("queue", NULL);
gst_bin_add_many (GST_BIN (m_DataGST.pipeline), (GstElement*)gst_object_ref (sink->m_pQueue),
gst_object_ref (sink->m_pConvert),
gst_object_ref (sink->m_pEncoder), gst_object_ref (sink->m_pParser), //gst_object_ref (pQueue2),
gst_object_ref (sink->m_pSink), NULL);
gboolean link_ok;
//link_ok = gst_element_link_filtered (sink->m_pRate, sink->m_pConvert, caps);
//gst_caps_unref (caps);
// gst_element_link_many (sink->m_pQueue, sink->m_pRate, sink->m_pCaps, sink->m_pConvert, sink->m_pEncoder, sink->m_pParser, sink->m_pSink, NULL);
gst_element_link_many (sink->m_pQueue, sink->m_pConvert, sink->m_pEncoder, sink->m_pParser, //pQueue2,
sink->m_pSink, NULL);
/*
GstCaps *caps;
caps = gst_caps_new_simple ("video/x-raw",
//"format", G_TYPE_STRING, "I420",
//"width", G_TYPE_INT, 1920,
//"height", G_TYPE_INT, 1080,
"framerate", GST_TYPE_FRACTION, 60, 1,
NULL);
*/
//g_object_set(G_OBJECT(sink->m_pCaps), "caps", caps, NULL);
g_object_set (sink->m_pSink, "location", strFilename.toStdString().c_str(), NULL);
g_object_set (sink->m_pEncoder, "bitrate", 10240, NULL);
g_object_set (sink->m_pEncoder, "byte-stream", true, NULL);
g_object_set (sink->m_pEncoder, "speed-preset", 1, NULL);
g_object_set (sink->m_pEncoder, "tune", 4, NULL);
g_object_set (sink->m_pSink, "sync", TRUE, NULL);
//g_object_set (sink->m_pSink, "sync", FALSE, NULL);
//g_object_set (sink->m_pSink, "-e", NULL);
gst_element_sync_state_with_parent (sink->m_pQueue);
gst_element_sync_state_with_parent (sink->m_pConvert);
gst_element_sync_state_with_parent (sink->m_pEncoder);
gst_element_sync_state_with_parent (sink->m_pParser);
//gst_element_sync_state_with_parent (pQueue2);
gst_element_sync_state_with_parent (sink->m_pSink);
sinkpad = gst_element_get_static_pad (sink->m_pQueue, "sink");
gst_pad_link (sink->m_pTeePad, sinkpad);
gst_object_unref (sinkpad);
g_print ("added\n");
m_DataGST.m_pSink = sink;
#else
CustomDataSink *sink = g_new0 (CustomDataSink, 1);
GstPad *sinkpad;
GstPadTemplate *templ;
//gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
templ = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (m_DataGST.tee), "src_%u");
g_print ("add\n");
sink->m_pTeePad = gst_element_request_pad (m_DataGST.tee, templ, NULL, NULL);
if(sink->m_pQueue==NULL)
{
sink->m_pQueue = gst_element_factory_make ("queue", NULL);
}
//sink->m_pConvert = gst_element_factory_make ("videoconvert", NULL);
if(sink->m_pConvert==NULL)
{
//sink->m_pConvert = gst_element_factory_make ("nvvidconv", NULL);
sink->m_pConvert = gst_element_factory_make ("videoconvert", NULL);
}
sink->m_pSink = gst_element_factory_make ("filesink", NULL);
//sink->m_pSink = gst_element_factory_make ("fakesink", NULL);
#ifdef __x86_64
sink->m_pEncoder = gst_element_factory_make("x264enc", NULL);
#else
//sink->m_pEncoder = gst_element_factory_make("omxh264enc", NULL);
if(sink->m_pEncoder==NULL)
{
sink->m_pEncoder = gst_element_factory_make("nvv4l2h265enc", NULL);
usleep(100*1000);
}
#endif
if(sink->m_pParser==NULL)
{
sink->m_pParser = gst_element_factory_make("h265parse", NULL);
}
if(sink->m_pMuxer==NULL)
{
sink->m_pMuxer = gst_element_factory_make("matroskamux", NULL);
}
//sink->m_pRate = gst_element_factory_make("videorate", NULL);
//sink->m_pCaps = gst_element_factory_make("capsfilter", "caps_scale");
if(sink->m_pCaps==NULL)
{
sink->m_pCaps = gst_element_factory_make("capsfilter", "caps");
}
sink->removing = FALSE;
//gst_bin_add_many (GST_BIN (m_DataGST.pipeline), (GstElement*) gst_object_ref (sink->m_pConvert), (GstElement*)gst_object_ref (sink->m_pQueue),
// gst_object_ref (sink->m_pEncoder), gst_object_ref(sink->m_pCaps), gst_object_ref (sink->m_pParser), gst_object_ref (sink->m_pMuxer), gst_object_ref (sink->m_pSink), NULL);
gst_bin_add_many (GST_BIN (m_DataGST.pipeline), (GstElement*)gst_object_ref (sink->m_pQueue), gst_object_ref (sink->m_pConvert),
gst_object_ref (sink->m_pEncoder), gst_object_ref (sink->m_pCaps), gst_object_ref (sink->m_pParser), gst_object_ref (sink->m_pMuxer), gst_object_ref (sink->m_pSink), NULL);
gboolean link_ok;
//link_ok = gst_element_link_filtered (sink->m_pRate, sink->m_pConvert, caps);
//gst_caps_unref (caps);
//link_ok = gst_element_link_many (sink->m_pConvert, sink->m_pQueue, sink->m_pEncoder, sink->m_pCaps, sink->m_pParser, sink->m_pMuxer, sink->m_pSink, NULL);
//link_ok = gst_element_link_many (sink->m_pConvert, sink->m_pQueue, sink->m_pEncoder, sink->m_pParser, sink->m_pMuxer, sink->m_pSink, NULL);
link_ok = gst_element_link_many (sink->m_pQueue, sink->m_pConvert, sink->m_pEncoder, sink->m_pCaps, sink->m_pParser, sink->m_pMuxer, sink->m_pSink, NULL);
//gst_element_link_many (sink->m_pQueue, sink->m_pRate, sink->m_pCaps, sink->m_pConvert, sink->m_pEncoder, sink->m_pParser, sink->m_pSink, NULL);
//gst_element_link_many (sink->m_pQueue, sink->m_pConvert, sink->m_pSink, NULL);
GstCaps *caps;
#ifdef __x86_64
caps = gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "I420",
//"width", G_TYPE_INT, 1920,
//"height", G_TYPE_INT, 1080,
"framerate", GST_TYPE_FRACTION, 60, 1,
NULL);
#else
caps = gst_caps_new_simple ("video/x-h265",
"stream-format", G_TYPE_STRING, "byte-stream",
//"width", G_TYPE_INT, 384,
//"height", G_TYPE_INT, 288,
//"framerate", GST_TYPE_FRACTION, 30, 1,
NULL);
#endif
g_object_set(G_OBJECT(sink->m_pCaps), "caps", caps, NULL);
g_object_set (sink->m_pSink, "location", strFilename.toStdString().c_str(), NULL);
#ifdef __x86_64
g_object_set (sink->m_pEncoder, "bitrate", 10000);
#else
//g_object_set (G_OBJECT(sink->m_pEncoder), "bitrate", 60000000, "peak-bitrate", 150000000);
g_object_set (sink->m_pEncoder, "bitrate", 10000000, NULL);
g_object_set (sink->m_pEncoder, "peak-bitrate", 16000000, NULL);
g_object_set (sink->m_pEncoder, "control-rate", 0, NULL);
g_object_set (sink->m_pEncoder, "bit-packetization", true, NULL);
g_object_set (sink->m_pEncoder, "EnableTwopassCBR", true, NULL);
g_object_set (sink->m_pEncoder, "insert-aud", true, NULL);
g_object_set (sink->m_pEncoder, "iframeinterval", 10, NULL);
g_object_set (sink->m_pEncoder, "maxperf-enable", true, NULL);
g_object_set (sink->m_pEncoder, "profile", 1, NULL);
g_object_set (sink->m_pEncoder, "vbv-size", 10000000, NULL);
g_object_set (sink->m_pEncoder, "preset-level", 1, NULL);
g_object_set (sink->m_pEncoder, "insert-sps-pps", true, NULL);
#endif
g_object_set (sink->m_pSink, "sync", TRUE, NULL);
gst_element_sync_state_with_parent (sink->m_pQueue);
gst_element_sync_state_with_parent (sink->m_pConvert);
//gst_element_sync_state_with_parent (sink->m_pRate);
gst_element_sync_state_with_parent (sink->m_pEncoder);
gst_element_sync_state_with_parent (sink->m_pCaps);
gst_element_sync_state_with_parent (sink->m_pParser);
gst_element_sync_state_with_parent (sink->m_pMuxer);
gst_element_sync_state_with_parent (sink->m_pSink);
sinkpad = gst_element_get_static_pad (sink->m_pQueue, "sink");
gst_pad_link (sink->m_pTeePad, sinkpad);
/*
gst_element_set_state(m_DataGST.pipeline, GST_STATE_PLAYING);
GstState checkState;
gst_element_get_state(m_DataGST.pipeline, &checkState, NULL, 0);
while(checkState==GST_STATE_PAUSED)
{
gst_element_set_state(m_DataGST.pipeline, GST_STATE_PLAYING);
usleep(100*1000);
gst_element_get_state(m_DataGST.pipeline, &checkState, NULL, 0);
}
GstStateChangeReturn ret = gst_element_set_state (sink->m_pQueue, GST_STATE_PLAYING);
gst_pad_send_event (sinkpad, gst_event_new_stream_start ("test"));
*/
gst_object_unref (sinkpad);
g_print ("added\n");
m_DataGST.m_pSink = sink;
/*
GstPad *pPadSinkSink = NULL;
pPadSinkSink = gst_element_get_static_pad (sink->m_pSink, "sink");
ret = gst_element_set_state (sink->m_pSink, GST_STATE_PAUSED);
gst_pad_send_event (pPadSinkSink, gst_event_new_stream_start ("test"));
usleep(1000*1000);
gst_element_set_state(m_DataGST.pipeline, GST_STATE_PLAYING);
ret = gst_element_set_state (sink->m_pSink, GST_STATE_PLAYING);
*/
#endif
}
void QGstreamerPlayer::SaveEnd()
{
CustomDataSink* sink =m_DataGST.m_pSink;
if(sink==NULL)
{
return;
}
GST_CustomData* pData = (GST_CustomData*)&m_DataGST;
GstPad *sinkpad;
sink->removing = TRUE;
sinkpad = gst_element_get_static_pad (sink->m_pQueue, "sink");
GstElement* pPipeLine = m_DataGST.pipeline;
/*
GstStateChangeReturn ret = gst_element_set_state (pPipeLine, GST_STATE_PAUSED);
ret = gst_element_set_state (pPipeLine, GST_STATE_READY);
ret = gst_element_set_state (pPipeLine, GST_STATE_PLAYING);
return;
*/
gst_pad_send_event (sinkpad, gst_event_new_eos ());
//gst_element_send_event (pPipeLine, gst_event_new_eos ());
gst_pad_unlink (sink->m_pTeePad, sinkpad);
gst_object_unref (sinkpad);
//return;
usleep(100*1000);
#ifdef __x86_64
//return;
//gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pRate);
//gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pCaps);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pConvert);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pSink);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pEncoder);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pParser);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pQueue);
gst_element_set_state (pData->m_pSink->m_pQueue, GST_STATE_NULL);
//gst_element_set_state (pData->m_pSink->m_pRate, GST_STATE_NULL);
//gst_element_set_state (pData->m_pSink->m_pCaps, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_NULL);
gst_object_unref (pData->m_pSink->m_pQueue);
//gst_object_unref (pData->m_pSink->m_pRate);
//gst_object_unref (pData->m_pSink->m_pCaps);
gst_object_unref (pData->m_pSink->m_pConvert);
gst_object_unref (pData->m_pSink->m_pSink);
gst_object_unref (pData->m_pSink->m_pEncoder);
gst_object_unref (pData->m_pSink->m_pParser);
#else
//gst_pad_send_event (sinkpad, gst_event_new_eos ());
//gst_element_send_event (pPipeLine, gst_event_new_eos ());
//gst_pad_unlink (sink->m_pTeePad, sinkpad);
//gst_object_unref (sinkpad);
//return;
//usleep(1000*1000);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pQueue);
//gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pRate);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pMuxer);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pCaps);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pConvert);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pSink);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pEncoder);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pParser);
gst_element_set_state (pData->m_pSink->m_pQueue, GST_STATE_NULL);
//gst_element_set_state (pData->m_pSink->m_pRate, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pCaps, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pMuxer, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_NULL);
//gst_object_unref (pData->m_pSink->m_pQueue);
//gst_object_unref (pData->m_pSink->m_pRate);
//gst_object_unref (pData->m_pSink->m_pCaps);
//gst_object_unref (pData->m_pSink->m_pMuxer);
//gst_object_unref (pData->m_pSink->m_pConvert);
gst_object_unref (pData->m_pSink->m_pSink);
//gst_object_unref (pData->m_pSink->m_pEncoder);
//gst_object_unref (pData->m_pSink->m_pParser);
#endif
gst_element_release_request_pad (pData->tee, pData->m_pSink->m_pTeePad);
gst_object_unref (pData->m_pSink->m_pTeePad);
pData->m_pSink = NULL;
usleep(100*1000);
}
void QGstreamerPlayer::CaptureImage()
{
m_bCaptureImage = true;
}