SVG5/qgstreamerplayer2.cpp
2025-10-12 13:55:56 +09:00

507 lines
14 KiB
C++

#include "qgstreamerplayer2.h"
#include "qrtspthread.h"
#include "qvideowidget.h"
#include "mainwindow.h"
QGstreamerPlayer2::QGstreamerPlayer2(QObject *parent) : QGstreamerPlayer(parent)
{
}
QGstreamerPlayer2::~QGstreamerPlayer2()
{
}
int QGstreamerPlayer2::OpenCamera(QString strVideoDevice)
{
m_bWorkingRTSP = true;
m_nFrameWidth = 0;
m_nFrameHeight = 0;
m_DataGST.m_strFilename = strVideoDevice;
pid_t pid_current = getpid();
GstBus *bus;
GstStateChangeReturn ret;
GstMessage *msg;
GError *error = NULL;
gboolean bTrapEnabled = FALSE;
GstPad *queue_app_pad;
GstCaps *video_caps;
m_bTerminate = false;
/* init */
//int argc = 0;
//gst_init (&argc, NULL);
//gst_segtrap_set_enabled(FALSE);
bTrapEnabled = gst_segtrap_is_enabled();
QString strTest = QString("v4l2src device=/dev/video0 name=test_src ! video/x-raw,format=(string)YUY2,width=1920,height=1080,framerate=(fraction)60/1 ! queue name=queue_src ! tee name=teename ! queue name=queue_sink ! appsink name=test_sink");
#ifdef _GSTREAMER_1_0
#ifdef __x86_64
m_DataGST.pipeline = gst_parse_launch(strTest.toStdString().c_str(), &error);
#else
#endif
#else
#endif
if(error!=NULL)
{
g_printerr ("Can not Create Pipeline.\n");
return -1;
}
GstElement* pVideoRate = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "test_videorate");
GstElement* pTimeOverlay = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "test_timeoverlay");
GstElement* pQueue1 = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "queue_name1");
GstElement* pQueue2 = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "queue_name2");
m_DataGST.source = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "test_src");
m_DataGST.sink = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "test_sink");
m_DataGST.queue_src = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "queue_src");
m_DataGST.queue_sink = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "queue_sink");
m_DataGST.tee = gst_bin_get_by_name(GST_BIN(m_DataGST.pipeline), "teename");
m_DataGST.pWidget = m_pVideoWidget;
m_DataGST.pPlayer = this;
m_DataGST.m_bShow = true;
g_signal_connect (m_DataGST.source, "pad-added", G_CALLBACK (QGstreamerPlayer::pad_added_handler), &m_DataGST);
GstPad* pad = gst_element_get_static_pad (m_DataGST.sink, "sink");
if (!m_DataGST.pipeline || !m_DataGST.source || !m_DataGST.queue_src || !m_DataGST.sink) {
g_printerr ("Not all elements could be created.\n");
return -2;
}
if(m_DataGST.queue_src!=NULL)
{
g_object_set (m_DataGST.queue_src, "max-size-buffers", 10, NULL);
}
if(m_DataGST.source!=NULL)
{
//g_object_set (m_DataGST.source, "device", strVideoDevice.toStdString().c_str(), NULL);
}
GstAppSinkCallbacks* appsink_callbacks = NULL;
if(m_DataGST.sink!=NULL)
{
appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks));
appsink_callbacks->eos = app_sink_eos;
appsink_callbacks->new_preroll = app_sink_new_preroll;
#ifdef _GSTREAMER_1_0
appsink_callbacks->new_sample = app_sink_new_sample;
#else
appsink_callbacks->new_buffer = app_sink_new_sample;
#endif
gst_app_sink_set_callbacks(GST_APP_SINK(m_DataGST.sink), appsink_callbacks, (gpointer)&m_DataGST, NULL);
g_object_set (m_DataGST.sink, "sync", FALSE, NULL);
gst_app_sink_set_drop(GST_APP_SINK(m_DataGST.sink), TRUE);
}
//ret = gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING);
ret = gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (m_DataGST.pipeline);
return -3;
}
bus = gst_element_get_bus (m_DataGST.pipeline);
m_bPipeStart = true;
bool bSignalOut = false;
int64_t tEnd=0;
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
(GstMessageType)(GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_STEP_DONE | GST_MESSAGE_STEP_START | GST_MESSAGE_ASYNC_DONE));
if(tEnd==0)
{
gst_element_query_duration (m_DataGST.pipeline, GST_FORMAT_TIME, &tEnd);
if(tEnd!=0)
{
int nHour = 0;
int nMinute = 0;
int nSec = 0;
int nMilliSec = tEnd/(1000*1000);
nSec = nMilliSec/1000;
nMinute = nSec/60;
nHour = nMinute/60;
nSec = nSec%60;
nMinute = nMinute%60;
nHour = nHour;
nMilliSec = (nMilliSec/1000)%1000;
qDebug() << "Duration: " << nHour << ":" << nMinute << ":" << nSec << ":" << nMilliSec ;
GstState state;
GstState pending;
gst_element_get_state(m_DataGST.pipeline, &state, &pending, GST_CLOCK_TIME_NONE);
}
}
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
m_bTerminate = true;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
{
//gst_message_unref (msg);
bSignalOut = true;
m_bTerminate = true;
}
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (m_DataGST.pipeline)) {
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
if(new_state==GST_STATE_PAUSED && old_state==GST_STATE_PLAYING)
{
//m_bTerminate = true;
}
}
break;
case GST_MESSAGE_STEP_DONE:
{
GstFormat format;
guint64 amount;
gdouble rate;
gboolean flush, intermediate;
guint64 duration;
gboolean eos;
gst_message_parse_step_done (msg, &format, &amount, &rate,
&flush, &intermediate, &duration, &eos);
app_sink_new_sample((GstAppSink *)m_DataGST.sink, &m_DataGST);
}
break;
case GST_MESSAGE_STEP_START:
{
break;
}
case GST_MESSAGE_ASYNC_DONE:
{
qDebug() << "Async Done";
//app_sink_new_sample((GstAppSink *)m_DataGST.sink, &m_DataGST);
m_bSeeking = false;
break;
}
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
if(m_DataGST.terminate==true)
{
m_bTerminate = true;
}
}
} while (m_bTerminate==false);
if(pVideoRate!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), pVideoRate);
gst_element_set_state (pVideoRate, GST_STATE_NULL);
gst_object_unref (pVideoRate);
}
if(pTimeOverlay!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), pTimeOverlay);
gst_element_set_state (pTimeOverlay, GST_STATE_NULL);
gst_object_unref (pTimeOverlay);
}
if(pQueue1!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), pQueue1);
gst_element_set_state (pQueue1, GST_STATE_NULL);
gst_object_unref (pQueue1);
}
if(pQueue2!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), pQueue2);
gst_element_set_state (pQueue2, GST_STATE_NULL);
gst_object_unref (pQueue2);
}
if(m_DataGST.source!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.source);
gst_element_set_state (m_DataGST.source, GST_STATE_NULL);
gst_object_unref (m_DataGST.source);
}
if(m_DataGST.tee!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.tee);
gst_element_set_state (m_DataGST.tee, GST_STATE_NULL);
gst_object_unref (m_DataGST.tee);
}
if(m_DataGST.queue_src!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.queue_src);
gst_element_set_state (m_DataGST.queue_src, GST_STATE_NULL);
gst_object_unref (m_DataGST.queue_src);
}
if(m_DataGST.queue_sink!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.queue_sink);
gst_element_set_state (m_DataGST.queue_sink, GST_STATE_NULL);
gst_object_unref (m_DataGST.queue_sink);
}
if(m_DataGST.sink!=NULL)
{
gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.sink);
gst_element_set_state (m_DataGST.sink, GST_STATE_NULL);
gst_object_unref (m_DataGST.sink);
}
if(m_DataGST.pipeline!=NULL)
{
gst_element_set_state (m_DataGST.pipeline, GST_STATE_NULL);
gst_object_unref (m_DataGST.pipeline);
}
gst_object_unref (bus);
free(appsink_callbacks);
m_bPipeStart = false;
m_bTerminate = false;
m_DataGST.terminate = false;
//gst_deinit();
m_bWorkingRTSP = false;
ExitCapture();
if(bSignalOut==true)
{
return -1;
}
return 0;
}
void QGstreamerPlayer2::SaveFile(QString strFilename)
{
#ifdef __x86_64
CustomDataSink *sink = NULL;
GstPad *sinkpad;
GstPadTemplate *templ;
templ = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (m_DataGST.tee), "src_%u");
g_print ("add\n");
if(m_DataGST.m_pSink==NULL)
{
sink = g_new0 (CustomDataSink, 1);
}
else
{
sink = m_DataGST.m_pSink;
}
sink->m_pTeePad = gst_element_request_pad (m_DataGST.tee, templ, NULL, NULL);
if(sink->m_pQueue==NULL)
{
sink->m_pQueue = gst_element_factory_make ("queue", NULL);
}
if(sink->m_pConvert==NULL)
{
sink->m_pConvert = gst_element_factory_make ("videoconvert", NULL);
}
if(sink->m_pEncoder==NULL)
{
sink->m_pEncoder = gst_element_factory_make("x264enc", NULL);
}
if(sink->m_pParser==NULL)
{
sink->m_pParser = gst_element_factory_make("matroskamux", NULL);
}
if(sink->m_pSink==NULL)
{
sink->m_pSink = gst_element_factory_make ("filesink", NULL);
}
else
{
sink->m_pSink = gst_element_factory_make ("filesink", NULL);
}
sink->removing = FALSE;
gst_bin_add_many (GST_BIN (m_DataGST.pipeline), (GstElement*)gst_object_ref (sink->m_pQueue),
gst_object_ref (sink->m_pConvert),
gst_object_ref (sink->m_pEncoder), gst_object_ref (sink->m_pParser), //gst_object_ref (pQueue2),
gst_object_ref (sink->m_pSink), NULL);
gboolean link_ok;
gst_element_link_many (sink->m_pQueue, sink->m_pConvert, sink->m_pEncoder, sink->m_pParser, sink->m_pSink, NULL);
g_object_set (sink->m_pSink, "location", strFilename.toStdString().c_str(), NULL);
g_object_set (sink->m_pEncoder, "bitrate", 10240, NULL);
g_object_set (sink->m_pEncoder, "byte-stream", true, NULL);
g_object_set (sink->m_pEncoder, "speed-preset", 1, NULL);
g_object_set (sink->m_pEncoder, "tune", 4, NULL);
g_object_set (sink->m_pSink, "sync", TRUE, NULL);
gst_element_sync_state_with_parent (sink->m_pQueue);
gst_element_sync_state_with_parent (sink->m_pConvert);
gst_element_sync_state_with_parent (sink->m_pEncoder);
gst_element_sync_state_with_parent (sink->m_pParser);
gst_element_sync_state_with_parent (sink->m_pSink);
sinkpad = gst_element_get_static_pad (sink->m_pQueue, "sink");
gst_pad_link (sink->m_pTeePad, sinkpad);
gst_object_unref (sinkpad);
g_print ("added\n");
gst_object_unref(templ);
m_DataGST.m_pSink = sink;
#else
#endif
}
void QGstreamerPlayer2::SaveEnd()
{
CustomDataSink* sink =m_DataGST.m_pSink;
if(sink==NULL)
{
return;
}
GST_CustomData* pData = (GST_CustomData*)&m_DataGST;
GstPad *sinkpad;
sink->removing = TRUE;
sinkpad = gst_element_get_static_pad (sink->m_pQueue, "sink");
GstElement* pPipeLine = m_DataGST.pipeline;
gst_pad_unlink (sink->m_pTeePad, sinkpad);
gst_object_unref (sinkpad);
//gst_pad_send_event (sinkpad, gst_event_new_eos ());
gst_element_send_event(pData->m_pSink->m_pEncoder, gst_event_new_eos());
//usleep(100*1000);
#ifdef __x86_64
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pQueue);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pConvert);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pEncoder);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pParser);
gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pSink);
gst_element_set_state (pData->m_pSink->m_pQueue, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_NULL);
gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_NULL);
gst_object_unref (pData->m_pSink->m_pQueue);
gst_object_unref (pData->m_pSink->m_pConvert);
gst_object_unref (pData->m_pSink->m_pEncoder);
gst_object_unref (pData->m_pSink->m_pParser);
gst_object_unref (pData->m_pSink->m_pSink);
#else
#endif
gst_element_release_request_pad (pData->tee, pData->m_pSink->m_pTeePad);
gst_element_remove_pad(pData->tee, pData->m_pSink->m_pTeePad);
gst_object_unref (pData->m_pSink->m_pTeePad);
g_free(pData->m_pSink);
pData->m_pSink = NULL;
usleep(10*1000);
}