#include "qgstreamerplayer.h" #include "qrtspthread.h" #include "qvideowidget.h" #include "mainwindow.h" #include #include #include #include #include #include #include #include static void errno_exit(const char *s) { fprintf(stderr, "%s error %d, %s\n",s, errno, strerror(errno)); exit(EXIT_FAILURE); } static int xioctl(int fd,int request,void *arg) { int r; do{ r = ioctl(fd, request, arg); } while(-1 == r && EINTR == errno); return r; } //#define __720P bool g_bSend = false; QGstreamerPlayer::QGstreamerPlayer(QObject *parent) : QObject(parent) { m_bCaptureImage = false; m_bWorkingRTSP = false; m_bSeeking = false; m_nFrameWidth = 0; m_nFrameHeight = 0; m_DataGST.m_bShow = false; m_DataGST.terminate = false; m_nCaptureMode = 2; //Running background capture m_bPipeStart = false; //int argc = 0; //gst_init (&argc, NULL); //gst_segtrap_set_enabled(FALSE); MainWindow* pMainWindow = MainWindow::GetMainWindow(); connect(this, SIGNAL(ExitCapture()), pMainWindow, SLOT(ExitCapture())); connect(this, SIGNAL(ErrorCapture()), pMainWindow, SLOT(ErrorCapture())); } QGstreamerPlayer::~QGstreamerPlayer() { MainWindow* pMainWindow = MainWindow::GetMainWindow(); disconnect(this, SIGNAL(ExitCapture()), pMainWindow, SLOT(ExitCapture())); disconnect(this, SIGNAL(ErrorCapture()), pMainWindow, SLOT(ErrorCapture())); gst_deinit(); } bool QGstreamerPlayer::IsPipeStart() { return m_bPipeStart; } void QGstreamerPlayer::OpenMovieFile(QString strFilename) { } bool QGstreamerPlayer::IsWorkingRTSP() { return m_bWorkingRTSP; } bool QGstreamerPlayer::IsVideoLink() { return m_DataGST.m_bVideoLink; } void QGstreamerPlayer::CreatePipeLineJetson() { //m_DataGST.queue_src = NULL; //m_DataGST.queue_sink = NULL; m_DataGST.vidconv1 = NULL; m_DataGST.vidconv2 = NULL; m_DataGST.source = gst_element_factory_make ("nvv4l2camerasrc", NULL); //m_DataGST.source = gst_element_factory_make ("v4l2src", NULL); m_DataGST.sink = gst_element_factory_make ("appsink", NULL); if(m_DataGST.queue_src==NULL) { m_DataGST.queue_src = gst_element_factory_make ("queue", NULL); } if(m_DataGST.queue_sink==NULL) { m_DataGST.queue_sink = gst_element_factory_make ("queue", NULL); } m_DataGST.vidconv1 = gst_element_factory_make ("nvvidconv", NULL); //m_DataGST.vidconv2 = gst_element_factory_make ("nvvidconv", NULL); m_DataGST.caps_videosrc = gst_element_factory_make("capsfilter", "caps_videosrc"); m_DataGST.caps_vidconv1 = gst_element_factory_make("capsfilter", "caps_vidconv1"); m_DataGST.videoBalance = NULL; m_DataGST.tee = gst_element_factory_make ("tee", NULL); //GstElement* pQueue2 = gst_element_factory_make ("queue", NULL); m_DataGST.m_pSink = g_new0 (CustomDataSink, 1); if(m_DataGST.sink!=NULL) { m_DataGST.appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks)); m_DataGST.appsink_callbacks->eos = app_sink_eos; m_DataGST.appsink_callbacks->new_preroll = app_sink_new_preroll; #ifdef _GSTREAMER_1_0 m_DataGST.appsink_callbacks->new_sample = app_sink_new_sample; #else m_DataGST.appsink_callbacks->new_buffer = app_sink_new_sample; #endif //test gst_app_sink_set_callbacks(GST_APP_SINK(m_DataGST.sink), m_DataGST.appsink_callbacks, (gpointer)&m_DataGST, NULL); g_object_set (m_DataGST.sink, "sync", FALSE, NULL); gst_app_sink_set_drop(GST_APP_SINK(m_DataGST.sink), TRUE); } GstCaps *cap_source; GstCaps *cap_vidconv1; #ifdef _4K cap_source = gst_caps_from_string("video/x-raw(memory:NVMM),format=(string)UYVY,width=3840,height=2160,framerate=(fraction)60/1"); cap_vidconv1 = gst_caps_from_string("video/x-raw,format=(string)UYVY,width=3840,height=2160,framerate=(fraction)60/1"); #else cap_source = gst_caps_from_string("video/x-raw(memory:NVMM),format=(string)UYVY,width=1920,height=1080,framerate=(fraction)60/1"); cap_vidconv1 = gst_caps_from_string("video/x-raw,format=(string)UYVY,width=1920,height=1080,framerate=(fraction)60/1"); #endif g_object_set(G_OBJECT(m_DataGST.caps_videosrc), "caps", cap_source, NULL); g_object_set(G_OBJECT(m_DataGST.caps_vidconv1), "caps", cap_vidconv1, NULL); } void QGstreamerPlayer::CreatePipeLinePC() { m_DataGST.vidconv1 = NULL; m_DataGST.vidconv2 = NULL; //m_DataGST.source = gst_element_factory_make ("v4l2src", NULL); m_DataGST.source = gst_element_factory_make ("videotestsrc", NULL); m_DataGST.sink = gst_element_factory_make ("appsink", NULL); if(m_DataGST.queue_src==NULL) { m_DataGST.queue_src = gst_element_factory_make ("queue", NULL); } if(m_DataGST.queue_sink==NULL) { m_DataGST.queue_sink = gst_element_factory_make ("queue", NULL); } m_DataGST.vidconv1 = gst_element_factory_make ("videoconvert", NULL); m_DataGST.vidrate1 = gst_element_factory_make ("videorate", NULL); m_DataGST.vidscale1 = gst_element_factory_make ("videoscale", NULL); m_DataGST.vidClockDisplay = gst_element_factory_make ("clockoverlay", NULL); m_DataGST.caps_videosrc = gst_element_factory_make("capsfilter", "caps_videosrc"); m_DataGST.caps_vidconv1 = gst_element_factory_make("capsfilter", "caps_vidconv1"); m_DataGST.caps_vidrate1 = gst_element_factory_make("capsfilter", "caps_vidrate1"); m_DataGST.caps_vidscale1 = gst_element_factory_make("capsfilter", "caps_vidscale1"); m_DataGST.videoBalance = NULL; m_DataGST.tee = gst_element_factory_make ("tee", NULL); m_DataGST.m_pSink = g_new0 (CustomDataSink, 1); if(m_DataGST.sink!=NULL) { m_DataGST.appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks)); m_DataGST.appsink_callbacks->eos = app_sink_eos; m_DataGST.appsink_callbacks->new_preroll = app_sink_new_preroll; #ifdef _GSTREAMER_1_0 m_DataGST.appsink_callbacks->new_sample = app_sink_new_sample; #else m_DataGST.appsink_callbacks->new_buffer = app_sink_new_sample; #endif //test gst_app_sink_set_callbacks(GST_APP_SINK(m_DataGST.sink), m_DataGST.appsink_callbacks, (gpointer)&m_DataGST, NULL); g_object_set (m_DataGST.sink, "sync", FALSE, NULL); gst_app_sink_set_drop(GST_APP_SINK(m_DataGST.sink), TRUE); } GstCaps *cap_source; cap_source = gst_caps_from_string("video/x-raw,format=(string)YUY2,width=1280,height=720,framerate=(fraction)10/1"); g_object_set(G_OBJECT(m_DataGST.caps_videosrc), "caps", cap_source, NULL); GstCaps *cap_vidscale1; cap_vidscale1 = gst_caps_from_string("video/x-raw,width=1920,height=1080"); g_object_set(G_OBJECT(m_DataGST.caps_vidscale1), "caps", cap_vidscale1, NULL); GstCaps *cap_vidrate1; cap_vidrate1 = gst_caps_from_string("video/x-raw,framerate=(fraction)60/1"); g_object_set(G_OBJECT(m_DataGST.caps_vidrate1), "caps", cap_vidrate1, NULL); GstCaps *cap_vidconv1; cap_vidconv1 = gst_caps_from_string("video/x-raw,format=(string)UYVY,width=1920,height=1080,framerate=(fraction)60/1"); g_object_set(G_OBJECT(m_DataGST.caps_vidconv1), "caps", cap_vidconv1, NULL); } int QGstreamerPlayer::OpenCamera4(QString strParameter) { CommonData* pCommonData = MainWindow::GetCommonData(); QRTSPThread* pRTSPThread = NULL; m_bError = false; m_DataGST.m_TimerStart.restart(); m_DataGST.m_bVideoLink = false; while(pRTSPThread==NULL) { usleep(1000); pRTSPThread = pCommonData->GetThreadRTSP(); } int nCaptureIndex = pRTSPThread->GetCaptureIndex(); QString strSource = QString("test_src%1").arg(QString::number(nCaptureIndex)); QString strPipeLine = QString("jetson video%1").arg(QString::number(nCaptureIndex)); pid_t pid_current = getpid(); GstStateChangeReturn ret; GstMessage *msg; gboolean bTrapEnabled = FALSE; GstPad *queue_app_pad; GstCaps *video_caps; if(gst_is_initialized()==false) { int argc = 0; gst_init (&argc, NULL); gst_segtrap_set_enabled(FALSE); bTrapEnabled = gst_segtrap_is_enabled(); } m_DataGST.pipeline = gst_pipeline_new(strPipeLine.toStdString().c_str()); m_DataGST.m_bEncoding = false; #ifdef _PC CreatePipeLinePC(); gst_bin_add_many (GST_BIN (m_DataGST.pipeline), (GstElement*)gst_object_ref (m_DataGST.source), gst_object_ref (m_DataGST.caps_videosrc), gst_object_ref (m_DataGST.queue_src), gst_object_ref (m_DataGST.vidClockDisplay), gst_object_ref (m_DataGST.vidrate1), gst_object_ref (m_DataGST.caps_vidrate1), gst_object_ref (m_DataGST.vidscale1), gst_object_ref (m_DataGST.caps_vidscale1), gst_object_ref (m_DataGST.tee), gst_object_ref (m_DataGST.vidconv1), gst_object_ref (m_DataGST.caps_vidconv1), gst_object_ref (m_DataGST.queue_sink), gst_object_ref (m_DataGST.sink), NULL); #else CreatePipeLineJetson(); gst_bin_add_many (GST_BIN (m_DataGST.pipeline), (GstElement*)gst_object_ref (m_DataGST.source), gst_object_ref (m_DataGST.caps_videosrc), gst_object_ref (m_DataGST.tee), gst_object_ref (m_DataGST.queue_src), gst_object_ref (m_DataGST.vidconv1), gst_object_ref (m_DataGST.caps_vidconv1), gst_object_ref (m_DataGST.queue_sink), gst_object_ref (m_DataGST.sink), NULL); #endif g_object_set (m_DataGST.sink, "sync", FALSE, NULL); gst_base_sink_set_last_sample_enabled(GST_BASE_SINK(m_DataGST.sink), true); g_signal_connect (m_DataGST.source, "pad-added", G_CALLBACK (QGstreamerPlayer::pad_added_handler), &m_DataGST); GstPad* pad = gst_element_get_static_pad (m_DataGST.sink, "sink"); //if (!m_DataGST.pipeline || !m_DataGST.source || !m_DataGST.queue_src || !m_DataGST.sink) { if (!m_DataGST.pipeline || !m_DataGST.source || !m_DataGST.sink) { qDebug() << strParameter << endl; g_printerr ("Not all elements could be created.\n"); m_bWorkingRTSP = false; m_bError = true; ErrorCapture(); return -2; } m_pError = NULL; m_bWorkingRTSP = true; m_nFrameWidth = 0; m_nFrameHeight = 0; m_DataGST.m_strFilename = strParameter; m_DataGST.m_bError = false; m_DataGST.terminate = false; m_bTerminate = false; m_DataGST.pWidget = m_pVideoWidget; m_DataGST.pPlayer = this; QSize nSize = m_pVideoWidget->size(); #ifdef _PC if(m_DataGST.pipeline!=NULL) { gst_element_set_state (m_DataGST.source, GST_STATE_NULL); gst_element_set_state (m_DataGST.caps_videosrc, GST_STATE_NULL); gst_element_set_state (m_DataGST.queue_src, GST_STATE_NULL); gst_element_set_state (m_DataGST.vidClockDisplay, GST_STATE_NULL); gst_element_set_state (m_DataGST.vidscale1, GST_STATE_NULL); gst_element_set_state (m_DataGST.caps_vidscale1, GST_STATE_NULL); gst_element_set_state (m_DataGST.vidrate1, GST_STATE_NULL); gst_element_set_state (m_DataGST.caps_vidrate1, GST_STATE_NULL); gst_element_set_state (m_DataGST.vidconv1, GST_STATE_NULL); gst_element_set_state (m_DataGST.caps_vidconv1, GST_STATE_NULL); gst_element_set_state (m_DataGST.queue_sink, GST_STATE_NULL); gst_element_set_state (m_DataGST.sink, GST_STATE_NULL); } if( gst_element_link_many (m_DataGST.source, m_DataGST.caps_videosrc, m_DataGST.queue_src, m_DataGST.vidClockDisplay, m_DataGST.vidscale1, m_DataGST.caps_vidscale1, m_DataGST.vidrate1, m_DataGST.caps_vidrate1, m_DataGST.tee, m_DataGST.vidconv1, m_DataGST.caps_vidconv1, m_DataGST.queue_sink, m_DataGST.sink, NULL) != TRUE) { int a=0; } #else if(m_DataGST.pipeline!=NULL) { gst_element_set_state (m_DataGST.source, GST_STATE_NULL); gst_element_set_state (m_DataGST.caps_videosrc, GST_STATE_NULL); gst_element_set_state (m_DataGST.queue_src, GST_STATE_NULL); gst_element_set_state (m_DataGST.vidconv1, GST_STATE_NULL); gst_element_set_state (m_DataGST.caps_vidconv1, GST_STATE_NULL); gst_element_set_state (m_DataGST.queue_sink, GST_STATE_NULL); gst_element_set_state (m_DataGST.sink, GST_STATE_NULL); } if( gst_element_link_many (m_DataGST.source, m_DataGST.caps_videosrc, m_DataGST.tee, m_DataGST.queue_src, m_DataGST.vidconv1, m_DataGST.caps_vidconv1, m_DataGST.queue_sink, m_DataGST.sink, NULL) != TRUE) { int a=0; } #endif m_DataGST.m_bShow = true; if(m_DataGST.queue_src!=NULL) { g_object_set (m_DataGST.queue_src, "max-size-buffers", 10, NULL); } if(m_DataGST.source!=NULL) { //g_object_set (m_DataGST.source, "device", strVideoDevice.toStdString().c_str(), NULL); } //queue_app_pad = gst_element_get_static_pad (m_DataGST.app_queue, "sink"); //sigint_setup(); //ret = gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING); ret = GST_STATE_CHANGE_SUCCESS; ret = gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (m_DataGST.pipeline); m_bPipeStart = false; m_bWorkingRTSP = false; //ErrorCapture(); ExitCapture(); return -3; } m_DataGST.bus = gst_element_get_bus (m_DataGST.pipeline); //if(kk>0) { //gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING); } // m_bPipeStart = true; bool bSignalOut = false; m_pVideoWidget->Reset(); int64_t tEnd=0; do { //msg = gst_bus_timed_pop_filtered (m_DataGST.bus, GST_CLOCK_TIME_NONE, msg = gst_bus_timed_pop_filtered (m_DataGST.bus, 1, (GstMessageType)(GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_STEP_DONE | GST_MESSAGE_STEP_START | GST_MESSAGE_ASYNC_DONE));// | GST_MESSAGE_DEVICE_REMOVED | GST_MESSAGE_ANY)); if(tEnd==0) { gst_element_query_duration (m_DataGST.pipeline, GST_FORMAT_TIME, &tEnd); if(tEnd!=0) { int nHour = 0; int nMinute = 0; int nSec = 0; int nMilliSec = tEnd/(1000*1000); nSec = nMilliSec/1000; nMinute = nSec/60; nHour = nMinute/60; nSec = nSec%60; nMinute = nMinute%60; nHour = nHour; nMilliSec = (nMilliSec/1000)%1000; qDebug() << "Duration: " << nHour << ":" << nMinute << ":" << nSec << ":" << nMilliSec ; //m_bTerminate = true; GstState state; GstState pending; gst_element_get_state(m_DataGST.pipeline, &state, &pending, GST_CLOCK_TIME_NONE); gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING); } } /* Parse message */ if (msg != NULL) { GError *err; gchar *debug_info; switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_ERROR: gst_message_parse_error (msg, &err, &debug_info); if(err!=NULL) { g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); } g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error (&err); g_free (debug_info); m_bError = true; m_DataGST.terminate = true; break; case GST_MESSAGE_EOS: g_print ("End-Of-Stream reached.\n"); { //gst_message_unref (msg); //if(m_nCaptureMode==0) { bSignalOut = true; m_DataGST.terminate = true; } } break; case GST_MESSAGE_STATE_CHANGED: /* We are only interested in state-changed messages from the pipeline */ GstState old_state, new_state, pending_state; gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); if (GST_MESSAGE_SRC (msg) == GST_OBJECT (m_DataGST.pipeline)) { g_print ("Pipeline state changed from %s to %s:\n", gst_element_state_get_name (old_state), gst_element_state_get_name (new_state)); if(new_state==GST_STATE_PAUSED && old_state==GST_STATE_PLAYING) { //m_bTerminate = true; } } break; case GST_MESSAGE_STEP_DONE: { GstFormat format; guint64 amount; gdouble rate; gboolean flush, intermediate; guint64 duration; gboolean eos; gst_message_parse_step_done (msg, &format, &amount, &rate, &flush, &intermediate, &duration, &eos); app_sink_new_sample((GstAppSink *)m_DataGST.sink, &m_DataGST); } break; case GST_MESSAGE_STEP_START: { } break; case GST_MESSAGE_ASYNC_DONE: { //qDebug() << "Async Done"; //app_sink_new_sample((GstAppSink *)m_DataGST.sink, &m_DataGST); //gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING); m_bSeeking = false; usleep(5*1000); } break; default: /* We should not reach here */ g_printerr ("Unexpected message received.\n"); break; } gst_message_unref (msg); } if(m_DataGST.terminate==true) { m_bTerminate = true; } } while (m_bTerminate==false); if(m_nCaptureMode==1) { } GstMessage * pMessage = NULL; while(pMessage=gst_bus_pop(m_DataGST.bus)) { if(pMessage==NULL) { break; } } if(m_DataGST.sink!=NULL) { gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.sink); gst_element_set_state (m_DataGST.sink, GST_STATE_NULL); gst_object_unref (m_DataGST.sink); } if(m_DataGST.queue_src!=NULL) { gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.queue_src); //gst_element_set_state (m_DataGST.queue_src, GST_STATE_NULL); //gst_object_unref (m_DataGST.queue_src); } if(m_DataGST.queue_sink!=NULL) { gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.queue_sink); //gst_element_set_state (m_DataGST.queue_sink, GST_STATE_NULL); //gst_object_unref (m_DataGST.queue_sink); } if(m_DataGST.caps_videosrc!=NULL) { gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.caps_videosrc); gst_element_set_state (m_DataGST.caps_videosrc, GST_STATE_NULL); gst_object_unref (m_DataGST.caps_videosrc); } if(m_DataGST.vidconv1!=NULL) { gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.vidconv1); gst_element_set_state (m_DataGST.vidconv1, GST_STATE_NULL); gst_object_unref (m_DataGST.vidconv1); } if(m_DataGST.caps_vidconv1!=NULL) { gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.caps_vidconv1); gst_element_set_state (m_DataGST.caps_vidconv1, GST_STATE_NULL); gst_object_unref (m_DataGST.caps_vidconv1); } if(m_DataGST.vidconv2!=NULL) { gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.vidconv2); gst_element_set_state (m_DataGST.vidconv2, GST_STATE_NULL); gst_object_unref (m_DataGST.vidconv2); } if(m_DataGST.source!=NULL) { gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.source); gst_element_set_state (m_DataGST.source, GST_STATE_NULL); gst_object_unref (m_DataGST.source); } if(m_DataGST.tee!=NULL) { gst_bin_remove (GST_BIN (m_DataGST.pipeline), m_DataGST.tee); gst_element_set_state (m_DataGST.tee, GST_STATE_NULL); gst_object_unref (m_DataGST.tee); } if(m_DataGST.m_pSink!=NULL) { g_free(m_DataGST.m_pSink); m_DataGST.m_pSink = NULL; } if(m_DataGST.appsink_callbacks!=NULL) { free(m_DataGST.appsink_callbacks); m_DataGST.appsink_callbacks = NULL; } if(m_DataGST.pipeline!=NULL) { gst_element_set_state (m_DataGST.pipeline, GST_STATE_NULL); gst_object_unref (m_DataGST.pipeline); m_DataGST.pipeline = NULL; } if(m_DataGST.bus!=NULL) { gst_object_unref (m_DataGST.bus); m_DataGST.bus = NULL; } m_bPipeStart = false; m_bTerminate = false; m_DataGST.terminate = false; m_bWorkingRTSP = false; m_DataGST.m_bVideoLink = false; m_DataGST.m_TimerStart.restart(); qDebug() << "OpenCamera4 Exit!!!"; if(m_pVideoWidget!=NULL) { m_pVideoWidget->Reset(); if(m_pVideoWidget->Lock()==true) { m_pVideoWidget->ClearQueueData(); m_pVideoWidget->SetChangeTexture(true); m_pVideoWidget->UpdateTexture(); m_pVideoWidget->Unlock(); m_pVideoWidget->update(); } } if(bSignalOut==true) { return -1; } if(m_bError==true) { int a=0; } return 0; } void QGstreamerPlayer::app_sink_eos(GstAppSink *sink, gpointer user_data) { GST_CustomData* pData = (GST_CustomData*)user_data; pData->m_bVideoLink = false; pData->m_TimerStart.restart(); qDebug() << "Enter EOS"; if(pData->m_pSink!=NULL) { //if(pData->m_pSink->removing==TRUE) { #ifdef _PC #else pData->m_pSink->removing = TRUE; if(pData->m_pSink->m_pQueue!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pQueue); gst_element_set_state (pData->m_pSink->m_pQueue, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pQueue, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pQueue); pData->m_pSink->m_pQueue = NULL; } if(pData->m_pSink->m_pConvert!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pConvert); gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pConvert); pData->m_pSink->m_pConvert = NULL; } if(pData->m_pSink->m_pQueueFileData!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pQueueFileData); gst_element_set_state (pData->m_pSink->m_pQueueFileData, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pQueueFileData, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pQueueFileData); pData->m_pSink->m_pQueueFileData = NULL; } if(pData->m_pSink->m_pEncoder!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pEncoder); gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pEncoder); pData->m_pSink->m_pEncoder = NULL; } if(pData->m_pSink->m_pParser!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pParser); gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pParser); pData->m_pSink->m_pParser = NULL; } if(pData->m_pSink->m_pMuxer!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pMuxer); gst_element_set_state (pData->m_pSink->m_pMuxer, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pMuxer, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pMuxer); pData->m_pSink->m_pMuxer = NULL; } if(pData->m_pSink->m_pSink!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pSink); gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pSink); pData->m_pSink->m_pSink = NULL; } /* gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->); gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->); gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->); gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->); gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pQueueFileData, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pMuxer, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pQueueFileData, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pMuxer, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pRate); gst_object_unref (pData->m_pSink->m_pCaps); gst_object_unref (pData->m_pSink->m_pMuxer); gst_object_unref (pData->m_pSink->m_pConvert); gst_object_unref (pData->m_pSink->m_pSink); gst_object_unref (pData->m_pSink->m_pEncoder); gst_object_unref (pData->m_pSink->m_pParser); pData->m_pSink->m_pRate = NULL; pData->m_pSink->m_pCaps = NULL; pData->m_pSink->m_pMuxer = NULL; pData->m_pSink->m_pConvert = NULL; pData->m_pSink->m_pSink = NULL; pData->m_pSink->m_pEncoder = NULL; pData->m_pSink->m_pParser = NULL; */ //sink->m_pSink = gst_element_factory_make ("filesink", NULL); //sink->m_pQueue = gst_element_factory_make ("queue", NULL); //sink->m_pQueueFileData = gst_element_factory_make ("queue", NULL); //sink->m_pConvert = gst_element_factory_make ("nvvidconv", NULL); //sink->m_pSink = gst_element_factory_make ("filesink", NULL); //sink->m_pEncoder = gst_element_factory_make("nvv4l2h265enc", NULL); //sink->m_pParser = gst_element_factory_make("h265parse", NULL); //sink->m_pMuxer = gst_element_factory_make("matroskamux", NULL); #endif if(pData->m_pSink->m_pTeePad!=NULL) { gst_element_release_request_pad (pData->tee, pData->m_pSink->m_pTeePad); gst_object_unref (pData->m_pSink->m_pTeePad); } } //else { } } } GstFlowReturn QGstreamerPlayer::app_sink_new_preroll(GstAppSink *sink, gpointer user_data) { qDebug() << "preroll" ; GST_CustomData* pd = (GST_CustomData*)user_data; GstSample* sample = NULL; int nState = pd->pThread->GetState(); if(nState>0) { //return GST_FLOW_OK; } if(g_bSend==true) { //qDebug() << "track"; //gst_element_set_state (pd->pipeline, GST_STATE_PAUSED); } //sample = gst_app_sink_pull_sample(sink); sample = gst_app_sink_pull_preroll(sink); if(sample==NULL) { qDebug() << "app_sink_new_proroll ERROR"; return GST_FLOW_OK; } QGstreamerPlayer* pPlayer = (QGstreamerPlayer*)pd->pPlayer; if(pPlayer->GetFrameWidth()==0 || pPlayer->GetFrameHeight()==0) { GstCaps* pCaps = NULL; pCaps = gst_sample_get_caps(sample); GstStructure* s = gst_caps_get_structure (pCaps, 0); gint width, height; int res = gst_structure_get_int (s, "width", &width) | gst_structure_get_int (s, "height", &height); pPlayer->SetFrameWidth(width); pPlayer->SetFrameHeight(height); pd->pWidget->SetTextureSize(width, height); } gboolean bEnabledLast = gst_base_sink_is_last_sample_enabled(GST_BASE_SINK(sink)); GstBuffer* buffer = gst_sample_get_buffer(sample); GstMemory* memory = gst_buffer_get_all_memory(buffer); GstMapInfo map_info; if(! gst_memory_map(memory, &map_info, GST_MAP_READ)) { gst_memory_unref(memory); gst_sample_unref(sample); usleep(10*1000); return GST_FLOW_ERROR; } guint8* pData = map_info.data; if(pd->pWidget!=NULL) { if(map_info.size>4096) { //g_pTestDT-> pd->pWidget->SetTextureData(pData, map_info.size); //m_bVideoLink = true; /* FILE* pFile = NULL; pFile = fopen("test.data", "wb"); fwrite(pData, 1280*720*1.5, 1, pFile); fclose(pFile); */ } else { } } else { FILE* pFile = NULL; pFile = fopen("test.data", "wb"); fwrite(pData, 1280*720, 1, pFile); fclose(pFile); } if(sample == NULL) { return GST_FLOW_ERROR; } gst_memory_unmap(memory, &map_info); gst_memory_unref(memory); gst_sample_unref(sample); usleep(1*1000); //gst_element_set_state (pd->pipeline, GST_STATE_PLAYING); return GST_FLOW_OK; } GstFlowReturn QGstreamerPlayer::app_sink_new_sample(GstAppSink *sink, gpointer user_data) { GST_CustomData* pd = (GST_CustomData*)user_data; GstSample* sample = NULL; if(pd->m_bShow==false) { pd->terminate = true; //gst_element_set_state (pd->pipeline, GST_STATE_PAUSED); gst_bin_remove (GST_BIN (pd->pipeline), pd->source); gst_element_set_state (pd->source, GST_STATE_NULL); gst_object_unref (pd->source); gst_element_set_state (pd->pipeline, GST_STATE_NULL); gst_object_unref (pd->pipeline); //gst_object_unref (pd->bus); //m_bTerminate = false; //m_DataGST.terminate = false; return GST_FLOW_EOS; } //qDebug() << "app_sink_new_sample"; int nState = pd->pThread->GetState(); if(nState>0) { //return GST_FLOW_OK; } if(g_bSend==true) { //qDebug() << "track"; //gst_element_set_state (pd->pipeline, GST_STATE_PAUSED); } sample = gst_app_sink_pull_sample(sink); if(sample==NULL) { qDebug() << "app_sink_new_sample ERROR"; return GST_FLOW_CUSTOM_ERROR; } QGstreamerPlayer* pPlayer = (QGstreamerPlayer*)pd->pPlayer; if(pPlayer->GetFrameWidth()==0 || pPlayer->GetFrameHeight()==0) { GstCaps* pCaps = NULL; pCaps = gst_sample_get_caps(sample); GstStructure* s = gst_caps_get_structure (pCaps, 0); gint width, height; int res = gst_structure_get_int (s, "width", &width) | gst_structure_get_int (s, "height", &height); pPlayer->SetFrameWidth(width); pPlayer->SetFrameHeight(height); pd->pWidget->SetTextureSize(width, height); } //sample = gst_app_sink_pull_preroll(sink); gboolean bEnabledLast = gst_base_sink_is_last_sample_enabled(GST_BASE_SINK(sink)); guint8* pData = NULL; GstBuffer* buffer = gst_sample_get_buffer(sample); GstMemory* memory = gst_buffer_get_all_memory(buffer); GstMapInfo map_info; if(! gst_memory_map(memory, &map_info, GST_MAP_READ)) { gst_memory_unref(memory); gst_sample_unref(sample); usleep(10*1000); return GST_FLOW_ERROR; } pData = map_info.data; if(pd->pWidget!=NULL && pd->m_bShow==true) { if(map_info.size>4096 && pData!=NULL) { //g_pTestDT-> if(pd->pWidget->isVisible()==false) { } if(pd->pWidget->isActiveWindow()==false) { } pd->pWidget->SetTextureData(pData, map_info.size); if(pd->m_bVideoLink==false) { pd->m_bVideoLink = true; pd->m_TimerStart.restart(); } /* FILE* pFile = NULL; pFile = fopen("test.data", "wb"); //fwrite(pData, 1280*720*1.5, 1, pFile); fwrite(pData, 1920*1080*2, 1, pFile); fclose(pFile); */ } else { } } else { if(pd->m_bShow==false) { } /* FILE* pFile = NULL; pFile = fopen("test.data", "wb"); fwrite(pData, 1280*720, 1, pFile); fclose(pFile); */ } if(sample == NULL) { return GST_FLOW_ERROR; } gst_memory_unmap(memory, &map_info); gst_memory_unref(memory); gst_sample_unref(sample); usleep(10); return GST_FLOW_OK; } void QGstreamerPlayer::pad_added_handler (GstElement *src, GstPad *new_pad, GST_CustomData *data) { GstPad *sink_pad = gst_element_get_static_pad (data->sink, "sink"); GstPadLinkReturn ret; GstCaps *new_pad_caps = NULL; GstStructure *new_pad_struct = NULL; const gchar *new_pad_type = NULL; g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src)); /* If our converter is already linked, we have nothing to do here */ if (gst_pad_is_linked (sink_pad)) { g_print (" We are already linked. Ignoring.\n"); goto exit; } /* Check the new pad's type */ new_pad_caps = gst_pad_get_allowed_caps (new_pad); new_pad_struct = gst_caps_get_structure (new_pad_caps, 0); new_pad_type = gst_structure_get_name (new_pad_struct); if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) { g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type); goto exit; } /* Attempt the link */ ret = gst_pad_link (new_pad, sink_pad); if (GST_PAD_LINK_FAILED (ret)) { g_print (" Type is '%s' but link failed.\n", new_pad_type); } else { g_print (" Link succeeded (type '%s').\n", new_pad_type); } exit: /* Unreference the new pad's caps, if we got them */ if (new_pad_caps != NULL) gst_caps_unref (new_pad_caps); /* Unreference the sink pad */ gst_object_unref (sink_pad); } void QGstreamerPlayer::SetThread(QRTSPThread *pThread) { m_DataGST.pThread = pThread; } void QGstreamerPlayer::SetVideoWidget(QVideoWidget *pWidget) { m_pVideoWidget = pWidget; m_DataGST.pWidget = pWidget; } void QGstreamerPlayer::Seek(gint64 nPos) { GstEvent *seek_event; m_DataGST.rate = 1.0; if(m_bSeeking==false) { m_nSeekPos = nPos; qDebug() << nPos; m_bSeeking = true; //gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING); gst_element_seek_simple(m_DataGST.pipeline, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE), nPos); //seek_event = gst_event_new_seek (m_DataGST.rate, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_KEY_UNIT), //seek_event = gst_event_new_seek (m_DataGST.rate, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE), // GST_SEEK_TYPE_SET, nPos, GST_SEEK_TYPE_END, nPos); //gboolean bSend = gst_element_send_event (m_DataGST.pipeline, seek_event); //gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED); //gst_element_send_event (m_DataGST.pipeline, gst_event_new_step (GST_FORMAT_BUFFERS, 1, m_DataGST.rate, TRUE, FALSE)); g_bSend = true; } else { m_ListSeeking.push_back(nPos); } } void QGstreamerPlayer::Pause() { gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED); } void QGstreamerPlayer::StartRTSP() { gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING); m_DataGST.m_bShow = true; m_pVideoWidget->setVisible(true); m_pVideoWidget->activateWindow(); m_pVideoWidget->setFocus(Qt::ActiveWindowFocusReason); m_pVideoWidget->update(); return; } void QGstreamerPlayer::StopRTSP() { m_DataGST.terminate = true; //usleep(1000*10); gst_element_send_event (m_DataGST.pipeline, gst_event_new_eos ()); //usleep(1000*10); return; } void QGstreamerPlayer::exitRTSP() { m_DataGST.m_bShow = false; m_DataGST.terminate = true; //m_bTerminate = true; /* gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING); usleep(100*1000); gst_element_send_event (m_DataGST.pipeline, gst_event_new_eos ()); */ return; //gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED); } void QGstreamerPlayer::Resume() { gst_element_set_state (m_DataGST.pipeline, GST_STATE_PLAYING); } bool QGstreamerPlayer::IsSeeking() { return m_bSeeking; } gint64 QGstreamerPlayer::GetSeekPos() { return m_nSeekPos; } void QGstreamerPlayer::SetFrameWidth(int nWidth) { m_nFrameWidth = nWidth; } void QGstreamerPlayer::SetFrameHeight(int nHeight) { m_nFrameHeight = nHeight; } int QGstreamerPlayer::GetFrameWidth() { return m_nFrameWidth; } int QGstreamerPlayer::GetFrameHeight() { return m_nFrameHeight; } void QGstreamerPlayer::SaveFile(QString strFilename) { #ifdef __x86_64 CustomDataSink *sink = g_new0 (CustomDataSink, 1); GstPad *sinkpad; GstPadTemplate *templ; //gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED); templ = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (m_DataGST.tee), "src_%u"); g_print ("add\n"); sink->m_pTeePad = gst_element_request_pad (m_DataGST.tee, templ, NULL, NULL); sink->m_pQueue = gst_element_factory_make ("queue", NULL); sink->m_pConvert = gst_element_factory_make ("videoconvert", NULL); sink->m_pSink = gst_element_factory_make ("filesink", NULL); sink->m_pEncoder = gst_element_factory_make("x264enc", NULL); sink->m_pParser = gst_element_factory_make("h264parse", NULL); sink->m_pMuxer = gst_element_factory_make("qtmux", NULL); sink->removing = FALSE; gst_bin_add_many (GST_BIN (m_DataGST.pipeline), (GstElement*)gst_object_ref (sink->m_pQueue), gst_object_ref (sink->m_pConvert), gst_object_ref (sink->m_pEncoder), gst_object_ref (sink->m_pParser), //gst_object_ref (pQueue2), gst_object_ref (sink->m_pMuxer), gst_object_ref (sink->m_pSink), NULL); gboolean link_ok; //link_ok = gst_element_link_filtered (sink->m_pRate, sink->m_pConvert, caps); //gst_caps_unref (caps); // gst_element_link_many (sink->m_pQueue, sink->m_pRate, sink->m_pCaps, sink->m_pConvert, sink->m_pEncoder, sink->m_pParser, sink->m_pSink, NULL); gst_element_link_many (sink->m_pQueue, sink->m_pConvert, sink->m_pEncoder, sink->m_pParser, sink->m_pMuxer, sink->m_pSink, NULL); /* GstCaps *caps; caps = gst_caps_new_simple ("video/x-raw", //"format", G_TYPE_STRING, "I420", //"width", G_TYPE_INT, 1920, //"height", G_TYPE_INT, 1080, "framerate", GST_TYPE_FRACTION, 60, 1, NULL); */ //g_object_set(G_OBJECT(sink->m_pCaps), "caps", caps, NULL); g_object_set (sink->m_pSink, "location", strFilename.toStdString().c_str(), NULL); g_object_set (sink->m_pEncoder, "bitrate", 10240, NULL); g_object_set (sink->m_pEncoder, "byte-stream", true, NULL); g_object_set (sink->m_pEncoder, "speed-preset", 1, NULL); g_object_set (sink->m_pEncoder, "tune", 4, NULL); g_object_set (sink->m_pSink, "sync", TRUE, NULL); //g_object_set (sink->m_pSink, "sync", FALSE, NULL); //g_object_set (sink->m_pSink, "-e", NULL); gst_element_sync_state_with_parent (sink->m_pQueue); gst_element_sync_state_with_parent (sink->m_pConvert); gst_element_sync_state_with_parent (sink->m_pEncoder); gst_element_sync_state_with_parent (sink->m_pParser); gst_element_sync_state_with_parent (sink->m_pMuxer); gst_element_sync_state_with_parent (sink->m_pSink); sinkpad = gst_element_get_static_pad (sink->m_pQueue, "sink"); gst_pad_link (sink->m_pTeePad, sinkpad); gst_object_unref (sinkpad); g_print ("added\n"); m_DataGST.m_pSink = sink; #else CustomDataSink *sink = m_DataGST.m_pSink; GstPad *sinkpad; GstPadTemplate *templ; QString strFilename1 = strFilename.remove(MOVIE_FILE_EXTENSION); sink->removing = FALSE; //gst_element_set_state (m_DataGST.pipeline, GST_STATE_PAUSED); templ = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (m_DataGST.tee), "src_%u"); g_print ("add\n"); sink->m_pTeePad = gst_element_request_pad (m_DataGST.tee, templ, NULL, NULL); sink->m_pQueue = gst_element_factory_make ("queue", NULL); sink->m_pQueueFileData = gst_element_factory_make ("queue", NULL); sink->m_pConvert = gst_element_factory_make ("nvvidconv", NULL); sink->m_pSink = gst_element_factory_make ("splitmuxsink", NULL); //sink->m_pSink = gst_element_factory_make ("filesink", NULL); bool bUseH265 = false; #ifdef __x86_64 sink->m_pEncoder = gst_element_factory_make("x264enc", NULL); #else if(sink->m_pEncoder==NULL) { if(bUseH265==true) { sink->m_pEncoder = gst_element_factory_make("nvv4l2h265enc", NULL); } else { sink->m_pEncoder = gst_element_factory_make("nvv4l2h264enc", NULL); } } #endif if(sink->m_pParser==NULL) { //sink->m_pParser = NULL; if(bUseH265==true) { sink->m_pParser = gst_element_factory_make("h265parse", NULL); } else { sink->m_pParser = gst_element_factory_make("h264parse", NULL); } } if(sink->m_pMuxer==NULL) { sink->m_pMuxer = NULL; //sink->m_pMuxer = gst_element_factory_make("matroskamux", NULL); } gst_bin_add_many (GST_BIN (m_DataGST.pipeline), (GstElement*)gst_object_ref (sink->m_pQueue), gst_object_ref (sink->m_pConvert), gst_object_ref (sink->m_pQueueFileData), gst_object_ref (sink->m_pEncoder), gst_object_ref (sink->m_pParser), //gst_object_ref (sink->m_pMuxer), gst_object_ref (sink->m_pSink), NULL); QString strFileExtension = MOVIE_FILE_EXTENSION; QString strSaveFilename = QString("%1_%05d%2").arg(strFilename1).arg(strFileExtension); qDebug() << strSaveFilename; g_object_set (sink->m_pSink, "location", strSaveFilename.toStdString().c_str(), "max-size-bytes", 2400000000, "async-handling", true, NULL); //g_object_set (sink->m_pSink, "location", strSaveFilename.toStdString().c_str(), "max-size-bytes", 200000000, "async-handling", true, NULL); //g_object_set (sink->m_pSink, "location", strFilename.toStdString().c_str(), NULL); //g_object_set (sink->m_pSink, "sync", FALSE, NULL); usleep(100); gboolean link_ok = 0; link_ok = gst_element_link_many (sink->m_pQueue, sink->m_pConvert, sink->m_pQueueFileData, sink->m_pEncoder, sink->m_pParser, //sink->m_pMuxer, sink->m_pSink, NULL); usleep(100); if(link_ok==0) { } #ifdef __x86_64 g_object_set (sink->m_pEncoder, "bitrate", 10000); #else if(bUseH265==true) { g_object_set (G_OBJECT(sink->m_pEncoder), "bitrate", 8000000, "profile", 1, "preset-level", 1, NULL); } else { //Main Profile MainTier: 20Mbps, HighTier: 50Mbps g_object_set (G_OBJECT(sink->m_pEncoder), "bitrate", 8000000, "profile", 4, "preset-level", 1, NULL); } //g_object_set (G_OBJECT(sink->m_pEncoder), "bitrate", 60000000, "peak-bitrate", 150000000); //g_object_set (sink->m_pEncoder, "bitrate", 1000000, NULL); //g_object_set (sink->m_pEncoder, "peak-bitrate", 1600000, NULL); //g_object_set (sink->m_pEncoder, "control-rate", 1, NULL); //g_object_set (sink->m_pEncoder, "bit-packetization", true, NULL); //g_object_set (sink->m_pEncoder, "EnableTwopassCBR", true, NULL); //g_object_set (sink->m_pEncoder, "insert-aud", true, NULL); //g_object_set (sink->m_pEncoder, "iframeinterval", 30, NULL); //g_object_set (sink->m_pEncoder, "maxperf-enable", false, NULL); //g_object_set (sink->m_pEncoder, "profile", 1, NULL); //g_object_set (sink->m_pEncoder, "vbv-size", 10000000, NULL); //g_object_set (sink->m_pEncoder, "preset-level", 1, NULL); //g_object_set (sink->m_pEncoder, "insert-sps-pps", true, NULL); #endif //m_DataGST.m_pSink = sink; gst_element_sync_state_with_parent (sink->m_pQueue); gst_element_sync_state_with_parent (sink->m_pConvert); gst_element_sync_state_with_parent (sink->m_pQueueFileData); gst_element_sync_state_with_parent (sink->m_pEncoder); gst_element_sync_state_with_parent (sink->m_pParser); //gst_element_sync_state_with_parent (sink->m_pMuxer); gst_element_sync_state_with_parent (sink->m_pSink); sinkpad = gst_element_get_static_pad (sink->m_pQueue, "sink"); gst_pad_link (sink->m_pTeePad, sinkpad); gst_object_unref (sinkpad); m_DataGST.m_bEncoding = true; g_print ("added\n"); //usleep(10*1000); //gst_element_set_state (sink->m_pSink, GST_STATE_PLAYING); /* GstPad *pPadSinkSink = NULL; pPadSinkSink = gst_element_get_static_pad (sink->m_pSink, "sink"); ret = gst_element_set_state (sink->m_pSink, GST_STATE_PAUSED); gst_pad_send_event (pPadSinkSink, gst_event_new_stream_start ("test")); gst_element_set_state(m_DataGST.pipeline, GST_STATE_PLAYING); ret = gst_element_set_state (sink->m_pSink, GST_STATE_PLAYING); */ #endif } void QGstreamerPlayer::SaveEnd() { CustomDataSink* sink =m_DataGST.m_pSink; SetCaptureMode(0); if(m_DataGST.m_bEncoding==false) { return; } if(sink==NULL) { return; } if(sink->removing==TRUE) { return; } GST_CustomData* pData = (GST_CustomData*)&m_DataGST; GstPad *sinkpad; sink->removing = TRUE; sinkpad = gst_element_get_static_pad (sink->m_pQueue, "sink"); GstElement* pPipeLine = m_DataGST.pipeline; /* GstStateChangeReturn ret = gst_element_set_state (pPipeLine, GST_STATE_PAUSED); ret = gst_element_set_state (pPipeLine, GST_STATE_READY); ret = gst_element_set_state (pPipeLine, GST_STATE_PLAYING); return; */ gst_pad_unlink (sink->m_pTeePad, sinkpad); gst_element_send_event(sink->m_pConvert, gst_event_new_eos()); //gst_pad_send_event (sinkpad, gst_event_new_eos ()); //gst_element_send_event (pPipeLine, gst_event_new_eos ()); gst_object_unref (sinkpad); //return; usleep(100*1000); #ifdef __x86_64 //return; //gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pRate); //gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pCaps); gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pConvert); gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pSink); gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pEncoder); gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pParser); gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pQueue); gst_element_set_state (pData->m_pSink->m_pQueue, GST_STATE_NULL); //gst_element_set_state (pData->m_pSink->m_pRate, GST_STATE_NULL); //gst_element_set_state (pData->m_pSink->m_pCaps, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pQueue); //gst_object_unref (pData->m_pSink->m_pRate); //gst_object_unref (pData->m_pSink->m_pCaps); gst_object_unref (pData->m_pSink->m_pConvert); gst_object_unref (pData->m_pSink->m_pSink); gst_object_unref (pData->m_pSink->m_pEncoder); gst_object_unref (pData->m_pSink->m_pParser); #else //gst_pad_send_event (sinkpad, gst_event_new_eos ()); //gst_element_send_event (pPipeLine, gst_event_new_eos ()); //gst_pad_unlink (sink->m_pTeePad, sinkpad); //gst_object_unref (sinkpad); //return; if(pData->m_pSink!=NULL) { if(pData->m_pSink->m_pQueue!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pQueue); gst_element_set_state (pData->m_pSink->m_pQueue, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pQueue, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pQueue); pData->m_pSink->m_pQueue = NULL; } if(pData->m_pSink->m_pConvert!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pConvert); gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pConvert); pData->m_pSink->m_pConvert = NULL; } if(pData->m_pSink->m_pQueueFileData!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pQueueFileData); gst_element_set_state (pData->m_pSink->m_pQueueFileData, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pQueueFileData, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pQueueFileData); pData->m_pSink->m_pQueueFileData = NULL; } if(pData->m_pSink->m_pEncoder!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pEncoder); gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pEncoder); pData->m_pSink->m_pEncoder = NULL; } if(pData->m_pSink->m_pParser!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pParser); gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pParser); pData->m_pSink->m_pParser = NULL; } if(pData->m_pSink->m_pMuxer!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pMuxer); gst_element_set_state (pData->m_pSink->m_pMuxer, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pMuxer, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pMuxer); pData->m_pSink->m_pMuxer = NULL; } if(pData->m_pSink->m_pSink!=NULL) { gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->m_pSink); gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pSink); pData->m_pSink->m_pSink = NULL; } /* gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->); gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->); gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->); gst_bin_remove (GST_BIN (pData->pipeline), pData->m_pSink->); gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pQueueFileData, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pMuxer, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_READY); gst_element_set_state (pData->m_pSink->m_pConvert, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pQueueFileData, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pEncoder, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pParser, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pMuxer, GST_STATE_NULL); gst_element_set_state (pData->m_pSink->m_pSink, GST_STATE_NULL); gst_object_unref (pData->m_pSink->m_pRate); gst_object_unref (pData->m_pSink->m_pCaps); gst_object_unref (pData->m_pSink->m_pMuxer); gst_object_unref (pData->m_pSink->m_pConvert); gst_object_unref (pData->m_pSink->m_pSink); gst_object_unref (pData->m_pSink->m_pEncoder); gst_object_unref (pData->m_pSink->m_pParser); pData->m_pSink->m_pRate = NULL; pData->m_pSink->m_pCaps = NULL; pData->m_pSink->m_pMuxer = NULL; pData->m_pSink->m_pConvert = NULL; pData->m_pSink->m_pSink = NULL; pData->m_pSink->m_pEncoder = NULL; pData->m_pSink->m_pParser = NULL; */ //sink->m_pSink = gst_element_factory_make ("filesink", NULL); //sink->m_pQueue = gst_element_factory_make ("queue", NULL); //sink->m_pQueueFileData = gst_element_factory_make ("queue", NULL); //sink->m_pConvert = gst_element_factory_make ("nvvidconv", NULL); //sink->m_pSink = gst_element_factory_make ("filesink", NULL); //sink->m_pEncoder = gst_element_factory_make("nvv4l2h265enc", NULL); //sink->m_pParser = gst_element_factory_make("h265parse", NULL); //sink->m_pMuxer = gst_element_factory_make("matroskamux", NULL); gst_element_release_request_pad (pData->tee, pData->m_pSink->m_pTeePad); gst_object_unref (pData->m_pSink->m_pTeePad); } #endif //pData->m_pSink = NULL; m_DataGST.m_bEncoding = false; m_DataGST.m_TimerStart.restart(); usleep(100*1000); } void QGstreamerPlayer::CaptureImage() { m_bCaptureImage = true; } void QGstreamerPlayer::ChangeHue(float fValue) { m_nTestCount++; //g_object_set (m_DataGST.videoBalance, "hue", fValue, NULL); } void QGstreamerPlayer::ChangeSaturation(float fValue) { m_nTestCount++; //g_object_set (m_DataGST.videoBalance, "saturation", fValue, NULL); } void QGstreamerPlayer::Alram() { /* gchar* msg; msg = gst_error_get_message(GST_CORE_ERROR, GST_CORE_ERROR_DISABLED); g_set_error(&m_pError, GST_CORE_ERROR, GST_CORE_ERROR_DISABLED, "%s", msg); g_free(msg); */ GstMessage *m; GstStructure *s; m_DataGST.m_bError = true; m = gst_message_new_error (NULL, NULL, "debug string"); GST_LOG ("posting error message"); gst_bus_post (m_DataGST.bus, m); } void QGstreamerPlayer::SetCaptureMode(int nMode) { m_nCaptureMode = nMode; } int QGstreamerPlayer::GetCaptuerMode() { return m_nCaptureMode; } qint64 QGstreamerPlayer::GetVideoLinkTime() { return m_DataGST.m_TimerStart.elapsed(); } bool QGstreamerPlayer::IsError() { return m_bError; }