I want to start and stop recording by adding a valve element to the record branch and create the necessary additions and functions. First, I give TRUE to the valve element for recorder so that the broadcast part does not start, but when I do this, the image shows the first frame. how to fix this bug
void GStreamerQmlSink::startStream(const QString &url, QQuickItem *videoItem)
{
if (pipeline) {
qWarning() << "Stream already running.";
return;
}
videoItemRef = videoItem;
// "watchdog timeout=10000 ! "
QString pipelineDescription = QString(
"rtspsrc location=%1 latency=0 udp-reconnect=1 ! "
"rtpjitterbuffer latency=50 ! "
"rtph264depay ! "
"h264parse ! "
"avdec_h264 ! "
"videoconvert ! "
"tee name=t "
"t. ! queue ! "
"glupload ! "
"glcolorconvert ! "
"qmlglsink name=video_sink "
"t. ! queue name =recording_queue ! "
"valve name =recording_valve ! "
"videoconvert ! "
"video/x-raw,format=I420 ! "
"x264enc tune=zerolatency ! "
"mp4mux ! "
"filesink name= file_sink location=/home/user/stream_record.mp4"
).arg(url);
GError *error = nullptr;
pipeline = gst_parse_launch(qPrintable(pipelineDescription), &error);
if (!pipeline || error) {
qCritical() << "Failed to create pipeline:" << (error ? error->message : "unknown error");
if (error) g_error_free(error);
return;
}
GstElement *videoSink = gst_bin_get_by_name(GST_BIN(pipeline), "video_sink");
if (!videoSink) {
qCritical() << "Failed to find qmlglsink in pipeline.";
gst_object_unref(pipeline);
pipeline = nullptr;
return;
}
g_object_set(G_OBJECT(videoSink), "widget", videoItem, nullptr);
gst_object_unref(videoSink);
GstElement *recordingValve = gst_bin_get_by_name(GST_BIN(pipeline), "recording_valve");
if (recordingValve) {
g_object_set(G_OBJECT(recordingValve), "drop", TRUE, nullptr);
gst_object_unref(recordingValve);
} else {
qCritical() << "Failed to find recording valve in pipeline.";
}
GstBus *bus = gst_element_get_bus(pipeline);
gst_bus_add_watch(bus, (GstBusFunc)GStreamerQmlSink::busCall, this);
gst_object_unref(bus);
GstStateChangeReturn ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
qCritical() << "Failed to set pipeline to PLAYING.";
gst_object_unref(pipeline);
pipeline = nullptr;
return;
}
qInfo() << "Pipeline started successfully.";
}
void GStreamerQmlSink::startRecording()
{
if (!pipeline) {
qWarning() << "No active pipeline.";
return;
}
GstElement *recordingValve = gst_bin_get_by_name(GST_BIN(pipeline), "recording_valve");
if (!recordingValve) {
qWarning() << "No active recording valve found.";
return;
}
qInfo() << "Starting recording...";
g_object_set(G_OBJECT(recordingValve), "drop", FALSE, nullptr);
gst_object_unref(recordingValve);
qInfo() << "Recording started successfully.";
}
void GStreamerQmlSink::stopRecording()
{
if (!pipeline) {
qWarning() << "No active pipeline.";
return;
}
GstElement *recordingQueue = gst_bin_get_by_name(GST_BIN(pipeline), "recording_queue");
if (!recordingQueue) {
qWarning() << "No active recording queue found.";
return;
}
qInfo() << "Stopping recording...";
// Sadece kayıt kuyruğuna EOS sinyali gönderiyoruz
GstEvent *eos_event = gst_event_new_eos();
gst_element_send_event(recordingQueue, eos_event);
gst_object_unref(recordingQueue);
qInfo() << "Recording stopped successfully. MP4 file should be finalized.";
}