添加靜態圖片顯示的pipeline,編譯ok,可以運行,但是還沒驗證

This commit is contained in:
2025-05-03 17:00:48 +08:00
parent cfb9d12f3c
commit e3180b5ad1
3 changed files with 65 additions and 44 deletions

View File

@ -192,8 +192,8 @@ GstElement *make_video_sink(const char *videosink, const char *videosink_options
}
void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
const char *decoder, const char *converter, const char *videosink, const char *videosink_options,
bool initial_fullscreen, bool video_sync, bool h265_support, const char *uri) {
const char *decoder, const char *converter, const char *videosink, const char *videosink_options,
bool initial_fullscreen, bool video_sync, bool h265_support, const char *uri, const char *default_image) {
GError *error = NULL;
GstCaps *caps = NULL;
hls_video = (uri != NULL);
@ -204,10 +204,18 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, video
logger_debug = (logger_get_level(logger) >= LOGGER_DEBUG);
video_terminate = false;
/* this call to g_set_application_name makes server_name appear in the X11 display window title bar, */
/* (instead of the program name uxplay taken from (argv[0]). It is only set one time. */
const gchar *appname = g_get_application_name();
if (!appname || strcmp(appname,server_name)) g_set_application_name(server_name);
appname = NULL;
/* the renderer for hls video will only be built if a HLS uri is provided in
* the call to video_renderer_init, in which case the h264 and 265 mirror-mode
* renderers will not be built. This is because it appears that we cannot
* put playbin into GST_STATE_READY before knowing the uri (?), so cannot use a
* unified renderer structure with h264, h265 and hls */
if (hls_video) {
n_renderers = 1;
} else {
@ -219,15 +227,18 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, video
renderer_type[i] = (video_renderer_t *) calloc(1, sizeof(video_renderer_t));
g_assert(renderer_type[i]);
renderer_type[i]->autovideo = auto_videosink;
renderer_type[i]->id = i;
renderer_type[i]->bus = NULL;
renderer_type[i]->id = i;
renderer_type[i]->bus = NULL;
if (hls_video) {
/* use playbin3 to play HLS video: replace "playbin3" by "playbin" to use playbin2 */
renderer_type[i]->pipeline = gst_element_factory_make("playbin3", "hls-playbin3");
g_assert(renderer_type[i]->pipeline);
renderer_type[i]->appsrc = NULL;
renderer_type[i]->codec = hls;
renderer_type[i]->codec = hls;
/* if we are not using autovideosink, build a videossink based on the stricng "videosink" */
if(strcmp(videosink, "autovideosink")) {
GstElement *playbin_videosink = make_video_sink(videosink, videosink_options);
GstElement *playbin_videosink = make_video_sink(videosink, videosink_options);
if (!playbin_videosink) {
logger_log(logger, LOGGER_ERR, "video_renderer_init: failed to create playbin_videosink");
} else {
@ -235,6 +246,7 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, video
g_object_set(G_OBJECT (renderer_type[i]->pipeline), "video-sink", playbin_videosink, NULL);
}
}
g_object_set (G_OBJECT (renderer_type[i]->pipeline), "uri", uri, NULL);
} else {
switch (i) {
@ -251,35 +263,38 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, video
}
GString *launch = g_string_new("");
// Add static image source
g_string_append(launch, "filesrc location=/path/to/placeholder.jpg ! jpegdec ! imagefreeze ! videoconvert ! ");
g_string_append(launch, "queue ! ");
g_string_append(launch, "input-selector name=selector ! ");
// Create input-selector and video sink
g_string_append(launch, "input-selector name=selector ! ");
g_string_append(launch, videosink);
g_string_append(launch, " name=");
g_string_append(launch, videosink);
g_string_append(launch, "_");
g_string_append(launch, renderer_type[i]->codec);
g_string_append(launch, videosink_options);
// Add video source
g_string_append(launch, "appsrc name=video_source ! ");
g_string_append(launch, "queue ! ");
g_string_append(launch, parser);
g_string_append(launch, " ! ");
g_string_append(launch, decoder);
g_string_append(launch, " ! ");
append_videoflip(launch, &videoflip[0], &videoflip[1]);
g_string_append(launch, converter);
g_string_append(launch, " ! ");
g_string_append(launch, "videoscale ! ");
g_string_append(launch, videosink);
g_string_append(launch, " name=");
g_string_append(launch, videosink);
g_string_append(launch, "_");
g_string_append(launch, renderer_type[i]->codec);
g_string_append(launch, videosink_options);
if (video_sync) {
g_string_append(launch, " sync=true");
sync = true;
} else {
g_string_append(launch, " sync=false");
sync = false;
}
if (video_sync) {
g_string_append(launch, " sync=true");
sync = true;
} else {
g_string_append(launch, " sync=false");
sync = false;
}
// Add static image source branch
g_string_append_printf(launch, " filesrc location=%s ! jpegdec ! imagefreeze ! videoconvert ! videoscale ! queue ! selector.sink_0 ", default_image);
// Add video source branch
g_string_append(launch, " appsrc name=video_source ! ");
g_string_append(launch, "queue ! ");
g_string_append(launch, parser);
g_string_append(launch, " ! ");
g_string_append(launch, decoder);
g_string_append(launch, " ! ");
append_videoflip(launch, &videoflip[0], &videoflip[1]);
g_string_append(launch, converter);
g_string_append(launch, " ! ");
g_string_append(launch, "videoscale ! ");
g_string_append(launch, "queue ! ");
g_string_append(launch, "selector.sink_1 ");
if (!strcmp(renderer_type[i]->codec, h264)) {
char *pos = launch->str;
@ -429,6 +444,7 @@ bool waiting_for_x11_window() {
void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time) {
GstBuffer *buffer;
GstClockTime pts = (GstClockTime) *ntp_time; /*now in nsecs */
//GstClockTimeDiff latency = GST_CLOCK_DIFF(gst_element_get_current_clock_time (renderer->appsrc), pts);
if (sync) {
if (pts >= gst_video_pipeline_base_time) {
pts -= gst_video_pipeline_base_time;
@ -439,6 +455,10 @@ void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_c
}
}
g_assert(data_len != 0);
/* first four bytes of valid h264 video data are 0x00, 0x00, 0x00, 0x01. *
* nal_count is the number of NAL units in the data: short SPS, PPS, SEI NALs *
* may precede a VCL NAL. Each NAL starts with 0x00 0x00 0x00 0x01 and is *
* byte-aligned: the first byte of invalid data (decryption failed) is 0x01 */
if (data[0]) {
logger_log(logger, LOGGER_ERR, "*** ERROR decryption of video packet failed ");
} else {
@ -450,6 +470,7 @@ void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_c
}
buffer = gst_buffer_new_allocate(NULL, *data_len, NULL);
g_assert(buffer != NULL);
//g_print("video latency %8.6f\n", (double) latency / SECOND_IN_NSECS);
if (sync) {
GST_BUFFER_PTS(buffer) = pts;
}
@ -482,7 +503,8 @@ void video_renderer_stop() {
video_renderer_switch_source(false);
}
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
}
//gst_element_set_state (renderer->playbin, GST_STATE_NULL);
}
}
static void video_renderer_destroy_h26x(video_renderer_t *renderer) {