添加靜態圖片顯示的pipeline,編譯ok,可以運行,但是還沒驗證

This commit is contained in:
2025-05-03 17:00:48 +08:00
parent cfb9d12f3c
commit e3180b5ad1
3 changed files with 65 additions and 44 deletions

View File

@ -192,8 +192,8 @@ GstElement *make_video_sink(const char *videosink, const char *videosink_options
} }
void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser, void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
const char *decoder, const char *converter, const char *videosink, const char *videosink_options, const char *decoder, const char *converter, const char *videosink, const char *videosink_options,
bool initial_fullscreen, bool video_sync, bool h265_support, const char *uri) { bool initial_fullscreen, bool video_sync, bool h265_support, const char *uri, const char *default_image) {
GError *error = NULL; GError *error = NULL;
GstCaps *caps = NULL; GstCaps *caps = NULL;
hls_video = (uri != NULL); hls_video = (uri != NULL);
@ -204,10 +204,18 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, video
logger_debug = (logger_get_level(logger) >= LOGGER_DEBUG); logger_debug = (logger_get_level(logger) >= LOGGER_DEBUG);
video_terminate = false; video_terminate = false;
/* this call to g_set_application_name makes server_name appear in the X11 display window title bar, */
/* (instead of the program name uxplay taken from (argv[0]). It is only set one time. */
const gchar *appname = g_get_application_name(); const gchar *appname = g_get_application_name();
if (!appname || strcmp(appname,server_name)) g_set_application_name(server_name); if (!appname || strcmp(appname,server_name)) g_set_application_name(server_name);
appname = NULL; appname = NULL;
/* the renderer for hls video will only be built if a HLS uri is provided in
* the call to video_renderer_init, in which case the h264 and 265 mirror-mode
* renderers will not be built. This is because it appears that we cannot
* put playbin into GST_STATE_READY before knowing the uri (?), so cannot use a
* unified renderer structure with h264, h265 and hls */
if (hls_video) { if (hls_video) {
n_renderers = 1; n_renderers = 1;
} else { } else {
@ -219,15 +227,18 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, video
renderer_type[i] = (video_renderer_t *) calloc(1, sizeof(video_renderer_t)); renderer_type[i] = (video_renderer_t *) calloc(1, sizeof(video_renderer_t));
g_assert(renderer_type[i]); g_assert(renderer_type[i]);
renderer_type[i]->autovideo = auto_videosink; renderer_type[i]->autovideo = auto_videosink;
renderer_type[i]->id = i; renderer_type[i]->id = i;
renderer_type[i]->bus = NULL; renderer_type[i]->bus = NULL;
if (hls_video) { if (hls_video) {
/* use playbin3 to play HLS video: replace "playbin3" by "playbin" to use playbin2 */
renderer_type[i]->pipeline = gst_element_factory_make("playbin3", "hls-playbin3"); renderer_type[i]->pipeline = gst_element_factory_make("playbin3", "hls-playbin3");
g_assert(renderer_type[i]->pipeline); g_assert(renderer_type[i]->pipeline);
renderer_type[i]->appsrc = NULL; renderer_type[i]->appsrc = NULL;
renderer_type[i]->codec = hls; renderer_type[i]->codec = hls;
/* if we are not using autovideosink, build a videossink based on the stricng "videosink" */
if(strcmp(videosink, "autovideosink")) { if(strcmp(videosink, "autovideosink")) {
GstElement *playbin_videosink = make_video_sink(videosink, videosink_options); GstElement *playbin_videosink = make_video_sink(videosink, videosink_options);
if (!playbin_videosink) { if (!playbin_videosink) {
logger_log(logger, LOGGER_ERR, "video_renderer_init: failed to create playbin_videosink"); logger_log(logger, LOGGER_ERR, "video_renderer_init: failed to create playbin_videosink");
} else { } else {
@ -235,6 +246,7 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, video
g_object_set(G_OBJECT (renderer_type[i]->pipeline), "video-sink", playbin_videosink, NULL); g_object_set(G_OBJECT (renderer_type[i]->pipeline), "video-sink", playbin_videosink, NULL);
} }
} }
g_object_set (G_OBJECT (renderer_type[i]->pipeline), "uri", uri, NULL); g_object_set (G_OBJECT (renderer_type[i]->pipeline), "uri", uri, NULL);
} else { } else {
switch (i) { switch (i) {
@ -251,35 +263,38 @@ void video_renderer_init(logger_t *render_logger, const char *server_name, video
} }
GString *launch = g_string_new(""); GString *launch = g_string_new("");
// Add static image source // Create input-selector and video sink
g_string_append(launch, "filesrc location=/path/to/placeholder.jpg ! jpegdec ! imagefreeze ! videoconvert ! "); g_string_append(launch, "input-selector name=selector ! ");
g_string_append(launch, "queue ! "); g_string_append(launch, videosink);
g_string_append(launch, "input-selector name=selector ! "); g_string_append(launch, " name=");
g_string_append(launch, videosink);
g_string_append(launch, "_");
g_string_append(launch, renderer_type[i]->codec);
g_string_append(launch, videosink_options);
// Add video source if (video_sync) {
g_string_append(launch, "appsrc name=video_source ! "); g_string_append(launch, " sync=true");
g_string_append(launch, "queue ! "); sync = true;
g_string_append(launch, parser); } else {
g_string_append(launch, " ! "); g_string_append(launch, " sync=false");
g_string_append(launch, decoder); sync = false;
g_string_append(launch, " ! "); }
append_videoflip(launch, &videoflip[0], &videoflip[1]); // Add static image source branch
g_string_append(launch, converter); g_string_append_printf(launch, " filesrc location=%s ! jpegdec ! imagefreeze ! videoconvert ! videoscale ! queue ! selector.sink_0 ", default_image);
g_string_append(launch, " ! ");
g_string_append(launch, "videoscale ! "); // Add video source branch
g_string_append(launch, videosink); g_string_append(launch, " appsrc name=video_source ! ");
g_string_append(launch, " name="); g_string_append(launch, "queue ! ");
g_string_append(launch, videosink); g_string_append(launch, parser);
g_string_append(launch, "_"); g_string_append(launch, " ! ");
g_string_append(launch, renderer_type[i]->codec); g_string_append(launch, decoder);
g_string_append(launch, videosink_options); g_string_append(launch, " ! ");
if (video_sync) { append_videoflip(launch, &videoflip[0], &videoflip[1]);
g_string_append(launch, " sync=true"); g_string_append(launch, converter);
sync = true; g_string_append(launch, " ! ");
} else { g_string_append(launch, "videoscale ! ");
g_string_append(launch, " sync=false"); g_string_append(launch, "queue ! ");
sync = false; g_string_append(launch, "selector.sink_1 ");
}
if (!strcmp(renderer_type[i]->codec, h264)) { if (!strcmp(renderer_type[i]->codec, h264)) {
char *pos = launch->str; char *pos = launch->str;
@ -429,6 +444,7 @@ bool waiting_for_x11_window() {
void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time) { void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time) {
GstBuffer *buffer; GstBuffer *buffer;
GstClockTime pts = (GstClockTime) *ntp_time; /*now in nsecs */ GstClockTime pts = (GstClockTime) *ntp_time; /*now in nsecs */
//GstClockTimeDiff latency = GST_CLOCK_DIFF(gst_element_get_current_clock_time (renderer->appsrc), pts);
if (sync) { if (sync) {
if (pts >= gst_video_pipeline_base_time) { if (pts >= gst_video_pipeline_base_time) {
pts -= gst_video_pipeline_base_time; pts -= gst_video_pipeline_base_time;
@ -439,6 +455,10 @@ void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_c
} }
} }
g_assert(data_len != 0); g_assert(data_len != 0);
/* first four bytes of valid h264 video data are 0x00, 0x00, 0x00, 0x01. *
* nal_count is the number of NAL units in the data: short SPS, PPS, SEI NALs *
* may precede a VCL NAL. Each NAL starts with 0x00 0x00 0x00 0x01 and is *
* byte-aligned: the first byte of invalid data (decryption failed) is 0x01 */
if (data[0]) { if (data[0]) {
logger_log(logger, LOGGER_ERR, "*** ERROR decryption of video packet failed "); logger_log(logger, LOGGER_ERR, "*** ERROR decryption of video packet failed ");
} else { } else {
@ -450,6 +470,7 @@ void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_c
} }
buffer = gst_buffer_new_allocate(NULL, *data_len, NULL); buffer = gst_buffer_new_allocate(NULL, *data_len, NULL);
g_assert(buffer != NULL); g_assert(buffer != NULL);
//g_print("video latency %8.6f\n", (double) latency / SECOND_IN_NSECS);
if (sync) { if (sync) {
GST_BUFFER_PTS(buffer) = pts; GST_BUFFER_PTS(buffer) = pts;
} }
@ -482,7 +503,8 @@ void video_renderer_stop() {
video_renderer_switch_source(false); video_renderer_switch_source(false);
} }
gst_element_set_state (renderer->pipeline, GST_STATE_NULL); gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
} //gst_element_set_state (renderer->playbin, GST_STATE_NULL);
}
} }
static void video_renderer_destroy_h26x(video_renderer_t *renderer) { static void video_renderer_destroy_h26x(video_renderer_t *renderer) {

BIN
test.jpg Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

View File

@ -86,7 +86,6 @@ static bool reset_loop = false;
static unsigned int open_connections= 0; static unsigned int open_connections= 0;
static std::string videosink = "autovideosink"; static std::string videosink = "autovideosink";
static std::string videosink_options = ""; static std::string videosink_options = "";
static std::string default_image = ""; // Add this line
static videoflip_t videoflip[2] = { NONE , NONE }; static videoflip_t videoflip[2] = { NONE , NONE };
static bool use_video = true; static bool use_video = true;
static unsigned char compression_type = 0; static unsigned char compression_type = 0;
@ -150,6 +149,7 @@ static std::string url = "";
static guint gst_x11_window_id = 0; static guint gst_x11_window_id = 0;
static guint gst_hls_position_id = 0; static guint gst_hls_position_id = 0;
static bool preserve_connections = false; static bool preserve_connections = false;
static std::string default_image_path = "";
/* logging */ /* logging */
@ -609,7 +609,7 @@ static void print_info (char *name) {
printf("-h265 Support h265 (4K) video (with h265 versions of h264 plugins)\n"); printf("-h265 Support h265 (4K) video (with h265 versions of h264 plugins)\n");
printf("-hls Support HTTP Live Streaming (currently Youtube video only) \n"); printf("-hls Support HTTP Live Streaming (currently Youtube video only) \n");
printf("-pin[xxxx]Use a 4-digit pin code to control client access (default: no)\n"); printf("-pin[xxxx]Use a 4-digit pin code to control client access (default: no)\n");
printf("-di <fn> Set default image file to display when video is not available\n"); printf("-di <path> Set default image path for video placeholder\n");
printf(" default pin is random: optionally use fixed pin xxxx\n"); printf(" default pin is random: optionally use fixed pin xxxx\n");
printf("-reg [fn] Keep a register in $HOME/.uxplay.register to verify returning\n"); printf("-reg [fn] Keep a register in $HOME/.uxplay.register to verify returning\n");
printf(" client pin-registration; (option: use file \"fn\" for this)\n"); printf(" client pin-registration; (option: use file \"fn\" for this)\n");
@ -819,11 +819,7 @@ static void parse_arguments (int argc, char *argv[]) {
// Parse arguments // Parse arguments
for (int i = 1; i < argc; i++) { for (int i = 1; i < argc; i++) {
std::string arg(argv[i]); std::string arg(argv[i]);
if (arg == "-di") { if (arg == "-allow") {
if (!option_has_value(i, argc, arg, argv[i+1])) exit(1);
default_image = std::string(argv[++i]);
LOGI("Using default image: %s", default_image.c_str());
} else if (arg == "-allow") {
if (!option_has_value(i, argc, arg, argv[i+1])) exit(1); if (!option_has_value(i, argc, arg, argv[i+1])) exit(1);
i++; i++;
allowed_clients.push_back(argv[i]); allowed_clients.push_back(argv[i]);
@ -1183,6 +1179,9 @@ static void parse_arguments (int argc, char *argv[]) {
h265_support = true; h265_support = true;
} else if (arg == "-nofreeze") { } else if (arg == "-nofreeze") {
nofreeze = true; nofreeze = true;
} else if (arg == "-di") {
if (!option_has_value(i, argc, arg, argv[i+1])) exit(1);
default_image_path = std::string(argv[++i]);
} else { } else {
fprintf(stderr, "unknown option %s, stopping (for help use option \"-h\")\n",argv[i]); fprintf(stderr, "unknown option %s, stopping (for help use option \"-h\")\n",argv[i]);
exit(1); exit(1);
@ -2244,7 +2243,7 @@ int main (int argc, char *argv[]) {
if (use_video) { if (use_video) {
video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(), video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(),
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), video_decoder.c_str(), video_converter.c_str(), videosink.c_str(),
videosink_options.c_str(), fullscreen, video_sync, h265_support, NULL, default_image.c_str()); videosink_options.c_str(), fullscreen, video_sync, h265_support, NULL, default_image_path.c_str());
video_renderer_start(); video_renderer_start();
} }
@ -2319,7 +2318,7 @@ int main (int argc, char *argv[]) {
const char *uri = (url.empty() ? NULL : url.c_str()); const char *uri = (url.empty() ? NULL : url.c_str());
video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(), video_renderer_init(render_logger, server_name.c_str(), videoflip, video_parser.c_str(),
video_decoder.c_str(), video_converter.c_str(), videosink.c_str(), video_decoder.c_str(), video_converter.c_str(), videosink.c_str(),
videosink_options.c_str(), fullscreen, video_sync, h265_support, uri); videosink_options.c_str(), fullscreen, video_sync, h265_support, uri, default_image_path.c_str());
video_renderer_start(); video_renderer_start();
} }
if (relaunch_video) { if (relaunch_video) {