初始提交
This commit is contained in:
86
renderers/CMakeLists.txt
Normal file
86
renderers/CMakeLists.txt
Normal file
@ -0,0 +1,86 @@
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
|
||||
if (APPLE )
|
||||
set( ENV{PKG_CONFIG_PATH} "/Library/FrameWorks/GStreamer.framework/Libraries/pkgconfig" ) # GStreamer.framework, preferred
|
||||
set( ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:/usr/local/lib/pkgconfig" ) # Brew or self-installed gstreamer
|
||||
set( ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:/opt/homebrew/lib/pkgconfig" ) # Brew, M1/M2 macs
|
||||
set( ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:$ENV{HOMEBREW_PREFIX}/lib/pkgconfig" ) # Brew, using prefix
|
||||
set( ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:/opt/local/lib/pkgconfig/" ) # MacPorts
|
||||
message( "PKG_CONFIG_PATH (Apple, renderers) = " $ENV{PKG_CONFIG_PATH} )
|
||||
find_program( PKG_CONFIG_EXECUTABLE pkg-config PATHS /Library/FrameWorks/GStreamer.framework/Commands )
|
||||
set(PKG_CONFIG_EXECUTABLE ${PKG_CONFIG_EXECUTABLE} --define-prefix )
|
||||
else()
|
||||
if ( DEFINED ENV{GSTREAMER_ROOT_DIR} )
|
||||
if ( EXISTS "$ENV{GSTREAMER_ROOT_DIR}/pkgconfig" )
|
||||
message ( STATUS "*** Using GSTREAMER_ROOT_DIR = " $ENV{GSTREAMER_ROOT_DIR} )
|
||||
set( ENV{PKG_CONFIG_PATH} "$ENV{GSTREAMER_ROOT_DIR}/pkgconfig:$ENV{PKG_CONFIG_PATH}" )
|
||||
endif()
|
||||
endif()
|
||||
set( ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:/usr/local/lib/pkgconfig" ) # standard location for self-installed gstreamer
|
||||
endif()
|
||||
|
||||
find_package( PkgConfig REQUIRED )
|
||||
if ( X11_FOUND )
|
||||
message (STATUS "Will use X_DISPLAY_FIX" )
|
||||
add_definitions( -DX_DISPLAY_FIX )
|
||||
pkg_check_modules (GST120 gstreamer-1.0>=1.20)
|
||||
if ( GST120_FOUND )
|
||||
message( "-- ZOOMFIX will NOT be applied as Gstreamer version is >= 1.20" )
|
||||
else()
|
||||
message( "-- Failure to find Gstreamer >= 1.20 is NOT an error!" )
|
||||
message( "-- ZOOMFIX will be applied as Gstreamer version is < 1.20" )
|
||||
add_definitions( -DZOOM_WINDOW_NAME_FIX )
|
||||
endif()
|
||||
endif()
|
||||
|
||||
pkg_check_modules(GST REQUIRED gstreamer-1.0>=1.4
|
||||
gstreamer-sdp-1.0>=1.4
|
||||
gstreamer-video-1.0>=1.4
|
||||
gstreamer-app-1.0>=1.4
|
||||
)
|
||||
|
||||
add_library( renderers
|
||||
STATIC
|
||||
audio_renderer.c
|
||||
video_renderer.c )
|
||||
|
||||
target_link_libraries ( renderers PUBLIC airplay )
|
||||
|
||||
# hacks to fix cmake confusion due to links in path with macOS FrameWorks
|
||||
|
||||
if( GST_INCLUDE_DIRS MATCHES "/Library/FrameWorks/GStreamer.framework/include" )
|
||||
set( GST_INCLUDE_DIRS "/Library/FrameWorks/GStreamer.framework/Headers")
|
||||
message( STATUS "GST_INCLUDE_DIRS" ${GST_INCLUDE_DIRS} )
|
||||
# fix to use -DGST_MACOS for "Official" GStreamer >= 1.22 packages
|
||||
pkg_check_modules ( GST122 gstreamer-1.0>=1.22 )
|
||||
if ( GST122_FOUND )
|
||||
set( GST_MACOS "1" CACHE STRING "define GST_MACOS in uxplay.cpp" )
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# set GST_MACOS for all Apple when GStreamer >= 1.24
|
||||
if ( APPLE AND NOT GST_MACOS )
|
||||
pkg_check_modules ( GST124 gstreamer-1.0>=1.24 )
|
||||
if ( GST124_FOUND )
|
||||
set( GST_MACOS "1" CACHE STRING "define GST_MACOS in uxplay.cpp" )
|
||||
endif()
|
||||
endif()
|
||||
|
||||
target_include_directories ( renderers PUBLIC ${GST_INCLUDE_DIRS} )
|
||||
|
||||
if( GST_LIBRARY_DIRS MATCHES "/Library/FrameWorks/GStreamer.framework/lib" )
|
||||
set( GST_LIBRARY_DIRS "/Library/FrameWorks/GStreamer.framework/Libraries")
|
||||
message( STATUS "GST_LIBRARY_DIRS" ${GST_LIBRARY_DIRS} )
|
||||
target_link_libraries( renderers PUBLIC ${GST_LIBRARIES} )
|
||||
if( CMAKE_VERSION VERSION_LESS "3.13" )
|
||||
message( FATAL_ERROR "This macOS build needs cmake >= 3.13" )
|
||||
endif()
|
||||
target_link_directories ( renderers PUBLIC ${GST_LIBRARY_DIRS} )
|
||||
elseif( CMAKE_VERSION VERSION_LESS "3.12" )
|
||||
target_link_libraries ( renderers PUBLIC ${GST_LIBRARIES} )
|
||||
else()
|
||||
target_link_libraries( renderers PUBLIC ${GST_LINK_LIBRARIES} )
|
||||
endif()
|
||||
|
||||
|
||||
|
378
renderers/audio_renderer.c
Normal file
378
renderers/audio_renderer.c
Normal file
@ -0,0 +1,378 @@
|
||||
/**
|
||||
* RPiPlay - An open-source AirPlay mirroring server for Raspberry Pi
|
||||
* Copyright (C) 2019 Florian Draschbacher
|
||||
* Modified for:
|
||||
* UxPlay - An open-source AirPlay mirroring server
|
||||
* Copyright (C) 2021-23 F. Duncanh
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
#include <math.h>
|
||||
#include <gst/gst.h>
|
||||
#include <gst/app/gstappsrc.h>
|
||||
#include "audio_renderer.h"
|
||||
#define SECOND_IN_NSECS 1000000000UL
|
||||
|
||||
#define NFORMATS 2 /* set to 4 to enable AAC_LD and PCM: allowed, but never seen in real-world use */
|
||||
|
||||
static GstClockTime gst_audio_pipeline_base_time = GST_CLOCK_TIME_NONE;
|
||||
static logger_t *logger = NULL;
|
||||
const char * format[NFORMATS];
|
||||
|
||||
static const gchar *avdec_aac = "avdec_aac";
|
||||
static const gchar *avdec_alac = "avdec_alac";
|
||||
static gboolean aac = FALSE;
|
||||
static gboolean alac = FALSE;
|
||||
static gboolean render_audio = FALSE;
|
||||
static gboolean async = FALSE;
|
||||
static gboolean vsync = FALSE;
|
||||
static gboolean sync = FALSE;
|
||||
|
||||
typedef struct audio_renderer_s {
|
||||
GstElement *appsrc;
|
||||
GstElement *pipeline;
|
||||
GstElement *volume;
|
||||
unsigned char ct;
|
||||
} audio_renderer_t ;
|
||||
static audio_renderer_t *renderer_type[NFORMATS];
|
||||
static audio_renderer_t *renderer = NULL;
|
||||
|
||||
/* GStreamer Caps strings for Airplay-defined audio compression types (ct) */
|
||||
|
||||
/* ct = 1; linear PCM (uncompressed): 44100/16/2, S16LE */
|
||||
static const char lpcm_caps[]="audio/x-raw,rate=(int)44100,channels=(int)2,format=S16LE,layout=interleaved";
|
||||
|
||||
/* ct = 2; codec_data is ALAC magic cookie: 44100/16/2 spf = 352 */
|
||||
static const char alac_caps[] = "audio/x-alac,mpegversion=(int)4,channnels=(int)2,rate=(int)44100,stream-format=raw,codec_data=(buffer)"
|
||||
"00000024""616c6163""00000000""00000160""0010280a""0e0200ff""00000000""00000000""0000ac44";
|
||||
|
||||
/* ct = 4; codec_data from MPEG v4 ISO 14996-3 Section 1.6.2.1: AAC-LC 44100/2 spf = 1024 */
|
||||
static const char aac_lc_caps[] ="audio/mpeg,mpegversion=(int)4,channnels=(int)2,rate=(int)44100,stream-format=raw,codec_data=(buffer)1210";
|
||||
|
||||
/* ct = 8; codec_data from MPEG v4 ISO 14996-3 Section 1.6.2.1: AAC_ELD 44100/2 spf = 480 */
|
||||
static const char aac_eld_caps[] ="audio/mpeg,mpegversion=(int)4,channnels=(int)2,rate=(int)44100,stream-format=raw,codec_data=(buffer)f8e85000";
|
||||
|
||||
static gboolean check_plugins (void)
|
||||
{
|
||||
gboolean ret;
|
||||
GstRegistry *registry;
|
||||
const gchar *needed[] = { "app", "libav", "playback", "autodetect", "videoparsersbad", NULL};
|
||||
const gchar *gst[] = {"plugins-base", "libav", "plugins-base", "plugins-good", "plugins-bad", NULL};
|
||||
registry = gst_registry_get ();
|
||||
ret = TRUE;
|
||||
for (int i = 0; i < g_strv_length ((gchar **) needed); i++) {
|
||||
GstPlugin *plugin;
|
||||
plugin = gst_registry_find_plugin (registry, needed[i]);
|
||||
if (!plugin) {
|
||||
g_print ("Required gstreamer plugin '%s' not found\n"
|
||||
"Missing plugin is contained in '[GStreamer 1.x]-%s'\n",needed[i], gst[i]);
|
||||
ret = FALSE;
|
||||
continue;
|
||||
}
|
||||
gst_object_unref (plugin);
|
||||
plugin = NULL;
|
||||
}
|
||||
if (ret == FALSE) {
|
||||
g_print ("\nif the plugin is installed, but not found, your gstreamer registry may have been corrupted.\n"
|
||||
"to rebuild it when gstreamer next starts, clear your gstreamer cache with:\n"
|
||||
"\"rm -rf ~/.cache/gstreamer-1.0\"\n\n");
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
static gboolean check_plugin_feature (const gchar *needed_feature)
|
||||
{
|
||||
gboolean ret;
|
||||
GstPluginFeature *plugin_feature;
|
||||
GstRegistry *registry = gst_registry_get ();
|
||||
ret = TRUE;
|
||||
|
||||
plugin_feature = gst_registry_find_feature (registry, needed_feature, GST_TYPE_ELEMENT_FACTORY);
|
||||
if (!plugin_feature) {
|
||||
g_print ("Required gstreamer libav plugin feature '%s' not found:\n\n"
|
||||
"This may be missing because the FFmpeg package used by GStreamer-1.x-libav is incomplete.\n"
|
||||
"(Some distributions provide an incomplete FFmpeg due to License or Patent issues:\n"
|
||||
"in such cases a complete version for that distribution is usually made available elsewhere)\n",
|
||||
needed_feature);
|
||||
ret = FALSE;
|
||||
} else {
|
||||
gst_object_unref (plugin_feature);
|
||||
plugin_feature = NULL;
|
||||
}
|
||||
if (ret == FALSE) {
|
||||
g_print ("\nif the plugin feature is installed, but not found, your gstreamer registry may have been corrupted.\n"
|
||||
"to rebuild it when gstreamer next starts, clear your gstreamer cache with:\n"
|
||||
"\"rm -rf ~/.cache/gstreamer-1.0\"\n\n");
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
bool gstreamer_init(){
|
||||
gst_init(NULL,NULL);
|
||||
return (bool) check_plugins ();
|
||||
}
|
||||
|
||||
void audio_renderer_init(logger_t *render_logger, const char* audiosink, const bool* audio_sync, const bool* video_sync) {
|
||||
GError *error = NULL;
|
||||
GstCaps *caps = NULL;
|
||||
GstClock *clock = gst_system_clock_obtain();
|
||||
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_REALTIME, NULL);
|
||||
|
||||
logger = render_logger;
|
||||
|
||||
aac = check_plugin_feature (avdec_aac);
|
||||
alac = check_plugin_feature (avdec_alac);
|
||||
|
||||
for (int i = 0; i < NFORMATS ; i++) {
|
||||
renderer_type[i] = (audio_renderer_t *) calloc(1,sizeof(audio_renderer_t));
|
||||
g_assert(renderer_type[i]);
|
||||
GString *launch = g_string_new("appsrc name=audio_source ! ");
|
||||
g_string_append(launch, "queue ! ");
|
||||
switch (i) {
|
||||
case 0: /* AAC-ELD */
|
||||
case 2: /* AAC-LC */
|
||||
if (aac) g_string_append(launch, "avdec_aac ! ");
|
||||
break;
|
||||
case 1: /* ALAC */
|
||||
if (alac) g_string_append(launch, "avdec_alac ! ");
|
||||
break;
|
||||
case 3: /*PCM*/
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
g_string_append (launch, "audioconvert ! ");
|
||||
g_string_append (launch, "audioresample ! "); /* wasapisink must resample from 44.1 kHz to 48 kHz */
|
||||
g_string_append (launch, "volume name=volume ! level ! ");
|
||||
g_string_append (launch, audiosink);
|
||||
switch(i) {
|
||||
case 1: /*ALAC*/
|
||||
if (*audio_sync) {
|
||||
g_string_append (launch, " sync=true");
|
||||
async = TRUE;
|
||||
} else {
|
||||
g_string_append (launch, " sync=false");
|
||||
async = FALSE;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (*video_sync) {
|
||||
g_string_append (launch, " sync=true");
|
||||
vsync = TRUE;
|
||||
} else {
|
||||
g_string_append (launch, " sync=false");
|
||||
vsync = FALSE;
|
||||
}
|
||||
break;
|
||||
}
|
||||
renderer_type[i]->pipeline = gst_parse_launch(launch->str, &error);
|
||||
if (error) {
|
||||
g_error ("gst_parse_launch error (audio %d):\n %s\n", i+1, error->message);
|
||||
g_clear_error (&error);
|
||||
}
|
||||
|
||||
g_assert (renderer_type[i]->pipeline);
|
||||
gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer_type[i]->pipeline), clock);
|
||||
|
||||
renderer_type[i]->appsrc = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "audio_source");
|
||||
renderer_type[i]->volume = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "volume");
|
||||
switch (i) {
|
||||
case 0:
|
||||
caps = gst_caps_from_string(aac_eld_caps);
|
||||
renderer_type[i]->ct = 8;
|
||||
format[i] = "AAC-ELD 44100/2";
|
||||
break;
|
||||
case 1:
|
||||
caps = gst_caps_from_string(alac_caps);
|
||||
renderer_type[i]->ct = 2;
|
||||
format[i] = "ALAC 44100/16/2";
|
||||
break;
|
||||
case 2:
|
||||
caps = gst_caps_from_string(aac_lc_caps);
|
||||
renderer_type[i]->ct = 4;
|
||||
format[i] = "AAC-LC 44100/2";
|
||||
break;
|
||||
case 3:
|
||||
caps = gst_caps_from_string(lpcm_caps);
|
||||
renderer_type[i]->ct = 1;
|
||||
format[i] = "PCM 44100/16/2 S16LE";
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
logger_log(logger, LOGGER_DEBUG, "Audio format %d: %s",i+1,format[i]);
|
||||
logger_log(logger, LOGGER_DEBUG, "GStreamer audio pipeline %d: \"%s\"", i+1, launch->str);
|
||||
g_string_free(launch, TRUE);
|
||||
g_object_set(renderer_type[i]->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
|
||||
gst_caps_unref(caps);
|
||||
g_object_unref(clock);
|
||||
}
|
||||
}
|
||||
|
||||
void audio_renderer_stop() {
|
||||
if (renderer) {
|
||||
gst_app_src_end_of_stream(GST_APP_SRC(renderer->appsrc));
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
|
||||
renderer = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
static void get_renderer_type(unsigned char *ct, int *id) {
|
||||
render_audio = FALSE;
|
||||
*id = -1;
|
||||
for (int i = 0; i < NFORMATS; i++) {
|
||||
if (renderer_type[i]->ct == *ct) {
|
||||
*id = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
switch (*id) {
|
||||
case 2:
|
||||
case 0:
|
||||
if (aac) {
|
||||
render_audio = TRUE;
|
||||
} else {
|
||||
logger_log(logger, LOGGER_INFO, "*** GStreamer libav plugin feature avdec_aac is missing, cannot decode AAC audio");
|
||||
}
|
||||
sync = vsync;
|
||||
break;
|
||||
case 1:
|
||||
if (alac) {
|
||||
render_audio = TRUE;
|
||||
} else {
|
||||
logger_log(logger, LOGGER_INFO, "*** GStreamer libav plugin feature avdec_alac is missing, cannot decode ALAC audio");
|
||||
}
|
||||
sync = async;
|
||||
break;
|
||||
case 3:
|
||||
render_audio = TRUE;
|
||||
sync = FALSE;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void audio_renderer_start(unsigned char *ct) {
|
||||
int id = -1;
|
||||
get_renderer_type(ct, &id);
|
||||
if (id >= 0 && renderer) {
|
||||
if(*ct != renderer->ct) {
|
||||
gst_app_src_end_of_stream(GST_APP_SRC(renderer->appsrc));
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
|
||||
logger_log(logger, LOGGER_INFO, "changed audio connection, format %s", format[id]);
|
||||
renderer = renderer_type[id];
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
|
||||
gst_audio_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
|
||||
}
|
||||
} else if (id >= 0) {
|
||||
logger_log(logger, LOGGER_INFO, "start audio connection, format %s", format[id]);
|
||||
renderer = renderer_type[id];
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
|
||||
gst_audio_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "unknown audio compression type ct = %d", *ct);
|
||||
}
|
||||
}
|
||||
|
||||
void audio_renderer_render_buffer(unsigned char* data, int *data_len, unsigned short *seqnum, uint64_t *ntp_time) {
|
||||
GstBuffer *buffer;
|
||||
bool valid;
|
||||
|
||||
if (!render_audio) return; /* do nothing unless render_audio == TRUE */
|
||||
|
||||
GstClockTime pts = (GstClockTime) *ntp_time ; /* now in nsecs */
|
||||
//GstClockTimeDiff latency = GST_CLOCK_DIFF(gst_element_get_current_clock_time (renderer->appsrc), pts);
|
||||
if (sync) {
|
||||
if (pts >= gst_audio_pipeline_base_time) {
|
||||
pts -= gst_audio_pipeline_base_time;
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "*** invalid ntp_time < gst_audio_pipeline_base_time\n%8.6f ntp_time\n%8.6f base_time",
|
||||
((double) *ntp_time) / SECOND_IN_NSECS, ((double) gst_audio_pipeline_base_time) / SECOND_IN_NSECS);
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (data_len == 0 || renderer == NULL) return;
|
||||
|
||||
/* all audio received seems to be either ct = 8 (AAC_ELD 44100/2 spf 460 ) AirPlay Mirror protocol *
|
||||
* or ct = 2 (ALAC 44100/16/2 spf 352) AirPlay protocol. *
|
||||
* first byte data[0] of ALAC frame is 0x20, *
|
||||
* first byte of AAC_ELD is 0x8c, 0x8d or 0x8e: 0x100011(00,01,10) in modern devices *
|
||||
* but is 0x80, 0x81 or 0x82: 0x100000(00,01,10) in ios9, ios10 devices *
|
||||
* first byte of AAC_LC should be 0xff (ADTS) (but has never been seen). */
|
||||
|
||||
buffer = gst_buffer_new_allocate(NULL, *data_len, NULL);
|
||||
g_assert(buffer != NULL);
|
||||
//g_print("audio latency %8.6f\n", (double) latency / SECOND_IN_NSECS);
|
||||
if (sync) {
|
||||
GST_BUFFER_PTS(buffer) = pts;
|
||||
}
|
||||
gst_buffer_fill(buffer, 0, data, *data_len);
|
||||
switch (renderer->ct){
|
||||
case 8: /*AAC-ELD*/
|
||||
switch (data[0]){
|
||||
case 0x8c:
|
||||
case 0x8d:
|
||||
case 0x8e:
|
||||
case 0x80:
|
||||
case 0x81:
|
||||
case 0x82:
|
||||
valid = true;
|
||||
break;
|
||||
default:
|
||||
valid = false;
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case 2: /*ALAC*/
|
||||
valid = (data[0] == 0x20);
|
||||
break;
|
||||
case 4: /*AAC_LC */
|
||||
valid = (data[0] == 0xff );
|
||||
break;
|
||||
default:
|
||||
valid = true;
|
||||
break;
|
||||
}
|
||||
if (valid) {
|
||||
gst_app_src_push_buffer(GST_APP_SRC(renderer->appsrc), buffer);
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "*** ERROR invalid audio frame (compression_type %d) skipped ", renderer->ct);
|
||||
logger_log(logger, LOGGER_ERR, "*** first byte of invalid frame was 0x%2.2x ", (unsigned int) data[0]);
|
||||
}
|
||||
}
|
||||
|
||||
void audio_renderer_set_volume(double volume) {
|
||||
volume = (volume > 10.0) ? 10.0 : volume;
|
||||
volume = (volume < 0.0) ? 0.0 : volume;
|
||||
g_object_set(renderer->volume, "volume", volume, NULL);
|
||||
}
|
||||
|
||||
void audio_renderer_flush() {
|
||||
}
|
||||
|
||||
void audio_renderer_destroy() {
|
||||
audio_renderer_stop();
|
||||
for (int i = 0; i < NFORMATS ; i++ ) {
|
||||
gst_object_unref (renderer_type[i]->volume);
|
||||
renderer_type[i]->volume = NULL;
|
||||
gst_object_unref (renderer_type[i]->appsrc);
|
||||
renderer_type[i]->appsrc = NULL;
|
||||
gst_object_unref (renderer_type[i]->pipeline);
|
||||
renderer_type[i]->pipeline = NULL;
|
||||
free(renderer_type[i]);
|
||||
}
|
||||
}
|
48
renderers/audio_renderer.h
Normal file
48
renderers/audio_renderer.h
Normal file
@ -0,0 +1,48 @@
|
||||
/**
|
||||
* RPiPlay - An open-source AirPlay mirroring server for Raspberry Pi
|
||||
* Copyright (C) 2019 Florian Draschbacher
|
||||
* Modified for:
|
||||
* UxPlay - An open-source AirPlay mirroring server
|
||||
* Copyright (C) 2021-23 F. Duncanh
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
#ifndef AUDIO_RENDERER_H
|
||||
#define AUDIO_RENDERER_H
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
#include "../lib/logger.h"
|
||||
|
||||
bool gstreamer_init();
|
||||
void audio_renderer_init(logger_t *logger, const char* audiosink, const bool *audio_sync, const bool *video_sync);
|
||||
void audio_renderer_start(unsigned char* compression_type);
|
||||
void audio_renderer_stop();
|
||||
void audio_renderer_render_buffer(unsigned char* data, int *data_len, unsigned short *seqnum, uint64_t *ntp_time);
|
||||
void audio_renderer_set_volume(double volume);
|
||||
void audio_renderer_flush();
|
||||
void audio_renderer_destroy();
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif //AUDIO_RENDERER_H
|
800
renderers/video_renderer.c
Normal file
800
renderers/video_renderer.c
Normal file
@ -0,0 +1,800 @@
|
||||
/**
|
||||
* RPiPlay - An open-source AirPlay mirroring server for Raspberry Pi
|
||||
* Copyright (C) 2019 Florian Draschbacher
|
||||
* Modified for:
|
||||
* UxPlay - An open-source AirPlay mirroring server
|
||||
* Copyright (C) 2021-24 F. Duncanh
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include <gst/app/gstappsrc.h>
|
||||
#include "video_renderer.h"
|
||||
|
||||
#define SECOND_IN_NSECS 1000000000UL
|
||||
#ifdef X_DISPLAY_FIX
|
||||
#include <gst/video/navigation.h>
|
||||
#include "x_display_fix.h"
|
||||
static bool fullscreen = false;
|
||||
static bool alt_keypress = false;
|
||||
static unsigned char X11_search_attempts;
|
||||
#endif
|
||||
|
||||
static GstClockTime gst_video_pipeline_base_time = GST_CLOCK_TIME_NONE;
|
||||
static logger_t *logger = NULL;
|
||||
static unsigned short width, height, width_source, height_source; /* not currently used */
|
||||
static bool first_packet = false;
|
||||
static bool sync = false;
|
||||
static bool auto_videosink = true;
|
||||
static bool hls_video = false;
|
||||
#ifdef X_DISPLAY_FIX
|
||||
static bool use_x11 = false;
|
||||
#endif
|
||||
static bool logger_debug = false;
|
||||
static bool video_terminate = false;
|
||||
|
||||
#define NCODECS 2 /* renderers for h264 and h265 */
|
||||
|
||||
struct video_renderer_s {
|
||||
GstElement *appsrc, *pipeline;
|
||||
GstBus *bus;
|
||||
const char *codec;
|
||||
bool autovideo, state_pending;
|
||||
int id;
|
||||
gboolean terminate;
|
||||
gint64 duration;
|
||||
gint buffering_level;
|
||||
#ifdef X_DISPLAY_FIX
|
||||
bool use_x11;
|
||||
const char * server_name;
|
||||
X11_Window_t * gst_window;
|
||||
#endif
|
||||
};
|
||||
|
||||
static video_renderer_t *renderer = NULL;
|
||||
static video_renderer_t *renderer_type[NCODECS] = {0};
|
||||
static int n_renderers = NCODECS;
|
||||
static char h264[] = "h264";
|
||||
static char h265[] = "h265";
|
||||
static char hls[] = "hls";
|
||||
|
||||
static void append_videoflip (GString *launch, const videoflip_t *flip, const videoflip_t *rot) {
|
||||
/* videoflip image transform */
|
||||
switch (*flip) {
|
||||
case INVERT:
|
||||
switch (*rot) {
|
||||
case LEFT:
|
||||
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_90R ! ");
|
||||
break;
|
||||
case RIGHT:
|
||||
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_90L ! ");
|
||||
break;
|
||||
default:
|
||||
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_180 ! ");
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case HFLIP:
|
||||
switch (*rot) {
|
||||
case LEFT:
|
||||
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_UL_LR ! ");
|
||||
break;
|
||||
case RIGHT:
|
||||
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_UR_LL ! ");
|
||||
break;
|
||||
default:
|
||||
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_HORIZ ! ");
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case VFLIP:
|
||||
switch (*rot) {
|
||||
case LEFT:
|
||||
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_UR_LL ! ");
|
||||
break;
|
||||
case RIGHT:
|
||||
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_UL_LR ! ");
|
||||
break;
|
||||
default:
|
||||
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_VERT ! ");
|
||||
break;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
switch (*rot) {
|
||||
case LEFT:
|
||||
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_90L ! ");
|
||||
break;
|
||||
case RIGHT:
|
||||
g_string_append(launch, "videoflip video-direction=GST_VIDEO_ORIENTATION_90R ! ");
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/* apple uses colorimetry=1:3:5:1 *
|
||||
* (not recognized by v4l2 plugin in Gstreamer < 1.20.4) *
|
||||
* See .../gst-libs/gst/video/video-color.h in gst-plugins-base *
|
||||
* range = 1 -> GST_VIDEO_COLOR_RANGE_0_255 ("full RGB") *
|
||||
* matrix = 3 -> GST_VIDEO_COLOR_MATRIX_BT709 *
|
||||
* transfer = 5 -> GST_VIDEO_TRANSFER_BT709 *
|
||||
* primaries = 1 -> GST_VIDEO_COLOR_PRIMARIES_BT709 *
|
||||
* closest used by GStreamer < 1.20.4 is BT709, 2:3:5:1 with * *
|
||||
* range = 2 -> GST_VIDEO_COLOR_RANGE_16_235 ("limited RGB") */
|
||||
|
||||
static const char h264_caps[]="video/x-h264,stream-format=(string)byte-stream,alignment=(string)au";
|
||||
static const char h265_caps[]="video/x-h265,stream-format=(string)byte-stream,alignment=(string)au";
|
||||
|
||||
void video_renderer_size(float *f_width_source, float *f_height_source, float *f_width, float *f_height) {
|
||||
width_source = (unsigned short) *f_width_source;
|
||||
height_source = (unsigned short) *f_height_source;
|
||||
width = (unsigned short) *f_width;
|
||||
height = (unsigned short) *f_height;
|
||||
logger_log(logger, LOGGER_DEBUG, "begin video stream wxh = %dx%d; source %dx%d", width, height, width_source, height_source);
|
||||
}
|
||||
|
||||
GstElement *make_video_sink(const char *videosink, const char *videosink_options) {
|
||||
/* used to build a videosink for playbin, using the user-specified string "videosink" */
|
||||
GstElement *video_sink = gst_element_factory_make(videosink, "videosink");
|
||||
if (!video_sink) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* process the video_sink_optons */
|
||||
size_t len = strlen(videosink_options);
|
||||
if (!len) {
|
||||
return video_sink;
|
||||
}
|
||||
|
||||
char *options = (char *) malloc(len + 1);
|
||||
strncpy(options, videosink_options, len + 1);
|
||||
|
||||
/* remove any extension begining with "!" */
|
||||
char *end = strchr(options, '!');
|
||||
if (end) {
|
||||
*end = '\0';
|
||||
}
|
||||
|
||||
/* add any fullscreen options "property=pval" included in string videosink_options*/
|
||||
/* OK to use strtok_r in Windows with MSYS2 (POSIX); use strtok_s for MSVC */
|
||||
char *token;
|
||||
char *text = options;
|
||||
|
||||
while((token = strtok_r(text, " ", &text))) {
|
||||
char *pval = strchr(token, '=');
|
||||
if (pval) {
|
||||
*pval = '\0';
|
||||
pval++;
|
||||
const gchar *property_name = (const gchar *) token;
|
||||
const gchar *value = (const gchar *) pval;
|
||||
g_print("playbin_videosink property: \"%s\" \"%s\"\n", property_name, value);
|
||||
gst_util_set_object_arg(G_OBJECT (video_sink), property_name, value);
|
||||
}
|
||||
}
|
||||
free(options);
|
||||
return video_sink;
|
||||
}
|
||||
|
||||
void video_renderer_init(logger_t *render_logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
|
||||
const char *decoder, const char *converter, const char *videosink, const char *videosink_options,
|
||||
bool initial_fullscreen, bool video_sync, bool h265_support, const char *uri) {
|
||||
GError *error = NULL;
|
||||
GstCaps *caps = NULL;
|
||||
hls_video = (uri != NULL);
|
||||
/* videosink choices that are auto */
|
||||
auto_videosink = (strstr(videosink, "autovideosink") || strstr(videosink, "fpsdisplaysink"));
|
||||
|
||||
logger = render_logger;
|
||||
logger_debug = (logger_get_level(logger) >= LOGGER_DEBUG);
|
||||
video_terminate = false;
|
||||
|
||||
const gchar *appname = g_get_application_name();
|
||||
if (!appname || strcmp(appname,server_name)) g_set_application_name(server_name);
|
||||
appname = NULL;
|
||||
|
||||
if (hls_video) {
|
||||
n_renderers = 1;
|
||||
} else {
|
||||
n_renderers = h265_support ? 2 : 1;
|
||||
}
|
||||
g_assert (n_renderers <= NCODECS);
|
||||
for (int i = 0; i < n_renderers; i++) {
|
||||
g_assert (i < 2);
|
||||
renderer_type[i] = (video_renderer_t *) calloc(1, sizeof(video_renderer_t));
|
||||
g_assert(renderer_type[i]);
|
||||
renderer_type[i]->autovideo = auto_videosink;
|
||||
renderer_type[i]->id = i;
|
||||
renderer_type[i]->bus = NULL;
|
||||
if (hls_video) {
|
||||
renderer_type[i]->pipeline = gst_element_factory_make("playbin3", "hls-playbin3");
|
||||
g_assert(renderer_type[i]->pipeline);
|
||||
renderer_type[i]->appsrc = NULL;
|
||||
renderer_type[i]->codec = hls;
|
||||
if(strcmp(videosink, "autovideosink")) {
|
||||
GstElement *playbin_videosink = make_video_sink(videosink, videosink_options);
|
||||
if (!playbin_videosink) {
|
||||
logger_log(logger, LOGGER_ERR, "video_renderer_init: failed to create playbin_videosink");
|
||||
} else {
|
||||
logger_log(logger, LOGGER_DEBUG, "video_renderer_init: create playbin_videosink at %p", playbin_videosink);
|
||||
g_object_set(G_OBJECT (renderer_type[i]->pipeline), "video-sink", playbin_videosink, NULL);
|
||||
}
|
||||
}
|
||||
g_object_set (G_OBJECT (renderer_type[i]->pipeline), "uri", uri, NULL);
|
||||
} else {
|
||||
switch (i) {
|
||||
case 0:
|
||||
renderer_type[i]->codec = h264;
|
||||
caps = gst_caps_from_string(h264_caps);
|
||||
break;
|
||||
case 1:
|
||||
renderer_type[i]->codec = h265;
|
||||
caps = gst_caps_from_string(h265_caps);
|
||||
break;
|
||||
default:
|
||||
g_assert(0);
|
||||
}
|
||||
GString *launch = g_string_new("");
|
||||
|
||||
// Add static image source
|
||||
g_string_append(launch, "filesrc location=/path/to/placeholder.jpg ! jpegdec ! imagefreeze ! videoconvert ! ");
|
||||
g_string_append(launch, "queue ! ");
|
||||
g_string_append(launch, "input-selector name=selector ! ");
|
||||
|
||||
// Add video source
|
||||
g_string_append(launch, "appsrc name=video_source ! ");
|
||||
g_string_append(launch, "queue ! ");
|
||||
g_string_append(launch, parser);
|
||||
g_string_append(launch, " ! ");
|
||||
g_string_append(launch, decoder);
|
||||
g_string_append(launch, " ! ");
|
||||
append_videoflip(launch, &videoflip[0], &videoflip[1]);
|
||||
g_string_append(launch, converter);
|
||||
g_string_append(launch, " ! ");
|
||||
g_string_append(launch, "videoscale ! ");
|
||||
g_string_append(launch, videosink);
|
||||
g_string_append(launch, " name=");
|
||||
g_string_append(launch, videosink);
|
||||
g_string_append(launch, "_");
|
||||
g_string_append(launch, renderer_type[i]->codec);
|
||||
g_string_append(launch, videosink_options);
|
||||
if (video_sync) {
|
||||
g_string_append(launch, " sync=true");
|
||||
sync = true;
|
||||
} else {
|
||||
g_string_append(launch, " sync=false");
|
||||
sync = false;
|
||||
}
|
||||
|
||||
if (!strcmp(renderer_type[i]->codec, h264)) {
|
||||
char *pos = launch->str;
|
||||
while ((pos = strstr(pos,h265))){
|
||||
pos +=3;
|
||||
*pos = '4';
|
||||
}
|
||||
} else if (!strcmp(renderer_type[i]->codec, h265)) {
|
||||
char *pos = launch->str;
|
||||
while ((pos = strstr(pos,h264))){
|
||||
pos +=3;
|
||||
*pos = '5';
|
||||
}
|
||||
}
|
||||
|
||||
logger_log(logger, LOGGER_DEBUG, "GStreamer video pipeline %d:\n\"%s\"", i + 1, launch->str);
|
||||
renderer_type[i]->pipeline = gst_parse_launch(launch->str, &error);
|
||||
if (error) {
|
||||
g_error ("get_parse_launch error (video) :\n %s\n",error->message);
|
||||
g_clear_error (&error);
|
||||
}
|
||||
g_assert (renderer_type[i]->pipeline);
|
||||
|
||||
GstClock *clock = gst_system_clock_obtain();
|
||||
g_object_set(clock, "clock-type", GST_CLOCK_TYPE_REALTIME, NULL);
|
||||
gst_pipeline_use_clock(GST_PIPELINE_CAST(renderer_type[i]->pipeline), clock);
|
||||
renderer_type[i]->appsrc = gst_bin_get_by_name (GST_BIN (renderer_type[i]->pipeline), "video_source");
|
||||
g_assert(renderer_type[i]->appsrc);
|
||||
|
||||
// Get the input-selector element
|
||||
GstElement *selector = gst_bin_get_by_name(GST_BIN(renderer_type[i]->pipeline), "selector");
|
||||
g_assert(selector);
|
||||
|
||||
// Set initial input to static image
|
||||
g_object_set(selector, "active-pad", gst_element_get_static_pad(selector, "sink_0"), NULL);
|
||||
gst_object_unref(selector);
|
||||
|
||||
g_object_set(renderer_type[i]->appsrc, "caps", caps, "stream-type", 0, "is-live", TRUE, "format", GST_FORMAT_TIME, NULL);
|
||||
g_string_free(launch, TRUE);
|
||||
gst_caps_unref(caps);
|
||||
gst_object_unref(clock);
|
||||
}
|
||||
#ifdef X_DISPLAY_FIX
|
||||
use_x11 = (strstr(videosink, "xvimagesink") || strstr(videosink, "ximagesink") || auto_videosink);
|
||||
fullscreen = initial_fullscreen;
|
||||
renderer_type[i]->server_name = server_name;
|
||||
renderer_type[i]->gst_window = NULL;
|
||||
renderer_type[i]->use_x11 = false;
|
||||
X11_search_attempts = 0;
|
||||
if (use_x11) {
|
||||
if (i == 0) {
|
||||
renderer_type[0]->gst_window = (X11_Window_t *) calloc(1, sizeof(X11_Window_t));
|
||||
g_assert(renderer_type[0]->gst_window);
|
||||
get_X11_Display(renderer_type[0]->gst_window);
|
||||
if (renderer_type[0]->gst_window->display) {
|
||||
renderer_type[i]->use_x11 = true;
|
||||
} else {
|
||||
free(renderer_type[0]->gst_window);
|
||||
renderer_type[0]->gst_window = NULL;
|
||||
}
|
||||
} else if (renderer_type[0]->use_x11) {
|
||||
renderer_type[i]->gst_window = (X11_Window_t *) calloc(1, sizeof(X11_Window_t));
|
||||
g_assert(renderer_type[i]->gst_window);
|
||||
memcpy(renderer_type[i]->gst_window, renderer_type[0]->gst_window, sizeof(X11_Window_t));
|
||||
renderer_type[i]->use_x11 = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
gst_element_set_state (renderer_type[i]->pipeline, GST_STATE_READY);
|
||||
GstState state;
|
||||
if (gst_element_get_state (renderer_type[i]->pipeline, &state, NULL, 100 * GST_MSECOND)) {
|
||||
if (state == GST_STATE_READY) {
|
||||
logger_log(logger, LOGGER_DEBUG, "Initialized GStreamer video renderer %d", i + 1);
|
||||
if (hls_video && i == 0) {
|
||||
renderer = renderer_type[i];
|
||||
}
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer %d", i + 1);
|
||||
}
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "Failed to initialize GStreamer video renderer %d", i + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void video_renderer_pause() {
|
||||
if (!renderer) {
|
||||
return;
|
||||
}
|
||||
logger_log(logger, LOGGER_DEBUG, "video renderer paused");
|
||||
gst_element_set_state(renderer->pipeline, GST_STATE_PAUSED);
|
||||
}
|
||||
|
||||
void video_renderer_resume() {
|
||||
if (!renderer) {
|
||||
return;
|
||||
}
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
|
||||
GstState state;
|
||||
/* wait with timeout 100 msec for pipeline to change state from PAUSED to PLAYING */
|
||||
gst_element_get_state(renderer->pipeline, &state, NULL, 100 * GST_MSECOND);
|
||||
const gchar *state_name = gst_element_state_get_name(state);
|
||||
logger_log(logger, LOGGER_DEBUG, "video renderer resumed: state %s", state_name);
|
||||
if (renderer->appsrc) {
|
||||
gst_video_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
|
||||
}
|
||||
}
|
||||
|
||||
void video_renderer_start() {
|
||||
if (hls_video) {
|
||||
renderer->bus = gst_element_get_bus(renderer->pipeline);
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
|
||||
return;
|
||||
}
|
||||
/* when not hls, start both h264 and h265 pipelines; will shut down the "wrong" one when we know the codec */
|
||||
for (int i = 0; i < n_renderers; i++) {
|
||||
gst_element_set_state (renderer_type[i]->pipeline, GST_STATE_PLAYING);
|
||||
if (renderer_type[i]->appsrc) {
|
||||
gst_video_pipeline_base_time = gst_element_get_base_time(renderer_type[i]->appsrc);
|
||||
}
|
||||
renderer_type[i]->bus = gst_element_get_bus(renderer_type[i]->pipeline);
|
||||
}
|
||||
renderer = NULL;
|
||||
first_packet = true;
|
||||
#ifdef X_DISPLAY_FIX
|
||||
X11_search_attempts = 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
/* used to find any X11 Window used by the playbin (HLS) pipeline after it starts playing.
|
||||
* if use_x11 is true, called every 100 ms after playbin state is READY until the x11 window is found*/
|
||||
bool waiting_for_x11_window() {
|
||||
if (!hls_video) {
|
||||
return false;
|
||||
}
|
||||
#ifdef X_DISPLAY_FIX
|
||||
if (use_x11 && renderer->gst_window) {
|
||||
get_x_window(renderer->gst_window, renderer->server_name);
|
||||
if (!renderer->gst_window->window) {
|
||||
return true; /* window still not found */
|
||||
}
|
||||
}
|
||||
#endif
|
||||
return false;
|
||||
}
|
||||
|
||||
void video_renderer_render_buffer(unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time) {
|
||||
GstBuffer *buffer;
|
||||
GstClockTime pts = (GstClockTime) *ntp_time; /*now in nsecs */
|
||||
if (sync) {
|
||||
if (pts >= gst_video_pipeline_base_time) {
|
||||
pts -= gst_video_pipeline_base_time;
|
||||
} else {
|
||||
logger_log(logger, LOGGER_ERR, "*** invalid ntp_time < gst_video_pipeline_base_time\n%8.6f ntp_time\n%8.6f base_time",
|
||||
((double) *ntp_time) / SECOND_IN_NSECS, ((double) gst_video_pipeline_base_time) / SECOND_IN_NSECS);
|
||||
return;
|
||||
}
|
||||
}
|
||||
g_assert(data_len != 0);
|
||||
if (data[0]) {
|
||||
logger_log(logger, LOGGER_ERR, "*** ERROR decryption of video packet failed ");
|
||||
} else {
|
||||
if (first_packet) {
|
||||
logger_log(logger, LOGGER_INFO, "Begin streaming to GStreamer video pipeline");
|
||||
first_packet = false;
|
||||
// Switch to video source when first valid frame is received
|
||||
video_renderer_switch_source(true);
|
||||
}
|
||||
buffer = gst_buffer_new_allocate(NULL, *data_len, NULL);
|
||||
g_assert(buffer != NULL);
|
||||
if (sync) {
|
||||
GST_BUFFER_PTS(buffer) = pts;
|
||||
}
|
||||
gst_buffer_fill(buffer, 0, data, *data_len);
|
||||
gst_app_src_push_buffer (GST_APP_SRC(renderer->appsrc), buffer);
|
||||
#ifdef X_DISPLAY_FIX
|
||||
if (renderer->gst_window && !(renderer->gst_window->window) && renderer->use_x11) {
|
||||
X11_search_attempts++;
|
||||
logger_log(logger, LOGGER_DEBUG, "Looking for X11 UxPlay Window, attempt %d", (int) X11_search_attempts);
|
||||
get_x_window(renderer->gst_window, renderer->server_name);
|
||||
if (renderer->gst_window->window) {
|
||||
logger_log(logger, LOGGER_INFO, "\n*** X11 Windows: Use key F11 or (left Alt)+Enter to toggle full-screen mode\n");
|
||||
if (fullscreen) {
|
||||
set_fullscreen(renderer->gst_window, &fullscreen);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
void video_renderer_flush() {
|
||||
}
|
||||
|
||||
void video_renderer_stop() {
|
||||
if (renderer) {
|
||||
if (renderer->appsrc) {
|
||||
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
|
||||
// Switch back to static image when video is stopped
|
||||
video_renderer_switch_source(false);
|
||||
}
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
|
||||
}
|
||||
}
|
||||
|
||||
static void video_renderer_destroy_h26x(video_renderer_t *renderer) {
|
||||
if (renderer) {
|
||||
GstState state;
|
||||
gst_element_get_state(renderer->pipeline, &state, NULL, 100 * GST_MSECOND);
|
||||
if (state != GST_STATE_NULL) {
|
||||
if (!hls_video) {
|
||||
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
|
||||
}
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
|
||||
}
|
||||
gst_object_unref(renderer->bus);
|
||||
if (renderer->appsrc) {
|
||||
gst_object_unref (renderer->appsrc);
|
||||
}
|
||||
gst_object_unref (renderer->pipeline);
|
||||
#ifdef X_DISPLAY_FIX
|
||||
if (renderer->gst_window) {
|
||||
free(renderer->gst_window);
|
||||
renderer->gst_window = NULL;
|
||||
}
|
||||
#endif
|
||||
free (renderer);
|
||||
renderer = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void video_renderer_destroy() {
|
||||
for (int i = 0; i < n_renderers; i++) {
|
||||
if (renderer_type[i]) {
|
||||
video_renderer_destroy_h26x(renderer_type[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gboolean gstreamer_pipeline_bus_callback(GstBus *bus, GstMessage *message, void *loop) {
|
||||
|
||||
/* identify which pipeline sent the message */
|
||||
int type = -1;
|
||||
for (int i = 0 ; i < n_renderers ; i ++ ) {
|
||||
if (renderer_type[i]->bus == bus) {
|
||||
type = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
g_assert(type != -1);
|
||||
|
||||
if (logger_debug) {
|
||||
g_print("GStreamer %s bus message: %s %s\n", renderer_type[type]->codec, GST_MESSAGE_SRC_NAME(message), GST_MESSAGE_TYPE_NAME(message));
|
||||
}
|
||||
|
||||
if (logger_debug && hls_video) {
|
||||
gint64 pos;
|
||||
gst_element_query_position (renderer_type[type]->pipeline, GST_FORMAT_TIME, &pos);
|
||||
if (GST_CLOCK_TIME_IS_VALID(pos)) {
|
||||
g_print("GStreamer bus message %s %s; position: %" GST_TIME_FORMAT "\n", GST_MESSAGE_SRC_NAME(message),
|
||||
GST_MESSAGE_TYPE_NAME(message), GST_TIME_ARGS(pos));
|
||||
} else {
|
||||
g_print("GStreamer bus message %s %s; position: none\n", GST_MESSAGE_SRC_NAME(message),
|
||||
GST_MESSAGE_TYPE_NAME(message));
|
||||
}
|
||||
}
|
||||
|
||||
switch (GST_MESSAGE_TYPE (message)) {
|
||||
case GST_MESSAGE_DURATION:
|
||||
renderer_type[type]->duration = GST_CLOCK_TIME_NONE;
|
||||
break;
|
||||
case GST_MESSAGE_BUFFERING:
|
||||
if (hls_video) {
|
||||
gint percent = -1;
|
||||
gst_message_parse_buffering(message, &percent);
|
||||
if (percent >= 0) {
|
||||
renderer_type[type]->buffering_level = percent;
|
||||
logger_log(logger, LOGGER_DEBUG, "Buffering :%u percent done", percent);
|
||||
if (percent < 100) {
|
||||
gst_element_set_state (renderer_type[type]->pipeline, GST_STATE_PAUSED);
|
||||
} else {
|
||||
gst_element_set_state (renderer_type[type]->pipeline, GST_STATE_PLAYING);
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
case GST_MESSAGE_ERROR: {
|
||||
GError *err;
|
||||
gchar *debug;
|
||||
gboolean flushing;
|
||||
gst_message_parse_error (message, &err, &debug);
|
||||
logger_log(logger, LOGGER_INFO, "GStreamer error: %s %s", GST_MESSAGE_SRC_NAME(message),err->message);
|
||||
if (!hls_video && strstr(err->message,"Internal data stream error")) {
|
||||
logger_log(logger, LOGGER_INFO,
|
||||
"*** This is a generic GStreamer error that usually means that GStreamer\n"
|
||||
"*** was unable to construct a working video pipeline.\n\n"
|
||||
"*** If you are letting the default autovideosink select the videosink,\n"
|
||||
"*** GStreamer may be trying to use non-functional hardware h264 video decoding.\n"
|
||||
"*** Try using option -avdec to force software decoding or use -vs <videosink>\n"
|
||||
"*** to select a videosink of your choice (see \"man uxplay\").\n\n"
|
||||
"*** Raspberry Pi models 4B and earlier using Video4Linux2 may need \"-bt709\" uxplay option");
|
||||
}
|
||||
g_error_free (err);
|
||||
g_free (debug);
|
||||
if (renderer_type[type]->appsrc) {
|
||||
gst_app_src_end_of_stream (GST_APP_SRC(renderer_type[type]->appsrc));
|
||||
}
|
||||
gst_bus_set_flushing(bus, TRUE);
|
||||
gst_element_set_state (renderer_type[type]->pipeline, GST_STATE_READY);
|
||||
renderer_type[type]->terminate = TRUE;
|
||||
g_main_loop_quit( (GMainLoop *) loop);
|
||||
break;
|
||||
}
|
||||
case GST_MESSAGE_EOS:
|
||||
/* end-of-stream */
|
||||
logger_log(logger, LOGGER_INFO, "GStreamer: End-Of-Stream");
|
||||
if (hls_video) {
|
||||
gst_bus_set_flushing(bus, TRUE);
|
||||
gst_element_set_state (renderer_type[type]->pipeline, GST_STATE_READY);
|
||||
renderer_type[type]->terminate = TRUE;
|
||||
g_main_loop_quit( (GMainLoop *) loop);
|
||||
}
|
||||
break;
|
||||
case GST_MESSAGE_STATE_CHANGED:
|
||||
if (renderer_type[type]->state_pending && strstr(GST_MESSAGE_SRC_NAME(message), "pipeline")) {
|
||||
GstState state;
|
||||
gst_element_get_state(renderer_type[type]->pipeline, &state, NULL, 100 * GST_MSECOND);
|
||||
if (state == GST_STATE_NULL) {
|
||||
gst_element_set_state(renderer_type[type]->pipeline, GST_STATE_PLAYING);
|
||||
} else if (state == GST_STATE_PLAYING) {
|
||||
renderer_type[type]->state_pending = false;
|
||||
}
|
||||
}
|
||||
if (renderer_type[type]->autovideo) {
|
||||
char *sink = strstr(GST_MESSAGE_SRC_NAME(message), "-actual-sink-");
|
||||
if (sink) {
|
||||
sink += strlen("-actual-sink-");
|
||||
if (strstr(GST_MESSAGE_SRC_NAME(message), renderer_type[type]->codec)) {
|
||||
logger_log(logger, LOGGER_DEBUG, "GStreamer: automatically-selected videosink"
|
||||
" (renderer %d: %s) is \"%ssink\"", renderer_type[type]->id + 1,
|
||||
renderer_type[type]->codec, sink);
|
||||
#ifdef X_DISPLAY_FIX
|
||||
renderer_type[type]->use_x11 = (strstr(sink, "ximage") || strstr(sink, "xvimage"));
|
||||
#endif
|
||||
renderer_type[type]->autovideo = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
#ifdef X_DISPLAY_FIX
|
||||
case GST_MESSAGE_ELEMENT:
|
||||
if (renderer_type[type]->gst_window && renderer_type[type]->gst_window->window) {
|
||||
GstNavigationMessageType message_type = gst_navigation_message_get_type (message);
|
||||
if (message_type == GST_NAVIGATION_MESSAGE_EVENT) {
|
||||
GstEvent *event = NULL;
|
||||
if (gst_navigation_message_parse_event (message, &event)) {
|
||||
GstNavigationEventType event_type = gst_navigation_event_get_type (event);
|
||||
const gchar *key;
|
||||
switch (event_type) {
|
||||
case GST_NAVIGATION_EVENT_KEY_PRESS:
|
||||
if (gst_navigation_event_parse_key_event (event, &key)) {
|
||||
if ((strcmp (key, "F11") == 0) || (alt_keypress && strcmp (key, "Return") == 0)) {
|
||||
fullscreen = !(fullscreen);
|
||||
set_fullscreen(renderer_type[type]->gst_window, &fullscreen);
|
||||
} else if (strcmp (key, "Alt_L") == 0) {
|
||||
alt_keypress = true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case GST_NAVIGATION_EVENT_KEY_RELEASE:
|
||||
if (gst_navigation_event_parse_key_event (event, &key)) {
|
||||
if (strcmp (key, "Alt_L") == 0) {
|
||||
alt_keypress = false;
|
||||
}
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (event) {
|
||||
gst_event_unref (event);
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
#endif
|
||||
default:
|
||||
/* unhandled message */
|
||||
break;
|
||||
}
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
void video_renderer_choose_codec (bool video_is_h265) {
|
||||
g_assert(!hls_video);
|
||||
/* set renderer to h264 or h265, depending on pps/sps received by raop_rtp_mirror */
|
||||
video_renderer_t *renderer_new = video_is_h265 ? renderer_type[1] : renderer_type[0];
|
||||
if (renderer == renderer_new) {
|
||||
return;
|
||||
}
|
||||
video_renderer_t *renderer_prev = renderer;
|
||||
renderer = renderer_new;
|
||||
gst_video_pipeline_base_time = gst_element_get_base_time(renderer->appsrc);
|
||||
/* it seems unlikely that the codec will change between h264 and h265 during a connection,
|
||||
* but in case it does, we set the previous renderer to GST_STATE_NULL, detect
|
||||
* when this is finished by listening for the bus message, and then reset it to
|
||||
* GST_STATE_READY, so it can be reused if the codec changes again. */
|
||||
if (renderer_prev) {
|
||||
gst_app_src_end_of_stream (GST_APP_SRC(renderer_prev->appsrc));
|
||||
gst_bus_set_flushing(renderer_prev->bus, TRUE);
|
||||
/* set state of previous renderer to GST_STATE_NULL to (hopefully?) close its video window */
|
||||
gst_element_set_state (renderer_prev->pipeline, GST_STATE_NULL);
|
||||
renderer_prev->state_pending = true; // will set state to PLAYING once state is NULL
|
||||
}
|
||||
}
|
||||
|
||||
unsigned int video_reset_callback(void * loop) {
|
||||
if (video_terminate) {
|
||||
video_terminate = false;
|
||||
if (renderer->appsrc) {
|
||||
gst_app_src_end_of_stream (GST_APP_SRC(renderer->appsrc));
|
||||
}
|
||||
gboolean flushing = TRUE;
|
||||
gst_bus_set_flushing(renderer->bus, flushing);
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_NULL);
|
||||
g_main_loop_quit( (GMainLoop *) loop);
|
||||
}
|
||||
return (unsigned int) TRUE;
|
||||
}
|
||||
|
||||
bool video_get_playback_info(double *duration, double *position, float *rate) {
|
||||
gint64 pos = 0;
|
||||
GstState state;
|
||||
*duration = 0.0;
|
||||
*position = -1.0;
|
||||
*rate = 0.0f;
|
||||
if (!renderer) {
|
||||
|
||||
return true;
|
||||
}
|
||||
gst_element_get_state(renderer->pipeline, &state, NULL, 0);
|
||||
*rate = 0.0f;
|
||||
switch (state) {
|
||||
case GST_STATE_PLAYING:
|
||||
*rate = 1.0f;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
if (!GST_CLOCK_TIME_IS_VALID(renderer->duration)) {
|
||||
if (!gst_element_query_duration (renderer->pipeline, GST_FORMAT_TIME, &renderer->duration)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
*duration = ((double) renderer->duration) / GST_SECOND;
|
||||
if (*duration) {
|
||||
if (gst_element_query_position (renderer->pipeline, GST_FORMAT_TIME, &pos) &&
|
||||
GST_CLOCK_TIME_IS_VALID(pos)) {
|
||||
*position = ((double) pos) / GST_SECOND;
|
||||
}
|
||||
}
|
||||
|
||||
logger_log(logger, LOGGER_DEBUG, "********* video_get_playback_info: position %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT " %s *********",
|
||||
GST_TIME_ARGS (pos), GST_TIME_ARGS (renderer->duration), gst_element_state_get_name(state));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void video_renderer_seek(float position) {
|
||||
double pos = (double) position;
|
||||
pos *= GST_SECOND;
|
||||
gint64 seek_position = (gint64) pos;
|
||||
seek_position = seek_position < 1000 ? 1000 : seek_position;
|
||||
seek_position = seek_position > renderer->duration - 1000 ? renderer->duration - 1000: seek_position;
|
||||
g_print("SCRUB: seek to %f secs = %" GST_TIME_FORMAT ", duration = %" GST_TIME_FORMAT "\n", position,
|
||||
GST_TIME_ARGS(seek_position), GST_TIME_ARGS(renderer->duration));
|
||||
gboolean result = gst_element_seek_simple(renderer->pipeline, GST_FORMAT_TIME,
|
||||
(GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT),
|
||||
seek_position);
|
||||
if (result) {
|
||||
g_print("seek succeeded\n");
|
||||
gst_element_set_state (renderer->pipeline, GST_STATE_PLAYING);
|
||||
} else {
|
||||
g_print("seek failed\n");
|
||||
}
|
||||
}
|
||||
|
||||
unsigned int video_renderer_listen(void *loop, int id) {
|
||||
g_assert(id >= 0 && id < n_renderers);
|
||||
return (unsigned int) gst_bus_add_watch(renderer_type[id]->bus,(GstBusFunc)
|
||||
gstreamer_pipeline_bus_callback, (gpointer) loop);
|
||||
}
|
||||
|
||||
void video_renderer_switch_source(bool use_video) {
|
||||
if (!renderer || hls_video) {
|
||||
return;
|
||||
}
|
||||
|
||||
GstElement *selector = gst_bin_get_by_name(GST_BIN(renderer->pipeline), "selector");
|
||||
if (!selector) {
|
||||
logger_log(logger, LOGGER_ERR, "Failed to get input-selector element");
|
||||
return;
|
||||
}
|
||||
|
||||
// Switch to video source (sink_1) or static image (sink_0)
|
||||
GstPad *pad = gst_element_get_static_pad(selector, use_video ? "sink_1" : "sink_0");
|
||||
if (!pad) {
|
||||
logger_log(logger, LOGGER_ERR, "Failed to get pad for %s", use_video ? "video source" : "static image");
|
||||
gst_object_unref(selector);
|
||||
return;
|
||||
}
|
||||
|
||||
g_object_set(selector, "active-pad", pad, NULL);
|
||||
gst_object_unref(pad);
|
||||
gst_object_unref(selector);
|
||||
|
||||
logger_log(logger, LOGGER_DEBUG, "Switched to %s source", use_video ? "video" : "static image");
|
||||
}
|
75
renderers/video_renderer.h
Normal file
75
renderers/video_renderer.h
Normal file
@ -0,0 +1,75 @@
|
||||
/**
|
||||
* RPiPlay - An open-source AirPlay mirroring server for Raspberry Pi
|
||||
* Copyright (C) 2019 Florian Draschbacher
|
||||
* Modified for:
|
||||
* UxPlay - An open-source AirPlay mirroring server
|
||||
* Copyright (C) 2021-23 F. Duncanh
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
/*
|
||||
* H264 renderer using gstreamer
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_RENDERER_H
|
||||
#define VIDEO_RENDERER_H
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
#include "../lib/logger.h"
|
||||
|
||||
typedef enum videoflip_e {
|
||||
NONE,
|
||||
LEFT,
|
||||
RIGHT,
|
||||
INVERT,
|
||||
VFLIP,
|
||||
HFLIP,
|
||||
} videoflip_t;
|
||||
|
||||
typedef struct video_renderer_s video_renderer_t;
|
||||
|
||||
void video_renderer_init (logger_t *logger, const char *server_name, videoflip_t videoflip[2], const char *parser,
|
||||
const char *decoder, const char *converter, const char *videosink, const char *videosink_options,
|
||||
bool initial_fullscreen, bool video_sync, bool h265_support, const char *uri, const char *default_image);
|
||||
void video_renderer_start ();
|
||||
void video_renderer_stop ();
|
||||
void video_renderer_pause ();
|
||||
void video_renderer_seek(float position);
|
||||
void video_renderer_resume ();
|
||||
bool video_renderer_is_paused();
|
||||
void video_renderer_render_buffer (unsigned char* data, int *data_len, int *nal_count, uint64_t *ntp_time);
|
||||
void video_renderer_flush ();
|
||||
unsigned int video_renderer_listen(void *loop, int id);
|
||||
void video_renderer_destroy ();
|
||||
void video_renderer_size(float *width_source, float *height_source, float *width, float *height);
|
||||
bool waiting_for_x11_window();
|
||||
bool video_get_playback_info(double *duration, double *position, float *rate);
|
||||
void video_renderer_choose_codec(bool is_h265);
|
||||
unsigned int video_renderer_listen(void *loop, int id);
|
||||
unsigned int video_reset_callback(void *loop);
|
||||
void video_renderer_switch_source(bool use_video);
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif //VIDEO_RENDERER_H
|
||||
|
119
renderers/x_display_fix.h
Normal file
119
renderers/x_display_fix.h
Normal file
@ -0,0 +1,119 @@
|
||||
/**
|
||||
* RPiPlay - An open-source AirPlay mirroring server for Raspberry Pi
|
||||
* Copyright (C) 2019 Florian Draschbacher
|
||||
* Modified for:
|
||||
* UxPlay - An open-source AirPlay mirroring server
|
||||
* Copyright (C) 2021-23 F. Duncanh
|
||||
*
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
/* based on code from David Ventura https://github.com/DavidVentura/UxPlay */
|
||||
|
||||
/* This file should be only included from video_renderer.c as it defines static
|
||||
* functions and depends on video_renderer internals */
|
||||
|
||||
#ifndef X_DISPLAY_FIX_H
|
||||
#define X_DISPLAY_FIX_H
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include <string.h>
|
||||
#include <X11/Xlib.h>
|
||||
#include <X11/Xutil.h>
|
||||
#include <stdio.h>
|
||||
|
||||
struct X11_Window_s {
|
||||
Display * display;
|
||||
Window window;
|
||||
} typedef X11_Window_t;
|
||||
|
||||
static void get_X11_Display(X11_Window_t * X11) {
|
||||
X11->display = XOpenDisplay(NULL);
|
||||
X11->window = (Window) NULL;
|
||||
}
|
||||
|
||||
static Window enum_windows(const char * str, Display * display, Window window, int depth) {
|
||||
int i;
|
||||
XTextProperty text;
|
||||
XGetWMName(display, window, &text);
|
||||
char* name = NULL;
|
||||
XFetchName(display, window, &name);
|
||||
if (name != 0 && strcmp(str, name) == 0) {
|
||||
return window;
|
||||
}
|
||||
Window _root, parent;
|
||||
Window* children = NULL;
|
||||
unsigned int n;
|
||||
XQueryTree(display, window, &_root, &parent, &children, &n);
|
||||
if (children != NULL) {
|
||||
for (i = 0; i < n; i++) {
|
||||
Window w = enum_windows(str, display, children[i], depth + 1);
|
||||
if (w) return w;
|
||||
}
|
||||
XFree(children);
|
||||
}
|
||||
return (Window) NULL;
|
||||
}
|
||||
|
||||
int X11_error_catcher( Display *disp, XErrorEvent *xe ) {
|
||||
// do nothing
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void get_x_window(X11_Window_t * X11, const char * name) {
|
||||
Window root = XDefaultRootWindow(X11->display);
|
||||
XSetErrorHandler(X11_error_catcher);
|
||||
X11->window = enum_windows(name, X11->display, root, 0);
|
||||
XSetErrorHandler(NULL);
|
||||
#ifdef ZOOM_WINDOW_NAME_FIX
|
||||
if (X11->window) {
|
||||
Atom _NET_WM_NAME = XInternAtom(X11->display, "_NET_WM_NAME", 0);
|
||||
Atom UTF8_STRING = XInternAtom(X11->display, "UTF8_STRING", 0);
|
||||
XChangeProperty(X11->display, X11->window, _NET_WM_NAME, UTF8_STRING,
|
||||
8, 0, (const unsigned char *) name, strlen(name));
|
||||
XSync(X11->display, False);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
static void set_fullscreen(X11_Window_t * X11, bool * fullscreen) {
|
||||
XClientMessageEvent msg = {
|
||||
.type = ClientMessage,
|
||||
.display = X11->display,
|
||||
.window = X11->window,
|
||||
.message_type = XInternAtom(X11->display, "_NET_WM_STATE", True),
|
||||
.format = 32,
|
||||
.data = { .l = {
|
||||
*fullscreen,
|
||||
XInternAtom(X11->display, "_NET_WM_STATE_FULLSCREEN", True),
|
||||
None,
|
||||
0,
|
||||
1
|
||||
}}
|
||||
};
|
||||
XSendEvent(X11->display, XRootWindow(X11->display, XDefaultScreen(X11->display)),
|
||||
False, SubstructureRedirectMask | SubstructureNotifyMask, (XEvent*) &msg);
|
||||
XSync(X11->display, False);
|
||||
}
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
Reference in New Issue
Block a user