diff options
author | Ludovic Courtès <ludo@gnu.org> | 2022-05-09 14:44:48 +0200 |
---|---|---|
committer | Ludovic Courtès <ludo@gnu.org> | 2022-05-09 14:44:48 +0200 |
commit | da3be3ff4f6d8b643ed9bdf6834df366b1bcea44 (patch) | |
tree | 913ff6010447b6a31dfbe4be6eccc06b11fd83cf /gnu/packages/patches | |
parent | f5fe0082abe4547f3fb9f29d8351473cfb3a387b (diff) | |
parent | 4980630d1e3e6f9a6dc05438c7593a727207d9a0 (diff) | |
download | guix-da3be3ff4f6d8b643ed9bdf6834df366b1bcea44.tar.gz |
Merge branch 'master' into staging
Diffstat (limited to 'gnu/packages/patches')
11 files changed, 226 insertions, 829 deletions
diff --git a/gnu/packages/patches/dealii-fix-compiliation-with-boost-1.78.patch b/gnu/packages/patches/dealii-fix-compiliation-with-boost-1.78.patch deleted file mode 100644 index 1937aa9afc..0000000000 --- a/gnu/packages/patches/dealii-fix-compiliation-with-boost-1.78.patch +++ /dev/null @@ -1,40 +0,0 @@ -From cbef761731627cece2a6f0276b87dacabbdc8a72 Mon Sep 17 00:00:00 2001 -From: David Wells <drwells@email.unc.edu> -Date: Tue, 4 Jan 2022 12:46:30 -0500 -Subject: [PATCH] Fix compilation with boost 1.78. - -I bisected (fortunately Boost.Geometry a header-only library so adding the -include directory sufficed) and -https://github.com/boostorg/geometry/commit/6eb9e238bcb37e26dc31d16acf826784a2ba30f4 -is where this problem starts for us. See also -https://github.com/boostorg/geometry/issues/792 - the easiest fix for all such -issues is to just include the project header `boost/geometry/geometry.hpp`. - -In this particular case, if you look at the commit which causes grid_tools.cc -fails to compile, its because we were relying on some implicit includes. In -particular, we need the distance header to find the distance between points and -boxes, but that was previously included in another file. - -This patch has been adapted from -e0e76835519d122fd12b5858e16d08641a641c6a to apply to dealii 9.3.2. - -See https://github.com/dealii/dealii/pull/13165. ---- - include/deal.II/numerics/rtree.h | 1 + - 1 file changed, 1 insertion(+) - -diff --git a/include/deal.II/numerics/rtree.h b/include/deal.II/numerics/rtree.h -index 1b9d04dacd..1e1bfd2932 100644 ---- a/include/deal.II/numerics/rtree.h -+++ b/include/deal.II/numerics/rtree.h -@@ -26,6 +26,7 @@ - #include <deal.II/boost_adaptors/segment.h> - - DEAL_II_DISABLE_EXTRA_DIAGNOSTICS -+#include <boost/geometry/algorithms/distance.hpp> - #include <boost/geometry/index/rtree.hpp> - #include <boost/geometry/strategies/strategies.hpp> - DEAL_II_ENABLE_EXTRA_DIAGNOSTICS --- -2.30.2 - diff --git a/gnu/packages/patches/dealii-fix-sundials.patch b/gnu/packages/patches/dealii-fix-sundials.patch deleted file mode 100644 index 7dd5df45da..0000000000 --- a/gnu/packages/patches/dealii-fix-sundials.patch +++ /dev/null @@ -1,60 +0,0 @@ -From af73f368f7f9d4a00df075d1a9f50fc495f8e87a Mon Sep 17 00:00:00 2001 -From: Timo Heister <timo.heister@gmail.com> -Date: Sat, 25 Dec 2021 12:30:45 -0500 -Subject: [PATCH] fix sundials compilation - ---- - include/deal.II/sundials/n_vector.templates.h | 11 ++++++----- - 1 file changed, 6 insertions(+), 5 deletions(-) - -diff --git a/include/deal.II/sundials/n_vector.templates.h b/include/deal.II/sundials/n_vector.templates.h -index 2b49e3efc9..746f63a03b 100644 ---- a/include/deal.II/sundials/n_vector.templates.h -+++ b/include/deal.II/sundials/n_vector.templates.h -@@ -253,13 +253,13 @@ namespace SUNDIALS - template < - typename VectorType, - typename std::enable_if_t<!IsBlockVector<VectorType>::value, int> = 0> -- MPI_Comm -+ const MPI_Comm & - get_communicator(N_Vector v); - - template < - typename VectorType, - typename std::enable_if_t<IsBlockVector<VectorType>::value, int> = 0> -- MPI_Comm -+ const MPI_Comm & - get_communicator(N_Vector v); - - /** -@@ -481,7 +481,7 @@ SUNDIALS::internal::NVectorOperations::destroy(N_Vector v) - - template <typename VectorType, - std::enable_if_t<IsBlockVector<VectorType>::value, int>> --MPI_Comm -+const MPI_Comm & - SUNDIALS::internal::NVectorOperations::get_communicator(N_Vector v) - { - return unwrap_nvector_const<VectorType>(v)->block(0).get_mpi_communicator(); -@@ -491,7 +491,7 @@ SUNDIALS::internal::NVectorOperations::get_communicator(N_Vector v) - - template <typename VectorType, - std::enable_if_t<!IsBlockVector<VectorType>::value, int>> --MPI_Comm -+const MPI_Comm & - SUNDIALS::internal::NVectorOperations::get_communicator(N_Vector v) - { - return unwrap_nvector_const<VectorType>(v)->get_mpi_communicator(); -@@ -519,7 +519,8 @@ SUNDIALS::internal::NVectorOperations::get_communicator_as_void_ptr(N_Vector v) - (void)v; - return nullptr; - # else -- return get_communicator<VectorType>(v); -+ // We need to cast away const here, as SUNDIALS demands a pure `void *`. -+ return &(const_cast<MPI_Comm &>(get_communicator<VectorType>(v))); - # endif - } - --- -2.30.2 - diff --git a/gnu/packages/patches/emacs-git-email-missing-parens.patch b/gnu/packages/patches/emacs-git-email-missing-parens.patch new file mode 100644 index 0000000000..051e2bf1eb --- /dev/null +++ b/gnu/packages/patches/emacs-git-email-missing-parens.patch @@ -0,0 +1,27 @@ +From 820ad7eb2e919e3f880bec22bd4f737fa55c4d22 Mon Sep 17 00:00:00 2001 +From: Niklas Eklund <niklas.eklund@posteo.net> +Date: Thu, 5 May 2022 12:43:49 +0200 +Subject: [PATCH] Fix missing parens in git-email-mu4e.el + +The incorrect number of parens in this file lead to Emacs failing to +parse the file. +--- + git-email-mu4e.el | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/git-email-mu4e.el b/git-email-mu4e.el +index d5b8770..9220bf9 100644 +--- a/git-email-mu4e.el ++++ b/git-email-mu4e.el +@@ -51,7 +51,7 @@ from Lisp, enable the mode if ARG is omitted or nil." + ;; built in context feature. + (seq-filter (lambda (header) + (not (eq (car header) 'from))) +- headers) ++ headers)))) + (setq git-email-compose-email-function 'message-mail))) + + (provide 'git-email-mu4e) +-- +2.34.0 + diff --git a/gnu/packages/patches/flatpak-unset-gdk-pixbuf-for-sandbox.patch b/gnu/packages/patches/flatpak-unset-gdk-pixbuf-for-sandbox.patch new file mode 100644 index 0000000000..79fec8e526 --- /dev/null +++ b/gnu/packages/patches/flatpak-unset-gdk-pixbuf-for-sandbox.patch @@ -0,0 +1,19 @@ +Most Guix system setup with desktop evironment will install GDK_PIXBUF_MODULE_FILE +environment variable in the system profile, and it'll be leaked into the sandbox +environment of flatpak, so the applications in sandbox may fail to find correct +GdkPixbuf loaders. + +This patch unset the GDK_PIXBUF_MODULE_FILE environment variable before running +the sandboxed applications, prevents it to load GdkPixbuf loaders from the path +of host system. + +--- a/common/flatpak-run.c ++++ b/common/flatpak-run.c +@@ -1853,6 +1853,7 @@ static const ExportData default_exports[] = { + {"GST_PTP_HELPER", NULL}, + {"GST_PTP_HELPER_1_0", NULL}, + {"GST_INSTALL_PLUGINS_HELPER", NULL}, ++ {"GDK_PIXBUF_MODULE_FILE", NULL}, + }; + + static const ExportData no_ld_so_cache_exports[] = { diff --git a/gnu/packages/patches/nftables-fix-makefile.patch b/gnu/packages/patches/nftables-fix-makefile.patch new file mode 100644 index 0000000000..1660635874 --- /dev/null +++ b/gnu/packages/patches/nftables-fix-makefile.patch @@ -0,0 +1,34 @@ +From 18a08fb7f0443f8bde83393bd6f69e23a04246b3 Mon Sep 17 00:00:00 2001 +From: Pablo Neira Ayuso <pablo@netfilter.org> +Date: Tue, 22 Feb 2022 00:56:36 +0100 +Subject: examples: compile with `make check' and add AM_CPPFLAGS + +Compile examples via `make check' like libnftnl does. Use AM_CPPFLAGS to +specify local headers via -I. + +Unfortunately, `make distcheck' did not catch this compile time error in +my system, since it was using the nftables/libnftables.h file of the +previous nftables release. + +Fixes: 5b364657a35f ("build: missing SUBIRS update") +Fixes: caf2a6ad2d22 ("examples: add libnftables example program") +Signed-off-by: Pablo Neira Ayuso <pablo@netfilter.org> +--- + examples/Makefile.am | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/examples/Makefile.am b/examples/Makefile.am +index c972170d..3b8b0b67 100644 +--- a/examples/Makefile.am ++++ b/examples/Makefile.am +@@ -1,4 +1,6 @@ +-noinst_PROGRAMS = nft-buffer \ ++check_PROGRAMS = nft-buffer \ + nft-json-file + ++AM_CPPFLAGS = -I$(top_srcdir)/include ++ + LDADD = $(top_builddir)/src/libnftables.la +-- +cgit v1.2.3 + diff --git a/gnu/packages/patches/openssh-trust-guix-store-directory.patch b/gnu/packages/patches/openssh-trust-guix-store-directory.patch new file mode 100644 index 0000000000..b3a9c1bdfc --- /dev/null +++ b/gnu/packages/patches/openssh-trust-guix-store-directory.patch @@ -0,0 +1,40 @@ +From 0d85bbd42ddcd442864a9ba4719aca8b70d68048 Mon Sep 17 00:00:00 2001 +From: Alexey Abramov <levenson@mmer.org> +Date: Fri, 22 Apr 2022 11:32:15 +0200 +Subject: [PATCH] Trust guix store directory + +To be able to execute binaries defined in OpenSSH configuration, we +need to tell OpenSSH that we can trust Guix store objects. safe_path +procedure takes a canonical path and for each component, walking +upwards, checks ownership and permissions constrains which are: must +be owned by root, not writable by group or others. +--- + misc.c | 5 +++++ + 1 file changed, 5 insertions(+) + +diff --git a/misc.c b/misc.c +index 0134d69..7131d5e 100644 +--- a/misc.c ++++ b/misc.c +@@ -2146,6 +2146,7 @@ int + safe_path(const char *name, struct stat *stp, const char *pw_dir, + uid_t uid, char *err, size_t errlen) + { ++ static const char guix_store[] = @STORE_DIRECTORY@; + char buf[PATH_MAX], homedir[PATH_MAX]; + char *cp; + int comparehome = 0; +@@ -2178,6 +2179,10 @@ safe_path(const char *name, struct stat *stp, const char *pw_dir, + } + strlcpy(buf, cp, sizeof(buf)); + ++ /* If we are past the Guix store then we can stop */ ++ if (strcmp(guix_store, buf) == 0) ++ break; ++ + if (stat(buf, &st) == -1 || + (!platform_sys_dir_uid(st.st_uid) && st.st_uid != uid) || + (st.st_mode & 022) != 0) { +-- +2.34.0 + diff --git a/gnu/packages/patches/tor-sandbox-i686.patch b/gnu/packages/patches/tor-sandbox-i686.patch deleted file mode 100644 index 34b0a053b1..0000000000 --- a/gnu/packages/patches/tor-sandbox-i686.patch +++ /dev/null @@ -1,36 +0,0 @@ -This patch fixes sandboxing on i686 by allowing 'statx'. Without this, -'src/test/test_include.sh' would fail. - -Patch adapted from: - - https://gitlab.torproject.org/tpo/core/tor/-/merge_requests/480 - -From 001d880d1082f5d124e10554e2718e407c7e88c6 Mon Sep 17 00:00:00 2001 -From: Simon South <simon@simonsouth.net> -Date: Fri, 5 Nov 2021 10:10:10 -0400 -Subject: [PATCH] sandbox: Allow "statx" syscall on i386 for glibc 2.33 - -glibc versions 2.33 and newer use the modern "statx" system call in their -implementations of stat() and opendir() for Linux on i386. Prevent failures in -the sandbox unit tests by modifying the sandbox to allow this system call -without restriction on i386 when it is available, and update the test suite to -skip the "sandbox/stat_filename" test in this case as it is certain to fail. ---- - src/lib/sandbox/sandbox.c | 3 +++ - src/test/test_sandbox.c | 7 ++++--- - 2 files changed, 7 insertions(+), 3 deletions(-) - -diff --git a/src/lib/sandbox/sandbox.c b/src/lib/sandbox/sandbox.c -index fb02a345ab..a15f99ad76 100644 ---- a/src/lib/sandbox/sandbox.c -+++ b/src/lib/sandbox/sandbox.c -@@ -252,6 +252,9 @@ static int filter_nopar_gen[] = { - SCMP_SYS(sigreturn), - #endif - SCMP_SYS(stat), -+#if defined(__i386__) && defined(__NR_statx) -+ SCMP_SYS(statx), -+#endif - SCMP_SYS(uname), - SCMP_SYS(wait4), - SCMP_SYS(write), diff --git a/gnu/packages/patches/ungoogled-chromium-system-nspr.patch b/gnu/packages/patches/ungoogled-chromium-system-nspr.patch index 831e2b899c..b39cf1281b 100644 --- a/gnu/packages/patches/ungoogled-chromium-system-nspr.patch +++ b/gnu/packages/patches/ungoogled-chromium-system-nspr.patch @@ -6,8 +6,8 @@ https://salsa.debian.org/chromium-team/chromium/-/blob/master/debian/patches/sys diff --git a/base/BUILD.gn b/base/BUILD.gn --- a/base/BUILD.gn +++ b/base/BUILD.gn -@@ -141,6 +141,12 @@ config("perfetto_config") { - } +@@ -184,6 +184,12 @@ buildflag_header("ios_cronet_buildflags") { + flags = [ "CRONET_BUILD=$is_cronet_build" ] } +if (is_linux) { @@ -19,7 +19,7 @@ diff --git a/base/BUILD.gn b/base/BUILD.gn # Base and everything it depends on should be a static library rather than # a source set. Base is more of a "library" in the classic sense in that many # small parts of it are used in many different contexts. This combined with a -@@ -759,8 +765,6 @@ component("base") { +@@ -838,8 +844,6 @@ mixed_component("base") { "third_party/cityhash_v103/src/city_v103.cc", "third_party/cityhash_v103/src/city_v103.h", "third_party/icu/icu_utf.h", @@ -31,10 +31,10 @@ diff --git a/base/BUILD.gn b/base/BUILD.gn diff --git a/base/time/pr_time_unittest.cc b/base/time/pr_time_unittest.cc --- a/base/time/pr_time_unittest.cc +++ b/base/time/pr_time_unittest.cc -@@ -7,7 +7,7 @@ +@@ -6,7 +6,7 @@ + #include <time.h> #include "base/compiler_specific.h" - #include "base/cxx17_backports.h" -#include "base/third_party/nspr/prtime.h" +#include <nspr/prtime.h> #include "base/time/time.h" @@ -43,7 +43,7 @@ diff --git a/base/time/pr_time_unittest.cc b/base/time/pr_time_unittest.cc diff --git a/base/time/time.cc b/base/time/time.cc --- a/base/time/time.cc +++ b/base/time/time.cc -@@ -18,7 +18,7 @@ +@@ -21,7 +21,7 @@ #include <utility> #include "base/strings/stringprintf.h" diff --git a/gnu/packages/patches/valgrind-fix-default-debuginfo-path.patch b/gnu/packages/patches/valgrind-fix-default-debuginfo-path.patch new file mode 100644 index 0000000000..f78dec02dc --- /dev/null +++ b/gnu/packages/patches/valgrind-fix-default-debuginfo-path.patch @@ -0,0 +1,100 @@ +From a7f17b57a94e9cde6d7fa96ac86be5c4fc4f9211 Mon Sep 17 00:00:00 2001 +From: Denis 'GNUtoo' Carikli <GNUtoo@cyberdimension.org> +Date: Sun, 24 Apr 2022 22:20:50 +0200 +Subject: [PATCH] valgrind: fix default debuginfo path + +Description: Workaround for missing symbol in Guix's ld.so. The + correct fix (not stripping all the ld.so symbols) will be done in the + next Guix release as it requires to recompile a lot of packages. + +Forwarded: not-needed +Bug-Guix: https://issues.guix.gnu.org/54728 +Author: Denis 'GNUtoo' Carikli <GNUtoo@cyberdimension.org> +--- + coregrind/m_debuginfo/readelf.c | 11 ++++++----- + docs/xml/manual-core-adv.xml | 4 ++-- + docs/xml/manual-core.xml | 2 +- + 3 files changed, 9 insertions(+), 8 deletions(-) + +diff --git a/coregrind/m_debuginfo/readelf.c b/coregrind/m_debuginfo/readelf.c +index c586e3f33..947fcc500 100644 +--- a/coregrind/m_debuginfo/readelf.c ++++ b/coregrind/m_debuginfo/readelf.c +@@ -1509,9 +1509,9 @@ DiImage* find_debug_file( struct _DebugInfo* di, + + if (buildid != NULL) { + debugpath = ML_(dinfo_zalloc)("di.fdf.1", +- VG_(strlen)(buildid) + 33); ++ VG_(strlen)(buildid) + DEBUGPATH_EXTRA_BYTES_1); + +- VG_(sprintf)(debugpath, "/usr/lib/debug/.build-id/%c%c/%s.debug", ++ VG_(sprintf)(debugpath, "DEFAULT_DEBUGINFO_PATH/.build-id/%c%c/%s.debug", + buildid[0], buildid[1], buildid + 2); + + dimg = open_debug_file(debugpath, buildid, 0, rel_ok, NULL); +@@ -1536,7 +1536,8 @@ DiImage* find_debug_file( struct _DebugInfo* di, + + debugpath = ML_(dinfo_zalloc)( + "di.fdf.3", +- VG_(strlen)(objdir) + VG_(strlen)(debugname) + 64 ++ VG_(strlen)(objdir) + VG_(strlen)(debugname) ++ + VG_(strlen)("DEFAULT_DEBUGINFO_PATH/") + 1 + + (extrapath ? VG_(strlen)(extrapath) : 0) + + (serverpath ? VG_(strlen)(serverpath) : 0)); + +@@ -1561,7 +1562,7 @@ DiImage* find_debug_file( struct _DebugInfo* di, + + TRY_OBJDIR_USRMERGE_OBJDIR("%s/%s"); + TRY_OBJDIR_USRMERGE_OBJDIR("%s/.debug/%s"); +- TRY_OBJDIR_USRMERGE_OBJDIR("/usr/lib/debug%s/%s"); ++ TRY_OBJDIR_USRMERGE_OBJDIR("DEFAULT_DEBUGINFO_PATH%s/%s"); + + if (extrapath) { + TRY_OBJDIR("%s%s/%s", extrapath, objdir, debugname); +@@ -1631,7 +1632,7 @@ DiImage* find_debug_file_ad_hoc( const DebugInfo* di, + + debugpath = ML_(dinfo_zalloc)( + "di.fdfah.3", +- VG_(strlen)(objdir) + 64 ++ VG_(strlen)(objdir) + DEBUGPATH_EXTRA_BYTES_2 + + (extrapath ? VG_(strlen)(extrapath) : 0) + + (serverpath ? VG_(strlen)(serverpath) : 0)); + +diff --git a/docs/xml/manual-core-adv.xml b/docs/xml/manual-core-adv.xml +index 1fa801edc..a7c01d5e6 100644 +--- a/docs/xml/manual-core-adv.xml ++++ b/docs/xml/manual-core-adv.xml +@@ -447,7 +447,7 @@ Valgrind embedded gdbserver:</para> + Remote debugging using | vgdb + relaying data between gdb and process 2418 + Reading symbols from /lib/ld-linux.so.2...done. +-Reading symbols from /usr/lib/debug/lib/ld-2.11.2.so.debug...done. ++Reading symbols from DEFAULT_DEBUGINFO_PATH/lib/ld-2.11.2.so.debug...done. + Loaded symbols for /lib/ld-linux.so.2 + [Switching to Thread 2418] + 0x001f2850 in _start () from /lib/ld-linux.so.2 +@@ -475,7 +475,7 @@ Remote communication error: Resource temporarily unavailable. + Remote debugging using | vgdb --pid=2479 + relaying data between gdb and process 2479 + Reading symbols from /lib/ld-linux.so.2...done. +-Reading symbols from /usr/lib/debug/lib/ld-2.11.2.so.debug...done. ++Reading symbols from DEFAULT_DEBUGINFO_PATH/lib/ld-2.11.2.so.debug...done. + Loaded symbols for /lib/ld-linux.so.2 + [Switching to Thread 2479] + 0x001f2850 in _start () from /lib/ld-linux.so.2 +diff --git a/docs/xml/manual-core.xml b/docs/xml/manual-core.xml +index dc33e1269..f6eb60086 100644 +--- a/docs/xml/manual-core.xml ++++ b/docs/xml/manual-core.xml +@@ -1409,7 +1409,7 @@ that can report errors, e.g. Memcheck, but not Cachegrind.</para> + <listitem> + <para>By default Valgrind searches in several well-known paths + for debug objects, such +- as <computeroutput>/usr/lib/debug/</computeroutput>.</para> ++ as <computeroutput>DEFAULT_DEBUGINFO_PATH/</computeroutput>.</para> + + <para>However, there may be scenarios where you may wish to put + debug objects at an arbitrary location, such as external storage +-- +2.35.1 + diff --git a/gnu/packages/patches/ytfzf-programs.patch b/gnu/packages/patches/ytfzf-programs.patch deleted file mode 100644 index 005ce2cf99..0000000000 --- a/gnu/packages/patches/ytfzf-programs.patch +++ /dev/null @@ -1,643 +0,0 @@ -From 3f1eaf5a1645b28ca18cfa028417dc225b7a557f Mon Sep 17 00:00:00 2001 -From: Raghav Gururajan <rg@raghavgururajan.name> -Date: Mon, 5 Jul 2021 06:45:49 -0400 -Subject: [PATCH] Modify the strings of referenced programs. - -Pattern the strings of referenced programs, so that they can be easily -substituted with absolute paths using a custom-phase. - -Co-authored-by: jgart <jgart@dismail.de> ---- - ytfzf | 198 +++++++++++++++++++++++++++++----------------------------- - 1 file changed, 99 insertions(+), 99 deletions(-) - -diff --git a/ytfzf b/ytfzf -index f4d2e0d..e8bb60b 100755 ---- a/ytfzf -+++ b/ytfzf -@@ -49,17 +49,17 @@ cache_dir=${YTFZF_CACHE-${cache_dir-$HOME/.cache/ytfzf}} - #video type preference (mp4/1080p, mp4/720p, etc..) - video_pref=${YTFZF_PREF-${video_pref-}} - #the menu to use instead of fzf when -D is specified --external_menu=${YTFZF_EXTMENU-${external_menu-dmenu -i -l 30 -p Search:}} -+external_menu=${YTFZF_EXTMENU-${external_menu-@dmenu@ -i -l 30 -p Search:}} - #number of columns (characters on a line) the external menu can have - #necessary for formatting text for external menus - external_menu_len=${YTFZF_EXTMENU_LEN-${external_menu_len-220}} - #player settings (players need to support streaming with youtube-dl) - #player to use for watching the video --video_player=${YTFZF_PLAYER-${video_player-mpv}} -+video_player=${YTFZF_PLAYER-${video_player-@mpv@}} - #if YTFZF_PREF is specified, use this player instead --video_player_format=${YTFZF_PLAYER_FORMAT-${video_player_format-mpv --ytdl-format=}} -+video_player_format=${YTFZF_PLAYER_FORMAT-${video_player_format-@mpv@ --ytdl-format=}} - #player to use for audio only --audio_player=${YTFZF_AUDIO_PLAYER-${audio_player-mpv --no-video}} -+audio_player=${YTFZF_AUDIO_PLAYER-${audio_player-@mpv@ --no-video}} - #the command to use for displaying thumbnails - thumb_disp_method=${YTFZF_THUMB_DISP_METHOD-${thumb_disp_method-ueberzug}} - #Storing the argument and location for autogenerated subtitles -@@ -85,8 +85,8 @@ subscriptions_file=${subscriptions_file-$config_dir/subscriptions} - #> stores the pid of running ytfzf sessions - pid_file="$cache_dir/.pid" - #> make folders that don't exist --[ -d "$cache_dir" ] || mkdir -p "$cache_dir" --[ -d "$thumb_dir" ] || mkdir -p "$thumb_dir" -+[ -d "$cache_dir" ] || @mkdir@ -p "$cache_dir" -+[ -d "$thumb_dir" ] || @mkdir@ -p "$thumb_dir" - - #> config settings - #list of shortcuts to use in fzf -@@ -177,12 +177,12 @@ dep_ck () { - done - unset Dep - } --dep_ck "jq" "youtube-dl" "curl" -+dep_ck "@jq@" "@youtube-dl@" "@curl@" - - - #only check for mpv if $YTFZF_PLAYER is set to it - #don't check $YTFZF_PLAYER as it could be multiple commands --[ "$video_player" = "mpv" ] && dep_ck "mpv" -+[ "$video_player" = "@mpv@" ] && dep_ck "@mpv@" - - ############################ - # Help Texts # -@@ -326,8 +326,8 @@ print_info () { - } - - print_error () { -- [ $ext_menu_notifs -eq 1 ] && notify-send "error" "$*" || printf "\033[31m$*\033[0m" >&2 -- [ $ext_menu_notifs -eq 1 ] && notify-send "Check for new versions and report at: https://github.com/pystardust/ytfzf\n" || printf "Check for new versions and report at: https://github.com/pystardust/ytfzf\n" >&2 -+ [ $ext_menu_notifs -eq 1 ] && @notify-send@ "error" "$*" || printf "\033[31m$*\033[0m" >&2 -+ [ $ext_menu_notifs -eq 1 ] && @notify-send@ "Check for new versions and report at: https://github.com/pystardust/ytfzf\n" || printf "Check for new versions and report at: https://github.com/pystardust/ytfzf\n" >&2 - } - - ############################ -@@ -398,12 +398,12 @@ format_fzf () { - format_menu () { - if [ "$is_ext_menu" -eq 0 ]; then - #dep_ck fzf here because it is only necessary to use here -- dep_ck "fzf" -- menu_command='column -t -s "$tab_space" | fzf -m --bind change:top --tabstop=1 --layout=reverse --delimiter="$tab_space" --nth=1,2 --expect="$shortcuts" $FZF_DEFAULT_OPTS' -+ dep_ck "@fzf@" -+ menu_command='@column@ -t -s "$tab_space" | @fzf@ -m --bind change:top --tabstop=1 --layout=reverse --delimiter="$tab_space" --nth=1,2 --expect="$shortcuts" $FZF_DEFAULT_OPTS' - format_fzf - else - # Dmenu doesn't render tabs so removing it -- menu_command='tr -d "$tab_space" | '"$external_menu" -+ menu_command='@tr@ -d "$tab_space" | '"$external_menu" - format_ext_menu - fi - } -@@ -461,13 +461,13 @@ ID="ytfzf-ueberzug" - WIDTH=$FZF_PREVIEW_COLUMNS - HEIGHT=$FZF_PREVIEW_LINES - start_ueberzug () { -- [ -e $FIFO ] || { mkfifo "$FIFO" || exit 1 ; } -- ueberzug layer --parser json --silent < "$FIFO" & -+ [ -e $FIFO ] || { @mkfifo@ "$FIFO" || exit 1 ; } -+ @ueberzug@ layer --parser json --silent < "$FIFO" & - exec 3>"$FIFO" - } - stop_ueberzug () { - exec 3>&- -- rm "$FIFO" > /dev/null 2>&1 -+ @rm@ "$FIFO" > /dev/null 2>&1 - } - - preview_img () { -@@ -476,12 +476,12 @@ preview_img () { - shorturl=${args##*${tab_space}|} - shorturl="${shorturl%% *}" - -- json_obj=$(printf "%s" "$videos_json" | jq '.[]|select( .videoID == "'"$shorturl"'")') -+ json_obj=$(printf "%s" "$videos_json" | @jq@ '.[]|select( .videoID == "'"$shorturl"'")') - - - IFS=$tab_space read -r title channel duration views date description <<-EOF - $( -- printf "%s" "$json_obj" | jq -r \ -+ printf "%s" "$json_obj" | @jq@ -r \ - ' - [.title,.channel,.duration,.views,.date,.description]|@tsv - ' -@@ -530,31 +530,31 @@ preview_img () { - } > "$FIFO" ;; - catimg) - printf "\n" -- catimg -w "$((thumb_width * 2))" "$IMAGE" ;; -+ @catimg@ -w "$((thumb_width * 2))" "$IMAGE" ;; - jp2a) - printf "\n" -- jp2a --size="${thumb_width}x$((thumb_height / 2))" --colors --color-depth=24 "$IMAGE" ;; -+ @jp2a@ --size="${thumb_width}x$((thumb_height / 2))" --colors --color-depth=24 "$IMAGE" ;; - jp2a-8) - printf "\n" -- jp2a --size="${thumb_width}x$((thumb_height / 2))" --colors --color-depth=8 "$IMAGE" ;; -+ @jp2a@ --size="${thumb_width}x$((thumb_height / 2))" --colors --color-depth=8 "$IMAGE" ;; - jp2a-4) - printf "\n" -- jp2a --size="${thumb_width}x$((thumb_height / 2))" --colors --color-depth=4 "$IMAGE" ;; -+ @jp2a@ --size="${thumb_width}x$((thumb_height / 2))" --colors --color-depth=4 "$IMAGE" ;; - jp2a-gray|jp2a-grey) - printf "\n" -- jp2a --size="${thumb_width}x$((thumb_height / 2))" "$IMAGE" ;; -+ @jp2a@ --size="${thumb_width}x$((thumb_height / 2))" "$IMAGE" ;; - chafa) - printf "\n" -- chafa --size="${thumb_width}x${thumb_height}" "$IMAGE" ;; -+ @chafa@ --size="${thumb_width}x${thumb_height}" "$IMAGE" ;; - chafa-gray|chafa-grey) - printf "\n" -- chafa --size="${thumb_width}x${thumb_height}" --colors=2 "$IMAGE" ;; -+ @chafa@ --size="${thumb_width}x${thumb_height}" --colors=2 "$IMAGE" ;; - chafa-4) - printf "\n" -- chafa --size="${thumb_width}x${thumb_height}" --colors=16 "$IMAGE" ;; -+ @chafa@ --size="${thumb_width}x${thumb_height}" --colors=16 "$IMAGE" ;; - chafa-8) - printf "\n" -- chafa --size="${thumb_width}x${thumb_height}" --colors=256 "$IMAGE" ;; -+ @chafa@ --size="${thumb_width}x${thumb_height}" --colors=256 "$IMAGE" ;; - custom) - if ! function_exists "handle_display_img"; then - printf "\033[031mERROR[#07]: \033[0m\033[1mhandle_display_img\033[0m is not defined" >&2 -@@ -585,20 +585,20 @@ download_thumbnails () { - if [ "$thumbnail_quality" -eq 1 ]; then - image_download () { - # higher quality images -- curl -s "$Url" -G --data-urlencode "sqp=" > "$thumb_dir/$Name.png" -+ @curl@ -s "$Url" -G --data-urlencode "sqp=" > "$thumb_dir/$Name.png" - } - else - image_download () { -- curl -s "$Url" > "$thumb_dir/$Name.png" -+ @curl@ -s "$Url" > "$thumb_dir/$Name.png" - } - fi - - print_info "Downloading Thumbnails...\n" - thumb_urls=$(printf "%s" "$*" |\ -- jq -r '.[]|[.thumbs,.videoID]|@tsv' ) -+ @jq@ -r '.[]|[.thumbs,.videoID]|@tsv' ) - - while IFS=$tab_space read -r Url Name; do -- sleep 0.001 -+ @sleep@ 0.001 - { - image_download - } & -@@ -628,7 +628,7 @@ get_sp_filter () { - #another example is sort by filter + upload date filter only changes one character as well - if [ -n "$filter_id" ]; then - #gets the character in the filter_id that needs to be replaced if upload_date_filter is also given -- upload_date_character=$(printf "%s" "$filter_id" | awk '{print substr($1, 8, 1)}') -+ upload_date_character=$(printf "%s" "$filter_id" | @awk@ '{print substr($1, 8, 1)}') - fi - - #For each of these, if upload_date_character is unset, the filter_id should be the normal filter -@@ -650,7 +650,7 @@ get_sp_filter () { - if [ -n "$upload_date_character" ]; then - #replaces the 8th character in the filter_id with the appropriate character - #the 8th character specifies the upload_date_filter -- sp=$(printf "%s" "$filter_id" | sed 's/\(.\{7\}\)./\1'"$upload_date_character"'/') -+ sp=$(printf "%s" "$filter_id" | @sed@ 's/\(.\{7\}\)./\1'"$upload_date_character"'/') - #otherwise set it to the filter_id - else - sp=$filter_id -@@ -660,15 +660,15 @@ get_sp_filter () { - - get_yt_json () { - # scrapes the json embedded in the youtube html page -- printf "%s" "$*" | sed -n '/var *ytInitialData/,$p' | tr -d '\n' |\ -- sed -E ' s_^.*var ytInitialData ?=__ ; s_;</script>.*__ ;' -+ printf "%s" "$*" | @sed@ -n '/var *ytInitialData/,$p' | @tr@ -d '\n' |\ -+ @sed@ -E ' s_^.*var ytInitialData ?=__ ; s_;</script>.*__ ;' - } - - get_yt_html () { - link=$1 - query=$2 - printf "%s" "$( -- curl "$link" -s \ -+ @curl@ "$link" -s \ - -G --data-urlencode "search_query=$query" \ - -G --data-urlencode "sp=$sp" \ - -H 'Authority: www.youtube.com' \ -@@ -684,7 +684,7 @@ get_video_data () { - # outputs tab and pipe separated fields: title, channel, view count, video length, video upload date, and the video id/url - # from the videos_json - printf "%s" "$*" |\ -- jq -r '.[]| "\(.title)'"$tab_space"'|\(.channel)'"$tab_space"'|\(.views)'"$tab_space"'|\(.duration)'"$tab_space"'|\(.date)'"$tab_space"'|\(.videoID)"' -+ @jq@ -r '.[]| "\(.title)'"$tab_space"'|\(.channel)'"$tab_space"'|\(.views)'"$tab_space"'|\(.duration)'"$tab_space"'|\(.date)'"$tab_space"'|\(.videoID)"' - } - - scrape_channel () { -@@ -694,7 +694,7 @@ scrape_channel () { - channel_url=$* - - # Converting channel title page url to channel video url -- if ! printf "%s" "$channel_url" | grep -q '/videos *$'; then -+ if ! printf "%s" "$channel_url" | @grep@ -q '/videos *$'; then - channel_url=${channel_url%/featured}/videos - fi - -@@ -706,8 +706,8 @@ scrape_channel () { - fi - - #gets the channel name from title of page -- channel_name=$(printf "%s" "$yt_html" | grep -o '<title>.*</title>' | -- sed \ -+ channel_name=$(printf "%s" "$yt_html" | @grep@ -o '<title>.*</title>' | -+ @sed@ \ - -e 's/ - YouTube//' \ - -e 's/<\/\?title>//g' \ - -e "s/'/'/g" \ -@@ -723,7 +723,7 @@ scrape_channel () { - - #gets a list of videos - videos_json=$(printf "%s" "$yt_json" |\ -- jq '[ .contents | ..|.gridVideoRenderer? | -+ @jq@ '[ .contents | ..|.gridVideoRenderer? | - select(. !=null) | - { - title: .title.runs[0].text, -@@ -736,7 +736,7 @@ scrape_channel () { - } - ]') - -- videos_json=$(printf "%s" "$videos_json" | jq '.[0:'$sub_link_count']') -+ videos_json=$(printf "%s" "$videos_json" | @jq@ '.[0:'$sub_link_count']') - printf "%s\n" "$videos_json" >> "$tmp_video_json_file" - #checks if it's empty in case it was defined in a config function eg: on_get_search - [ -z "$videos_data" ] && videos_data=$(get_video_data "$videos_json") -@@ -768,11 +768,11 @@ get_trending_url_data () { - scrape_pt () { - #gets a list of videos - pt_json=$( -- curl \ -+ @curl@ \ - -s "https://sepiasearch.org/api/v1/search/videos" \ - -G --data-urlencode "search=$*") - videos_json=$(printf "%s" "$pt_json" |\ -- jq '[ .data | .[] | -+ @jq@ '[ .data | .[] | - { - title: .name, - channel: .channel.displayName, -@@ -829,7 +829,7 @@ scrape_yt () { - fi - - #gets a list of videos -- videos_json=$(printf "%s" "$yt_json" | jq '[ .contents| -+ videos_json=$(printf "%s" "$yt_json" | @jq@ '[ .contents| - ..|.videoRenderer? | - select(. !=null) | - { -@@ -844,7 +844,7 @@ scrape_yt () { - } - ]') - -- playlist_json=$(printf "%s" "$yt_json" | jq '[ .contents| -+ playlist_json=$(printf "%s" "$yt_json" | @jq@ '[ .contents| - ..|.playlistRenderer? | - select(. !=null) | - { -@@ -904,28 +904,28 @@ get_search_query () { - #> To select videos from videos_data - user_selection () { - #remove subscription separators -- videos_data_clean=$(printf "%s" "$videos_data" | sed "/.*$tab_space$/d") -+ videos_data_clean=$(printf "%s" "$videos_data" | @sed@ "/.*$tab_space$/d") - - #$selected_data is the video the user picked - #picks the first n videos - if [ "$select_all" -eq 1 ] ; then - selected_data=$videos_data_clean - elif [ "$auto_select" -eq 1 ] ; then -- selected_data=$(printf "%s\n" "$videos_data_clean" | sed "${link_count}"q ) -+ selected_data=$(printf "%s\n" "$videos_data_clean" | @sed@ "${link_count}"q ) - #picks n random videos - elif [ "$random_select" -eq 1 ] ; then -- selected_data=$(printf "%s\n" "$videos_data_clean" | posix_shuf | head -n${link_count}) -+ selected_data=$(printf "%s\n" "$videos_data_clean" | posix_shuf | @head@ -n${link_count}) - #posix_shuf, pick the first $link_count videos - - #show thumbnail menu - elif [ "$show_thumbnails" -eq 1 ] ; then -- dep_ck "ueberzug" "fzf" -+ dep_ck "@ueberzug@" "@fzf@" - export YTFZF_THUMB_DISP_METHOD="$thumb_disp_method" - [ "$thumb_disp_method" = "ueberzug" ] && start_ueberzug - #thumbnails only work in fzf, use fzf -- menu_command="fzf -m --tabstop=1 --bind change:top --delimiter=\"$tab_space\" \ -+ menu_command="@fzf@ -m --tabstop=1 --bind change:top --delimiter=\"$tab_space\" \ - --nth=1,2 --expect='$shortcuts' $FZF_DEFAULT_OPTS \ -- --layout=reverse --preview \"sh $0 -U {}\" \ -+ --layout=reverse --preview \"@sh@ $0 -U {}\" \ - --preview-window \"$PREVIEW_SIDE:50%:noborder:wrap\"" - selected_data=$( title_len=200 video_menu "$videos_data" ) - [ "$thumb_disp_method" = "ueberzug" ] && stop_ueberzug -@@ -951,10 +951,10 @@ handle_shortcuts () { - case $selected_key in - "$urls_shortcut") printf "%s\n" $selected_urls; return 1 ;; - "$title_shortcut") -- printf "%s\n" "$selected_data" | awk -F " " '{print $1}'; return 1 ;; -+ printf "%s\n" "$selected_data" | @awk@ -F " " '{print $1}'; return 1 ;; - "$open_browser_shortcut") - for url in $selected_urls; do -- nohup $BROWSER "$url" >/dev/null 2>&1 -+ @nohup@ $BROWSER "$url" >/dev/null 2>&1 - done - return 1 ;; - "$watch_shortcut") is_download=0; is_audio_only=0; return 0;; -@@ -988,10 +988,10 @@ format_user_selection () { - 11) selected_urls=$selected_urls$new_line'https://www.youtube.com/watch?v='$surl ;; - 34) selected_urls=$selected_urls$new_line'https://www.youtube.com/playlist?list='$surl ;; - 36) -- selected_urls=$selected_urls$new_line"$(printf "%s" "$videos_json" | jq '.[].url' | grep -F "$surl" | tr -d '"')" ;; -+ selected_urls=$selected_urls$new_line"$(printf "%s" "$videos_json" | @jq@ '.[].url' | @grep@ -F "$surl" | @tr@ -d '"')" ;; - *) continue ;; - esac -- refined_selected_data=$refined_selected_data$new_line$(printf '%s' "$videos_data" | grep "|$surl" ) -+ refined_selected_data=$refined_selected_data$new_line$(printf '%s' "$videos_data" | @grep@ "|$surl" ) - done<<-EOF - $selected_data - EOF -@@ -1014,9 +1014,9 @@ print_data () { - get_video_format () { - # select format if flag given - [ $show_format -eq 0 ] && return -- formats=$(youtube-dl -F "$(printf "$selected_urls")") -- line_number=$(printf "$formats" | grep -n '.*extension resolution.*' | cut -d: -f1) -- quality=$(printf "$formats \n1 2 xAudio" | awk -v lineno=$line_number 'FNR > lineno {print $3}' | sort -n | awk -F"x" '{print $2 "p"}' | uniq | sed -e "s/Audiop/Audio/" -e "/^p$/d" | eval "$menu_command" | sed "s/p//g") -+ formats=$(@youtube-dl@ -F "$(printf "$selected_urls")") -+ line_number=$(printf "$formats" | @grep@ -n '.*extension resolution.*' | @cut@ -d: -f1) -+ quality=$(printf "$formats \n1 2 xAudio" | @awk@ -v lineno=$line_number 'FNR > lineno {print $3}' | @sort@ -n | @awk@ -F"x" '{print $2 "p"}' | @uniq@ | @sed@ -e "s/Audiop/Audio/" -e "/^p$/d" | eval "$menu_command" | @sed@ "s/p//g") - [ -z "$quality" ] && exit; - [ $quality = "Audio" ] && video_pref= && video_player="$audio_player" || video_pref="bestvideo[height=?$quality][vcodec!=?vp9]+bestaudio/best" - -@@ -1026,9 +1026,9 @@ get_video_format () { - get_sub_lang () { - if [ $auto_caption -eq 1 ]; then - #Gets the auto generated subs and stores them in a file -- sub_list=$(youtube-dl --list-subs --write-auto-sub "$selected_urls" | sed '/Available subtitles/,$d' | awk '{print $1}' | sed '1d;2d;3d') -+ sub_list=$(@youtube-dl@ --list-subs --write-auto-sub "$selected_urls" | @sed@ '/Available subtitles/,$d' | @awk@ '{print $1}' | @sed@ '1d;2d;3d') - if [ -n "$sub_list" ]; then -- [ -n "$selected_sub" ] || selected_sub=$(printf "$sub_list" | eval "$menu_command") && youtube-dl --sub-lang $selected_sub --write-auto-sub --skip-download "$selected_urls" -o /tmp/ytfzf && YTFZF_SUBT_NAME="--sub-file=/tmp/ytfzf.$selected_sub.vtt" || printf "Auto generated subs not available." -+ [ -n "$selected_sub" ] || selected_sub=$(printf "$sub_list" | eval "$menu_command") && @youtube-dl@ --sub-lang $selected_sub --write-auto-sub --skip-download "$selected_urls" -o /tmp/ytfzf && YTFZF_SUBT_NAME="--sub-file=/tmp/ytfzf.$selected_sub.vtt" || printf "Auto generated subs not available." - fi - unset sub_list - fi -@@ -1046,10 +1046,10 @@ open_player () { - if [ $detach_player -eq 1 ]; then - if [ -z "$video_pref" ] || [ $is_audio_only -eq 1 ]; then - printf "Opening Player: %s\n" "$video_player $*" -- setsid -f $video_player "$@" $YTFZF_SUBT_NAME >/dev/null 2>&1 -+ @setsid@ -f $video_player "$@" $YTFZF_SUBT_NAME >/dev/null 2>&1 - else - printf "Opening Player: %s\n" "$video_player_format$video_pref $*" -- setsid -f $video_player_format"$video_pref" "$@" $YTFZF_SUBT_NAME >/dev/null 2>&1 -+ @setsid@ -f $video_player_format"$video_pref" "$@" $YTFZF_SUBT_NAME >/dev/null 2>&1 - fi - return - fi -@@ -1064,9 +1064,9 @@ open_player () { - fi - elif [ $is_download -eq 1 ]; then - if [ -z "$video_pref" ]; then -- youtube-dl "$@" "$YTFZF_SUBT_NAME" -+ @youtube-dl@ "$@" "$YTFZF_SUBT_NAME" - else -- youtube-dl -f "$video_pref" "$@" $YTFZF_SUBT_NAME || video_pref= open_player "$@" -+ @youtube-dl@ -f "$video_pref" "$@" $YTFZF_SUBT_NAME || video_pref= open_player "$@" - fi - fi - } -@@ -1087,7 +1087,7 @@ play_url () { - fi - - #Delete the temp auto-gen subtitle file -- [ $auto_caption -eq 1 ] && rm -f "${YTFZF_SUBT_NAME#*=}" -+ [ $auto_caption -eq 1 ] && @rm@ -f "${YTFZF_SUBT_NAME#*=}" - - unset player_urls - } -@@ -1102,7 +1102,7 @@ session_is_running () { - #> removes tmp files and clutter - clean_up () { - if ! session_is_running ; then -- [ -d "$thumb_dir" ] && rm -r "$thumb_dir" -+ [ -d "$thumb_dir" ] && @rm@ -r "$thumb_dir" - : > "$pid_file" - function_exists "on_exit" && on_exit - fi -@@ -1124,9 +1124,9 @@ save_before_exit () { - check_if_url () { - # to check if given input is a url - url_regex='^https\?://.*' -- if printf "%s" "$1" | grep -q "$url_regex"; then -+ if printf "%s" "$1" | @grep@ -q "$url_regex"; then - is_url=1 -- selected_urls=$(printf "%s" "$1" | tr ' ' '\n') -+ selected_urls=$(printf "%s" "$1" | @tr@ ' ' '\n') - scrape="url" - else - is_url=0 -@@ -1139,10 +1139,10 @@ get_history () { - if [ "$enable_hist" -eq 1 ]; then - [ -e "$history_file" ] || : > "$history_file" - #gets history data in reverse order (makes it most recent to least recent) -- hist_data=$( sed '1!G; h; $!d' "$history_file" ) -+ hist_data=$( @sed@ '1!G; h; $!d' "$history_file" ) - [ -z "$hist_data" ] && printf "History is empty!\n" >&2 && return 1; - #removes duplicate values from $history_data -- videos_data=$(printf "%s" "$hist_data" | uniq ) -+ videos_data=$(printf "%s" "$hist_data" | @uniq@ ) - [ "$sort_videos_data" -eq 1 ] && videos_data="$(printf "%s" "$videos_data" | sort_video_data_fn)" - else - printf "History is not enabled. Please enable it to use this option (-H).\n" >&2; -@@ -1177,10 +1177,10 @@ get_search_history () { - if [ "$enable_search_hist" -eq 1 ]; then - [ -e "$search_history_file" ] || : > "$search_history_file" - #gets history data in reverse order (makes it most recent to least recent) -- hist_data=$( sed '1!G; h; $!d' "$search_history_file" ) -+ hist_data=$( @sed@ '1!G; h; $!d' "$search_history_file" ) - [ -z "$hist_data" ] && printf "Search history is empty!\n" >&2 && return 1; - #removes duplicate values from $history_data -- search_history=$(printf "%s" "$hist_data" | uniq ) -+ search_history=$(printf "%s" "$hist_data" | @uniq@ ) - else - printf "Search history is not enabled. Please enable it to use this option (-q).\n" >&2; - exit 1; -@@ -1190,7 +1190,7 @@ get_search_history () { - - set_search_history () { - [ -z "$search_query" ] && return -- [ $enable_search_hist -eq 1 ] && printf "%s\t%s\n" "$(date '+%Y-%m-%d %H:%M:%S')" "$search_query" >> "$search_history_file" ; -+ [ $enable_search_hist -eq 1 ] && printf "%s\t%s\n" "$(@date@ '+%Y-%m-%d %H:%M:%S')" "$search_query" >> "$search_history_file" ; - } - - search_history_menu () { -@@ -1200,15 +1200,15 @@ search_history_menu () { - #when using an external menu, the search history will be done there - choice=$( printf "%s\n" "$search_history" | eval "$external_menu" ) - else -- choice="$( printf "%s\n" "$search_history" | fzf --prompt="$search_history_prompt" --print-query --no-multi -d '\t' --with-nth=2.. --expect='alt-enter' --bind='tab:replace-query' )" -+ choice="$( printf "%s\n" "$search_history" | @fzf@ --prompt="$search_history_prompt" --print-query --no-multi -d '\t' --with-nth=2.. --expect='alt-enter' --bind='tab:replace-query' )" - fi - - # first line is the fzf query (what the user types in fzf) - # second line is the fzf --expect key pressed - # third line is the search_history selection made -- query="$( printf "%s" "$choice" | sed -n '1p' )" -- key="$( printf "%s" "$choice" | sed -n '2p' )" -- selection="$( printf "%s" "$choice" | sed -n '3p' )" -+ query="$( printf "%s" "$choice" | @sed@ -n '1p' )" -+ key="$( printf "%s" "$choice" | @sed@ -n '2p' )" -+ selection="$( printf "%s" "$choice" | @sed@ -n '3p' )" - - # if no search history selection has been made - # and the user typed a query, use that instead -@@ -1225,7 +1225,7 @@ search_history_menu () { - search_query="$query" - return;; - esac -- search_query="$( printf "%s" "$selection" | awk -F'\t' '{printf "%s", $NF}' )" -+ search_query="$( printf "%s" "$selection" | @awk@ -F'\t' '{printf "%s", $NF}' )" - } - - ! function_exists "send_select_video_notif" && send_select_video_notif () { -@@ -1244,13 +1244,13 @@ search_history_menu () { - - #if downloading, say Downloading not currently playing - [ $is_download -eq 1 ] && title="Downloading" || title="Currently playing" -- notify-send "$title" "$message" -i "$video_thumb" -+ @notify-send@ "$title" "$message" -i "$video_thumb" - - unset message video_thumb title - } - - send_notify () { -- videos_selected_count=$(printf "%s\n" "$*" | wc -l) -+ videos_selected_count=$(printf "%s\n" "$*" | @wc@ -l) - while IFS=$tab_space read -r video_title video_channel video_views video_duration video_date video_shorturl; do - send_select_video_notif - done << EOF -@@ -1284,14 +1284,14 @@ if ! function_exists "data_sort_key"; then - sort_by="${5#|}" - sort_by="${sort_by#Streamed}" - #print the data that should be sorted by -- printf "%d" "$(date -d "${sort_by}" '+%s')" -+ printf "%d" "$(@date@ -d "${sort_by}" '+%s')" - unset sort_by - } - fi - #the function to use for sorting - if ! function_exists "data_sort_fn"; then - data_sort_fn () { -- sort -nr -+ @sort@ -nr - } - fi - sort_video_data_fn () { -@@ -1300,7 +1300,7 @@ sort_video_data_fn () { - IFS="$tab_space" - #run the key function to get the value to sort by - printf "%s\t%s\n" "$(data_sort_key $line)" "$line" -- done | data_sort_fn | cut -f2- -+ done | data_sort_fn | @cut@ -f2- - unset IFS line - } - -@@ -1314,19 +1314,19 @@ scrape_subscriptions () { - while IFS= read -r url; do - scrape_channel "$url" & - done <<-EOF -- $( sed \ -+ $( @sed@ \ - -e "s/#.*//" \ - -e "/^[[:space:]]*$/d" \ - -e "s/[[:space:]]*//g" \ - "$subscriptions_file") - EOF - wait -- videos_json="$(cat "$tmp_video_json_file")" -+ videos_json="$(@cat@ "$tmp_video_json_file")" - export videos_json - if [ $sort_videos_data -eq 1 ]; then - videos_data=$(sort_video_data_fn < "$tmp_video_data_file") - else -- videos_data=$(cat "$tmp_video_data_file") -+ videos_data=$(@cat@ "$tmp_video_data_file") - fi - } - -@@ -1346,11 +1346,11 @@ create_subs () { - : > "$config_dir/subscriptions" - - # check how many subscriptions there are in the file -- sublength=$( jq '. | length' < "$yt_sub_import_file" ) -+ sublength=$( @jq@ '. | length' < "$yt_sub_import_file" ) - -- for i in $(seq $((sublength - 1))); do -- channelInfo=$(jq --argjson index ${i} '[ "https://www.youtube.com/channel/" + .[$index].snippet.resourceId.channelId + "/videos", "#" + .[$index].snippet.title ]' < "$yt_sub_import_file") -- printf "%s\n" "$(printf "%s" "$channelInfo" | tr -d '[]"\n,')" >> "$subscriptions_file" -+ for i in $(@seq@ $((sublength - 1))); do -+ channelInfo=$(@jq@ --argjson index ${i} '[ "https://www.youtube.com/channel/" + .[$index].snippet.resourceId.channelId + "/videos", "#" + .[$index].snippet.title ]' < "$yt_sub_import_file") -+ printf "%s\n" "$(printf "%s" "$channelInfo" | @tr@ -d '[]"\n,')" >> "$subscriptions_file" - done - exit - } -@@ -1367,10 +1367,10 @@ verify_thumb_disp_method () { - - #sort -R is not posix - posix_shuf () { -- awk -F '\n' ' -+ @awk@ -F '\n' ' - BEGIN {srand()} #set the random seed at the start - {print rand() " " $0} #prepend a random number for each line' |\ -- sort | sed -E 's/[^ ]* //' -+ @sort@ | @sed@ -E 's/[^ ]* //' - #sort by the random numbers, remove the random number - } - -@@ -1486,8 +1486,8 @@ parse_opt () { - exit ;; - version) - printf "\033[1mytfzf:\033[0m %s\n" "$YTFZF_VERSION" -- printf "\033[1myoutube-dl:\033[0m %s\n" "$(youtube-dl --version)" -- command -v "fzf" 1>/dev/null && printf "\033[1mfzf:\033[0m %s\n" "$(fzf --version)" -+ printf "\033[1myoutube-dl:\033[0m %s\n" "$(@youtube-dl@ --version)" -+ command -v "@fzf@" 1>/dev/null && printf "\033[1mfzf:\033[0m %s\n" "$(@fzf@ --version)" - exit ;; - - subt) -@@ -1559,19 +1559,19 @@ done - shift $((OPTIND-1)) - - #only apply to ext_menu since they dont have a terminal to print to --[ $is_ext_menu -eq 1 ] && command -v notify-send 1>/dev/null 2>&1 && ext_menu_notifs=1 || ext_menu_notifs=0 -+[ $is_ext_menu -eq 1 ] && command -v @notify-send@ 1>/dev/null 2>&1 && ext_menu_notifs=1 || ext_menu_notifs=0 - - #used for thumbnail previews in ueberzug - if [ $is_ext_menu -eq 0 ]; then -- export TTY_LINES=$(tput lines) -- export TTY_COLS=$(tput cols) -+ export TTY_LINES=$(@tput@ lines) -+ export TTY_COLS=$(@tput@ cols) - fi - - #if both are true, it defaults to using fzf, and if fzf isnt installed it will throw an error - #so print this error instead and set $show_thumbnails to 0 - if [ $is_ext_menu -eq 1 ] && [ $show_thumbnails -eq 1 ]; then - [ $ext_menu_notifs -eq 1 ] &&\ -- notify-send "warning" "Currently thumbnails do not work in external menus" ||\ -+ @notify-send@ "warning" "Currently thumbnails do not work in external menus" ||\ - printf "\033[33mWARNING: Currently thumbnails do not work in external menus\033[0m\n" >&2 - show_thumbnails=0 - fi --- -2.32.0 - diff --git a/gnu/packages/patches/ytfzf-updates.patch b/gnu/packages/patches/ytfzf-updates.patch deleted file mode 100644 index 40e7c138b0..0000000000 --- a/gnu/packages/patches/ytfzf-updates.patch +++ /dev/null @@ -1,44 +0,0 @@ -From ceb6836cd31653267506957cd0ccf78046404d3b Mon Sep 17 00:00:00 2001 -From: Raghav Gururajan <rg@raghavgururajan.name> -Date: Mon, 5 Jul 2021 06:47:38 -0400 -Subject: [PATCH 2/2] Disable updates within the application. - -Patch the code responsible for self-updating the application. - -Co-authored-by: jgart <jgart@dismail.de> ---- - ytfzf | 18 ++---------------- - 1 file changed, 2 insertions(+), 16 deletions(-) - -diff --git a/ytfzf b/ytfzf -index f0f2e16..2d1bb2e 100755 ---- a/ytfzf -+++ b/ytfzf -@@ -1260,22 +1260,8 @@ EOF - } - - update_ytfzf () { -- branch="$1" -- updatefile="/tmp/ytfzf-update" -- curl -L "https://raw.githubusercontent.com/pystardust/ytfzf/$branch/ytfzf" -o "$updatefile" -- -- if sed -n '1p' < "$updatefile" | grep -q '#!/bin/sh'; then -- chmod 755 "$updatefile" -- [ "$(uname)" = "Darwin" ] && prefix="/usr/local/bin" || prefix="/usr/bin" -- function_exists "sudo" && doasroot="sudo" || doasroot="doas" -- $doasroot cp "$updatefile" "$prefix/ytfzf" -- unset prefix doasroot -- else -- printf "%bFailed to update ytfzf. Try again later.%b" "$c_red" "$c_reset" -- fi -- -- rm "$updatefile" -- exit 0 -+ printf "%bUpdates have to be installed with Guix.%b\n" "$c_red" "$c_reset" -+ exit 1 - } - - #gives a value to sort by (this will give the unix time the video was uploaded) --- -2.32.0 - |