aboutsummaryrefslogtreecommitdiff
path: root/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks
diff options
context:
space:
mode:
Diffstat (limited to 'infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks')
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/audit-blas.sh37
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/audit-tmpdir.sh41
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.sh237
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/autoreconf.sh7
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/breakpoint-hook.sh9
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/compress-man-pages.sh32
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/copy-desktop-items.sh42
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/die.sh21
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/enable-coverage-instrumentation.sh20
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/find-xml-catalogs.sh22
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh40
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/gog-unpack.sh11
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/install-shell-files.sh230
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/keep-build-tree.sh6
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/ld-is-cc-hook.sh5
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh25
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/make-symlinks-relative.sh28
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/make-wrapper.sh146
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-docs.sh23
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-lib64.sh22
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-sbin.sh19
-rwxr-xr-xinfra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-systemd-user-units.sh25
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh199
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/patch-shebangs.sh119
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/prune-libtool-files.sh22
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/reproducible-builds.sh4
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/role.bash71
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh37
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/set-java-classpath.sh13
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh34
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh5
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/shorten-perl-shebang.sh88
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/strip.sh57
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh12
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/use-old-cxx-abi.sh1
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/validate-pkg-config.sh18
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/win-dll-link.sh45
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix177
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix30
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/sample-project/Makefile30
-rw-r--r--infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/wrap-gapps-hook.sh93
41 files changed, 2103 insertions, 0 deletions
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/audit-blas.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/audit-blas.sh
new file mode 100644
index 000000000000..6a40073fb234
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/audit-blas.sh
@@ -0,0 +1,37 @@
+# Ensure that we are always linking against “libblas.so.3” and
+# “liblapack.so.3”.
+
+auditBlas() {
+ local dir="$prefix"
+ [ -e "$dir" ] || return 0
+
+ local i
+ while IFS= read -r -d $'\0' i; do
+ if ! isELF "$i"; then continue; fi
+
+ if $OBJDUMP -p "$i" | grep 'NEEDED' | awk '{ print $2; }' | grep -q '\(libmkl_rt.so\|libopenblas.so.0\)'; then
+ echo "$i refers to a specific implementation of BLAS or LAPACK."
+ echo "This prevents users from switching BLAS/LAPACK implementations."
+ echo "Add \`blas' or \`lapack' to buildInputs instead of \`mkl' or \`openblas'."
+ exit 1
+ fi
+
+ (IFS=:
+ for dir in "$(patchelf --print-rpath "$i")"; do
+ if [ -f "$dir/libblas.so.3" ] || [ -f "$dir/libblas.so" ]; then
+ if [ "$dir" != "@blas@/lib" ]; then
+ echo "$dir is not allowed to contain a library named libblas.so.3"
+ exit 1
+ fi
+ fi
+ if [ -f "$dir/liblapack.so.3" ] || [ -f "$dir/liblapack.so" ]; then
+ if [ "$dir" != "@lapack@/lib" ]; then
+ echo "$dir is not allowed to contain a library named liblapack.so.3"
+ exit 1
+ fi
+ fi
+ done)
+ done < <(find "$dir" -type f -print0)
+}
+
+fixupOutputHooks+=(auditBlas)
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/audit-tmpdir.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/audit-tmpdir.sh
new file mode 100644
index 000000000000..c9dd32d1dd22
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/audit-tmpdir.sh
@@ -0,0 +1,41 @@
+# Check whether RPATHs or wrapper scripts contain references to
+# $TMPDIR. This is a serious security bug because it allows any user
+# to inject files into search paths of other users' processes.
+#
+# It might be better to have Nix scan build output for any occurrence
+# of $TMPDIR (which would also be good for reproducibility), but at
+# the moment that would produce too many spurious errors (e.g. debug
+# info or assertion messages that refer to $TMPDIR).
+
+fixupOutputHooks+=('if [[ -z "${noAuditTmpdir-}" && -e "$prefix" ]]; then auditTmpdir "$prefix"; fi')
+
+auditTmpdir() {
+ local dir="$1"
+ [ -e "$dir" ] || return 0
+
+ header "checking for references to $TMPDIR/ in $dir..."
+
+ local i
+ while IFS= read -r -d $'\0' i; do
+ if [[ "$i" =~ .build-id ]]; then continue; fi
+
+ if isELF "$i"; then
+ if { printf :; patchelf --print-rpath "$i"; } | grep -q -F ":$TMPDIR/"; then
+ echo "RPATH of binary $i contains a forbidden reference to $TMPDIR/"
+ exit 1
+ fi
+ fi
+
+ if isScript "$i"; then
+ if [ -e "$(dirname "$i")/.$(basename "$i")-wrapped" ]; then
+ if grep -q -F "$TMPDIR/" "$i"; then
+ echo "wrapper script $i contains a forbidden reference to $TMPDIR/"
+ exit 1
+ fi
+ fi
+ fi
+
+ done < <(find "$dir" -type f -print0)
+
+ stopNest
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.sh
new file mode 100644
index 000000000000..4f7c0c14304c
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/auto-patchelf.sh
@@ -0,0 +1,237 @@
+declare -a autoPatchelfLibs
+
+gatherLibraries() {
+ autoPatchelfLibs+=("$1/lib")
+}
+
+addEnvHooks "$targetOffset" gatherLibraries
+
+isExecutable() {
+ # For dynamically linked ELF files it would be enough to check just for the
+ # INTERP section. However, we won't catch statically linked executables as
+ # they only have an ELF type of EXEC but no INTERP.
+ #
+ # So what we do here is just check whether *either* the ELF type is EXEC
+ # *or* there is an INTERP section. This also catches position-independent
+ # executables, as they typically have an INTERP section but their ELF type
+ # is DYN.
+ isExeResult="$(LANG=C $READELF -h -l "$1" 2> /dev/null \
+ | grep '^ *Type: *EXEC\>\|^ *INTERP\>')"
+ # not using grep -q, because it can cause Broken pipe
+ [ -n "$isExeResult" ]
+}
+
+# We cache dependencies so that we don't need to search through all of them on
+# every consecutive call to findDependency.
+declare -a cachedDependencies
+
+addToDepCache() {
+ local existing
+ for existing in "${cachedDependencies[@]}"; do
+ if [ "$existing" = "$1" ]; then return; fi
+ done
+ cachedDependencies+=("$1")
+}
+
+declare -gi depCacheInitialised=0
+declare -gi doneRecursiveSearch=0
+declare -g foundDependency
+
+getDepsFromSo() {
+ ldd "$1" 2> /dev/null | sed -n -e 's/[^=]*=> *\(.\+\) \+([^)]*)$/\1/p'
+}
+
+populateCacheWithRecursiveDeps() {
+ local so found foundso
+ for so in "${cachedDependencies[@]}"; do
+ for found in $(getDepsFromSo "$so"); do
+ local libdir="${found%/*}"
+ local base="${found##*/}"
+ local soname="${base%.so*}"
+ for foundso in "${found%/*}/$soname".so*; do
+ addToDepCache "$foundso"
+ done
+ done
+ done
+}
+
+getSoArch() {
+ objdump -f "$1" | sed -ne 's/^architecture: *\([^,]\+\).*/\1/p'
+}
+
+# NOTE: If you want to use this function outside of the autoPatchelf function,
+# keep in mind that the dependency cache is only valid inside the subshell
+# spawned by the autoPatchelf function, so invoking this directly will possibly
+# rebuild the dependency cache. See the autoPatchelf function below for more
+# information.
+findDependency() {
+ local filename="$1"
+ local arch="$2"
+ local lib dep
+
+ if [ $depCacheInitialised -eq 0 ]; then
+ for lib in "${autoPatchelfLibs[@]}"; do
+ for so in "$lib/"*.so*; do addToDepCache "$so"; done
+ done
+ depCacheInitialised=1
+ fi
+
+ for dep in "${cachedDependencies[@]}"; do
+ if [ "$filename" = "${dep##*/}" ]; then
+ if [ "$(getSoArch "$dep")" = "$arch" ]; then
+ foundDependency="$dep"
+ return 0
+ fi
+ fi
+ done
+
+ # Populate the dependency cache with recursive dependencies *only* if we
+ # didn't find the right dependency so far and afterwards run findDependency
+ # again, but this time with $doneRecursiveSearch set to 1 so that it won't
+ # recurse again (and thus infinitely).
+ if [ $doneRecursiveSearch -eq 0 ]; then
+ populateCacheWithRecursiveDeps
+ doneRecursiveSearch=1
+ findDependency "$filename" "$arch" || return 1
+ return 0
+ fi
+ return 1
+}
+
+autoPatchelfFile() {
+ local dep rpath="" toPatch="$1"
+
+ local interpreter="$(< "$NIX_CC/nix-support/dynamic-linker")"
+ if isExecutable "$toPatch"; then
+ patchelf --set-interpreter "$interpreter" "$toPatch"
+ if [ -n "$runtimeDependencies" ]; then
+ for dep in $runtimeDependencies; do
+ rpath="$rpath${rpath:+:}$dep/lib"
+ done
+ fi
+ fi
+
+ echo "searching for dependencies of $toPatch" >&2
+
+ # We're going to find all dependencies based on ldd output, so we need to
+ # clear the RPATH first.
+ patchelf --remove-rpath "$toPatch"
+
+ local missing="$(
+ ldd "$toPatch" 2> /dev/null | \
+ sed -n -e 's/^[\t ]*\([^ ]\+\) => not found.*/\1/p'
+ )"
+
+ # This ensures that we get the output of all missing dependencies instead
+ # of failing at the first one, because it's more useful when working on a
+ # new package where you don't yet know its dependencies.
+ local -i depNotFound=0
+
+ for dep in $missing; do
+ echo -n " $dep -> " >&2
+ if findDependency "$dep" "$(getSoArch "$toPatch")"; then
+ rpath="$rpath${rpath:+:}${foundDependency%/*}"
+ echo "found: $foundDependency" >&2
+ else
+ echo "not found!" >&2
+ depNotFound=1
+ fi
+ done
+
+ # This makes sure the builder fails if we didn't find a dependency, because
+ # the stdenv setup script is run with set -e. The actual error is emitted
+ # earlier in the previous loop.
+ [ $depNotFound -eq 0 -o -n "$autoPatchelfIgnoreMissingDeps" ]
+
+ if [ -n "$rpath" ]; then
+ echo "setting RPATH to: $rpath" >&2
+ patchelf --set-rpath "$rpath" "$toPatch"
+ fi
+}
+
+# Can be used to manually add additional directories with shared object files
+# to be included for the next autoPatchelf invocation.
+addAutoPatchelfSearchPath() {
+ local -a findOpts=()
+
+ # XXX: Somewhat similar to the one in the autoPatchelf function, maybe make
+ # it DRY someday...
+ while [ $# -gt 0 ]; do
+ case "$1" in
+ --) shift; break;;
+ --no-recurse) shift; findOpts+=("-maxdepth" 1);;
+ --*)
+ echo "addAutoPatchelfSearchPath: ERROR: Invalid command line" \
+ "argument: $1" >&2
+ return 1;;
+ *) break;;
+ esac
+ done
+
+ cachedDependencies+=(
+ $(find "$@" "${findOpts[@]}" \! -type d \
+ \( -name '*.so' -o -name '*.so.*' \))
+ )
+}
+
+autoPatchelf() {
+ local norecurse=
+
+ while [ $# -gt 0 ]; do
+ case "$1" in
+ --) shift; break;;
+ --no-recurse) shift; norecurse=1;;
+ --*)
+ echo "autoPatchelf: ERROR: Invalid command line" \
+ "argument: $1" >&2
+ return 1;;
+ *) break;;
+ esac
+ done
+
+ if [ $# -eq 0 ]; then
+ echo "autoPatchelf: No paths to patch specified." >&2
+ return 1
+ fi
+
+ echo "automatically fixing dependencies for ELF files" >&2
+
+ # Add all shared objects of the current output path to the start of
+ # cachedDependencies so that it's choosen first in findDependency.
+ addAutoPatchelfSearchPath ${norecurse:+--no-recurse} -- "$@"
+
+ # Here we actually have a subshell, which also means that
+ # $cachedDependencies is final at this point, so whenever we want to run
+ # findDependency outside of this, the dependency cache needs to be rebuilt
+ # from scratch, so keep this in mind if you want to run findDependency
+ # outside of this function.
+ while IFS= read -r -d $'\0' file; do
+ isELF "$file" || continue
+ segmentHeaders="$(LANG=C $READELF -l "$file")"
+ # Skip if the ELF file doesn't have segment headers (eg. object files).
+ # not using grep -q, because it can cause Broken pipe
+ [ -n "$(echo "$segmentHeaders" | grep '^Program Headers:')" ] || continue
+ if isExecutable "$file"; then
+ # Skip if the executable is statically linked.
+ [ -n "$(echo "$segmentHeaders" | grep "^ *INTERP\\>")" ] || continue
+ fi
+ autoPatchelfFile "$file"
+ done < <(find "$@" ${norecurse:+-maxdepth 1} -type f -print0)
+}
+
+# XXX: This should ultimately use fixupOutputHooks but we currently don't have
+# a way to enforce the order. If we have $runtimeDependencies set, the setup
+# hook of patchelf is going to ruin everything and strip out those additional
+# RPATHs.
+#
+# So what we do here is basically run in postFixup and emulate the same
+# behaviour as fixupOutputHooks because the setup hook for patchelf is run in
+# fixupOutput and the postFixup hook runs later.
+postFixupHooks+=('
+ if [ -z "${dontAutoPatchelf-}" ]; then
+ autoPatchelf -- $(for output in $outputs; do
+ [ -e "${!output}" ] || continue
+ echo "${!output}"
+ done)
+ fi
+')
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/autoreconf.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/autoreconf.sh
new file mode 100644
index 000000000000..c08cab158688
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/autoreconf.sh
@@ -0,0 +1,7 @@
+preConfigurePhases+=" autoreconfPhase"
+
+autoreconfPhase() {
+ runHook preAutoreconf
+ autoreconf ${autoreconfFlags:---install --force --verbose}
+ runHook postAutoreconf
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/breakpoint-hook.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/breakpoint-hook.sh
new file mode 100644
index 000000000000..6bef786ac3ac
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/breakpoint-hook.sh
@@ -0,0 +1,9 @@
+breakpointHook() {
+ local red='\033[0;31m'
+ local no_color='\033[0m'
+
+ echo -e "${red}build failed in ${curPhase} with exit code ${exitCode}${no_color}"
+ printf "To attach install cntr and run the following command as root:\n\n"
+ sh -c "echo ' cntr attach -t command cntr-${out}'; while true; do sleep 99999999; done"
+}
+failureHooks+=(breakpointHook)
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/compress-man-pages.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/compress-man-pages.sh
new file mode 100644
index 000000000000..82e48cd8aa77
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/compress-man-pages.sh
@@ -0,0 +1,32 @@
+fixupOutputHooks+=('if [ -z "${dontGzipMan-}" ]; then compressManPages "$prefix"; fi')
+
+compressManPages() {
+ local dir="$1"
+
+ if [ -L "$dir"/share ] || [ -L "$dir"/share/man ] || [ ! -d "$dir/share/man" ]
+ then return
+ fi
+ echo "gzipping man pages under $dir/share/man/"
+
+ # Compress all uncompressed manpages. Don't follow symlinks, etc.
+ find "$dir"/share/man/ -type f -a '!' -regex '.*\.\(bz2\|gz\)$' -print0 \
+ | while IFS= read -r -d $'\0' f
+ do
+ if gzip -c -n "$f" > "$f".gz; then
+ rm "$f"
+ else
+ rm "$f".gz
+ fi
+ done
+
+ # Point symlinks to compressed manpages.
+ find "$dir"/share/man/ -type l -a '!' -regex '.*\.\(bz2\|gz\)$' -print0 \
+ | while IFS= read -r -d $'\0' f
+ do
+ local target
+ target="$(readlink -f "$f")"
+ if [ -f "$target".gz ]; then
+ ln -sf "$target".gz "$f".gz && rm "$f"
+ fi
+ done
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/copy-desktop-items.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/copy-desktop-items.sh
new file mode 100644
index 000000000000..f96a10f33d5c
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/copy-desktop-items.sh
@@ -0,0 +1,42 @@
+# shellcheck shell=bash
+
+# Setup hook that installs specified desktop items.
+#
+# Example usage in a derivation:
+#
+# { …, makeDesktopItem, copyDesktopItems, … }:
+#
+# let desktopItem = makeDesktopItem { … }; in
+# stdenv.mkDerivation {
+# …
+# nativeBuildInputs = [ copyDesktopItems ];
+#
+# desktopItems = [ desktopItem ];
+# …
+# }
+#
+# This hook will copy files which are either given by full path
+# or all '*.desktop' files placed inside the 'share/applications'
+# folder of each `desktopItems` argument.
+
+postInstallHooks+=(copyDesktopItems)
+
+copyDesktopItems() {
+ if [ "${dontCopyDesktopItems-}" = 1 ]; then return; fi
+
+ if [ -z "$desktopItems" ]; then
+ return
+ fi
+
+ for desktopItem in $desktopItems; do
+ if [[ -f "$desktopItem" ]]; then
+ echo "Copying '$f' into '$out/share/applications'"
+ install -D -m 444 -t "$out"/share/applications "$f"
+ else
+ for f in "$desktopItem"/share/applications/*.desktop; do
+ echo "Copying '$f' into '$out/share/applications'"
+ install -D -m 444 -t "$out"/share/applications "$f"
+ done
+ fi
+ done
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/die.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/die.sh
new file mode 100644
index 000000000000..0db41e030f4c
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/die.sh
@@ -0,0 +1,21 @@
+# Exit with backtrace and error message
+#
+# Usage: die "Error message"
+die() {
+ # Let us be a little sloppy with errors, because otherwise the final
+ # invocation of `caller` below will cause the script to exit.
+ set +e
+
+ # Print our error message
+ printf "\nBuilder called die: %b\n" "$*"
+ printf "Backtrace:\n"
+
+ # Print a backtrace.
+ local frame=0
+ while caller $frame; do
+ ((frame++));
+ done
+ printf "\n"
+
+ exit 1
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/enable-coverage-instrumentation.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/enable-coverage-instrumentation.sh
new file mode 100644
index 000000000000..2b48fea4ff0b
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/enable-coverage-instrumentation.sh
@@ -0,0 +1,20 @@
+postPhases+=" cleanupBuildDir"
+
+# Force GCC to build with coverage instrumentation. Also disable
+# optimisation, since it may confuse things.
+export NIX_CFLAGS_COMPILE="${NIX_CFLAGS_COMPILE:-} -O0 --coverage"
+
+# Get rid of everything that isn't a gcno file or a C source file.
+# Also strip the `.tmp_' prefix from gcno files. (The Linux kernel
+# creates these.)
+cleanupBuildDir() {
+ if ! [ -e $out/.build ]; then return; fi
+
+ find $out/.build/ -type f -a ! \
+ \( -name "*.c" -o -name "*.cc" -o -name "*.cpp" -o -name "*.h" -o -name "*.hh" -o -name "*.y" -o -name "*.l" -o -name "*.gcno" \) \
+ | xargs rm -f --
+
+ for i in $(find $out/.build/ -name ".tmp_*.gcno"); do
+ mv "$i" "$(echo $i | sed s/.tmp_//)"
+ done
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/find-xml-catalogs.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/find-xml-catalogs.sh
new file mode 100644
index 000000000000..f446a6f27fd9
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/find-xml-catalogs.sh
@@ -0,0 +1,22 @@
+addXMLCatalogs () {
+ local d i
+ # ‘xml/dtd’ and ‘xml/xsl’ are deprecated. Catalogs should be
+ # installed underneath ‘share/xml’.
+ for d in $1/share/xml $1/xml/dtd $1/xml/xsl; do
+ if [ -d $d ]; then
+ for i in $(find $d -name catalog.xml); do
+ XML_CATALOG_FILES+=" $i"
+ done
+ fi
+ done
+}
+
+if [ -z "${libxmlHookDone-}" ]; then
+ libxmlHookDone=1
+
+ # Set up XML_CATALOG_FILES. An empty initial value prevents
+ # xmllint and xsltproc from looking in /etc/xml/catalog.
+ export XML_CATALOG_FILES=''
+ if [ -z "$XML_CATALOG_FILES" ]; then XML_CATALOG_FILES=" "; fi
+ addEnvHooks "$hostOffset" addXMLCatalogs
+fi
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh
new file mode 100644
index 000000000000..af2ff0cc9662
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/fix-darwin-dylib-names.sh
@@ -0,0 +1,40 @@
+# On macOS, binaries refer to dynamic library dependencies using
+# either relative paths (e.g. "libicudata.dylib", searched relative to
+# $DYLD_LIBRARY_PATH) or absolute paths
+# (e.g. "/nix/store/.../lib/libicudata.dylib"). In Nix, the latter is
+# preferred since it allows programs to just work. When linking
+# against a library (e.g. "-licudata"), the linker uses the install
+# name embedded in the dylib (which can be shown using "otool -D").
+# Most packages create dylibs with absolute install names, but some do
+# not. This setup hook fixes dylibs by setting their install names to
+# their absolute path (using "install_name_tool -id"). It also
+# rewrites references in other dylibs to absolute paths.
+
+fixupOutputHooks+=('fixDarwinDylibNamesIn $prefix')
+
+fixDarwinDylibNames() {
+ local flags=()
+ local old_id
+
+ for fn in "$@"; do
+ flags+=(-change "$(basename "$fn")" "$fn")
+ done
+
+ for fn in "$@"; do
+ if [ -L "$fn" ]; then continue; fi
+ echo "$fn: fixing dylib"
+ int_out=$(install_name_tool -id "$fn" "${flags[@]}" "$fn" 2>&1)
+ result=$?
+ if [ "$result" -ne 0 ] &&
+ ! grep "shared library stub file and can't be changed" <<< "$out"
+ then
+ echo "$int_out" >&2
+ exit "$result"
+ fi
+ done
+}
+
+fixDarwinDylibNamesIn() {
+ local dir="$1"
+ fixDarwinDylibNames $(find "$dir" -name "*.dylib")
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/gog-unpack.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/gog-unpack.sh
new file mode 100644
index 000000000000..559b543fadfc
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/gog-unpack.sh
@@ -0,0 +1,11 @@
+unpackPhase="unpackGog"
+
+unpackGog() {
+ runHook preUnpackGog
+
+ innoextract --silent --extract --exclude-temp "${src}"
+
+ find . -depth -print -execdir rename -f 'y/A-Z/a-z/' '{}' \;
+
+ runHook postUnpackGog
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/install-shell-files.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/install-shell-files.sh
new file mode 100644
index 000000000000..194b408b1050
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/install-shell-files.sh
@@ -0,0 +1,230 @@
+# shellcheck shell=bash
+# Setup hook for the `installShellFiles` package.
+#
+# Example usage in a derivation:
+#
+# { …, installShellFiles, … }:
+# stdenv.mkDerivation {
+# …
+# nativeBuildInputs = [ installShellFiles ];
+# postInstall = ''
+# installManPage share/doc/foobar.1
+# installShellCompletion share/completions/foobar.{bash,fish,zsh}
+# '';
+# …
+# }
+#
+# See comments on each function for more details.
+
+# installManPage <path> [...<path>]
+#
+# Each argument is checked for its man section suffix and installed into the appropriate
+# share/man/man<n>/ directory. The function returns an error if any paths don't have the man
+# section suffix (with optional .gz compression).
+installManPage() {
+ local path
+ for path in "$@"; do
+ if (( "${NIX_DEBUG:-0}" >= 1 )); then
+ echo "installManPage: installing $path"
+ fi
+ if test -z "$path"; then
+ echo "installManPage: error: path cannot be empty" >&2
+ return 1
+ fi
+ local basename
+ basename=$(stripHash "$path") # use stripHash in case it's a nix store path
+ local trimmed=${basename%.gz} # don't get fooled by compressed manpages
+ local suffix=${trimmed##*.}
+ if test -z "$suffix" -o "$suffix" = "$trimmed"; then
+ echo "installManPage: error: path missing manpage section suffix: $path" >&2
+ return 1
+ fi
+ local outRoot
+ if test "$suffix" = 3; then
+ outRoot=${!outputDevman:?}
+ else
+ outRoot=${!outputMan:?}
+ fi
+ install -Dm644 -T "$path" "${outRoot}/share/man/man$suffix/$basename" || return
+ done
+}
+
+# installShellCompletion [--cmd <name>] ([--bash|--fish|--zsh] [--name <name>] <path>)...
+#
+# Each path is installed into the appropriate directory for shell completions for the given shell.
+# If one of `--bash`, `--fish`, or `--zsh` is given the path is assumed to belong to that shell.
+# Otherwise the file extension will be examined to pick a shell. If the shell is unknown a warning
+# will be logged and the command will return a non-zero status code after processing any remaining
+# paths. Any of the shell flags will affect all subsequent paths (unless another shell flag is
+# given).
+#
+# If the shell completion needs to be renamed before installing the optional `--name <name>` flag
+# may be given. Any name provided with this flag only applies to the next path.
+#
+# If all shell completions need to be renamed before installing the optional `--cmd <name>` flag
+# may be given. This will synthesize a name for each file, unless overridden with an explicit
+# `--name` flag. For example, `--cmd foobar` will synthesize the name `_foobar` for zsh and
+# `foobar.bash` for bash.
+#
+# For zsh completions, if the `--name` flag is not given, the path will be automatically renamed
+# such that `foobar.zsh` becomes `_foobar`.
+#
+# A path may be a named fd, such as produced by the bash construct `<(cmd)`. When using a named fd,
+# the shell type flag must be provided, and either the `--name` or `--cmd` flag must be provided.
+# This might look something like:
+#
+# installShellCompletion --zsh --name _foobar <($out/bin/foobar --zsh-completion)
+#
+# This command accepts multiple shell flags in conjunction with multiple paths if you wish to
+# install them all in one command:
+#
+# installShellCompletion share/completions/foobar.{bash,fish} --zsh share/completions/_foobar
+#
+# However it may be easier to read if each shell is split into its own invocation, especially when
+# renaming is involved:
+#
+# installShellCompletion --bash --name foobar.bash share/completions.bash
+# installShellCompletion --fish --name foobar.fish share/completions.fish
+# installShellCompletion --zsh --name _foobar share/completions.zsh
+#
+# Or to use shell newline escaping to split a single invocation across multiple lines:
+#
+# installShellCompletion --cmd foobar \
+# --bash <($out/bin/foobar --bash-completion) \
+# --fish <($out/bin/foobar --fish-completion) \
+# --zsh <($out/bin/foobar --zsh-completion)
+#
+# If any argument is `--` the remaining arguments will be treated as paths.
+installShellCompletion() {
+ local shell='' name='' cmdname='' retval=0 parseArgs=1 arg
+ while { arg=$1; shift; }; do
+ # Parse arguments
+ if (( parseArgs )); then
+ case "$arg" in
+ --bash|--fish|--zsh)
+ shell=${arg#--}
+ continue;;
+ --name)
+ name=$1
+ shift || {
+ echo 'installShellCompletion: error: --name flag expected an argument' >&2
+ return 1
+ }
+ continue;;
+ --name=*)
+ # treat `--name=foo` the same as `--name foo`
+ name=${arg#--name=}
+ continue;;
+ --cmd)
+ cmdname=$1
+ shift || {
+ echo 'installShellCompletion: error: --cmd flag expected an argument' >&2
+ return 1
+ }
+ continue;;
+ --cmd=*)
+ # treat `--cmd=foo` the same as `--cmd foo`
+ cmdname=${arg#--cmd=}
+ continue;;
+ --?*)
+ echo "installShellCompletion: warning: unknown flag ${arg%%=*}" >&2
+ retval=2
+ continue;;
+ --)
+ # treat remaining args as paths
+ parseArgs=0
+ continue;;
+ esac
+ fi
+ if (( "${NIX_DEBUG:-0}" >= 1 )); then
+ echo "installShellCompletion: installing $arg${name:+ as $name}"
+ fi
+ # if we get here, this is a path or named pipe
+ # Identify shell and output name
+ local curShell=$shell
+ local outName=''
+ if [[ -z "$arg" ]]; then
+ echo "installShellCompletion: error: empty path is not allowed" >&2
+ return 1
+ elif [[ -p "$arg" ]]; then
+ # this is a named fd or fifo
+ if [[ -z "$curShell" ]]; then
+ echo "installShellCompletion: error: named pipe requires one of --bash, --fish, or --zsh" >&2
+ return 1
+ elif [[ -z "$name" && -z "$cmdname" ]]; then
+ echo "installShellCompletion: error: named pipe requires one of --cmd or --name" >&2
+ return 1
+ fi
+ else
+ # this is a path
+ local argbase
+ argbase=$(stripHash "$arg")
+ if [[ -z "$curShell" ]]; then
+ # auto-detect the shell
+ case "$argbase" in
+ ?*.bash) curShell=bash;;
+ ?*.fish) curShell=fish;;
+ ?*.zsh) curShell=zsh;;
+ *)
+ if [[ "$argbase" = _* && "$argbase" != *.* ]]; then
+ # probably zsh
+ echo "installShellCompletion: warning: assuming path \`$arg' is zsh; please specify with --zsh" >&2
+ curShell=zsh
+ else
+ echo "installShellCompletion: warning: unknown shell for path: $arg" >&2
+ retval=2
+ continue
+ fi;;
+ esac
+ fi
+ outName=$argbase
+ fi
+ # Identify output path
+ if [[ -n "$name" ]]; then
+ outName=$name
+ elif [[ -n "$cmdname" ]]; then
+ case "$curShell" in
+ bash|fish) outName=$cmdname.$curShell;;
+ zsh) outName=_$cmdname;;
+ *)
+ # Our list of shells is out of sync with the flags we accept or extensions we detect.
+ echo 'installShellCompletion: internal error' >&2
+ return 1;;
+ esac
+ fi
+ local sharePath
+ case "$curShell" in
+ bash) sharePath=bash-completion/completions;;
+ fish) sharePath=fish/vendor_completions.d;;
+ zsh)
+ sharePath=zsh/site-functions
+ # only apply automatic renaming if we didn't have a manual rename
+ if [[ -z "$name" && -z "$cmdname" ]]; then
+ # convert a name like `foo.zsh` into `_foo`
+ outName=${outName%.zsh}
+ outName=_${outName#_}
+ fi;;
+ *)
+ # Our list of shells is out of sync with the flags we accept or extensions we detect.
+ echo 'installShellCompletion: internal error' >&2
+ return 1;;
+ esac
+ # Install file
+ local outDir="${!outputBin:?}/share/$sharePath"
+ local outPath="$outDir/$outName"
+ if [[ -p "$arg" ]]; then
+ # install handles named pipes on NixOS but not on macOS
+ mkdir -p "$outDir" \
+ && cat "$arg" > "$outPath"
+ else
+ install -Dm644 -T "$arg" "$outPath"
+ fi || return
+ # Clear the per-path flags
+ name=
+ done
+ if [[ -n "$name" ]]; then
+ echo 'installShellCompletion: error: --name flag given with no path' >&2
+ return 1
+ fi
+ return $retval
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/keep-build-tree.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/keep-build-tree.sh
new file mode 100644
index 000000000000..754900bfc337
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/keep-build-tree.sh
@@ -0,0 +1,6 @@
+prePhases+=" moveBuildDir"
+
+moveBuildDir() {
+ mkdir -p $out/.build
+ cd $out/.build
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/ld-is-cc-hook.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/ld-is-cc-hook.sh
new file mode 100644
index 000000000000..b53e184b0956
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/ld-is-cc-hook.sh
@@ -0,0 +1,5 @@
+ld-is-cc-hook() {
+ LD=$CC
+}
+
+preConfigureHooks+=(ld-is-cc-hook)
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh
new file mode 100644
index 000000000000..9108b4c50355
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/make-coverage-analysis-report.sh
@@ -0,0 +1,25 @@
+postPhases+=" coverageReportPhase"
+
+coverageReportPhase() {
+ lcov --directory . --capture --output-file app.info
+ set -o noglob
+ lcov --remove app.info ${lcovFilter:-"/nix/store/*"} > app2.info
+ set +o noglob
+ mv app2.info app.info
+
+ mkdir -p $out/coverage
+ genhtml app.info $lcovExtraTraceFiles -o $out/coverage > log
+
+ # Grab the overall coverage percentage so that Hydra can plot it over time.
+ mkdir -p $out/nix-support
+ lineCoverage="$(sed 's/.*lines\.*: \([0-9\.]\+\)%.*/\1/; t ; d' log)"
+ functionCoverage="$(sed 's/.*functions\.*: \([0-9\.]\+\)%.*/\1/; t ; d' log)"
+ if [ -z "$lineCoverage" -o -z "$functionCoverage" ]; then
+ echo "failed to get coverage statistics"
+ exit 1
+ fi
+ echo "lineCoverage $lineCoverage %" >> $out/nix-support/hydra-metrics
+ echo "functionCoverage $functionCoverage %" >> $out/nix-support/hydra-metrics
+
+ echo "report coverage $out/coverage" >> $out/nix-support/hydra-build-products
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/make-symlinks-relative.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/make-symlinks-relative.sh
new file mode 100644
index 000000000000..0608d3ca81c4
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/make-symlinks-relative.sh
@@ -0,0 +1,28 @@
+fixupOutputHooks+=(_makeSymlinksRelative)
+
+# For every symlink in $output that refers to another file in $output
+# ensure that the symlink is relative. This removes references to the output
+# has from the resulting store paths and thus the NAR files.
+_makeSymlinksRelative() {
+ local symlinkTarget
+
+ if [ -n "${dontRewriteSymlinks-}" ]; then
+ return 0
+ fi
+
+ while IFS= read -r -d $'\0' f; do
+ symlinkTarget=$(readlink "$f")
+ if [[ "$symlinkTarget"/ != "$prefix"/* ]]; then
+ # skip this symlink as it doesn't point to $prefix
+ continue
+ fi
+
+ if [ ! -e "$symlinkTarget" ]; then
+ echo "the symlink $f is broken, it points to $symlinkTarget (which is missing)"
+ fi
+
+ echo "rewriting symlink $f to be relative to $prefix"
+ ln -snrf "$symlinkTarget" "$f"
+
+ done < <(find $prefix -type l -print0)
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/make-wrapper.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/make-wrapper.sh
new file mode 100644
index 000000000000..8b7012677cd5
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/make-wrapper.sh
@@ -0,0 +1,146 @@
+# Assert that FILE exists and is executable
+#
+# assertExecutable FILE
+assertExecutable() {
+ local file="$1"
+ [[ -f "$file" && -x "$file" ]] || \
+ die "Cannot wrap '$file' because it is not an executable file"
+}
+
+# construct an executable file that wraps the actual executable
+# makeWrapper EXECUTABLE OUT_PATH ARGS
+
+# ARGS:
+# --argv0 NAME : set name of executed process to NAME
+# (otherwise it’s called …-wrapped)
+# --set VAR VAL : add VAR with value VAL to the executable’s
+# environment
+# --set-default VAR VAL : like --set, but only adds VAR if not already set in
+# the environment
+# --unset VAR : remove VAR from the environment
+# --run COMMAND : run command before the executable
+# --add-flags FLAGS : add FLAGS to invocation of executable
+
+# --prefix ENV SEP VAL : suffix/prefix ENV with VAL, separated by SEP
+# --suffix
+# --suffix-each ENV SEP VALS : like --suffix, but VALS is a list
+# --prefix-contents ENV SEP FILES : like --suffix-each, but contents of FILES
+# are read first and used as VALS
+# --suffix-contents
+makeWrapper() {
+ local original="$1"
+ local wrapper="$2"
+ local params varName value command separator n fileNames
+ local argv0 flagsBefore flags
+
+ assertExecutable "$original"
+
+ mkdir -p "$(dirname "$wrapper")"
+
+ echo "#! @shell@ -e" > "$wrapper"
+
+ params=("$@")
+ for ((n = 2; n < ${#params[*]}; n += 1)); do
+ p="${params[$n]}"
+
+ if [[ "$p" == "--set" ]]; then
+ varName="${params[$((n + 1))]}"
+ value="${params[$((n + 2))]}"
+ n=$((n + 2))
+ echo "export $varName=${value@Q}" >> "$wrapper"
+ elif [[ "$p" == "--set-default" ]]; then
+ varName="${params[$((n + 1))]}"
+ value="${params[$((n + 2))]}"
+ n=$((n + 2))
+ echo "export $varName=\${$varName-${value@Q}}" >> "$wrapper"
+ elif [[ "$p" == "--unset" ]]; then
+ varName="${params[$((n + 1))]}"
+ n=$((n + 1))
+ echo "unset $varName" >> "$wrapper"
+ elif [[ "$p" == "--run" ]]; then
+ command="${params[$((n + 1))]}"
+ n=$((n + 1))
+ echo "$command" >> "$wrapper"
+ elif [[ ("$p" == "--suffix") || ("$p" == "--prefix") ]]; then
+ varName="${params[$((n + 1))]}"
+ separator="${params[$((n + 2))]}"
+ value="${params[$((n + 3))]}"
+ n=$((n + 3))
+ if test -n "$value"; then
+ if test "$p" = "--suffix"; then
+ echo "export $varName=\$$varName\${$varName:+${separator@Q}}${value@Q}" >> "$wrapper"
+ else
+ echo "export $varName=${value@Q}\${$varName:+${separator@Q}}\$$varName" >> "$wrapper"
+ fi
+ fi
+ elif [[ "$p" == "--suffix-each" ]]; then
+ varName="${params[$((n + 1))]}"
+ separator="${params[$((n + 2))]}"
+ values="${params[$((n + 3))]}"
+ n=$((n + 3))
+ for value in $values; do
+ echo "export $varName=\$$varName\${$varName:+$separator}${value@Q}" >> "$wrapper"
+ done
+ elif [[ ("$p" == "--suffix-contents") || ("$p" == "--prefix-contents") ]]; then
+ varName="${params[$((n + 1))]}"
+ separator="${params[$((n + 2))]}"
+ fileNames="${params[$((n + 3))]}"
+ n=$((n + 3))
+ for fileName in $fileNames; do
+ contents="$(cat "$fileName")"
+ if test "$p" = "--suffix-contents"; then
+ echo "export $varName=\$$varName\${$varName:+$separator}${contents@Q}" >> "$wrapper"
+ else
+ echo "export $varName=${contents@Q}\${$varName:+$separator}\$$varName" >> "$wrapper"
+ fi
+ done
+ elif [[ "$p" == "--add-flags" ]]; then
+ flags="${params[$((n + 1))]}"
+ n=$((n + 1))
+ flagsBefore="$flagsBefore $flags"
+ elif [[ "$p" == "--argv0" ]]; then
+ argv0="${params[$((n + 1))]}"
+ n=$((n + 1))
+ else
+ die "makeWrapper doesn't understand the arg $p"
+ fi
+ done
+
+ echo exec ${argv0:+-a \"$argv0\"} \""$original"\" \
+ "$flagsBefore" '"$@"' >> "$wrapper"
+
+ chmod +x "$wrapper"
+}
+
+addSuffix() {
+ suffix="$1"
+ shift
+ for name in "$@"; do
+ echo "$name$suffix"
+ done
+}
+
+filterExisting() {
+ for fn in "$@"; do
+ if test -e "$fn"; then
+ echo "$fn"
+ fi
+ done
+}
+
+# Syntax: wrapProgram <PROGRAM> <MAKE-WRAPPER FLAGS...>
+wrapProgram() {
+ local prog="$1"
+ local hidden
+
+ assertExecutable "$prog"
+
+ hidden="$(dirname "$prog")/.$(basename "$prog")"-wrapped
+ while [ -e "$hidden" ]; do
+ hidden="${hidden}_"
+ done
+ mv "$prog" "$hidden"
+ # Silence warning about unexpanded $0:
+ # shellcheck disable=SC2016
+ makeWrapper "$hidden" "$prog" --argv0 '$0' "${@:2}"
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-docs.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-docs.sh
new file mode 100644
index 000000000000..ef31dcdce274
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-docs.sh
@@ -0,0 +1,23 @@
+# This setup hook moves $out/{man,doc,info} to $out/share; moves
+# $out/share/man to $man/share/man; and moves $out/share/doc to
+# $man/share/doc.
+
+preFixupHooks+=(_moveToShare)
+
+_moveToShare() {
+ forceShare=${forceShare:=man doc info}
+ if [ -z "$forceShare" -o -z "$out" ]; then return; fi
+
+ for d in $forceShare; do
+ if [ -d "$out/$d" ]; then
+ if [ -d "$out/share/$d" ]; then
+ echo "both $d/ and share/$d/ exist!"
+ else
+ echo "moving $out/$d to $out/share/$d"
+ mkdir -p $out/share
+ mv $out/$d $out/share/
+ fi
+ fi
+ done
+}
+
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-lib64.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-lib64.sh
new file mode 100644
index 000000000000..9517af797323
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-lib64.sh
@@ -0,0 +1,22 @@
+# This setup hook, for each output, moves everything in $output/lib64
+# to $output/lib, and replaces $output/lib64 with a symlink to
+# $output/lib. The rationale is that lib64 directories are unnecessary
+# in Nix (since 32-bit and 64-bit builds of a package are in different
+# store paths anyway).
+# If the move would overwrite anything, it should fail on rmdir.
+
+fixupOutputHooks+=(_moveLib64)
+
+_moveLib64() {
+ if [ "${dontMoveLib64-}" = 1 ]; then return; fi
+ if [ ! -e "$prefix/lib64" -o -L "$prefix/lib64" ]; then return; fi
+ echo "moving $prefix/lib64/* to $prefix/lib"
+ mkdir -p $prefix/lib
+ shopt -s dotglob
+ for i in $prefix/lib64/*; do
+ mv --no-clobber "$i" $prefix/lib
+ done
+ shopt -u dotglob
+ rmdir $prefix/lib64
+ ln -s lib $prefix/lib64
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-sbin.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-sbin.sh
new file mode 100644
index 000000000000..1c0c4dc9f2d9
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-sbin.sh
@@ -0,0 +1,19 @@
+# This setup hook, for each output, moves everything in $output/sbin
+# to $output/bin, and replaces $output/sbin with a symlink to
+# $output/bin.
+
+fixupOutputHooks+=(_moveSbin)
+
+_moveSbin() {
+ if [ "${dontMoveSbin-}" = 1 ]; then return; fi
+ if [ ! -e "$prefix/sbin" -o -L "$prefix/sbin" ]; then return; fi
+ echo "moving $prefix/sbin/* to $prefix/bin"
+ mkdir -p $prefix/bin
+ shopt -s dotglob
+ for i in $prefix/sbin/*; do
+ mv "$i" $prefix/bin
+ done
+ shopt -u dotglob
+ rmdir $prefix/sbin
+ ln -s bin $prefix/sbin
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-systemd-user-units.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-systemd-user-units.sh
new file mode 100755
index 000000000000..5963d87c7515
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/move-systemd-user-units.sh
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+
+# This setup hook, for each output, moves everything in
+# $output/lib/systemd/user to $output/share/systemd/user, and replaces
+# $output/lib/systemd/user with a symlink to
+# $output/share/systemd/user.
+
+fixupOutputHooks+=(_moveSystemdUserUnits)
+
+_moveSystemdUserUnits() {
+ if [ "${dontMoveSystemdUserUnits:-0}" = 1 ]; then return; fi
+ if [ ! -e "${prefix:?}/lib/systemd/user" ]; then return; fi
+ local source="$prefix/lib/systemd/user"
+ local target="$prefix/share/systemd/user"
+ echo "moving $source/* to $target"
+ mkdir -p "$target"
+ (
+ shopt -s dotglob
+ for i in "$source"/*; do
+ mv "$i" "$target"
+ done
+ )
+ rmdir "$source"
+ ln -s "$target" "$source"
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh
new file mode 100644
index 000000000000..bfa47e3b20e1
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/multiple-outputs.sh
@@ -0,0 +1,199 @@
+# The base package for automatic multiple-output splitting. Used in stdenv as well.
+preConfigureHooks+=(_multioutConfig)
+preFixupHooks+=(_multioutDocs)
+preFixupHooks+=(_multioutDevs)
+postFixupHooks+=(_multioutPropagateDev)
+
+# Assign the first string containing nonempty variable to the variable named $1
+_assignFirst() {
+ local varName="$1"
+ local REMOVE=REMOVE # slightly hacky - we allow REMOVE (i.e. not a variable name)
+ shift
+ while (( $# )); do
+ if [ -n "${!1-}" ]; then eval "${varName}"="$1"; return; fi
+ shift
+ done
+ echo "Error: _assignFirst found no valid variant!"
+ return 1 # none found
+}
+
+# Same as _assignFirst, but only if "$1" = ""
+_overrideFirst() {
+ if [ -z "${!1-}" ]; then
+ _assignFirst "$@"
+ fi
+}
+
+
+# Setup chains of sane default values with easy overridability.
+# The variables are global to be usable anywhere during the build.
+# Typical usage in package is defining outputBin = "dev";
+
+_overrideFirst outputDev "dev" "out"
+_overrideFirst outputBin "bin" "out"
+
+_overrideFirst outputInclude "$outputDev"
+
+# so-libs are often among the main things to keep, and so go to $out
+_overrideFirst outputLib "lib" "out"
+
+_overrideFirst outputDoc "doc" "out"
+_overrideFirst outputDevdoc "devdoc" REMOVE # documentation for developers
+# man and info pages are small and often useful to distribute with binaries
+_overrideFirst outputMan "man" "$outputBin"
+_overrideFirst outputDevman "devman" "devdoc" "$outputMan"
+_overrideFirst outputInfo "info" "$outputBin"
+
+
+# Add standard flags to put files into the desired outputs.
+_multioutConfig() {
+ if [ "$outputs" = "out" ] || [ -z "${setOutputFlags-1}" ]; then return; fi;
+
+ # try to detect share/doc/${shareDocName}
+ # Note: sadly, $configureScript detection comes later in configurePhase,
+ # and reordering would cause more trouble than worth.
+ if [ -z "$shareDocName" ]; then
+ local confScript="$configureScript"
+ if [ -z "$confScript" ] && [ -x ./configure ]; then
+ confScript=./configure
+ fi
+ if [ -f "$confScript" ]; then
+ local shareDocName="$(sed -n "s/^PACKAGE_TARNAME='\(.*\)'$/\1/p" < "$confScript")"
+ fi
+ # PACKAGE_TARNAME sometimes contains garbage.
+ if [ -z "$shareDocName" ] || echo "$shareDocName" | grep -q '[^a-zA-Z0-9_-]'; then
+ shareDocName="$(echo "$name" | sed 's/-[^a-zA-Z].*//')"
+ fi
+ fi
+
+ configureFlags="\
+ --bindir=${!outputBin}/bin --sbindir=${!outputBin}/sbin \
+ --includedir=${!outputInclude}/include --oldincludedir=${!outputInclude}/include \
+ --mandir=${!outputMan}/share/man --infodir=${!outputInfo}/share/info \
+ --docdir=${!outputDoc}/share/doc/${shareDocName} \
+ --libdir=${!outputLib}/lib --libexecdir=${!outputLib}/libexec \
+ --localedir=${!outputLib}/share/locale \
+ $configureFlags"
+
+ installFlags="\
+ pkgconfigdir=${!outputDev}/lib/pkgconfig \
+ m4datadir=${!outputDev}/share/aclocal aclocaldir=${!outputDev}/share/aclocal \
+ $installFlags"
+}
+
+
+# Add rpath prefixes to library paths, and avoid stdenv doing it for $out.
+_addRpathPrefix "${!outputLib}"
+NIX_NO_SELF_RPATH=1
+
+
+# Move subpaths that match pattern $1 from under any output/ to the $2 output/
+# Beware: only globbing patterns are accepted, e.g.: * ? {foo,bar}
+# A special target "REMOVE" is allowed: moveToOutput foo REMOVE
+moveToOutput() {
+ local patt="$1"
+ local dstOut="$2"
+ local output
+ for output in $outputs; do
+ if [ "${!output}" = "$dstOut" ]; then continue; fi
+ local srcPath
+ for srcPath in "${!output}"/$patt; do
+ # apply to existing files/dirs, *including* broken symlinks
+ if [ ! -e "$srcPath" ] && [ ! -L "$srcPath" ]; then continue; fi
+
+ if [ "$dstOut" = REMOVE ]; then
+ echo "Removing $srcPath"
+ rm -r "$srcPath"
+ else
+ local dstPath="$dstOut${srcPath#${!output}}"
+ echo "Moving $srcPath to $dstPath"
+
+ if [ -d "$dstPath" ] && [ -d "$srcPath" ]
+ then # attempt directory merge
+ # check the case of trying to move an empty directory
+ rmdir "$srcPath" --ignore-fail-on-non-empty
+ if [ -d "$srcPath" ]; then
+ mv -t "$dstPath" "$srcPath"/*
+ rmdir "$srcPath"
+ fi
+ else # usual move
+ mkdir -p "$(readlink -m "$dstPath/..")"
+ mv "$srcPath" "$dstPath"
+ fi
+ fi
+
+ # remove empty directories, printing iff at least one gets removed
+ local srcParent="$(readlink -m "$srcPath/..")"
+ if rmdir "$srcParent"; then
+ echo "Removing empty $srcParent/ and (possibly) its parents"
+ rmdir -p --ignore-fail-on-non-empty "$(readlink -m "$srcParent/..")" \
+ 2> /dev/null || true # doesn't ignore failure for some reason
+ fi
+ done
+ done
+}
+
+# Move documentation to the desired outputs.
+_multioutDocs() {
+ local REMOVE=REMOVE # slightly hacky - we expand ${!outputFoo}
+
+ moveToOutput share/info "${!outputInfo}"
+ moveToOutput share/doc "${!outputDoc}"
+ moveToOutput share/gtk-doc "${!outputDevdoc}"
+ moveToOutput share/devhelp/books "${!outputDevdoc}"
+
+ # the default outputMan is in $bin
+ moveToOutput share/man "${!outputMan}"
+ moveToOutput share/man/man3 "${!outputDevman}"
+}
+
+# Move development-only stuff to the desired outputs.
+_multioutDevs() {
+ if [ "$outputs" = "out" ] || [ -z "${moveToDev-1}" ]; then return; fi;
+ moveToOutput include "${!outputInclude}"
+ # these files are sometimes provided even without using the corresponding tool
+ moveToOutput lib/pkgconfig "${!outputDev}"
+ moveToOutput share/pkgconfig "${!outputDev}"
+ moveToOutput lib/cmake "${!outputDev}"
+ moveToOutput share/aclocal "${!outputDev}"
+ # don't move *.la, as libtool needs them in the directory of the library
+
+ for f in "${!outputDev}"/{lib,share}/pkgconfig/*.pc; do
+ echo "Patching '$f' includedir to output ${!outputInclude}"
+ sed -i "/^includedir=/s,=\${prefix},=${!outputInclude}," "$f"
+ done
+}
+
+# Make the "dev" propagate other outputs needed for development.
+_multioutPropagateDev() {
+ if [ "$outputs" = "out" ]; then return; fi;
+
+ local outputFirst
+ for outputFirst in $outputs; do
+ break
+ done
+ local propagaterOutput="$outputDev"
+ if [ -z "$propagaterOutput" ]; then
+ propagaterOutput="$outputFirst"
+ fi
+
+ # Default value: propagate binaries, includes and libraries
+ if [ -z "${propagatedBuildOutputs+1}" ]; then
+ local po_dirty="$outputBin $outputInclude $outputLib"
+ set +o pipefail
+ propagatedBuildOutputs=`echo "$po_dirty" \
+ | tr -s ' ' '\n' | grep -v -F "$propagaterOutput" \
+ | sort -u | tr '\n' ' ' `
+ set -o pipefail
+ fi
+
+ # The variable was explicitly set to empty or we resolved it so
+ if [ -z "$propagatedBuildOutputs" ]; then
+ return
+ fi
+
+ mkdir -p "${!propagaterOutput}"/nix-support
+ for output in $propagatedBuildOutputs; do
+ echo -n " ${!output}" >> "${!propagaterOutput}"/nix-support/propagated-build-inputs
+ done
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/patch-shebangs.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/patch-shebangs.sh
new file mode 100644
index 000000000000..b48b0c50f577
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/patch-shebangs.sh
@@ -0,0 +1,119 @@
+# This setup hook causes the fixup phase to rewrite all script
+# interpreter file names (`#! /path') to paths found in $PATH. E.g.,
+# /bin/sh will be rewritten to /nix/store/<hash>-some-bash/bin/sh.
+# /usr/bin/env gets special treatment so that ".../bin/env python" is
+# rewritten to /nix/store/<hash>/bin/python. Interpreters that are
+# already in the store are left untouched.
+# A script file must be marked as executable, otherwise it will not be
+# considered.
+
+fixupOutputHooks+=(patchShebangsAuto)
+
+# Run patch shebangs on a directory or file.
+# Can take multiple paths as arguments.
+# patchShebangs [--build | --host] PATH...
+
+# Flags:
+# --build : Lookup commands available at build-time
+# --host : Lookup commands available at runtime
+
+# Example use cases,
+# $ patchShebangs --host /nix/store/...-hello-1.0/bin
+# $ patchShebangs --build configure
+
+patchShebangs() {
+ local pathName
+
+ if [ "$1" = "--host" ]; then
+ pathName=HOST_PATH
+ shift
+ elif [ "$1" = "--build" ]; then
+ pathName=PATH
+ shift
+ fi
+
+ echo "patching script interpreter paths in $@"
+ local f
+ local oldPath
+ local newPath
+ local arg0
+ local args
+ local oldInterpreterLine
+ local newInterpreterLine
+
+ if [ $# -eq 0 ]; then
+ echo "No arguments supplied to patchShebangs" >&2
+ return 0
+ fi
+
+ local f
+ while IFS= read -r -d $'\0' f; do
+ isScript "$f" || continue
+
+ oldInterpreterLine=$(head -1 "$f" | tail -c+3)
+ read -r oldPath arg0 args <<< "$oldInterpreterLine"
+
+ if [ -z "$pathName" ]; then
+ if [ -n "$strictDeps" ] && [[ "$f" = "$NIX_STORE"* ]]; then
+ pathName=HOST_PATH
+ else
+ pathName=PATH
+ fi
+ fi
+
+ if $(echo "$oldPath" | grep -q "/bin/env$"); then
+ # Check for unsupported 'env' functionality:
+ # - options: something starting with a '-'
+ # - environment variables: foo=bar
+ if $(echo "$arg0" | grep -q -- "^-.*\|.*=.*"); then
+ echo "$f: unsupported interpreter directive \"$oldInterpreterLine\" (set dontPatchShebangs=1 and handle shebang patching yourself)" >&2
+ exit 1
+ fi
+
+ newPath="$(PATH="${!pathName}" command -v "$arg0" || true)"
+ else
+ if [ "$oldPath" = "" ]; then
+ # If no interpreter is specified linux will use /bin/sh. Set
+ # oldpath="/bin/sh" so that we get /nix/store/.../sh.
+ oldPath="/bin/sh"
+ fi
+
+ newPath="$(PATH="${!pathName}" command -v "$(basename "$oldPath")" || true)"
+
+ args="$arg0 $args"
+ fi
+
+ # Strip trailing whitespace introduced when no arguments are present
+ newInterpreterLine="$(echo "$newPath $args" | sed 's/[[:space:]]*$//')"
+
+ if [ -n "$oldPath" -a "${oldPath:0:${#NIX_STORE}}" != "$NIX_STORE" ]; then
+ if [ -n "$newPath" -a "$newPath" != "$oldPath" ]; then
+ echo "$f: interpreter directive changed from \"$oldInterpreterLine\" to \"$newInterpreterLine\""
+ # escape the escape chars so that sed doesn't interpret them
+ escapedInterpreterLine=$(echo "$newInterpreterLine" | sed 's|\\|\\\\|g')
+ # Preserve times, see: https://github.com/NixOS/nixpkgs/pull/33281
+ timestamp=$(mktemp)
+ touch -r "$f" "$timestamp"
+ sed -i -e "1 s|.*|#\!$escapedInterpreterLine|" "$f"
+ touch -r "$timestamp" "$f"
+ rm "$timestamp"
+ fi
+ fi
+ done < <(find "$@" -type f -perm -0100 -print0)
+
+ stopNest
+}
+
+patchShebangsAuto () {
+ if [ -z "${dontPatchShebangs-}" -a -e "$prefix" ]; then
+
+ # Dev output will end up being run on the build platform. An
+ # example case of this is sdl2-config. Otherwise, we can just
+ # use the runtime path (--host).
+ if [ "$output" != out ] && [ "$output" = "$outputDev" ]; then
+ patchShebangs --build "$prefix"
+ else
+ patchShebangs --host "$prefix"
+ fi
+ fi
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/prune-libtool-files.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/prune-libtool-files.sh
new file mode 100644
index 000000000000..0ec56549645c
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/prune-libtool-files.sh
@@ -0,0 +1,22 @@
+# Clear dependency_libs in libtool files for shared libraries.
+
+# Shared libraries already encode their dependencies with locations. .la
+# files do not always encode those locations, and sometimes encode the
+# locations in the wrong Nix output. .la files are not needed for shared
+# libraries, but without dependency_libs they do not hurt either.
+
+fixupOutputHooks+=(_pruneLibtoolFiles)
+
+_pruneLibtoolFiles() {
+ if [ "${dontPruneLibtoolFiles-}" ] || [ ! -e "$prefix" ]; then
+ return
+ fi
+
+ # Libtool uses "dlname" and "library_names" fields for shared libraries and
+ # the "old_library" field for static libraries. We are processing only
+ # those .la files that do not describe static libraries.
+ find "$prefix" -type f -name '*.la' \
+ -exec grep -q '^# Generated by .*libtool' {} \; \
+ -exec grep -q "^old_library=''" {} \; \
+ -exec sed -i {} -e "/^dependency_libs='[^']/ c dependency_libs='' #pruned" \;
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/reproducible-builds.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/reproducible-builds.sh
new file mode 100644
index 000000000000..2d8db6ff7d3c
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/reproducible-builds.sh
@@ -0,0 +1,4 @@
+# Use the last part of the out path as hash input for the build.
+# This should ensure that it is deterministic across rebuilds of the same
+# derivation and not easily collide with other builds.
+export NIX_CFLAGS_COMPILE+=" -frandom-seed=${out##*/}"
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/role.bash b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/role.bash
new file mode 100644
index 000000000000..cf69e732e7c3
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/role.bash
@@ -0,0 +1,71 @@
+# Since the same derivation can be depend on in multiple ways, we need to
+# accumulate *each* role (i.e. host and target platforms relative the depending
+# derivation) in which the derivation is used.
+#
+# The role is intened to be use as part of other variables names like
+# - $NIX_SOMETHING${role_post}
+
+function getRole() {
+ case $1 in
+ -1)
+ role_post='_FOR_BUILD'
+ ;;
+ 0)
+ role_post=''
+ ;;
+ 1)
+ role_post='_FOR_TARGET'
+ ;;
+ *)
+ echo "@name@: used as improper sort of dependency" >2
+ return 1
+ ;;
+ esac
+}
+
+# `hostOffset` describes how the host platform of the package is slid relative
+# to the depending package. `targetOffset` likewise describes the target
+# platform of the package. Both are brought into scope of the setup hook defined
+# for dependency whose setup hook is being processed relative to the package
+# being built.
+
+function getHostRole() {
+ getRole "$hostOffset"
+}
+function getTargetRole() {
+ getRole "$targetOffset"
+}
+
+# `depHostOffset` describes how the host platform of the dependencies are slid
+# relative to the depending package. `depTargetOffset` likewise describes the
+# target platform of dependenices. Both are brought into scope of the
+# environment hook defined for the dependency being applied relative to the
+# package being built.
+
+function getHostRoleEnvHook() {
+ getRole "$depHostOffset"
+}
+function getTargetRoleEnvHook() {
+ getRole "$depTargetOffset"
+}
+
+# This variant is inteneded specifically for code-prodocing tool wrapper scripts
+# `NIX_@wrapperName@_TARGET_*_@suffixSalt@` tracks this (needs to be an exported
+# env var so can't use fancier data structures).
+function getTargetRoleWrapper() {
+ case $targetOffset in
+ -1)
+ export NIX_@wrapperName@_TARGET_BUILD_@suffixSalt@=1
+ ;;
+ 0)
+ export NIX_@wrapperName@_TARGET_HOST_@suffixSalt@=1
+ ;;
+ 1)
+ export NIX_@wrapperName@_TARGET_TARGET_@suffixSalt@=1
+ ;;
+ *)
+ echo "@name@: used as improper sort of dependency" >2
+ return 1
+ ;;
+ esac
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh
new file mode 100644
index 000000000000..19dbb10d18e7
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh
@@ -0,0 +1,37 @@
+export NIX_SET_BUILD_ID=1
+export NIX_LDFLAGS+=" --compress-debug-sections=zlib"
+export NIX_CFLAGS_COMPILE+=" -ggdb -Wa,--compress-debug-sections"
+dontStrip=1
+
+fixupOutputHooks+=(_separateDebugInfo)
+
+_separateDebugInfo() {
+ [ -e "$prefix" ] || return 0
+
+ local dst="${debug:-$out}"
+ if [ "$prefix" = "$dst" ]; then return 0; fi
+
+ dst="$dst/lib/debug/.build-id"
+
+ # Find executables and dynamic libraries.
+ local i magic
+ while IFS= read -r -d $'\0' i; do
+ if ! isELF "$i"; then continue; fi
+
+ # Extract the Build ID. FIXME: there's probably a cleaner way.
+ local id="$($READELF -n "$i" | sed 's/.*Build ID: \([0-9a-f]*\).*/\1/; t; d')"
+ if [ "${#id}" != 40 ]; then
+ echo "could not find build ID of $i, skipping" >&2
+ continue
+ fi
+
+ # Extract the debug info.
+ header "separating debug info from $i (build ID $id)"
+ mkdir -p "$dst/${id:0:2}"
+ $OBJCOPY --only-keep-debug "$i" "$dst/${id:0:2}/${id:2}.debug"
+ $STRIP --strip-debug "$i"
+
+ # Also a create a symlink <original-name>.debug.
+ ln -sfn ".build-id/${id:0:2}/${id:2}.debug" "$dst/../$(basename "$i")"
+ done < <(find "$prefix" -type f -print0)
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/set-java-classpath.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/set-java-classpath.sh
new file mode 100644
index 000000000000..445fa56d61de
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/set-java-classpath.sh
@@ -0,0 +1,13 @@
+# This setup hook adds every JAR in the share/java subdirectories of
+# the build inputs to $CLASSPATH.
+
+export CLASSPATH
+
+addPkgToClassPath () {
+ local jar
+ for jar in $1/share/java/*.jar; do
+ export CLASSPATH=''${CLASSPATH-}''${CLASSPATH:+:}''${jar}
+ done
+}
+
+addEnvHooks "$targetOffset" addPkgToClassPath
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh
new file mode 100644
index 000000000000..ae34ffec4854
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh
@@ -0,0 +1,34 @@
+updateSourceDateEpoch() {
+ local path="$1"
+
+ # Get the last modification time of all regular files, sort them,
+ # and get the most recent. Maybe we should use
+ # https://github.com/0-wiz-0/findnewest here.
+ local -a res=($(find "$path" -type f -not -newer "$NIX_BUILD_TOP/.." -printf '%T@ %p\0' \
+ | sort -n --zero-terminated | tail -n1 --zero-terminated | head -c -1))
+ local time="${res[0]//\.[0-9]*/}" # remove the fraction part
+ local newestFile="${res[1]}"
+
+ # Update $SOURCE_DATE_EPOCH if the most recent file we found is newer.
+ if [ "${time:-0}" -gt "$SOURCE_DATE_EPOCH" ]; then
+ echo "setting SOURCE_DATE_EPOCH to timestamp $time of file $newestFile"
+ export SOURCE_DATE_EPOCH="$time"
+
+ # Warn if the new timestamp is too close to the present. This
+ # may indicate that we were being applied to a file generated
+ # during the build, or that an unpacker didn't restore
+ # timestamps properly.
+ local now="$(date +%s)"
+ if [ "$time" -gt $((now - 60)) ]; then
+ echo "warning: file $newestFile may be generated; SOURCE_DATE_EPOCH may be non-deterministic"
+ fi
+ fi
+}
+
+postUnpackHooks+=(_updateSourceDateEpochFromSourceRoot)
+
+_updateSourceDateEpochFromSourceRoot() {
+ if [ -n "$sourceRoot" ]; then
+ updateSourceDateEpoch "$sourceRoot"
+ fi
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh
new file mode 100644
index 000000000000..96bf48cf123a
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/setup-debug-info-dirs.sh
@@ -0,0 +1,5 @@
+setupDebugInfoDirs () {
+ addToSearchPath NIX_DEBUG_INFO_DIRS $1/lib/debug
+}
+
+addEnvHooks "$targetOffset" setupDebugInfoDirs
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/shorten-perl-shebang.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/shorten-perl-shebang.sh
new file mode 100644
index 000000000000..4bf7c0ff1af4
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/shorten-perl-shebang.sh
@@ -0,0 +1,88 @@
+# This setup hook modifies a Perl script so that any "-I" flags in its shebang
+# line are rewritten into a "use lib ..." statement on the next line. This gets
+# around a limitation in Darwin, which will not properly handle a script whose
+# shebang line exceeds 511 characters.
+#
+# Each occurrence of "-I /path/to/lib1" or "-I/path/to/lib2" is removed from
+# the shebang line, along with the single space that preceded it. These library
+# paths are placed into a new line of the form
+#
+# use lib "/path/to/lib1", "/path/to/lib2";
+#
+# immediately following the shebang line. If a library appeared in the original
+# list more than once, only its first occurrence will appear in the output
+# list. In other words, the libraries are deduplicated, but the ordering of the
+# first appearance of each one is preserved.
+#
+# Any flags other than "-I" in the shebang line are left as-is, and the
+# interpreter is also left alone (although the script will abort if the
+# interpreter does not seem to be either "perl" or else "env" with "perl" as
+# its argument). Each line after the shebang line is left unchanged. Each file
+# is modified in place.
+#
+# Usage:
+# shortenPerlShebang SCRIPT...
+
+shortenPerlShebang() {
+ while [ $# -gt 0 ]; do
+ _shortenPerlShebang "$1"
+ shift
+ done
+}
+
+_shortenPerlShebang() {
+ local program="$1"
+
+ echo "shortenPerlShebang: rewriting shebang line in $program"
+
+ if ! isScript "$program"; then
+ die "shortenPerlShebang: refusing to modify $program because it is not a script"
+ fi
+
+ local temp="$(mktemp)"
+
+ gawk '
+ (NR == 1) {
+ if (!($0 ~ /\/(perl|env +perl)\>/)) {
+ print "shortenPerlShebang: script does not seem to be a Perl script" > "/dev/stderr"
+ exit 1
+ }
+ idx = 0
+ while (match($0, / -I ?([^ ]+)/, pieces)) {
+ matches[idx] = pieces[1]
+ idx++
+ $0 = gensub(/ -I ?[^ ]+/, "", 1, $0)
+ }
+ print $0
+ if (idx > 0) {
+ prefix = "use lib "
+ for (idx in matches) {
+ path = matches[idx]
+ if (!(path in seen)) {
+ printf "%s\"%s\"", prefix, path
+ seen[path] = 1
+ prefix = ", "
+ }
+ }
+ print ";"
+ }
+ }
+ (NR > 1 ) {
+ print
+ }
+ ' "$program" > "$temp" || die
+ # Preserve the mode of the original file
+ cp --preserve=mode --attributes-only "$program" "$temp"
+ mv "$temp" "$program"
+
+ # Measure the new shebang line length and make sure it's okay. We subtract
+ # one to account for the trailing newline that "head" included in its
+ # output.
+ local new_length=$(( $(head -n 1 "$program" | wc -c) - 1 ))
+
+ # Darwin is okay when the shebang line contains 511 characters, but not
+ # when it contains 512 characters.
+ if [ $new_length -ge 512 ]; then
+ die "shortenPerlShebang: shebang line is $new_length characters--still too long for Darwin!"
+ fi
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/strip.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/strip.sh
new file mode 100644
index 000000000000..f5fa9378fd7e
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/strip.sh
@@ -0,0 +1,57 @@
+# This setup hook strips libraries and executables in the fixup phase.
+
+fixupOutputHooks+=(_doStrip)
+
+_doStrip() {
+ # We don't bother to strip build platform code because it shouldn't make it
+ # to $out anyways---if it does, that's a bigger problem that a lack of
+ # stripping will help catch.
+ local -ra flags=(dontStripHost dontStripTarget)
+ local -ra stripCmds=(STRIP TARGET_STRIP)
+
+ # Optimization
+ if [[ "${STRIP-}" == "${TARGET_STRIP-}" ]]; then
+ dontStripTarget+=1
+ fi
+
+ local i
+ for i in ${!stripCmds[@]}; do
+ local -n flag="${flags[$i]}"
+ local -n stripCmd="${stripCmds[$i]}"
+
+ # `dontStrip` disables them all
+ if [[ "${dontStrip-}" || "${flag-}" ]] || ! type -f "${stripCmd-}" 2>/dev/null
+ then continue; fi
+
+ stripDebugList=${stripDebugList:-lib lib32 lib64 libexec bin sbin}
+ if [ -n "$stripDebugList" ]; then
+ stripDirs "$stripCmd" "$stripDebugList" "${stripDebugFlags:--S}"
+ fi
+
+ stripAllList=${stripAllList:-}
+ if [ -n "$stripAllList" ]; then
+ stripDirs "$stripCmd" "$stripAllList" "${stripAllFlags:--s}"
+ fi
+ done
+}
+
+stripDirs() {
+ local cmd="$1"
+ local dirs="$2"
+ local stripFlags="$3"
+ local dirsNew=
+
+ local d
+ for d in ${dirs}; do
+ if [ -d "$prefix/$d" ]; then
+ dirsNew="${dirsNew} $prefix/$d "
+ fi
+ done
+ dirs=${dirsNew}
+
+ if [ -n "${dirs}" ]; then
+ header "stripping (with command $cmd and flags $stripFlags) in$dirs"
+ find $dirs -type f -print0 | xargs -0 ${xargsFlags:--r} $cmd $commonStripFlags $stripFlags 2>/dev/null || true
+ stopNest
+ fi
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh
new file mode 100644
index 000000000000..ebd3afa05d94
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/update-autotools-gnu-config-scripts.sh
@@ -0,0 +1,12 @@
+preConfigurePhases+=" updateAutotoolsGnuConfigScriptsPhase"
+
+updateAutotoolsGnuConfigScriptsPhase() {
+ if [ -n "${dontUpdateAutotoolsGnuConfigScripts-}" ]; then return; fi
+
+ for script in config.sub config.guess; do
+ for f in $(find . -type f -name "$script"); do
+ echo "Updating Autotools / GNU config script to a newer upstream version: $f"
+ cp -f "@gnu_config@/$script" "$f"
+ done
+ done
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/use-old-cxx-abi.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/use-old-cxx-abi.sh
new file mode 100644
index 000000000000..53335d7a9a7a
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/use-old-cxx-abi.sh
@@ -0,0 +1 @@
+export NIX_CFLAGS_COMPILE+=" -D_GLIBCXX_USE_CXX11_ABI=0"
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/validate-pkg-config.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/validate-pkg-config.sh
new file mode 100644
index 000000000000..ada1b56760d6
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/validate-pkg-config.sh
@@ -0,0 +1,18 @@
+# This setup hook validates each pkgconfig file in each output.
+
+fixupOutputHooks+=(_validatePkgConfig)
+
+_validatePkgConfig() {
+ local bail=0
+ for pc in $(find "$prefix" -name '*.pc'); do
+ # Do not fail immediately. It's nice to see all errors when
+ # there are multiple pkgconfig files.
+ if ! pkg-config --validate "$pc"; then
+ bail=1
+ fi
+ done
+
+ if [ $bail -eq 1 ]; then
+ exit 1
+ fi
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/win-dll-link.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/win-dll-link.sh
new file mode 100644
index 000000000000..6130f32bef86
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/win-dll-link.sh
@@ -0,0 +1,45 @@
+
+fixupOutputHooks+=(_linkDLLs)
+
+# For every *.{exe,dll} in $output/bin/ we try to find all (potential)
+# transitive dependencies and symlink those DLLs into $output/bin
+# so they are found on invocation.
+# (DLLs are first searched in the directory of the running exe file.)
+# The links are relative, so relocating whole /nix/store won't break them.
+_linkDLLs() {
+(
+ if [ ! -d "$prefix/bin" ]; then exit; fi
+ cd "$prefix/bin"
+
+ # Compose path list where DLLs should be located:
+ # prefix $PATH by currently-built outputs
+ local DLLPATH=""
+ local outName
+ for outName in $outputs; do
+ addToSearchPath DLLPATH "${!outName}/bin"
+ done
+ DLLPATH="$DLLPATH:$PATH"
+
+ echo DLLPATH="'$DLLPATH'"
+
+ linkCount=0
+ # Iterate over any DLL that we depend on.
+ local dll
+ for dll in $($OBJDUMP -p *.{exe,dll} | sed -n 's/.*DLL Name: \(.*\)/\1/p' | sort -u); do
+ if [ -e "./$dll" ]; then continue; fi
+ # Locate the DLL - it should be an *executable* file on $DLLPATH.
+ local dllPath="$(PATH="$DLLPATH" type -P "$dll")"
+ if [ -z "$dllPath" ]; then continue; fi
+ # That DLL might have its own (transitive) dependencies,
+ # so add also all DLLs from its directory to be sure.
+ local dllPath2
+ for dllPath2 in "$dllPath" "$(dirname $(readlink "$dllPath" || echo "$dllPath"))"/*.dll; do
+ if [ -e ./"$(basename "$dllPath2")" ]; then continue; fi
+ CYGWIN+=\ winsymlinks:nativestrict ln -sr "$dllPath2" .
+ linkCount=$(($linkCount+1))
+ done
+ done
+ echo "Created $linkCount DLL link(s) in $prefix/bin"
+)
+}
+
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix
new file mode 100644
index 000000000000..d0ea088bf71e
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/default.nix
@@ -0,0 +1,177 @@
+{ stdenv
+, lib
+, makeSetupHook
+, makeWrapper
+, gobject-introspection
+, isGraphical ? true
+, gtk3
+, librsvg
+, dconf
+, callPackage
+, wrapGAppsHook
+, writeTextFile
+}:
+
+makeSetupHook {
+ deps = lib.optionals (!stdenv.isDarwin) [
+ # It is highly probable that a program will use GSettings,
+ # at minimum through GTK file chooser dialogue.
+ # Let’s add a GIO module for “dconf” GSettings backend
+ # to avoid falling back to “memory” backend. This is
+ # required for GSettings-based settings to be persisted.
+ # Unfortunately, it also requires the user to have dconf
+ # D-Bus service enabled globally (e.g. through a NixOS module).
+ dconf.lib
+ ] ++ lib.optionals isGraphical [
+ # TODO: remove this, packages should depend on GTK explicitly.
+ gtk3
+
+ # librsvg provides a module for gdk-pixbuf to allow rendering
+ # SVG icons. Most icon themes are SVG-based and so are some
+ # graphics in GTK (e.g. cross for closing window in window title bar)
+ # so it is pretty much required for applications using GTK.
+ librsvg
+ ] ++ [
+
+ # We use the wrapProgram function.
+ makeWrapper
+ ];
+ substitutions = {
+ passthru.tests = let
+ sample-project = ./tests/sample-project;
+
+ testLib = callPackage ./tests/lib.nix { };
+ inherit (testLib) expectSomeLineContainingYInFileXToMentionZ;
+ in rec {
+ # Simple derivation containing a program and a daemon.
+ basic = stdenv.mkDerivation {
+ name = "basic";
+
+ src = sample-project;
+
+ nativeBuildInputs = [ wrapGAppsHook ];
+
+ installFlags = [ "bin-foo" "libexec-bar" ];
+ };
+
+ # The wrapper for executable files should add path to dconf GIO module.
+ basic-contains-dconf = let
+ tested = basic;
+ in testLib.runTest "basic-contains-dconf" (
+ testLib.skip stdenv.isDarwin ''
+ ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GIO_EXTRA_MODULES=" "${dconf.lib}/lib/gio/modules"}
+ ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GIO_EXTRA_MODULES=" "${dconf.lib}/lib/gio/modules"}
+ ''
+ );
+
+ # Simple derivation containing a gobject-introspection typelib.
+ typelib-Mahjong = stdenv.mkDerivation {
+ name = "typelib-Mahjong";
+
+ src = sample-project;
+
+ installFlags = [ "typelib-Mahjong" ];
+ };
+
+ # Simple derivation using a typelib.
+ typelib-user = stdenv.mkDerivation {
+ name = "typelib-user";
+
+ src = sample-project;
+
+ nativeBuildInputs = [
+ gobject-introspection
+ wrapGAppsHook
+ ];
+
+ buildInputs = [
+ typelib-Mahjong
+ ];
+
+ installFlags = [ "bin-foo" "libexec-bar" ];
+ };
+
+ # Testing cooperation with gobject-introspection setup hook,
+ # which should populate GI_TYPELIB_PATH variable with paths
+ # to typelibs among the derivation’s dependencies.
+ # The resulting GI_TYPELIB_PATH should be picked up by the wrapper.
+ typelib-user-has-gi-typelib-path = let
+ tested = typelib-user;
+ in testLib.runTest "typelib-user-has-gi-typelib-path" ''
+ ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GI_TYPELIB_PATH=" "${typelib-Mahjong}/lib/girepository-1.0"}
+ ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GI_TYPELIB_PATH=" "${typelib-Mahjong}/lib/girepository-1.0"}
+ '';
+
+ # Simple derivation containing a gobject-introspection typelib in lib output.
+ typelib-Bechamel = stdenv.mkDerivation {
+ name = "typelib-Bechamel";
+
+ outputs = [ "out" "lib" ];
+
+ src = sample-project;
+
+ makeFlags = [
+ "LIBDIR=${placeholder "lib"}/lib"
+ ];
+
+ installFlags = [ "typelib-Bechamel" ];
+ };
+
+ # Simple derivation using a typelib from non-default output.
+ typelib-multiout-user = stdenv.mkDerivation {
+ name = "typelib-multiout-user";
+
+ src = sample-project;
+
+ nativeBuildInputs = [
+ gobject-introspection
+ wrapGAppsHook
+ ];
+
+ buildInputs = [
+ typelib-Bechamel
+ ];
+
+ installFlags = [ "bin-foo" "libexec-bar" ];
+ };
+
+ # Testing cooperation with gobject-introspection setup hook,
+ # which should populate GI_TYPELIB_PATH variable with paths
+ # to typelibs among the derivation’s dependencies,
+ # even when they are not in default output.
+ # The resulting GI_TYPELIB_PATH should be picked up by the wrapper.
+ typelib-multiout-user-has-gi-typelib-path = let
+ tested = typelib-multiout-user;
+ in testLib.runTest "typelib-multiout-user-has-gi-typelib-path" ''
+ ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GI_TYPELIB_PATH=" "${typelib-Bechamel.lib}/lib/girepository-1.0"}
+ ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GI_TYPELIB_PATH=" "${typelib-Bechamel.lib}/lib/girepository-1.0"}
+ '';
+
+ # Simple derivation that contains a typelib as well as a program using it.
+ typelib-self-user = stdenv.mkDerivation {
+ name = "typelib-self-user";
+
+ src = sample-project;
+
+ nativeBuildInputs = [
+ gobject-introspection
+ wrapGAppsHook
+ ];
+
+ installFlags = [ "typelib-Cow" "bin-foo" "libexec-bar" ];
+ };
+
+ # Testing cooperation with gobject-introspection setup hook,
+ # which should add the path to derivation’s own typelibs
+ # to GI_TYPELIB_PATH variable.
+ # The resulting GI_TYPELIB_PATH should be picked up by the wrapper.
+ # https://github.com/NixOS/nixpkgs/issues/85515
+ typelib-self-user-has-gi-typelib-path = let
+ tested = typelib-self-user;
+ in testLib.runTest "typelib-self-user-has-gi-typelib-path" ''
+ ${expectSomeLineContainingYInFileXToMentionZ "${tested}/bin/foo" "GI_TYPELIB_PATH=" "${typelib-self-user}/lib/girepository-1.0"}
+ ${expectSomeLineContainingYInFileXToMentionZ "${tested}/libexec/bar" "GI_TYPELIB_PATH=" "${typelib-self-user}/lib/girepository-1.0"}
+ '';
+ };
+ };
+} ./wrap-gapps-hook.sh
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix
new file mode 100644
index 000000000000..1757bdbbe250
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/lib.nix
@@ -0,0 +1,30 @@
+{ runCommand
+}:
+
+rec {
+ runTest = name: body: runCommand name { } ''
+ set -o errexit
+ ${body}
+ touch $out
+ '';
+
+ skip = cond: text:
+ if cond then ''
+ echo "Skipping test $name" > /dev/stderr
+ '' else text;
+
+ fail = text: ''
+ echo "FAIL: $name: ${text}" > /dev/stderr
+ exit 1
+ '';
+
+ expectSomeLineContainingYInFileXToMentionZ = file: filter: expected: ''
+ if ! cat "${file}" | grep "${filter}"; then
+ ${fail "The file “${file}” should include a line containing “${filter}”."}
+ fi
+
+ if ! cat "${file}" | grep "${filter}" | grep ${expected}; then
+ ${fail "The file “${file}” should include a line containing “${filter}” that also contains “${expected}”."}
+ fi
+ '';
+}
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/sample-project/Makefile b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/sample-project/Makefile
new file mode 100644
index 000000000000..5d234db11a0b
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/tests/sample-project/Makefile
@@ -0,0 +1,30 @@
+PREFIX = $(out)
+BINDIR = $(PREFIX)/bin
+LIBEXECDIR = $(PREFIX)/libexec
+LIBDIR = $(PREFIX)/lib
+TYPELIBDIR = $(LIBDIR)/girepository-1.0
+
+all:
+ echo "Compiling…"
+install:
+ echo "Installing…"
+
+bin:
+ mkdir -p $(BINDIR)
+# Adds `bin-${foo}` targets, that install `${foo}` executable to `$(BINDIR)`.
+bin-%: bin
+ touch $(BINDIR)/$(@:bin-%=%)
+ chmod +x $(BINDIR)/$(@:bin-%=%)
+
+libexec:
+ mkdir -p $(LIBEXECDIR)
+# Adds `libexec-${foo}` targets, that install `${foo}` executable to `$(LIBEXECDIR)`.
+libexec-%: libexec
+ touch $(LIBEXECDIR)/$(@:libexec-%=%)
+ chmod +x $(LIBEXECDIR)/$(@:libexec-%=%)
+
+typelib:
+ mkdir -p $(TYPELIBDIR)
+# Adds `typelib-${foo}` targets, that install `${foo}-1.0.typelib` file to `$(TYPELIBDIR)`.
+typelib-%: typelib
+ touch $(TYPELIBDIR)/$(@:typelib-%=%)-1.0.typelib
diff --git a/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/wrap-gapps-hook.sh b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/wrap-gapps-hook.sh
new file mode 100644
index 000000000000..1a46e075dbe7
--- /dev/null
+++ b/infra/libkookie/nixpkgs/pkgs/build-support/setup-hooks/wrap-gapps-hook/wrap-gapps-hook.sh
@@ -0,0 +1,93 @@
+# shellcheck shell=bash
+gappsWrapperArgs=()
+
+find_gio_modules() {
+ if [ -d "$1/lib/gio/modules" ] && [ -n "$(ls -A "$1/lib/gio/modules")" ] ; then
+ gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$1/lib/gio/modules")
+ fi
+}
+
+addEnvHooks "${targetOffset:?}" find_gio_modules
+
+gappsWrapperArgsHook() {
+ if [ -n "$GDK_PIXBUF_MODULE_FILE" ]; then
+ gappsWrapperArgs+=(--set GDK_PIXBUF_MODULE_FILE "$GDK_PIXBUF_MODULE_FILE")
+ fi
+
+ if [ -n "$XDG_ICON_DIRS" ]; then
+ gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$XDG_ICON_DIRS")
+ fi
+
+ if [ -n "$GSETTINGS_SCHEMAS_PATH" ]; then
+ gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$GSETTINGS_SCHEMAS_PATH")
+ fi
+
+ # Check for prefix as well
+ if [ -d "${prefix:?}/share" ]; then
+ gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$prefix/share")
+ fi
+
+ if [ -d "$prefix/lib/gio/modules" ] && [ -n "$(ls -A "$prefix/lib/gio/modules")" ]; then
+ gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$prefix/lib/gio/modules")
+ fi
+
+ for v in ${wrapPrefixVariables:-} GST_PLUGIN_SYSTEM_PATH_1_0 GI_TYPELIB_PATH GRL_PLUGIN_PATH; do
+ if [ -n "${!v}" ]; then
+ gappsWrapperArgs+=(--prefix "$v" : "${!v}")
+ fi
+ done
+}
+
+preFixupPhases+=" gappsWrapperArgsHook"
+
+wrapGApp() {
+ local program="$1"
+ shift 1
+ wrapProgram "$program" "${gappsWrapperArgs[@]}" "$@"
+}
+
+# Note: $gappsWrapperArgs still gets defined even if ${dontWrapGApps-} is set.
+wrapGAppsHook() {
+ # guard against running multiple times (e.g. due to propagation)
+ [ -z "$wrapGAppsHookHasRun" ] || return 0
+ wrapGAppsHookHasRun=1
+
+ if [[ -z "${dontWrapGApps:-}" ]]; then
+ targetDirsThatExist=()
+ targetDirsRealPath=()
+
+ # wrap binaries
+ targetDirs=("${prefix}/bin" "${prefix}/libexec")
+ for targetDir in "${targetDirs[@]}"; do
+ if [[ -d "${targetDir}" ]]; then
+ targetDirsThatExist+=("${targetDir}")
+ targetDirsRealPath+=("$(realpath "${targetDir}")/")
+ find "${targetDir}" -type f -executable -print0 |
+ while IFS= read -r -d '' file; do
+ echo "Wrapping program '${file}'"
+ wrapGApp "${file}"
+ done
+ fi
+ done
+
+ # wrap links to binaries that point outside targetDirs
+ # Note: links to binaries within targetDirs do not need
+ # to be wrapped as the binaries have already been wrapped
+ if [[ ${#targetDirsThatExist[@]} -ne 0 ]]; then
+ find "${targetDirsThatExist[@]}" -type l -xtype f -executable -print0 |
+ while IFS= read -r -d '' linkPath; do
+ linkPathReal=$(realpath "${linkPath}")
+ for targetPath in "${targetDirsRealPath[@]}"; do
+ if [[ "$linkPathReal" == "$targetPath"* ]]; then
+ echo "Not wrapping link: '$linkPath' (already wrapped)"
+ continue 2
+ fi
+ done
+ echo "Wrapping link: '$linkPath'"
+ wrapGApp "${linkPath}"
+ done
+ fi
+ fi
+}
+
+fixupOutputHooks+=(wrapGAppsHook)