aboutsummaryrefslogtreecommitdiff
path: root/nixpkgs/maintainers/scripts
diff options
context:
space:
mode:
authorKatharina Fey <kookie@spacekookie.de>2019-10-05 22:42:42 +0000
committerKatharina Fey <kookie@spacekookie.de>2019-10-05 22:44:50 +0000
commit73d865b1dae7585d0eff167271dabe77c9d0b8e6 (patch)
tree337324fab29014f3d60a8bff4979e397fb556d88 /nixpkgs/maintainers/scripts
parent670a2de0037acadb83433165344710dd3ac03adf (diff)
parente14d8e29606feddb29d7c27ea62dd514ef80f1e4 (diff)
Replacing nixcfg with libkookierebuild
Generally, nixcfg grew out of a dotfiles repository, that happened to also have some scripts in it. As more and more of the configuration was replaced with nix specifics (home-manager, etc...), so did nixcfg change over time (previously "stuff"). As part of this, kookiepkgs was introduced along-side nixcfg, to make it easier to add custom things to nixpkgs-based systems (NixOS). Additionally, the core system configuration was handled via private infrastructure repositories, each specific to the machine in question. The problem with this approach is a lot of redundancy when building non-userspace (read home-manager) systems and a lot of chaos with having to cherry-pick commits from different branches to work with nixpkgs trees in development. Ultimately, keeping both new package definitions, patches and configuration for the root system and userspace (home-manager) in the same repository is a _much_ better approach to solving these issues. And as such, libkookie was started: the general idea is that it includes all nix expressions that are relevant to _any_ of my computers. Under `roots`, a machine can have it's primary configuration file which is built andcopied into the nix store, so that nixpkgs can always point at the version a generation was built with, not what is on disk). Overlays contains everything that kookiepkgs used to, modules contains both system-level modules (only required on NixOS), as well as anything that is being built with home-manager. Modules are all kept in the same tree, however some require system-level access while others don't. There could be some kind of list to distinguish the two, so that userspace-only systems can still take advantage of libkookie.
Diffstat (limited to 'nixpkgs/maintainers/scripts')
-rw-r--r--nixpkgs/maintainers/scripts/all-tarballs.nix16
-rwxr-xr-xnixpkgs/maintainers/scripts/check-maintainer-github-handles.sh66
-rwxr-xr-xnixpkgs/maintainers/scripts/copy-tarballs.pl226
-rwxr-xr-xnixpkgs/maintainers/scripts/debian-patches.sh34
-rwxr-xr-xnixpkgs/maintainers/scripts/dep-licenses.sh57
-rw-r--r--nixpkgs/maintainers/scripts/eval-release.nix24
-rwxr-xr-xnixpkgs/maintainers/scripts/eval-release.sh11
-rwxr-xr-xnixpkgs/maintainers/scripts/fetch-kde-qt.sh61
-rw-r--r--nixpkgs/maintainers/scripts/find-tarballs.nix50
-rwxr-xr-xnixpkgs/maintainers/scripts/hydra-eval-failures.py112
-rwxr-xr-xnixpkgs/maintainers/scripts/hydra_eval_check13
-rw-r--r--nixpkgs/maintainers/scripts/luarocks-config.lua4
-rw-r--r--nixpkgs/maintainers/scripts/luarocks-packages.csv70
-rwxr-xr-xnixpkgs/maintainers/scripts/nix-call-package5
-rwxr-xr-xnixpkgs/maintainers/scripts/nix-diff.sh277
-rw-r--r--nixpkgs/maintainers/scripts/nix-generate-from-cpan.nix25
-rwxr-xr-xnixpkgs/maintainers/scripts/nix-generate-from-cpan.pl471
-rw-r--r--nixpkgs/maintainers/scripts/nixpkgs-lint.nix23
-rwxr-xr-xnixpkgs/maintainers/scripts/nixpkgs-lint.pl173
-rwxr-xr-xnixpkgs/maintainers/scripts/patchelf-hints.sh84
-rwxr-xr-xnixpkgs/maintainers/scripts/rebuild-amount.sh127
-rwxr-xr-xnixpkgs/maintainers/scripts/update-channel-branches.sh112
-rwxr-xr-xnixpkgs/maintainers/scripts/update-discord36
-rwxr-xr-xnixpkgs/maintainers/scripts/update-luarocks-packages136
-rw-r--r--nixpkgs/maintainers/scripts/update-luarocks-shell.nix9
-rwxr-xr-xnixpkgs/maintainers/scripts/update-python-libraries5
-rwxr-xr-xnixpkgs/maintainers/scripts/update-ruby-packages13
-rwxr-xr-xnixpkgs/maintainers/scripts/update.nix157
-rw-r--r--nixpkgs/maintainers/scripts/update.py79
-rw-r--r--nixpkgs/maintainers/scripts/vanity-manual-equalities.txt7
-rwxr-xr-xnixpkgs/maintainers/scripts/vanity.sh122
31 files changed, 2605 insertions, 0 deletions
diff --git a/nixpkgs/maintainers/scripts/all-tarballs.nix b/nixpkgs/maintainers/scripts/all-tarballs.nix
new file mode 100644
index 00000000000..6a4de8a4b95
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/all-tarballs.nix
@@ -0,0 +1,16 @@
+/* Helper expression for copy-tarballs. This returns (nearly) all
+ tarballs used the free packages in Nixpkgs.
+
+ Typical usage:
+
+ $ copy-tarballs.pl --expr 'import <nixpkgs/maintainers/scripts/all-tarballs.nix>'
+*/
+
+import ../../pkgs/top-level/release.nix
+ { # Don't apply ‘hydraJob’ to jobs, because then we can't get to the
+ # dependency graph.
+ scrubJobs = false;
+ # No need to evaluate on i686.
+ supportedSystems = [ "x86_64-linux" ];
+ limitedSupportedSystems = [];
+ }
diff --git a/nixpkgs/maintainers/scripts/check-maintainer-github-handles.sh b/nixpkgs/maintainers/scripts/check-maintainer-github-handles.sh
new file mode 100755
index 00000000000..879a2e452cb
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/check-maintainer-github-handles.sh
@@ -0,0 +1,66 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i bash -p jq parallel
+
+# Example how to work with the `lib.maintainers` attrset.
+# Can be used to check whether all user handles are still valid.
+
+set -e
+
+# nixpkgs='<nixpkgs>'
+# if [ -n "$1" ]; then
+
+function checkCommits {
+ local user="$1"
+ local tmp=$(mktemp)
+ curl --silent -w "%{http_code}" \
+ "https://github.com/NixOS/nixpkgs/commits?author=$user" \
+ > "$tmp"
+ # the last line of tmp contains the http status
+ local status=$(tail -n1 "$tmp")
+ local ret=
+ case $status in
+ 200) if <"$tmp" grep -i "no commits found" > /dev/null; then
+ ret=1
+ else
+ ret=0
+ fi
+ ;;
+ # because of github’s hard request limits, this can take some time
+ 429) sleep 2
+ printf "."
+ checkCommits "$user"
+ ret=$?
+ ;;
+ *) printf "BAD STATUS: $(tail -n1 $tmp) for %s\n" "$user"; ret=1
+ ret=1
+ ;;
+ esac
+ rm "$tmp"
+ return $ret
+}
+export -f checkCommits
+
+function checkUser {
+ local user="$1"
+ local status=
+ status="$(curl --silent --head "https://github.com/${user}" | grep Status)"
+ # checks whether a user handle can be found on github
+ if [[ "$status" =~ 404 ]]; then
+ printf "%s\t\t\t\t%s\n" "$status" "$user"
+ # checks whether the user handle has any nixpkgs commits
+ elif checkCommits "$user"; then
+ printf "OK!\t\t\t\t%s\n" "$user"
+ else
+ printf "No Commits!\t\t\t%s\n" "$user"
+ fi
+}
+export -f checkUser
+
+# output the maintainers set as json
+# and filter out the github username of each maintainer (if it exists)
+# then check some at the same time
+nix-instantiate -A lib.maintainers --eval --strict --json \
+ | jq -r '.[]|.github|select(.)' \
+ | parallel -j5 checkUser
+
+# parallel -j100 checkUser ::: "eelco" "profpatsch" "Profpatsch" "a"
diff --git a/nixpkgs/maintainers/scripts/copy-tarballs.pl b/nixpkgs/maintainers/scripts/copy-tarballs.pl
new file mode 100755
index 00000000000..59696a4432d
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/copy-tarballs.pl
@@ -0,0 +1,226 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i perl -p perl perlPackages.NetAmazonS3 perlPackages.FileSlurp perlPackages.JSON perlPackages.LWPProtocolHttps nixUnstable nixUnstable.perl-bindings
+
+# This command uploads tarballs to tarballs.nixos.org, the
+# content-addressed cache used by fetchurl as a fallback for when
+# upstream tarballs disappear or change. Usage:
+#
+# 1) To upload one or more files:
+#
+# $ copy-tarballs.pl --file /path/to/tarball.tar.gz
+#
+# 2) To upload all files obtained via calls to fetchurl in a Nix derivation:
+#
+# $ copy-tarballs.pl --expr '(import <nixpkgs> {}).hello'
+
+use strict;
+use warnings;
+use File::Basename;
+use File::Path;
+use File::Slurp;
+use JSON;
+use Net::Amazon::S3;
+use Nix::Store;
+
+isValidPath("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-foo"); # FIXME: forces Nix::Store initialisation
+
+sub usage {
+ die "Syntax: $0 [--dry-run] [--exclude REGEXP] [--expr EXPR | --file FILES...]\n";
+}
+
+my $dryRun = 0;
+my $expr;
+my @fileNames;
+my $exclude;
+
+while (@ARGV) {
+ my $flag = shift @ARGV;
+
+ if ($flag eq "--expr") {
+ $expr = shift @ARGV or die "--expr requires an argument";
+ } elsif ($flag eq "--file") {
+ @fileNames = @ARGV;
+ last;
+ } elsif ($flag eq "--dry-run") {
+ $dryRun = 1;
+ } elsif ($flag eq "--exclude") {
+ $exclude = shift @ARGV or die "--exclude requires an argument";
+ } else {
+ usage();
+ }
+}
+
+
+# S3 setup.
+my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "AWS_ACCESS_KEY_ID not set\n";
+my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "AWS_SECRET_ACCESS_KEY not set\n";
+
+my $s3 = Net::Amazon::S3->new(
+ { aws_access_key_id => $aws_access_key_id,
+ aws_secret_access_key => $aws_secret_access_key,
+ retry => 1,
+ host => "s3-eu-west-1.amazonaws.com",
+ });
+
+my $bucket = $s3->bucket("nixpkgs-tarballs") or die;
+
+my $doWrite = 0;
+my $cacheFile = ($ENV{"HOME"} or die "\$HOME is not set") . "/.cache/nix/copy-tarballs";
+my %cache;
+$cache{$_} = 1 foreach read_file($cacheFile, err_mode => 'quiet', chomp => 1);
+$doWrite = 1;
+
+END() {
+ File::Path::mkpath(dirname($cacheFile), 0, 0755);
+ write_file($cacheFile, map { "$_\n" } keys %cache) if $doWrite;
+}
+
+sub alreadyMirrored {
+ my ($algo, $hash) = @_;
+ my $key = "$algo/$hash";
+ return 1 if defined $cache{$key};
+ my $res = defined $bucket->get_key($key);
+ $cache{$key} = 1 if $res;
+ return $res;
+}
+
+sub uploadFile {
+ my ($fn, $name) = @_;
+
+ my $md5_16 = hashFile("md5", 0, $fn) or die;
+ my $sha1_16 = hashFile("sha1", 0, $fn) or die;
+ my $sha256_32 = hashFile("sha256", 1, $fn) or die;
+ my $sha256_16 = hashFile("sha256", 0, $fn) or die;
+ my $sha512_32 = hashFile("sha512", 1, $fn) or die;
+ my $sha512_16 = hashFile("sha512", 0, $fn) or die;
+
+ my $mainKey = "sha512/$sha512_16";
+
+ # Create redirects from the other hash types.
+ sub redirect {
+ my ($name, $dest) = @_;
+ #print STDERR "linking $name to $dest...\n";
+ $bucket->add_key($name, "", {
+ 'x-amz-website-redirect-location' => "/" . $dest,
+ 'x-amz-acl' => "public-read"
+ })
+ or die "failed to create redirect from $name to $dest\n";
+ $cache{$name} = 1;
+ }
+ redirect "md5/$md5_16", $mainKey;
+ redirect "sha1/$sha1_16", $mainKey;
+ redirect "sha256/$sha256_32", $mainKey;
+ redirect "sha256/$sha256_16", $mainKey;
+ redirect "sha512/$sha512_32", $mainKey;
+
+ # Upload the file as sha512/<hash-in-base-16>.
+ print STDERR "uploading $fn to $mainKey...\n";
+ $bucket->add_key_filename($mainKey, $fn, {
+ 'x-amz-meta-original-name' => $name,
+ 'x-amz-acl' => "public-read"
+ })
+ or die "failed to upload $fn to $mainKey\n";
+ $cache{$mainKey} = 1;
+}
+
+if (scalar @fileNames) {
+ my $res = 0;
+ foreach my $fn (@fileNames) {
+ eval {
+ if (alreadyMirrored("sha512", hashFile("sha512", 0, $fn))) {
+ print STDERR "$fn is already mirrored\n";
+ } else {
+ uploadFile($fn, basename $fn);
+ }
+ };
+ if ($@) {
+ warn "$@";
+ $res = 1;
+ }
+ }
+ exit $res;
+}
+
+elsif (defined $expr) {
+
+ # Evaluate find-tarballs.nix.
+ my $pid = open(JSON, "-|", "nix-instantiate", "--eval", "--json", "--strict",
+ "<nixpkgs/maintainers/scripts/find-tarballs.nix>",
+ "--arg", "expr", $expr);
+ my $stdout = <JSON>;
+ waitpid($pid, 0);
+ die "$0: evaluation failed\n" if $?;
+ close JSON;
+
+ my $fetches = decode_json($stdout);
+
+ print STDERR "evaluation returned ", scalar(@{$fetches}), " tarballs\n";
+
+ # Check every fetchurl call discovered by find-tarballs.nix.
+ my $mirrored = 0;
+ my $have = 0;
+ foreach my $fetch (sort { $a->{url} cmp $b->{url} } @{$fetches}) {
+ my $url = $fetch->{url};
+ my $algo = $fetch->{type};
+ my $hash = $fetch->{hash};
+ my $name = $fetch->{name};
+
+ if (defined $ENV{DEBUG}) {
+ print "$url $algo $hash\n";
+ next;
+ }
+
+ if ($url !~ /^http:/ && $url !~ /^https:/ && $url !~ /^ftp:/ && $url !~ /^mirror:/) {
+ print STDERR "skipping $url (unsupported scheme)\n";
+ next;
+ }
+
+ next if defined $exclude && $url =~ /$exclude/;
+
+ if (alreadyMirrored($algo, $hash)) {
+ $have++;
+ next;
+ }
+
+ my $storePath = makeFixedOutputPath(0, $algo, $hash, $name);
+
+ print STDERR "mirroring $url ($storePath)...\n";
+
+ if ($dryRun) {
+ $mirrored++;
+ next;
+ }
+
+ # Substitute the output.
+ if (!isValidPath($storePath)) {
+ system("nix-store", "-r", $storePath);
+ }
+
+ # Otherwise download the file using nix-prefetch-url.
+ if (!isValidPath($storePath)) {
+ $ENV{QUIET} = 1;
+ $ENV{PRINT_PATH} = 1;
+ my $fh;
+ my $pid = open($fh, "-|", "nix-prefetch-url", "--type", $algo, $url, $hash) or die;
+ waitpid($pid, 0) or die;
+ if ($? != 0) {
+ print STDERR "failed to fetch $url: $?\n";
+ next;
+ }
+ <$fh>; my $storePath2 = <$fh>; chomp $storePath2;
+ if ($storePath ne $storePath2) {
+ warn "strange: $storePath != $storePath2\n";
+ next;
+ }
+ }
+
+ uploadFile($storePath, $url);
+ $mirrored++;
+ }
+
+ print STDERR "mirrored $mirrored files, already have $have files\n";
+}
+
+else {
+ usage();
+}
diff --git a/nixpkgs/maintainers/scripts/debian-patches.sh b/nixpkgs/maintainers/scripts/debian-patches.sh
new file mode 100755
index 00000000000..b4923fb537e
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/debian-patches.sh
@@ -0,0 +1,34 @@
+#!/bin/sh
+
+# Download patches from debian project
+# Usage $0 debian-patches.txt debian-patches.nix
+# An example input and output files can be found in applications/graphics/xara/
+
+DEB_URL=https://sources.debian.org/data/main
+declare -a deb_patches
+mapfile -t deb_patches < $1
+
+# First letter
+deb_prefix="${deb_patches[0]:0:1}"
+prefix="${DEB_URL}/${deb_prefix}/${deb_patches[0]}/debian/patches"
+
+if [[ -n "$2" ]]; then
+ exec 1> $2
+fi
+
+cat <<EOF
+# Generated by $(basename $0) from $(basename $1)
+let
+ prefix = "${prefix}";
+in
+[
+EOF
+for ((i=1;i < ${#deb_patches[@]}; ++i)); do
+ url="${prefix}/${deb_patches[$i]}"
+ sha256=$(nix-prefetch-url $url)
+ echo " {"
+ echo " url = \"\${prefix}/${deb_patches[$i]}\";"
+ echo " sha256 = \"$sha256\";"
+ echo " }"
+done
+echo "]"
diff --git a/nixpkgs/maintainers/scripts/dep-licenses.sh b/nixpkgs/maintainers/scripts/dep-licenses.sh
new file mode 100755
index 00000000000..28ad22c334f
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/dep-licenses.sh
@@ -0,0 +1,57 @@
+#!/bin/sh
+
+attr=$1
+
+: ${NIXPKGS=/etc/nixos/nixpkgs}
+
+tmp=$(mktemp --tmpdir -d nixpkgs-dep-license.XXXXXX)
+
+exitHandler() {
+ exitCode=$?
+ rm -rf "$tmp"
+ exit $exitCode
+}
+
+trap "exitHandler" EXIT
+
+# fetch the trace and the drvPath of the attribute.
+nix-instantiate $NIXPKGS -A $attr --show-trace > "$tmp/drvPath" 2> "$tmp/trace" || {
+ cat 1>&2 - "$tmp/trace" <<EOF
+An error occurred while evaluating $attr.
+EOF
+ exit 1
+}
+
+# generate a sed script based on the trace output.
+sed '
+ \,@:.*:@, {
+ # \1 *.drv file
+ # \2 License terms
+ s,.*@:drv:\(.*\):\(.*\):@.*,s!\1!\1: \2!; t;,
+ s!Str(\\\"\([^,]*\)\\\",\[\])!\1!g
+ b
+ }
+ d
+' "$tmp/trace" > "$tmp/filter.sed"
+
+if test $(wc -l "$tmp/filter.sed" | sed 's/ .*//') == 0; then
+ echo 1>&2 "
+No derivation mentionned in the stack trace. Either your derivation does
+not use stdenv.mkDerivation or you forgot to use the stdenv adapter named
+traceDrvLicenses.
+
+- defaultStdenv = allStdenvs.stdenv;
++ defaultStdenv = traceDrvLicenses allStdenvs.stdenv;
+"
+ exit 1
+fi
+
+
+# remove all dependencies which are using stdenv.mkDerivation
+echo '
+d
+' >> "$tmp/filter.sed"
+
+nix-store -q --tree $(cat "$tmp/drvPath") | sed -f "$tmp/filter.sed"
+
+exit 0;
diff --git a/nixpkgs/maintainers/scripts/eval-release.nix b/nixpkgs/maintainers/scripts/eval-release.nix
new file mode 100644
index 00000000000..bb9572cbc79
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/eval-release.nix
@@ -0,0 +1,24 @@
+# Evaluate `release.nix' like Hydra would. Too bad nix-instantiate
+# can't to do this.
+
+with import ../../lib;
+
+let
+ trace = if builtins.getEnv "VERBOSE" == "1" then builtins.trace else (x: y: y);
+
+ rel = removeAttrs (import ../../pkgs/top-level/release.nix { }) [ "tarball" "unstable" "xbursttools" ];
+
+ # Add the ‘recurseForDerivations’ attribute to ensure that
+ # nix-instantiate recurses into nested attribute sets.
+ recurse = path: attrs:
+ if (builtins.tryEval attrs).success then
+ if isDerivation attrs
+ then
+ if (builtins.tryEval attrs.drvPath).success
+ then { inherit (attrs) name drvPath; }
+ else { failed = true; }
+ else { recurseForDerivations = true; } //
+ mapAttrs (n: v: let path' = path ++ [n]; in trace path' (recurse path' v)) attrs
+ else { };
+
+in recurse [] rel
diff --git a/nixpkgs/maintainers/scripts/eval-release.sh b/nixpkgs/maintainers/scripts/eval-release.sh
new file mode 100755
index 00000000000..e0dfaf1de74
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/eval-release.sh
@@ -0,0 +1,11 @@
+#! /bin/sh
+
+if [[ -z "$VERBOSE" ]]; then
+ echo "You may set VERBOSE=1 to see debug output or to any other non-empty string to make this script completely silent"
+fi
+unset HOME NIXPKGS_CONFIG # Force empty config
+
+# With the default heap size (380MB), nix-instantiate fails:
+# Too many heap sections: Increase MAXHINCR or MAX_HEAP_SECTS
+export GC_INITIAL_HEAP_SIZE=${GC_INITIAL_HEAP_SIZE:-2000000000} # 2GB
+nix-instantiate --strict --eval-only --xml --show-trace "$(dirname "$0")"/eval-release.nix 2>&1 > /dev/null
diff --git a/nixpkgs/maintainers/scripts/fetch-kde-qt.sh b/nixpkgs/maintainers/scripts/fetch-kde-qt.sh
new file mode 100755
index 00000000000..a267a5fa871
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/fetch-kde-qt.sh
@@ -0,0 +1,61 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils findutils gnused nix wget
+
+set -efuo pipefail
+
+SRCS=
+if [ -d "$1" ]; then
+ SRCS="$(pwd)/$1/srcs.nix"
+ . "$1/fetch.sh"
+else
+ SRCS="$(pwd)/$(dirname $1)/srcs.nix"
+ . "$1"
+fi
+
+tmp=$(mktemp -d)
+pushd $tmp >/dev/null
+wget -nH -r -c --no-parent "${WGET_ARGS[@]}" -A '*.tar.xz.sha256' -A '*.mirrorlist' >/dev/null
+find -type f -name '*.mirrorlist' -delete
+
+csv=$(mktemp)
+find . -type f | while read src; do
+ # Sanitize file name
+ filename=$(gawk '{ print $2 }' "$src" | tr '@' '_')
+ nameVersion="${filename%.tar.*}"
+ name=$(echo "$nameVersion" | sed -e 's,-[[:digit:]].*,,' | sed -e 's,-opensource-src$,,' | sed -e 's,-everywhere-src$,,')
+ version=$(echo "$nameVersion" | sed -e 's,^\([[:alpha:]][[:alnum:]]*-\)\+,,')
+ echo "$name,$version,$src,$filename" >>$csv
+done
+
+cat >"$SRCS" <<EOF
+# DO NOT EDIT! This file is generated automatically by fetch-kde-qt.sh
+{ fetchurl, mirror }:
+
+{
+EOF
+
+gawk -F , "{ print \$1 }" $csv | sort | uniq | while read name; do
+ versions=$(gawk -F , "/^$name,/ { print \$2 }" $csv)
+ latestVersion=$(echo "$versions" | sort -rV | head -n 1)
+ src=$(gawk -F , "/^$name,$latestVersion,/ { print \$3 }" $csv)
+ filename=$(gawk -F , "/^$name,$latestVersion,/ { print \$4 }" $csv)
+ url="$(dirname "${src:2}")/$filename"
+ sha256=$(gawk '{ print $1 }' "$src")
+ cat >>"$SRCS" <<EOF
+ $name = {
+ version = "$latestVersion";
+ src = fetchurl {
+ url = "\${mirror}/$url";
+ sha256 = "$sha256";
+ name = "$filename";
+ };
+ };
+EOF
+done
+
+echo "}" >>"$SRCS"
+
+popd >/dev/null
+rm -fr $tmp >/dev/null
+
+rm -f $csv >/dev/null
diff --git a/nixpkgs/maintainers/scripts/find-tarballs.nix b/nixpkgs/maintainers/scripts/find-tarballs.nix
new file mode 100644
index 00000000000..52cce909918
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/find-tarballs.nix
@@ -0,0 +1,50 @@
+# This expression returns a list of all fetchurl calls used by ‘expr’.
+
+with import ../.. { };
+with lib;
+
+{ expr }:
+
+let
+
+ root = expr;
+
+ uniqueUrls = map (x: x.file) (genericClosure {
+ startSet = map (file: { key = file.url; inherit file; }) urls;
+ operator = const [ ];
+ });
+
+ urls = map (drv: { url = head (drv.urls or [ drv.url ]); hash = drv.outputHash; type = drv.outputHashAlgo; name = drv.name; }) fetchurlDependencies;
+
+ fetchurlDependencies =
+ filter
+ (drv: drv.outputHash or "" != "" && drv.outputHashMode or "flat" == "flat"
+ && drv.postFetch or "" == "" && (drv ? url || drv ? urls))
+ dependencies;
+
+ dependencies = map (x: x.value) (genericClosure {
+ startSet = map keyDrv (derivationsIn' root);
+ operator = { key, value }: map keyDrv (immediateDependenciesOf value);
+ });
+
+ derivationsIn' = x:
+ if !canEval x then []
+ else if isDerivation x then optional (canEval x.drvPath) x
+ else if isList x then concatLists (map derivationsIn' x)
+ else if isAttrs x then concatLists (mapAttrsToList (n: v: addErrorContext "while finding tarballs in '${n}':" (derivationsIn' v)) x)
+ else [ ];
+
+ keyDrv = drv: if canEval drv.drvPath then { key = drv.drvPath; value = drv; } else { };
+
+ immediateDependenciesOf = drv:
+ concatLists (mapAttrsToList (n: v: derivationsIn v) (removeAttrs drv ["meta" "passthru"]));
+
+ derivationsIn = x:
+ if !canEval x then []
+ else if isDerivation x then optional (canEval x.drvPath) x
+ else if isList x then concatLists (map derivationsIn x)
+ else [ ];
+
+ canEval = val: (builtins.tryEval val).success;
+
+in uniqueUrls
diff --git a/nixpkgs/maintainers/scripts/hydra-eval-failures.py b/nixpkgs/maintainers/scripts/hydra-eval-failures.py
new file mode 100755
index 00000000000..6e7ec2dbc00
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/hydra-eval-failures.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i python3 -p 'python3.withPackages(ps: with ps; [ requests pyquery click ])'
+
+# To use, just execute this script with --help to display help.
+
+import subprocess
+import json
+import sys
+
+import click
+import requests
+from pyquery import PyQuery as pq
+
+def map_dict (f, d):
+ for k,v in d.items():
+ d[k] = f(v)
+
+maintainers_json = subprocess.check_output([
+ 'nix-instantiate', '-A', 'lib.maintainers', '--eval', '--strict', '--json'
+])
+maintainers = json.loads(maintainers_json)
+MAINTAINERS = map_dict(lambda v: v.get('github', None), maintainers)
+
+def get_response_text(url):
+ return pq(requests.get(url).text) # IO
+
+EVAL_FILE = {
+ 'nixos': 'nixos/release.nix',
+ 'nixpkgs': 'pkgs/top-level/release.nix',
+}
+
+
+def get_maintainers(attr_name):
+ try:
+ nixname = attr_name.split('.')
+ meta_json = subprocess.check_output([
+ 'nix-instantiate',
+ '--eval',
+ '--strict',
+ '-A',
+ '.'.join(nixname[1:]) + '.meta',
+ EVAL_FILE[nixname[0]],
+ '--arg',
+ 'nixpkgs',
+ './.',
+ '--json'])
+ meta = json.loads(meta_json)
+ return meta.get('maintainers', [])
+ except:
+ return []
+
+def filter_github_users(maintainers):
+ github_only = []
+ for i in maintainers:
+ if i.get('github'):
+ github_only.append(i)
+ return github_only
+
+def print_build(table_row):
+ a = pq(table_row)('a')[1]
+ print("- [ ] [{}]({})".format(a.text, a.get('href')), flush=True)
+
+ job_maintainers = filter_github_users(get_maintainers(a.text))
+ if job_maintainers:
+ print(" - maintainers: {}".format(" ".join(map(lambda u: '@' + u.get('github'), job_maintainers))))
+ # TODO: print last three persons that touched this file
+ # TODO: pinpoint the diff that broke this build, or maybe it's transient or maybe it never worked?
+
+ sys.stdout.flush()
+
+@click.command()
+@click.option(
+ '--jobset',
+ default="nixos/release-19.09",
+ help='Hydra project like nixos/release-19.09')
+def cli(jobset):
+ """
+ Given a Hydra project, inspect latest evaluation
+ and print a summary of failed builds
+ """
+
+ url = "http://hydra.nixos.org/jobset/{}".format(jobset)
+
+ # get the last evaluation
+ click.echo(click.style(
+ 'Getting latest evaluation for {}'.format(url), fg='green'))
+ d = get_response_text(url)
+ evaluations = d('#tabs-evaluations').find('a[class="row-link"]')
+ latest_eval_url = evaluations[0].get('href')
+
+ # parse last evaluation page
+ click.echo(click.style(
+ 'Parsing evaluation {}'.format(latest_eval_url), fg='green'))
+ d = get_response_text(latest_eval_url + '?full=1')
+
+ # TODO: aborted evaluations
+ # TODO: dependency failed without propagated builds
+ print('\nFailures:')
+ for tr in d('img[alt="Failed"]').parents('tr'):
+ print_build(tr)
+
+ print('\nDependency failures:')
+ for tr in d('img[alt="Dependency failed"]').parents('tr'):
+ print_build(tr)
+
+
+
+if __name__ == "__main__":
+ try:
+ cli()
+ except Exception as e:
+ import pdb;pdb.post_mortem()
diff --git a/nixpkgs/maintainers/scripts/hydra_eval_check b/nixpkgs/maintainers/scripts/hydra_eval_check
new file mode 100755
index 00000000000..c8e03424f32
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/hydra_eval_check
@@ -0,0 +1,13 @@
+#! /bin/sh
+
+# give absolute path of release.nix as argument
+hydra_eval_jobs \
+ --argstr system x86_64-linux \
+ --argstr system i686-linux \
+ --argstr system x86_64-darwin \
+ --argstr system i686-cygwin \
+ --argstr system x86_64-cygwin \
+ --argstr system i686-freebsd \
+ --arg officialRelease false \
+ --arg nixpkgs "{ outPath = builtins.storePath ./. ; rev = 1234; }" \
+ $@
diff --git a/nixpkgs/maintainers/scripts/luarocks-config.lua b/nixpkgs/maintainers/scripts/luarocks-config.lua
new file mode 100644
index 00000000000..89e74c00ea8
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/luarocks-config.lua
@@ -0,0 +1,4 @@
+rocks_servers = {
+ "https://luarocks.org"
+}
+version_check_on_fail = false
diff --git a/nixpkgs/maintainers/scripts/luarocks-packages.csv b/nixpkgs/maintainers/scripts/luarocks-packages.csv
new file mode 100644
index 00000000000..a6fbcd5a0be
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/luarocks-packages.csv
@@ -0,0 +1,70 @@
+# nix name, luarocks name, server, version,luaversion,maintainers
+alt-getopt,,,,,arobyn
+ansicolors,,,,,
+argparse,,,,,
+basexx,,,,,
+binaryheap,,,,,vcunat
+bit32,,,,lua5_1,lblasc
+busted,,,,,
+cjson,lua-cjson,,,,
+compat53,,,,,vcunat
+coxpcall,,,1.17.0-1,,
+cqueues,,,,,vcunat
+cyrussasl,,,,,vcunat
+digestif,,http://luarocks.org/dev,,lua5_3,
+dkjson,,,,,
+fifo,,,,,
+http,,,,,vcunat
+inspect,,,,,
+ldoc,,,,,
+lgi,,,,,
+ljsyscall,,,,lua5_1,lblasc
+lpeg,,,,,vyp
+lpeg_patterns,,,,,
+lpeglabel,,,,,
+lpty,,,,,
+lrexlib-gnu,,,,,
+lrexlib-pcre,,,,,vyp
+lrexlib-posix,,,,,
+ltermbox,,,,,
+lua-cmsgpack,,,,,
+lua-iconv,,,,,
+lua-lsp,,http://luarocks.org/dev,,,
+lua-messagepack,,,,,
+lua-term,,,,,
+lua-toml,,,,,
+lua-zlib,,,,,koral
+lua_cliargs,,,,,
+luabitop,,,,,
+luacheck,,,,,
+luadbi,,,,,
+luadbi-mysql,,,,,
+luadbi-postgresql,,,,,
+luadbi-sqlite3,,,,,
+luaevent,,,,,
+luaexpat,,,1.3.0-1,,arobyn flosse
+luaffi,,http://luarocks.org/dev,,,
+luafilesystem,,,1.7.0-2,,flosse vcunat
+luaossl,,,,lua5_1,vcunat
+luaposix,,,,,vyp lblasc
+luasec,,,,,flosse
+luasocket,,,,,
+luasql-sqlite3,,,,,vyp
+luassert,,,,,
+luasystem,,,,,
+luautf8,,,,,pstn
+luazip,,,,,
+lua-yajl,,,,,pstn
+luuid,,,,,
+luv,,,,,
+markdown,,,,,
+mediator_lua,,,,,
+mpack,,,,,
+moonscript,,,,,arobyn
+nvim-client,,,,,
+penlight,,,,,
+rapidjson,,,,,
+say,,,,,
+std__debug,std._debug,,,,
+std_normalize,std.normalize,,,,
+stdlib,,,,,vyp
diff --git a/nixpkgs/maintainers/scripts/nix-call-package b/nixpkgs/maintainers/scripts/nix-call-package
new file mode 100755
index 00000000000..be478fca2b7
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/nix-call-package
@@ -0,0 +1,5 @@
+#! /bin/sh
+
+echo "let pkgs = import <nixpkgs$2> {}; x = pkgs.callPackage $1 { $3 }; in ${4:-x}" |
+nix-instantiate --show-trace - |
+xargs nix-store -r -K
diff --git a/nixpkgs/maintainers/scripts/nix-diff.sh b/nixpkgs/maintainers/scripts/nix-diff.sh
new file mode 100755
index 00000000000..0c65e29cf43
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/nix-diff.sh
@@ -0,0 +1,277 @@
+#!/usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils gnugrep gnused
+
+################################################################################
+# nix-diff.sh #
+################################################################################
+# This script "diffs" Nix profile generations. #
+# #
+# Example: #
+################################################################################
+# > nix-diff.sh 90 92 #
+# + gnumake-4.2.1 #
+# + gnumake-4.2.1-doc #
+# - htmldoc-1.8.29 #
+################################################################################
+# The example shows that as of generation 92 and since generation 90, #
+# gnumake-4.2.1 and gnumake-4.2.1-doc have been installed, while #
+# htmldoc-1.8.29 has been removed. #
+# #
+# The example above shows the default, minimal output mode of this script. #
+# For more features, run `nix-diff.sh -h` for usage instructions. #
+################################################################################
+
+usage() {
+ cat <<EOF
+usage: nix-diff.sh [-h | [-p profile | -s] [-q] [-l] [range]]
+-h: print this message before exiting
+-q: list the derivations installed in the parent generation
+-l: diff every available intermediate generation between parent and
+ child
+-p profile: specify the Nix profile to use
+ * defaults to ~/.nix-profile
+-s: use the system profile
+ * equivalent to: -p /nix/var/nix/profiles/system
+profile: * should be something like /nix/var/nix/profiles/default, not a
+ generation link like /nix/var/nix/profiles/default-2-link
+range: the range of generations to diff
+ * the following patterns are allowed, where A, B, and N are positive
+ integers, and G is the currently active generation:
+ A..B => diffs from generation A to generation B
+ ~N => diffs from the Nth newest generation (older than G) to G
+ A => diffs from generation A to G
+ * defaults to ~1
+EOF
+}
+
+usage_tip() {
+ echo 'run `nix-diff.sh -h` for usage instructions' >&2
+ exit 1
+}
+
+while getopts :hqlp:s opt; do
+ case $opt in
+ h)
+ usage
+ exit
+ ;;
+ q)
+ opt_query=1
+ ;;
+ l)
+ opt_log=1
+ ;;
+ p)
+ opt_profile=$OPTARG
+ ;;
+ s)
+ opt_profile=/nix/var/nix/profiles/system
+ ;;
+ \?)
+ echo "error: invalid option -$OPTARG" >&2
+ usage_tip
+ ;;
+ esac
+done
+shift $((OPTIND-1))
+
+if [ -n "$opt_profile" ]; then
+ if ! [ -L "$opt_profile" ]; then
+ echo "error: expecting \`$opt_profile\` to be a symbolic link" >&2
+ usage_tip
+ fi
+else
+ opt_profile=$(readlink ~/.nix-profile)
+ if (( $? != 0 )); then
+ echo 'error: unable to dereference `~/.nix-profile`' >&2
+ echo 'specify the profile manually with the `-p` flag' >&2
+ usage_tip
+ fi
+fi
+
+list_gens() {
+ nix-env -p "$opt_profile" --list-generations \
+ | sed -r 's:^\s*::' \
+ | cut -d' ' -f1
+}
+
+current_gen() {
+ nix-env -p "$opt_profile" --list-generations \
+ | grep -E '\(current\)\s*$' \
+ | sed -r 's:^\s*::' \
+ | cut -d' ' -f1
+}
+
+neg_gen() {
+ local i=0 from=$1 n=$2 tmp
+ for gen in $(list_gens | sort -rn); do
+ if ((gen < from)); then
+ tmp=$gen
+ ((i++))
+ ((i == n)) && break
+ fi
+ done
+ if ((i < n)); then
+ echo -n "error: there aren't $n generation(s) older than" >&2
+ echo " generation $from" >&2
+ return 1
+ fi
+ echo $tmp
+}
+
+match() {
+ argv=("$@")
+ for i in $(seq $(($#-1))); do
+ if grep -E "^${argv[$i]}\$" <(echo "$1") >/dev/null; then
+ echo $i
+ return
+ fi
+ done
+ echo 0
+}
+
+case $(match "$1" '' '[0-9]+' '[0-9]+\.\.[0-9]+' '~[0-9]+') in
+ 1)
+ diffTo=$(current_gen)
+ diffFrom=$(neg_gen $diffTo 1)
+ (($? == 1)) && usage_tip
+ ;;
+ 2)
+ diffFrom=$1
+ diffTo=$(current_gen)
+ ;;
+ 3)
+ diffFrom=${1%%.*}
+ diffTo=${1##*.}
+ ;;
+ 4)
+ diffTo=$(current_gen)
+ diffFrom=$(neg_gen $diffTo ${1#*~})
+ (($? == 1)) && usage_tip
+ ;;
+ 0)
+ echo 'error: invalid invocation' >&2
+ usage_tip
+ ;;
+esac
+
+dirA="${opt_profile}-${diffFrom}-link"
+dirB="${opt_profile}-${diffTo}-link"
+
+declare -a temp_files
+temp_length() {
+ echo -n ${#temp_files[@]}
+}
+temp_make() {
+ temp_files[$(temp_length)]=$(mktemp)
+}
+temp_clean() {
+ rm -f ${temp_files[@]}
+}
+temp_name() {
+ echo -n "${temp_files[$(($(temp_length)-1))]}"
+}
+trap 'temp_clean' EXIT
+
+temp_make
+versA=$(temp_name)
+refs=$(nix-store -q --references "$dirA")
+(( $? != 0 )) && exit 1
+echo "$refs" \
+ | grep -v env-manifest.nix \
+ | sort \
+ > "$versA"
+
+print_tag() {
+ local gen=$1
+ nix-env -p "$opt_profile" --list-generations \
+ | grep -E "^\s*${gen}" \
+ | sed -r 's:^\s*::' \
+ | sed -r 's:\s*$::'
+}
+
+if [ -n "$opt_query" ]; then
+ print_tag $diffFrom
+ cat "$versA" \
+ | sed -r 's:^[^-]+-(.*)$: \1:'
+
+ print_line=1
+fi
+
+if [ -n "$opt_log" ]; then
+ gens=$(for gen in $(list_gens); do
+ ((diffFrom < gen && gen < diffTo)) && echo $gen
+ done)
+ # Force the $diffTo generation to be included in this list, instead of using
+ # `gen <= diffTo` in the preceding loop, so we encounter an error upon the
+ # event of its nonexistence.
+ gens=$(echo "$gens"
+ echo $diffTo)
+else
+ gens=$diffTo
+fi
+
+temp_make
+add=$(temp_name)
+temp_make
+rem=$(temp_name)
+temp_make
+out=$(temp_name)
+
+for gen in $gens; do
+
+ [ -n "$print_line" ] && echo
+
+ temp_make
+ versB=$(temp_name)
+
+ dirB="${opt_profile}-${gen}-link"
+ refs=$(nix-store -q --references "$dirB")
+ (( $? != 0 )) && exit 1
+ echo "$refs" \
+ | grep -v env-manifest.nix \
+ | sort \
+ > "$versB"
+
+ in=$(comm -3 -1 "$versA" "$versB")
+ sed -r 's:^[^-]*-(.*)$:\1+:' <(echo "$in") \
+ | sort -f \
+ > "$add"
+
+ un=$(comm -3 -2 "$versA" "$versB")
+ sed -r 's:^[^-]*-(.*)$:\1-:' <(echo "$un") \
+ | sort -f \
+ > "$rem"
+
+ cat "$rem" "$add" \
+ | sort -f \
+ | sed -r 's:(.*)-$:- \1:' \
+ | sed -r 's:(.*)\+$:\+ \1:' \
+ | grep -v '^$' \
+ > "$out"
+
+ if [ -n "$opt_query" -o -n "$opt_log" ]; then
+
+ lines=$(wc -l "$out" | cut -d' ' -f1)
+ tag=$(print_tag "$gen")
+ (( $? != 0 )) && exit 1
+ if [ $lines -eq 0 ]; then
+ echo "$tag (no change)"
+ else
+ echo "$tag"
+ fi
+ cat "$out" \
+ | sed 's:^: :'
+
+ print_line=1
+
+ else
+ echo "diffing from generation $diffFrom to $diffTo"
+ cat "$out"
+ fi
+
+ versA=$versB
+
+done
+
+exit 0
diff --git a/nixpkgs/maintainers/scripts/nix-generate-from-cpan.nix b/nixpkgs/maintainers/scripts/nix-generate-from-cpan.nix
new file mode 100644
index 00000000000..5c4cf0f6c55
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/nix-generate-from-cpan.nix
@@ -0,0 +1,25 @@
+{ stdenv, makeWrapper, perl, perlPackages }:
+
+stdenv.mkDerivation {
+ name = "nix-generate-from-cpan-3";
+
+ buildInputs = with perlPackages; [
+ makeWrapper perl GetoptLongDescriptive CPANPLUS Readonly LogLog4perl
+ ];
+
+ phases = [ "installPhase" ];
+
+ installPhase =
+ ''
+ mkdir -p $out/bin
+ cp ${./nix-generate-from-cpan.pl} $out/bin/nix-generate-from-cpan
+ patchShebangs $out/bin/nix-generate-from-cpan
+ wrapProgram $out/bin/nix-generate-from-cpan --set PERL5LIB $PERL5LIB
+ '';
+
+ meta = {
+ maintainers = with stdenv.lib.maintainers; [ eelco rycee ];
+ description = "Utility to generate a Nix expression for a Perl package from CPAN";
+ platforms = stdenv.lib.platforms.unix;
+ };
+}
diff --git a/nixpkgs/maintainers/scripts/nix-generate-from-cpan.pl b/nixpkgs/maintainers/scripts/nix-generate-from-cpan.pl
new file mode 100755
index 00000000000..e04d3713e9a
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/nix-generate-from-cpan.pl
@@ -0,0 +1,471 @@
+#!/usr/bin/env perl
+
+use utf8;
+use strict;
+use warnings;
+
+use CPAN::Meta();
+use CPANPLUS::Backend();
+use Getopt::Long::Descriptive qw( describe_options );
+use JSON::PP qw( encode_json );
+use Log::Log4perl qw(:easy);
+use Readonly();
+
+# Readonly hash that maps CPAN style license strings to information
+# necessary to generate a Nixpkgs style license attribute.
+Readonly::Hash my %LICENSE_MAP => (
+
+ # The Perl 5 License (Artistic 1 & GPL 1 or later).
+ perl_5 => {
+ licenses => [qw( artistic1 gpl1Plus )]
+ },
+
+ # GNU Affero General Public License, Version 3.
+ agpl_3 => {
+ licenses => [qw( agpl3Plus )],
+ amb => 1
+ },
+
+ # Apache Software License, Version 1.1.
+ apache_1_1 => {
+ licenses => ["Apache License 1.1"],
+ in_set => 0
+ },
+
+ # Apache License, Version 2.0.
+ apache_2_0 => {
+ licenses => [qw( asl20 )]
+ },
+
+ # Artistic License, (Version 1).
+ artistic_1 => {
+ licenses => [qw( artistic1 )]
+ },
+
+ # Artistic License, Version 2.0.
+ artistic_2 => {
+ licenses => [qw( artistic2 )]
+ },
+
+ # BSD License (three-clause).
+ bsd => {
+ licenses => [qw( bsd3 )],
+ amb => 1
+ },
+
+ # FreeBSD License (two-clause).
+ freebsd => {
+ licenses => [qw( bsd2 )]
+ },
+
+ # GNU Free Documentation License, Version 1.2.
+ gfdl_1_2 => {
+ licenses => [qw( fdl12 )]
+ },
+
+ # GNU Free Documentation License, Version 1.3.
+ gfdl_1_3 => {
+ licenses => [qw( fdl13 )]
+ },
+
+ # GNU General Public License, Version 1.
+ gpl_1 => {
+ licenses => [qw( gpl1Plus )],
+ amb => 1
+ },
+
+ # GNU General Public License, Version 2. Note, we will interpret
+ # "gpl" alone as GPL v2+.
+ gpl_2 => {
+ licenses => [qw( gpl2Plus )],
+ amb => 1
+ },
+
+ # GNU General Public License, Version 3.
+ gpl_3 => {
+ licenses => [qw( gpl3Plus )],
+ amb => 1
+ },
+
+ # GNU Lesser General Public License, Version 2.1. Note, we will
+ # interpret "gpl" alone as LGPL v2.1+.
+ lgpl_2_1 => {
+ licenses => [qw( lgpl21Plus )],
+ amb => 1
+ },
+
+ # GNU Lesser General Public License, Version 3.0.
+ lgpl_3_0 => {
+ licenses => [qw( lgpl3Plus )],
+ amb => 1
+ },
+
+ # MIT (aka X11) License.
+ mit => {
+ licenses => [qw( mit )]
+ },
+
+ # Mozilla Public License, Version 1.0.
+ mozilla_1_0 => {
+ licenses => [qw( mpl10 )]
+ },
+
+ # Mozilla Public License, Version 1.1.
+ mozilla_1_1 => {
+ licenses => [qw( mpl11 )]
+ },
+
+ # OpenSSL License.
+ openssl => {
+ licenses => [qw( openssl )]
+ },
+
+ # Q Public License, Version 1.0.
+ qpl_1_0 => {
+ licenses => [qw( qpl )]
+ },
+
+ # Original SSLeay License.
+ ssleay => {
+ licenses => ["Original SSLeay License"],
+ in_set => 0
+ },
+
+ # Sun Internet Standards Source License (SISSL).
+ sun => {
+ licenses => ["Sun Industry Standards Source License v1.1"],
+ in_set => 0
+ },
+
+ # zlib License.
+ zlib => {
+ licenses => [qw( zlib )]
+ },
+
+ # Other Open Source Initiative (OSI) approved license.
+ open_source => {
+ licenses => [qw( free )],
+ amb => 1
+ },
+
+ # Requires special permission from copyright holder.
+ restricted => {
+ licenses => [qw( unfree )],
+ amb => 1
+ },
+
+ # Not an OSI approved license, but not restricted. Note, we
+ # currently map this to unfreeRedistributable, which is a
+ # conservative choice.
+ unrestricted => {
+ licenses => [qw( unfreeRedistributable )],
+ amb => 1
+ },
+
+ # License not provided in metadata.
+ unknown => {
+ licenses => [qw( unknown )],
+ amb => 1
+ }
+);
+
+sub handle_opts {
+ my ( $opt, $usage ) = describe_options(
+ 'usage: $0 %o MODULE',
+ [ 'maintainer|m=s', 'the package maintainer' ],
+ [ 'debug|d', 'enable debug output' ],
+ [ 'help', 'print usage message and exit' ]
+ );
+
+ if ( $opt->help ) {
+ print $usage->text;
+ exit;
+ }
+
+ my $module_name = $ARGV[0];
+
+ if ( !defined $module_name ) {
+ print STDERR "Missing module name\n";
+ print STDERR $usage->text;
+ exit 1;
+ }
+
+ return ( $opt, $module_name );
+}
+
+# Takes a Perl package attribute name and returns 1 if the name cannot
+# be referred to as a bareword. This typically happens if the package
+# name is a reserved Nix keyword.
+sub is_reserved {
+ my ($pkg) = @_;
+
+ return $pkg =~ /^(?: assert |
+ else |
+ if |
+ import |
+ in |
+ inherit |
+ let |
+ rec |
+ then |
+ while |
+ with )$/x;
+}
+
+sub pkg_to_attr {
+ my ($module) = @_;
+ my $attr_name = $module->package_name;
+ if ( $attr_name eq "libwww-perl" ) {
+ return "LWP";
+ }
+ else {
+ $attr_name =~ s/-//g;
+ return $attr_name;
+ }
+}
+
+sub get_pkg_name {
+ my ($module) = @_;
+ return ( $module->package_name, $module->package_version =~ s/^v(\d)/$1/r );
+}
+
+sub read_meta {
+ my ($pkg_path) = @_;
+
+ my $yaml_path = "$pkg_path/META.yml";
+ my $json_path = "$pkg_path/META.json";
+ my $meta;
+
+ if ( -r $json_path ) {
+ $meta = CPAN::Meta->load_file($json_path);
+ }
+ elsif ( -r $yaml_path ) {
+ $meta = CPAN::Meta->load_file($yaml_path);
+ }
+ else {
+ WARN("package has no META.yml or META.json");
+ }
+
+ return $meta;
+}
+
+# Map a module to the attribute corresponding to its package
+# (e.g. HTML::HeadParser will be mapped to HTMLParser, because that
+# module is in the HTML-Parser package).
+sub module_to_pkg {
+ my ( $cb, $module_name ) = @_;
+ my @modules = $cb->search( type => "name", allow => [$module_name] );
+ if ( scalar @modules == 0 ) {
+
+ # Fallback.
+ $module_name =~ s/:://g;
+ return $module_name;
+ }
+ my $module = $modules[0];
+ my $attr_name = pkg_to_attr($module);
+ DEBUG("mapped dep $module_name to $attr_name");
+ return $attr_name;
+}
+
+sub get_deps {
+ my ( $cb, $meta, $type ) = @_;
+
+ return if !defined $meta;
+
+ my $prereqs = $meta->effective_prereqs;
+ my $deps = $prereqs->requirements_for( $type, "requires" );
+ my @res;
+ foreach my $n ( $deps->required_modules ) {
+ next if $n eq "perl";
+
+ # Figure out whether the module is a core module by attempting
+ # to `use` the module in a pure Perl interpreter and checking
+ # whether it succeeded. Note, $^X is a magic variable holding
+ # the path to the running Perl interpreter.
+ if ( system("env -i $^X -M$n -e1 >/dev/null 2>&1") == 0 ) {
+ DEBUG("skipping Perl-builtin module $n");
+ next;
+ }
+
+ my $pkg = module_to_pkg( $cb, $n );
+
+ # If the package name is reserved then we need to refer to it
+ # through the "self" variable.
+ $pkg = "self.\"$pkg\"" if is_reserved($pkg);
+
+ push @res, $pkg;
+ }
+ return @res;
+}
+
+sub uniq {
+ return keys %{ { map { $_ => 1 } @_ } };
+}
+
+sub render_license {
+ my ($cpan_license) = @_;
+
+ return if !defined $cpan_license;
+
+ my $licenses;
+
+ # If the license is ambiguous then we'll print an extra warning.
+ # For example, "gpl_2" is ambiguous since it may refer to exactly
+ # "GPL v2" or to "GPL v2 or later".
+ my $amb = 0;
+
+ # Whether the license is available inside `stdenv.lib.licenses`.
+ my $in_set = 1;
+
+ my $nix_license = $LICENSE_MAP{$cpan_license};
+ if ( !$nix_license ) {
+ WARN("Unknown license: $cpan_license");
+ $licenses = [$cpan_license];
+ $in_set = 0;
+ }
+ else {
+ $licenses = $nix_license->{licenses};
+ $amb = $nix_license->{amb};
+ $in_set = !$nix_license->{in_set};
+ }
+
+ my $license_line;
+
+ if ( @$licenses == 0 ) {
+
+ # Avoid defining the license line.
+ }
+ elsif ($in_set) {
+ my $lic = 'stdenv.lib.licenses';
+ if ( @$licenses == 1 ) {
+ $license_line = "$lic.$licenses->[0]";
+ }
+ else {
+ $license_line = "with $lic; [ " . join( ' ', @$licenses ) . " ]";
+ }
+ }
+ else {
+ if ( @$licenses == 1 ) {
+ $license_line = $licenses->[0];
+ }
+ else {
+ $license_line = '[ ' . join( ' ', @$licenses ) . ' ]';
+ }
+ }
+
+ INFO("license: $cpan_license");
+ WARN("License '$cpan_license' is ambiguous, please verify") if $amb;
+
+ return $license_line;
+}
+
+my ( $opt, $module_name ) = handle_opts();
+
+Log::Log4perl->easy_init(
+ {
+ level => $opt->debug ? $DEBUG : $INFO,
+ layout => '%m%n'
+ }
+);
+
+my $cb = CPANPLUS::Backend->new;
+
+my @modules = $cb->search( type => "name", allow => [$module_name] );
+die "module $module_name not found\n" if scalar @modules == 0;
+die "multiple packages that match module $module_name\n" if scalar @modules > 1;
+my $module = $modules[0];
+
+my ($pkg_name, $pkg_version) = get_pkg_name $module;
+my $attr_name = pkg_to_attr $module;
+
+INFO( "attribute name: ", $attr_name );
+INFO( "module: ", $module->module );
+INFO( "version: ", $module->version );
+INFO( "package: ", $module->package, " (", "$pkg_name-$pkg_version", ", ", $attr_name, ")" );
+INFO( "path: ", $module->path );
+
+my $tar_path = $module->fetch();
+INFO( "downloaded to: ", $tar_path );
+INFO( "sha-256: ", $module->status->checksum_value );
+
+my $pkg_path = $module->extract();
+INFO( "unpacked to: ", $pkg_path );
+
+my $meta = read_meta($pkg_path);
+
+DEBUG( "metadata: ", encode_json( $meta->as_struct ) ) if defined $meta;
+
+my @runtime_deps = sort( uniq( get_deps( $cb, $meta, "runtime" ) ) );
+INFO("runtime deps: @runtime_deps");
+
+my @build_deps = sort( uniq(
+ get_deps( $cb, $meta, "configure" ),
+ get_deps( $cb, $meta, "build" ),
+ get_deps( $cb, $meta, "test" )
+) );
+
+# Filter out runtime dependencies since those are already handled.
+my %in_runtime_deps = map { $_ => 1 } @runtime_deps;
+@build_deps = grep { not $in_runtime_deps{$_} } @build_deps;
+
+INFO("build deps: @build_deps");
+
+my $homepage = $meta ? $meta->resources->{homepage} : undef;
+INFO("homepage: $homepage") if defined $homepage;
+
+my $description = $meta ? $meta->abstract : undef;
+if ( defined $description ) {
+ $description = uc( substr( $description, 0, 1 ) )
+ . substr( $description, 1 ); # capitalise first letter
+ $description =~ s/\.$//; # remove period at the end
+ $description =~ s/\s*$//;
+ $description =~ s/^\s*//;
+ $description =~ s/\n+/ /; # Replace new lines by space.
+ INFO("description: $description");
+}
+
+#print(Data::Dumper::Dumper($meta->licenses) . "\n");
+my $license = $meta ? render_license( $meta->licenses ) : undef;
+
+INFO( "RSS feed: https://metacpan.org/feed/distribution/",
+ $module->package_name );
+
+my $build_fun = -e "$pkg_path/Build.PL"
+ && !-e "$pkg_path/Makefile.PL" ? "buildPerlModule" : "buildPerlPackage";
+
+print STDERR "===\n";
+
+print <<EOF;
+ ${\(is_reserved($attr_name) ? "\"$attr_name\"" : $attr_name)} = $build_fun {
+ pname = "$pkg_name";
+ version = "$pkg_version";
+ src = fetchurl {
+ url = "mirror://cpan/${\$module->path}/${\$module->package}";
+ sha256 = "${\$module->status->checksum_value}";
+ };
+EOF
+print <<EOF if scalar @build_deps > 0;
+ buildInputs = [ @build_deps ];
+EOF
+print <<EOF if scalar @runtime_deps > 0;
+ propagatedBuildInputs = [ @runtime_deps ];
+EOF
+print <<EOF;
+ meta = {
+EOF
+print <<EOF if defined $homepage;
+ homepage = $homepage;
+EOF
+print <<EOF if defined $description && $description ne "Unknown";
+ description = "$description";
+EOF
+print <<EOF if defined $license;
+ license = $license;
+EOF
+print <<EOF if $opt->maintainer;
+ maintainers = [ maintainers.${\$opt->maintainer} ];
+EOF
+print <<EOF;
+ };
+ };
+EOF
diff --git a/nixpkgs/maintainers/scripts/nixpkgs-lint.nix b/nixpkgs/maintainers/scripts/nixpkgs-lint.nix
new file mode 100644
index 00000000000..6d99c94bf33
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/nixpkgs-lint.nix
@@ -0,0 +1,23 @@
+{ stdenv, makeWrapper, perl, perlPackages }:
+
+stdenv.mkDerivation {
+ name = "nixpkgs-lint-1";
+
+ buildInputs = [ makeWrapper perl perlPackages.XMLSimple ];
+
+ dontUnpack = true;
+ buildPhase = "true";
+
+ installPhase =
+ ''
+ mkdir -p $out/bin
+ cp ${./nixpkgs-lint.pl} $out/bin/nixpkgs-lint
+ wrapProgram $out/bin/nixpkgs-lint --set PERL5LIB $PERL5LIB
+ '';
+
+ meta = {
+ maintainers = [ stdenv.lib.maintainers.eelco ];
+ description = "A utility for Nixpkgs contributors to check Nixpkgs for common errors";
+ platforms = stdenv.lib.platforms.unix;
+ };
+}
diff --git a/nixpkgs/maintainers/scripts/nixpkgs-lint.pl b/nixpkgs/maintainers/scripts/nixpkgs-lint.pl
new file mode 100755
index 00000000000..638d1b2aaa1
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/nixpkgs-lint.pl
@@ -0,0 +1,173 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i perl -p perl perlPackages.XMLSimple
+
+use strict;
+use List::Util qw(min);
+use XML::Simple qw(:strict);
+use Getopt::Long qw(:config gnu_getopt);
+
+# Parse the command line.
+my $path = "<nixpkgs>";
+my $filter = "*";
+my $maintainer;
+
+sub showHelp {
+ print <<EOF;
+Usage: $0 [--package=NAME] [--maintainer=REGEXP] [--file=PATH]
+
+Check Nixpkgs for common errors/problems.
+
+ -p, --package filter packages by name (default is ‘*’)
+ -m, --maintainer filter packages by maintainer (case-insensitive regexp)
+ -f, --file path to Nixpkgs (default is ‘<nixpkgs>’)
+
+Examples:
+ \$ nixpkgs-lint -f /my/nixpkgs -p firefox
+ \$ nixpkgs-lint -f /my/nixpkgs -m eelco
+EOF
+ exit 0;
+}
+
+GetOptions("package|p=s" => \$filter,
+ "maintainer|m=s" => \$maintainer,
+ "file|f=s" => \$path,
+ "help" => sub { showHelp() }
+ ) or exit 1;
+
+# Evaluate Nixpkgs into an XML representation.
+my $xml = `nix-env -f '$path' -qa '$filter' --xml --meta --drv-path`;
+die "$0: evaluation of ‘$path’ failed\n" if $? != 0;
+
+my $info = XMLin($xml, KeyAttr => { 'item' => '+attrPath', 'meta' => 'name' }, ForceArray => 1, SuppressEmpty => '' ) or die "cannot parse XML output";
+
+# Check meta information.
+print "=== Package meta information ===\n\n";
+my $nrBadNames = 0;
+my $nrMissingMaintainers = 0;
+my $nrMissingPlatforms = 0;
+my $nrMissingDescriptions = 0;
+my $nrBadDescriptions = 0;
+my $nrMissingLicenses = 0;
+
+foreach my $attr (sort keys %{$info->{item}}) {
+ my $pkg = $info->{item}->{$attr};
+
+ my $pkgName = $pkg->{name};
+ my $pkgVersion = "";
+ if ($pkgName =~ /(.*)(-[0-9].*)$/) {
+ $pkgName = $1;
+ $pkgVersion = $2;
+ }
+
+ # Check the maintainers.
+ my @maintainers;
+ my $x = $pkg->{meta}->{maintainers};
+ if (defined $x && $x->{type} eq "strings") {
+ @maintainers = map { $_->{value} } @{$x->{string}};
+ } elsif (defined $x->{value}) {
+ @maintainers = ($x->{value});
+ }
+
+ if (defined $maintainer && scalar(grep { $_ =~ /$maintainer/i } @maintainers) == 0) {
+ delete $info->{item}->{$attr};
+ next;
+ }
+
+ if (scalar @maintainers == 0) {
+ print "$attr: Lacks a maintainer\n";
+ $nrMissingMaintainers++;
+ }
+
+ # Check the platforms.
+ if (!defined $pkg->{meta}->{platforms}) {
+ print "$attr: Lacks a platform\n";
+ $nrMissingPlatforms++;
+ }
+
+ # Package names should not be capitalised.
+ if ($pkgName =~ /^[A-Z]/) {
+ print "$attr: package name ‘$pkgName’ should not be capitalised\n";
+ $nrBadNames++;
+ }
+
+ if ($pkgVersion eq "") {
+ print "$attr: package has no version\n";
+ $nrBadNames++;
+ }
+
+ # Check the license.
+ if (!defined $pkg->{meta}->{license}) {
+ print "$attr: Lacks a license\n";
+ $nrMissingLicenses++;
+ }
+
+ # Check the description.
+ my $description = $pkg->{meta}->{description}->{value};
+ if (!$description) {
+ print "$attr: Lacks a description\n";
+ $nrMissingDescriptions++;
+ } else {
+ my $bad = 0;
+ if ($description =~ /^\s/) {
+ print "$attr: Description starts with whitespace\n";
+ $bad = 1;
+ }
+ if ($description =~ /\s$/) {
+ print "$attr: Description ends with whitespace\n";
+ $bad = 1;
+ }
+ if ($description =~ /\.$/) {
+ print "$attr: Description ends with a period\n";
+ $bad = 1;
+ }
+ if (index(lc($description), lc($attr)) != -1) {
+ print "$attr: Description contains package name\n";
+ $bad = 1;
+ }
+ $nrBadDescriptions++ if $bad;
+ }
+}
+
+print "\n";
+
+# Find packages that have the same name.
+print "=== Package name collisions ===\n\n";
+
+my %pkgsByName;
+
+foreach my $attr (sort keys %{$info->{item}}) {
+ my $pkg = $info->{item}->{$attr};
+ #print STDERR "attr = $attr, name = $pkg->{name}\n";
+ $pkgsByName{$pkg->{name}} //= [];
+ push @{$pkgsByName{$pkg->{name}}}, $pkg;
+}
+
+my $nrCollisions = 0;
+foreach my $name (sort keys %pkgsByName) {
+ my @pkgs = @{$pkgsByName{$name}};
+
+ # Filter attributes that are aliases of each other (e.g. yield the
+ # same derivation path).
+ my %drvsSeen;
+ @pkgs = grep { my $x = $drvsSeen{$_->{drvPath}}; $drvsSeen{$_->{drvPath}} = 1; !defined $x } @pkgs;
+
+ # Filter packages that have a lower priority.
+ my $highest = min (map { $_->{meta}->{priority}->{value} // 0 } @pkgs);
+ @pkgs = grep { ($_->{meta}->{priority}->{value} // 0) == $highest } @pkgs;
+
+ next if scalar @pkgs == 1;
+
+ $nrCollisions++;
+ print "The following attributes evaluate to a package named ‘$name’:\n";
+ print " ", join(", ", map { $_->{attrPath} } @pkgs), "\n\n";
+}
+
+print "=== Bottom line ===\n";
+print "Number of packages: ", scalar(keys %{$info->{item}}), "\n";
+print "Number of bad names: $nrBadNames\n";
+print "Number of missing maintainers: $nrMissingMaintainers\n";
+print "Number of missing platforms: $nrMissingPlatforms\n";
+print "Number of missing licenses: $nrMissingLicenses\n";
+print "Number of missing descriptions: $nrMissingDescriptions\n";
+print "Number of bad descriptions: $nrBadDescriptions\n";
+print "Number of name collisions: $nrCollisions\n";
diff --git a/nixpkgs/maintainers/scripts/patchelf-hints.sh b/nixpkgs/maintainers/scripts/patchelf-hints.sh
new file mode 100755
index 00000000000..5fdfc15dc23
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/patchelf-hints.sh
@@ -0,0 +1,84 @@
+
+usage() {
+ echo "
+$0 <path to unpacked binary distribution directory>
+
+This program return the list of libraries and where to find them based on
+your currently installed programs.
+";
+ exit 1
+}
+
+if test $# -ne 1; then
+ usage
+fi
+
+binaryDist=$1
+
+hasBinaries=false
+for bin in $(find $binaryDist -executable -type f) :; do
+ if test $bin = ":"; then
+ $hasBinaries || \
+ echo "No patchable found in this directory."
+ break
+ fi
+ hasBinaries=true
+
+ echo ""
+ echo "$bin:"
+ hasLibraries=false
+ unset interpreter
+ unset addRPath
+ for lib in $(strings $bin | grep '^\(/\|\)lib.*\.so' | sort | uniq) :; do
+ if test $lib = ":"; then
+ $hasLibraries || \
+ echo " This program is a script or it is statically linked."
+ break
+ fi
+ hasLibraries=true
+
+ echo " $lib:";
+
+ libPath=$lib
+ lib=$(basename $lib)
+
+ #versionLessLib=$(echo $lib | sed 's,[.][.0-9]*$,,')
+
+ libs="$(
+ find /nix/store/*/lib* \( -type f -or -type l \) -name $lib |
+ grep -v '\(bootstrap-tools\|system-path\|user-environment\|extra-utils\)'
+ )"
+
+ echo "$libs" |
+ sed 's,^/nix/store/[a-z0-9]*-\([^/]*\)/.*/\([^/]*\)$, \1 -> \2,' |
+ sort |
+ uniq;
+
+ names=$(
+ echo "$libs" |
+ sed 's,^/nix/store/[a-z0-9]*-\([^/]*\)-[.0-9]*/.*$,\1,' |
+ sort |
+ uniq;
+ )
+
+ if test "$names" = "glibc"; then names="stdenv.glibc"; fi
+ if echo $names | grep -c "gcc" &> /dev/null; then names="stdenv.cc.cc"; fi
+
+ if test $lib != $libPath; then
+ interpreter="--interpreter \${$names}/lib/$lib"
+ elif echo $addRPath | grep -c "$names" &> /dev/null; then
+ :
+ else
+ addRPath=${addRPath+$addRPath:}"\${$names}/lib"
+ fi
+ done;
+ $hasLibraries && \
+ echo "
+ Patchelf command:
+
+ patchelf $interpreter \\
+ ${addRPath+--set-rpath $addRPath \\
+} \$out/$bin
+
+"
+done;
diff --git a/nixpkgs/maintainers/scripts/rebuild-amount.sh b/nixpkgs/maintainers/scripts/rebuild-amount.sh
new file mode 100755
index 00000000000..1a54cada8af
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/rebuild-amount.sh
@@ -0,0 +1,127 @@
+#!/usr/bin/env bash
+set -e
+
+# --print: avoid dependency on environment
+optPrint=
+if [ "$1" == "--print" ]; then
+ optPrint=true
+ shift
+fi
+
+if [ "$#" != 1 ] && [ "$#" != 2 ]; then
+ cat <<-EOF
+ Usage: $0 [--print] commit-spec [commit-spec]
+ You need to be in a git-controlled nixpkgs tree.
+ The current state of the tree will be used if the second commit is missing.
+ EOF
+ exit 1
+fi
+
+# A slightly hacky way to get the config.
+parallel="$(echo 'config.rebuild-amount.parallel or false' | nix-repl . 2>/dev/null \
+ | grep -v '^\(nix-repl.*\)\?$' | tail -n 1 || true)"
+
+echo "Estimating rebuild amount by counting changed Hydra jobs."
+
+toRemove=()
+
+cleanup() {
+ rm -rf "${toRemove[@]}"
+}
+trap cleanup EXIT SIGINT SIGQUIT ERR
+
+MKTEMP='mktemp --tmpdir nix-rebuild-amount-XXXXXXXX'
+
+nixexpr() {
+ cat <<-EONIX
+ let
+ lib = import $1/lib;
+ hydraJobs = import $1/pkgs/top-level/release.nix
+ # Compromise: accuracy vs. resources needed for evaluation.
+ { supportedSystems = cfg.systems or [ "x86_64-linux" "x86_64-darwin" ]; };
+ cfg = (import $1 {}).config.rebuild-amount or {};
+
+ recurseIntoAttrs = attrs: attrs // { recurseForDerivations = true; };
+
+ # hydraJobs leaves recurseForDerivations as empty attrmaps;
+ # that would break nix-env and we also need to recurse everywhere.
+ tweak = lib.mapAttrs
+ (name: val:
+ if name == "recurseForDerivations" then true
+ else if lib.isAttrs val && val.type or null != "derivation"
+ then recurseIntoAttrs (tweak val)
+ else val
+ );
+
+ # Some of these contain explicit references to platform(s) we want to avoid;
+ # some even (transitively) depend on ~/.nixpkgs/config.nix (!)
+ blacklist = [
+ "tarball" "metrics" "manual"
+ "darwin-tested" "unstable" "stdenvBootstrapTools"
+ "moduleSystem" "lib-tests" # these just confuse the output
+ ];
+
+ in
+ tweak (builtins.removeAttrs hydraJobs blacklist)
+ EONIX
+}
+
+# Output packages in tree $2 that weren't in $1.
+# Changing the output hash or name is taken as a change.
+# Extra nix-env parameters can be in $3
+newPkgs() {
+ # We use files instead of pipes, as running multiple nix-env processes
+ # could eat too much memory for a standard 4GiB machine.
+ local -a list
+ for i in 1 2; do
+ local l="$($MKTEMP)"
+ list[$i]="$l"
+ toRemove+=("$l")
+
+ local expr="$($MKTEMP)"
+ toRemove+=("$expr")
+ nixexpr "${!i}" > "$expr"
+
+ nix-env -f "$expr" -qaP --no-name --out-path --show-trace $3 \
+ | sort > "${list[$i]}" &
+
+ if [ "$parallel" != "true" ]; then
+ wait
+ fi
+ done
+
+ wait
+ comm -13 "${list[@]}"
+}
+
+# Prepare nixpkgs trees.
+declare -a tree
+for i in 1 2; do
+ if [ -n "${!i}" ]; then # use the given commit
+ dir="$($MKTEMP -d)"
+ tree[$i]="$dir"
+ toRemove+=("$dir")
+
+ git clone --shared --no-checkout --quiet . "${tree[$i]}"
+ (cd "${tree[$i]}" && git checkout --quiet "${!i}")
+ else #use the current tree
+ tree[$i]="$(pwd)"
+ fi
+done
+
+newlist="$($MKTEMP)"
+toRemove+=("$newlist")
+# Notes:
+# - the evaluation is done on x86_64-linux, like on Hydra.
+# - using $newlist file so that newPkgs() isn't in a sub-shell (because of toRemove)
+newPkgs "${tree[1]}" "${tree[2]}" '--argstr system "x86_64-linux"' > "$newlist"
+
+# Hacky: keep only the last word of each attribute path and sort.
+sed -n 's/\([^. ]*\.\)*\([^. ]*\) .*$/\2/p' < "$newlist" \
+ | sort | uniq -c
+
+if [ -n "$optPrint" ]; then
+ echo
+ cat "$newlist"
+fi
+
diff --git a/nixpkgs/maintainers/scripts/update-channel-branches.sh b/nixpkgs/maintainers/scripts/update-channel-branches.sh
new file mode 100755
index 00000000000..d65cf3ec5f6
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-channel-branches.sh
@@ -0,0 +1,112 @@
+#!/bin/sh
+set -e
+
+: ${NIXOS_CHANNELS:=https://nixos.org/channels/}
+: ${CHANNELS_NAMESPACE:=refs/heads/channels/}
+
+# List all channels which are currently in the repository which we would
+# have to remove if they are not found again.
+deadChannels=$(git for-each-ref --format="%(refname)" "$CHANNELS_NAMESPACE")
+
+updateRef() {
+ local channelName=$1
+ local newRev=$2
+
+ # if the inputs are not valid, then we do not update any branch.
+ test -z "$newRev" -o -z "$channelName" && return;
+
+ # Update the local refs/heads/channels/* branches to be in-sync with the
+ # channel references.
+ local branch=$CHANNELS_NAMESPACE$channelName
+ oldRev=$(git rev-parse --short "$branch" 2>/dev/null || true)
+ if test "$oldRev" != "$newRev"; then
+ if git update-ref "$branch" "$newRev" 2>/dev/null; then
+ if test -z "$oldRev"; then
+ echo " * [new branch] $newRev -> ${branch#refs/heads/}"
+ else
+ echo " $oldRev..$newRev -> ${branch#refs/heads/}"
+ fi
+ else
+ if test -z "$oldRev"; then
+ echo " * [missing rev] $newRev -> ${branch#refs/heads/}"
+ else
+ echo " [missing rev] $oldRev..$newRev -> ${branch#refs/heads/}"
+ fi
+ fi
+ fi
+
+ # Filter out the current channel from the list of dead channels.
+ deadChannels=$(grep -v "$CHANNELS_NAMESPACE$channelName" <<EOF
+$deadChannels
+EOF
+) ||true
+}
+
+# Find the name of all channels which are listed in the directory.
+echo "Fetching channels from $NIXOS_CHANNELS:"
+for channelName in : $(curl -s "$NIXOS_CHANNELS" | sed -n '/folder/ { s,.*href=",,; s,/".*,,; p }'); do
+ test "$channelName" = : && continue;
+
+ # Do not follow redirections, such that we can extract the
+ # short-changeset from the name of the directory where we are
+ # redirected to.
+ sha1=$(curl -sI "$NIXOS_CHANNELS$channelName" | sed -n '/Location/ { s,.*\.\([a-f0-9]*\)[ \r]*$,\1,; p; }')
+
+ updateRef "remotes/$channelName" "$sha1"
+done
+
+echo "Fetching channels from nixos-version:"
+if currentSystem=$(nixos-version 2>/dev/null); then
+ # If the system is entirely build from a custom nixpkgs version,
+ # then the version is not annotated in git version. This sed
+ # expression is basically matching that the expressions end with
+ # ".<sha1> (Name)" to extract the sha1.
+ sha1=$(echo "$currentSystem" | sed -n 's,^.*\.\([a-f0-9]*\) *(.*)$,\1,; T skip; p; :skip;')
+
+ updateRef current-system "$sha1"
+fi
+
+echo "Fetching channels from $HOME/.nix-defexpr:"
+for revFile in : $(find -L "$HOME/.nix-defexpr/" -maxdepth 4 -name svn-revision); do
+ test "$revFile" = : && continue;
+
+ # Deconstruct a path such as, into:
+ #
+ # /home/luke/.nix-defexpr/channels_root/nixos/nixpkgs/svn-revision
+ # channelName = root/nixos
+ #
+ # /home/luke/.nix-defexpr/channels/nixpkgs/svn-revision
+ # channelName = nixpkgs
+ #
+ user=${revFile#*.nix-defexpr/channels}
+ repo=${user#*/}
+ repo=${repo%%/*}
+ user=${user%%/*}
+ user=${user#_}
+ test -z "$user" && user=$USER
+ channelName="$user${user:+/}$repo"
+
+ sha1=$(sed -n 's,^.*\.\([a-f0-9]*\)$,\1,; T skip; p; :skip;' "$revFile")
+
+ updateRef "$channelName" "$sha1"
+done
+
+# Suggest to remove channel branches which are no longer found by this
+# script. This is to handle the cases where a local/remote channel
+# disappear. We should not attempt to remove manually any branches, as they
+# might be user branches.
+if test -n "$deadChannels"; then
+
+ echo "
+Some old channel branches are still in your repository, if you
+want to remove them, run the following command(s):
+"
+
+ while read branch; do
+ echo " git update-ref -d $branch"
+ done <<EOF
+$deadChannels
+EOF
+
+ echo
+fi
diff --git a/nixpkgs/maintainers/scripts/update-discord b/nixpkgs/maintainers/scripts/update-discord
new file mode 100755
index 00000000000..23ec6e401be
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-discord
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+# script to generate `pkgs/networking/instant-messengers/discord/default.nix`
+
+set -e
+exec >${1:?usage: $0 <output-file>}
+
+cat <<EOF
+{ branch ? "stable", pkgs }:
+
+let
+ inherit (pkgs) callPackage fetchurl;
+in {
+EOF
+
+for branch in "" ptb canary; do
+ url=$(curl -sI "https://discordapp.com/api/download${branch:+/}${branch}?platform=linux&format=tar.gz" | grep -oP 'location: \K\S+')
+ version=${url##https://dl*.discordapp.net/apps/linux/}
+ version=${version%%/*.tar.gz}
+ echo " ${branch:-stable} = callPackage ./base.nix {"
+ echo " pname = \"discord${branch:+-}${branch}\";"
+ case $branch in
+ "") suffix="" ;;
+ ptb) suffix="PTB" ;;
+ canary) suffix="Canary" ;;
+ esac
+ echo " binaryName = \"Discord${suffix}\";"
+ echo " desktopName = \"Discord${suffix:+ }${suffix}\";"
+ echo " version = \"${version}\";"
+ echo " src = fetchurl {"
+ echo " url = \"${url}\";"
+ echo " sha256 = \"$(nix-prefetch-url "$url")\";"
+ echo " };"
+ echo " };"
+done
+
+echo "}.\${branch}"
diff --git a/nixpkgs/maintainers/scripts/update-luarocks-packages b/nixpkgs/maintainers/scripts/update-luarocks-packages
new file mode 100755
index 00000000000..1a31d71086f
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-luarocks-packages
@@ -0,0 +1,136 @@
+#!/usr/bin/env nix-shell
+#!nix-shell update-luarocks-shell.nix -i bash
+
+# You'll likely want to use
+# ``
+# nixpkgs $ maintainers/scripts/update-luarocks-packages pkgs/development/lua-modules/generated-packages.nix
+# ``
+# to update all libraries in that folder.
+# to debug, redirect stderr to stdout with 2>&1
+
+# stop the script upon C-C
+set -eu -o pipefail
+
+CSV_FILE="maintainers/scripts/luarocks-packages.csv"
+TMP_FILE="$(mktemp)"
+# Set in the update-luarocks-shell.nix
+NIXPKGS_PATH="$LUAROCKS_NIXPKGS_PATH"
+export LUAROCKS_CONFIG="$NIXPKGS_PATH/maintainers/scripts/luarocks-config.lua"
+
+# 10 is a pretty arbitrary number of simultaneous jobs, but it is generally
+# impolite to hit a webserver with *too* many simultaneous connections :)
+PARALLEL_JOBS=10
+
+exit_trap() {
+ local lc="$BASH_COMMAND" rc=$?
+ test $rc -eq 0 || echo -e "*** error $rc: $lc.\nGenerated temporary file in $TMP_FILE" >&2
+}
+
+print_help() {
+ echo "Usage: $0 <GENERATED_FILE>"
+ echo "(most likely pkgs/development/lua-modules/generated-packages.nix)"
+ echo ""
+ echo " -c <CSV_FILE> to set the list of luarocks package to generate"
+ exit 1
+}
+
+if [ $# -lt 1 ]; then
+ print_help
+ exit 1
+fi
+
+trap exit_trap EXIT
+
+while getopts ":hc:" opt; do
+ case $opt in
+ h)
+ print_help
+ ;;
+ c)
+ echo "Loading package list from $OPTARG !" >&2
+ CSV_FILE="$OPTARG"
+ ;;
+ \?)
+ echo "Invalid option: -$OPTARG" >&2
+ ;;
+ esac
+ shift $((OPTIND - 1))
+done
+
+GENERATED_NIXFILE="$1"
+
+HEADER="
+/* ${GENERATED_NIXFILE} is an auto-generated file -- DO NOT EDIT!
+Regenerate it with:
+nixpkgs$ ${0} ${GENERATED_NIXFILE}
+
+These packages are manually refined in lua-overrides.nix
+*/
+{ self, stdenv, fetchurl, fetchgit, pkgs, ... } @ args:
+self: super:
+with self;
+{
+"
+
+FOOTER="
+}
+/* GENERATED */
+"
+
+function convert_pkg() {
+ nix_pkg_name="$1"
+ lua_pkg_name="$2"
+ server="$3"
+ pkg_version="$4"
+ lua_version="$5"
+ maintainers="$6"
+
+ if [ "${nix_pkg_name:0:1}" == "#" ]; then
+ echo "Skipping comment ${*}" >&2
+ return
+ fi
+ if [ -z "$lua_pkg_name" ]; then
+ echo "Using nix_name as lua_pkg_name for '$nix_pkg_name'" >&2
+ lua_pkg_name="$nix_pkg_name"
+ fi
+
+ echo "Building expression for $lua_pkg_name (version $pkg_version) from server [$server]" >&2
+ luarocks_args=(nix)
+ if [[ -n $server ]]; then
+ luarocks_args+=("--only-server=$server")
+ fi
+ if [[ -n $maintainers ]]; then
+ luarocks_args+=("--maintainers=$maintainers")
+ fi
+ if [[ -n $lua_version ]]; then
+ lua_drv_path=$(nix-build --no-out-link "$NIXPKGS_PATH" -A "$lua_version")
+ luarocks_args+=("--lua-dir=$lua_drv_path/bin")
+ fi
+ luarocks_args+=("$lua_pkg_name")
+ if [[ -n $pkg_version ]]; then
+ luarocks_args+=("$pkg_version")
+ fi
+ echo "Running 'luarocks ${luarocks_args[*]}'" >&2
+ if drv="$nix_pkg_name = $(luarocks "${luarocks_args[@]}")"; then
+ echo "$drv"
+ else
+ echo "Failed to convert $nix_pkg_name" >&2
+ return 1
+ fi
+}
+
+# params needed when called via callPackage
+echo "$HEADER" | tee "$TMP_FILE"
+
+# Ensure parallel can run our bash function
+export -f convert_pkg
+export SHELL=bash
+# Read each line in the csv file and run convert_pkg for each, in parallel
+parallel --group --keep-order --halt now,fail=1 --jobs "$PARALLEL_JOBS" --colsep ',' convert_pkg {} <"$CSV_FILE" | tee -a "$TMP_FILE"
+
+# close the set
+echo "$FOOTER" | tee -a "$TMP_FILE"
+
+cp "$TMP_FILE" "$GENERATED_NIXFILE"
+
+# vim: set ts=4 sw=4 ft=sh:
diff --git a/nixpkgs/maintainers/scripts/update-luarocks-shell.nix b/nixpkgs/maintainers/scripts/update-luarocks-shell.nix
new file mode 100644
index 00000000000..23a940b3691
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-luarocks-shell.nix
@@ -0,0 +1,9 @@
+{ nixpkgs ? import ../.. { }
+}:
+with nixpkgs;
+mkShell {
+ buildInputs = [
+ bash luarocks-nix nix-prefetch-scripts parallel
+ ];
+ LUAROCKS_NIXPKGS_PATH = toString nixpkgs.path;
+}
diff --git a/nixpkgs/maintainers/scripts/update-python-libraries b/nixpkgs/maintainers/scripts/update-python-libraries
new file mode 100755
index 00000000000..4a6024c4038
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-python-libraries
@@ -0,0 +1,5 @@
+#!/bin/sh
+build=`nix-build -E "with import (fetchTarball "channel:nixpkgs-unstable") {}; python3.withPackages(ps: with ps; [ packaging requests toolz ])"`
+python=${build}/bin/python
+exec ${python} pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py $@
+
diff --git a/nixpkgs/maintainers/scripts/update-ruby-packages b/nixpkgs/maintainers/scripts/update-ruby-packages
new file mode 100755
index 00000000000..fef6b75ded0
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update-ruby-packages
@@ -0,0 +1,13 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i bash -p bundler bundix
+
+set -euf -o pipefail
+
+(
+ cd pkgs/development/ruby-modules/with-packages
+ rm -f gemset.nix Gemfile.lock
+ bundle lock
+ bundix
+ mv gemset.nix ../../../top-level/ruby-packages.nix
+ rm -f Gemfile.lock
+)
diff --git a/nixpkgs/maintainers/scripts/update.nix b/nixpkgs/maintainers/scripts/update.nix
new file mode 100755
index 00000000000..04723cb8a36
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update.nix
@@ -0,0 +1,157 @@
+{ package ? null
+, maintainer ? null
+, path ? null
+, max-workers ? null
+, keep-going ? null
+}:
+
+# TODO: add assert statements
+
+let
+ /* Remove duplicate elements from the list based on some extracted value. O(n^2) complexity.
+ */
+ nubOn = f: list:
+ if list == [] then
+ []
+ else
+ let
+ x = pkgs.lib.head list;
+ xs = pkgs.lib.filter (p: f x != f p) (pkgs.lib.drop 1 list);
+ in
+ [x] ++ nubOn f xs;
+
+ pkgs = import ./../../default.nix {
+ overlays = [];
+ };
+
+ packagesWith = cond: return: set:
+ nubOn (pkg: pkg.updateScript)
+ (pkgs.lib.flatten
+ (pkgs.lib.mapAttrsToList
+ (name: pkg:
+ let
+ result = builtins.tryEval (
+ if pkgs.lib.isDerivation pkg && cond name pkg
+ then [(return name pkg)]
+ else if pkg.recurseForDerivations or false || pkg.recurseForRelease or false
+ then packagesWith cond return pkg
+ else []
+ );
+ in
+ if result.success then result.value
+ else []
+ )
+ set
+ )
+ );
+
+ packagesWithUpdateScriptAndMaintainer = maintainer':
+ let
+ maintainer =
+ if ! builtins.hasAttr maintainer' pkgs.lib.maintainers then
+ builtins.throw "Maintainer with name `${maintainer'} does not exist in `maintainers/maintainer-list.nix`."
+ else
+ builtins.getAttr maintainer' pkgs.lib.maintainers;
+ in
+ packagesWith (name: pkg: builtins.hasAttr "updateScript" pkg &&
+ (if builtins.hasAttr "maintainers" pkg.meta
+ then (if builtins.isList pkg.meta.maintainers
+ then builtins.elem maintainer pkg.meta.maintainers
+ else maintainer == pkg.meta.maintainers
+ )
+ else false
+ )
+ )
+ (name: pkg: pkg)
+ pkgs;
+
+ packagesWithUpdateScript = path:
+ let
+ attrSet = pkgs.lib.attrByPath (pkgs.lib.splitString "." path) null pkgs;
+ in
+ if attrSet == null then
+ builtins.throw "Attribute path `${path}` does not exists."
+ else
+ packagesWith (name: pkg: builtins.hasAttr "updateScript" pkg)
+ (name: pkg: pkg)
+ attrSet;
+
+ packageByName = name:
+ let
+ package = pkgs.lib.attrByPath (pkgs.lib.splitString "." name) null pkgs;
+ in
+ if package == null then
+ builtins.throw "Package with an attribute name `${name}` does not exists."
+ else if ! builtins.hasAttr "updateScript" package then
+ builtins.throw "Package with an attribute name `${name}` does not have a `passthru.updateScript` attribute defined."
+ else
+ package;
+
+ packages =
+ if package != null then
+ [ (packageByName package) ]
+ else if maintainer != null then
+ packagesWithUpdateScriptAndMaintainer maintainer
+ else if path != null then
+ packagesWithUpdateScript path
+ else
+ builtins.throw "No arguments provided.\n\n${helpText}";
+
+ helpText = ''
+ Please run:
+
+ % nix-shell maintainers/scripts/update.nix --argstr maintainer garbas
+
+ to run all update scripts for all packages that lists \`garbas\` as a maintainer
+ and have \`updateScript\` defined, or:
+
+ % nix-shell maintainers/scripts/update.nix --argstr package garbas
+
+ to run update script for specific package, or
+
+ % nix-shell maintainers/scripts/update.nix --argstr path gnome3
+
+ to run update script for all package under an attribute path.
+
+ You can also add
+
+ --argstr max-workers 8
+
+ to increase the number of jobs in parallel, or
+
+ --argstr keep-going true
+
+ to continue running when a single update fails.
+ '';
+
+ packageData = package: {
+ name = package.name;
+ pname = (builtins.parseDrvName package.name).name;
+ updateScript = map builtins.toString (pkgs.lib.toList package.updateScript);
+ };
+
+ packagesJson = pkgs.writeText "packages.json" (builtins.toJSON (map packageData packages));
+
+ optionalArgs =
+ pkgs.lib.optional (max-workers != null) "--max-workers=${max-workers}"
+ ++ pkgs.lib.optional (keep-going == "true") "--keep-going";
+
+ args = [ packagesJson ] ++ optionalArgs;
+
+in pkgs.stdenv.mkDerivation {
+ name = "nixpkgs-update-script";
+ buildCommand = ''
+ echo ""
+ echo "----------------------------------------------------------------"
+ echo ""
+ echo "Not possible to update packages using \`nix-build\`"
+ echo ""
+ echo "${helpText}"
+ echo "----------------------------------------------------------------"
+ exit 1
+ '';
+ shellHook = ''
+ unset shellHook # do not contaminate nested shells
+ exec ${pkgs.python3.interpreter} ${./update.py} ${builtins.concatStringsSep " " args}
+ '';
+}
diff --git a/nixpkgs/maintainers/scripts/update.py b/nixpkgs/maintainers/scripts/update.py
new file mode 100644
index 00000000000..eb7d0ef2647
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/update.py
@@ -0,0 +1,79 @@
+import argparse
+import concurrent.futures
+import json
+import os
+import subprocess
+import sys
+
+updates = {}
+
+def eprint(*args, **kwargs):
+ print(*args, file=sys.stderr, **kwargs)
+
+def run_update_script(package):
+ eprint(f" - {package['name']}: UPDATING ...")
+
+ subprocess.run(package['updateScript'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, check=True)
+
+
+def main(max_workers, keep_going, packages):
+ with open(sys.argv[1]) as f:
+ packages = json.load(f)
+
+ eprint()
+ eprint('Going to be running update for following packages:')
+ for package in packages:
+ eprint(f" - {package['name']}")
+ eprint()
+
+ confirm = input('Press Enter key to continue...')
+ if confirm == '':
+ eprint()
+ eprint('Running update for:')
+
+ with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
+ for package in packages:
+ updates[executor.submit(run_update_script, package)] = package
+
+ for future in concurrent.futures.as_completed(updates):
+ package = updates[future]
+
+ try:
+ future.result()
+ eprint(f" - {package['name']}: DONE.")
+ except subprocess.CalledProcessError as e:
+ eprint(f" - {package['name']}: ERROR")
+ eprint()
+ eprint(f"--- SHOWING ERROR LOG FOR {package['name']} ----------------------")
+ eprint()
+ eprint(e.stdout.decode('utf-8'))
+ with open(f"{package['pname']}.log", 'wb') as f:
+ f.write(e.stdout)
+ eprint()
+ eprint(f"--- SHOWING ERROR LOG FOR {package['name']} ----------------------")
+
+ if not keep_going:
+ sys.exit(1)
+
+ eprint()
+ eprint('Packages updated!')
+ sys.exit()
+ else:
+ eprint('Aborting!')
+ sys.exit(130)
+
+parser = argparse.ArgumentParser(description='Update packages')
+parser.add_argument('--max-workers', '-j', dest='max_workers', type=int, help='Number of updates to run concurrently', nargs='?', default=4)
+parser.add_argument('--keep-going', '-k', dest='keep_going', action='store_true', help='Do not stop after first failure')
+parser.add_argument('packages', help='JSON file containing the list of package names and their update scripts')
+
+if __name__ == '__main__':
+ args = parser.parse_args()
+
+ try:
+ main(args.max_workers, args.keep_going, args.packages)
+ except (KeyboardInterrupt, SystemExit) as e:
+ for update in updates:
+ update.cancel()
+
+ sys.exit(e.code if isinstance(e, SystemExit) else 130)
diff --git a/nixpkgs/maintainers/scripts/vanity-manual-equalities.txt b/nixpkgs/maintainers/scripts/vanity-manual-equalities.txt
new file mode 100644
index 00000000000..4a7bc3aea44
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/vanity-manual-equalities.txt
@@ -0,0 +1,7 @@
+viric viriketo@gmail.com
+Pjotr Prins pjotr.public01@thebird.nl
+Pjotr Prins pjotr.public05@thebird.nl
+Wouter den Breejen wbreejen
+MarcWeber marcweber
+Ricardo Correia Ricardo M. Correia
+ertesx@gmx.de ertes
diff --git a/nixpkgs/maintainers/scripts/vanity.sh b/nixpkgs/maintainers/scripts/vanity.sh
new file mode 100755
index 00000000000..aa7d4ec967d
--- /dev/null
+++ b/nixpkgs/maintainers/scripts/vanity.sh
@@ -0,0 +1,122 @@
+#! /bin/sh
+
+export LANG=C LC_ALL=C LC_COLLATE=C
+
+# Load git log
+raw_git_log="$(git log)"
+git_data="$(echo "$raw_git_log" | grep 'Author:' |
+ sed -e 's/^ *Author://; s/\\//g; s/^ *//; s/ *$//;
+ s/ @ .*//; s/ *[<]/\t/; s/[>]//')"
+
+# Name - nick - email correspondence from log and from maintainer list
+# Also there are a few manual entries
+maintainers="$(cat "$(dirname "$0")/../maintainer-list.nix" |
+ grep '=' | sed -re 's/\\"/''/g;
+ s/[ ]*([^ =]*)[ ]*=[ ]*" *(.*[^ ]) *[<](.*)[>] *".*/\1\t\2\t\3/')"
+git_lines="$( ( echo "$git_data";
+ cat "$(dirname "$0")/vanity-manual-equalities.txt") | sort |uniq)"
+
+emails="$(
+ ( echo "$maintainers" | cut -f 3; echo "$git_data" | cut -f 2 ) |
+ sort | uniq | grep -E ".+@.+[.].+"
+ )"
+
+fetchGithubName () {
+ commitid="$(
+ echo "$raw_git_log" | grep -B3 "Author: .*[<]$1[>]" | head -n 3 |
+ grep '^commit ' | tail -n 1 | sed -e 's/^commit //'
+ )"
+ userid="$(
+ curl https://github.com/NixOS/nixpkgs/commit/"$commitid" 2>/dev/null |
+ grep committed -B10 | grep 'href="/' |
+ sed -re 's@.* href="/@@; s@".*@@' |
+ grep -v "/commit/"
+ )";
+ echo "$userid"
+}
+
+[ -n "$NIXPKGS_GITHUB_NAME_CACHE" ] && {
+ echo "$emails" | while read email; do
+ line="$(grep "$email " "$NIXPKGS_GITHUB_NAME_CACHE")"
+ [ -z "$line" ] && {
+ echo "$email $(fetchGithubName "$email")" >> \
+ "$NIXPKGS_GITHUB_NAME_CACHE"
+ }
+ done
+}
+
+# For RDF
+normalize_name () {
+ sed -e 's/%/%25/g; s/ /%20/g; s/'\''/%27/g; s/"/%22/g; s/`/%60/g; s/\^/%5e/g; '
+}
+
+denormalize_name () {
+ sed -e 's/%20/ /g; s/%27/'\''/g; s/%22/"/g; s/%60/`/g; s/%5e/^/g; s/%25/%/g;';
+}
+
+n3="$(mktemp --suffix .n3)"
+
+# «The same person» relation and a sorting hint
+# Full name is something with a space
+(
+echo "$git_lines" | sed -re 's@(.*)\t(.*)@<my://name/\1> <my://can-be> <my://name/\2>.@'
+echo "$git_lines" | sed -re 's@(.*)\t(.*)@<my://name/\2> <my://can-be> <my://name/\1>.@'
+echo "$maintainers" | sed -re 's@(.*)\t(.*)\t(.*)@<my://name/\1> <my://can-be> <my://name/\2>.@'
+echo "$maintainers" | sed -re 's@(.*)\t(.*)\t(.*)@<my://name/\2> <my://can-be> <my://name/\3>.@'
+echo "$maintainers" | sed -re 's@(.*)\t(.*)\t(.*)@<my://name/\3> <my://can-be> <my://name/\1>.@'
+echo "$git_lines" | grep ' ' | cut -f 1 | sed -e 's@.*@<my://name/&> <my://is-name> <my://0>.@'
+echo "$git_lines" | grep -v ' ' | cut -f 1 | sed -e 's@.*@<my://name/&> <my://is-name> <my://1>.@'
+echo "$maintainers" | cut -f 2 | sed -e 's@.*@<my://name/&> <my://is-name> <my://0>.@'
+[ -n "$NIXPKGS_GITHUB_NAME_CACHE" ] && cat "$NIXPKGS_GITHUB_NAME_CACHE" |
+ grep -v " $" |
+ sed -re 's@(.*)\t(.*)@<my://name/\1> <my://at-github> <my://github/\2>.@'
+) | normalize_name | grep -E '<my://[-a-z]+>' | sort | uniq > "$n3"
+
+# Get transitive closure
+sparql="$(nix-build '<nixpkgs>' -Q -A apache-jena --no-out-link)/bin/sparql"
+name_list="$(
+ "$sparql" --results=TSV --data="$n3" "
+ select ?x ?y ?g where {
+ ?x <my://can-be>+ ?y.
+ ?x <my://is-name> ?g.
+ }
+ " | tail -n +2 |
+ sed -re 's@<my://name/@@g; s@<my://@@g; s@>@@g;' |
+ sort -k 2,3 -t ' '
+)"
+github_name_list="$(
+ "$sparql" --results=TSV --data="$n3" "
+ select ?x ?y where {
+ ?x (<my://can-be>+ / <my://at-github>) ?y.
+ }
+ " | tail -n +2 |
+ sed -re 's@<my://(name|github)/@@g; s@<my://@@g; s@>@@g;'
+)"
+
+# Take first spelling option for every person
+name_list_canonical="$(echo "$name_list" | cut -f 1,2 | uniq -f1)"
+
+cleaner_script="$(echo "$name_list_canonical" | denormalize_name |
+ sed -re 's/(.*)\t(.*)/s#^\2$#\1#g/g')"
+
+# Add github usernames
+if [ -n "$NIXPKGS_GITHUB_NAME_CACHE" ]; then
+ github_adder_script="$(mktemp)"
+ echo "$github_name_list" |
+ grep -E "$(echo "$name_list_canonical" | cut -f 2 |
+ tr '\n' '|' )" |
+ sort | uniq |
+ sed -re 's/(.*)\t(.*)/s| \1$| \1\t\2|g;/' |
+ denormalize_name > "$github_adder_script"
+else
+ github_adder_script='/dev/null'
+fi
+
+echo "$name_list" | denormalize_name
+
+echo
+
+echo "$git_data" | cut -f 1 |
+ sed -e "$cleaner_script" |
+ sort | uniq -c | sort -k1n | sed -rf "$github_adder_script" |
+ sed -re 's/^ *([0-9]+) /\1\t/'