diff options
Diffstat (limited to 'maintainers')
-rw-r--r-- | maintainers/build-release-notes.py | 147 | ||||
-rwxr-xr-x | maintainers/release-notes | 2 | ||||
-rwxr-xr-x | maintainers/upload-release.pl | 256 | ||||
-rwxr-xr-x | maintainers/upload_manual.sh | 40 |
4 files changed, 160 insertions, 285 deletions
diff --git a/maintainers/build-release-notes.py b/maintainers/build-release-notes.py index 2a154a61e..45d5d6ff9 100644 --- a/maintainers/build-release-notes.py +++ b/maintainers/build-release-notes.py @@ -1,25 +1,78 @@ +from collections import defaultdict import frontmatter import sys import pathlib import textwrap +from typing import Any, Tuple +import dataclasses +import yaml -GH_BASE = "https://github.com/NixOS/nix" -FORGEJO_BASE = "https://git.lix.systems/lix-project/lix" +GH_ROOT = "https://github.com/" +GH_REPO_BASE = "https://github.com/NixOS/nix" +FORGEJO_REPO_BASE = "https://git.lix.systems/lix-project/lix" +FORGEJO_ROOT = "https://git.lix.systems/" GERRIT_BASE = "https://gerrit.lix.systems/c/lix/+" +KNOWN_KEYS = ('synopsis', 'cls', 'issues', 'prs', 'significance', 'category', 'credits') SIGNIFICANCECES = { None: 0, 'significant': 10, } +# This is just hardcoded for better validation. If you think there should be +# more of them, feel free to add more. +CATEGORIES = [ + 'Breaking Changes', + 'Features', + 'Improvements', + 'Fixes', + 'Packaging', + 'Miscellany', +] + + +@dataclasses.dataclass +class AuthorInfo: + name: str + github: str | None = None + forgejo: str | None = None + display_name: str | None = None + + def show_name(self) -> str: + return self.display_name or self.name + + def __str__(self) -> str: + if self.forgejo: + return f'[{self.show_name()}]({FORGEJO_ROOT}{self.forgejo})' + elif self.github: + return f'[{self.show_name()}]({GH_ROOT}{self.github})' + else: + return self.show_name() + + +class AuthorInfoDB: + def __init__(self, author_info: dict[str, dict], throw_on_missing: bool): + self.author_info = {name: AuthorInfo(name=name, **d) for (name, d) in author_info.items()} + self.throw_on_missing = throw_on_missing + + def __getitem__(self, name) -> str: + if name in self.author_info: + return str(self.author_info[name]) + else: + if self.throw_on_missing: + raise Exception(f'Missing author info for author {name}') + else: + return name + + def format_link(ident: str, gh_part: str, fj_part: str) -> str: # FIXME: deprecate github as default if ident.isdigit(): - num, link, base = int(ident), f"#{ident}", f"{GH_BASE}/{gh_part}" + num, link, base = int(ident), f"#{ident}", f"{GH_REPO_BASE}/{gh_part}" elif ident.startswith("gh#"): - num, link, base = int(ident[3:]), ident, f"{GH_BASE}/{gh_part}" + num, link, base = int(ident[3:]), ident, f"{GH_REPO_BASE}/{gh_part}" elif ident.startswith("fj#"): - num, link, base = int(ident[3:]), ident, f"{FORGEJO_BASE}/{fj_part}" + num, link, base = int(ident[3:]), ident, f"{FORGEJO_REPO_BASE}/{fj_part}" else: raise Exception("unrecognized reference format", ident) return f"[{link}]({base}/{num})" @@ -31,28 +84,20 @@ def format_pr(pr: str) -> str: def format_cl(clid: int) -> str: return f"[cl/{clid}]({GERRIT_BASE}/{clid})" -def run_on_dir(d): - paths = pathlib.Path(d).glob('*.md') - entries = [] - for p in paths: - try: - e = frontmatter.load(p) - if 'synopsis' not in e.metadata: - raise Exception('missing synopsis') - unknownKeys = set(e.metadata.keys()) - set(('synopsis', 'cls', 'issues', 'prs', 'significance')) - if unknownKeys: - raise Exception('unknown keys', unknownKeys) - entries.append((p, e)) - except Exception as e: - e.add_note(f"in {p}") - raise +def plural_list(strs: list[str]) -> str: + if len(strs) <= 1: + return ''.join(strs) + else: + comma = ',' if len(strs) >= 3 else '' + return '{}{} and {}'.format(', '.join(strs[:-1]), comma, strs[-1]) - def listify(l: list | int) -> list: - if not isinstance(l, list): - return [l] - else: - return l +def listify(l: list | int) -> list: + if not isinstance(l, list): + return [l] + else: + return l +def do_category(author_info: AuthorInfoDB, entries: list[Tuple[pathlib.Path, Any]]): for p, entry in sorted(entries, key=lambda e: (-SIGNIFICANCECES[e[1].metadata.get('significance')], e[0])): try: header = entry.metadata['synopsis'] @@ -66,11 +111,57 @@ def run_on_dir(d): print(f"- {header}") print() print(textwrap.indent(entry.content, ' ')) - print() + if credits := listify(entry.metadata.get('credits', [])): + print() + print(textwrap.indent('Many thanks to {} for this.'.format(plural_list(list(author_info[c] for c in credits))), ' ')) except Exception as e: e.add_note(f"in {p}") raise + +def run_on_dir(author_info: AuthorInfoDB, d): + d = pathlib.Path(d) + if not d.is_dir(): + raise ValueError(f'provided path {d} is not a directory') + paths = pathlib.Path(d).glob('*.md') + entries = defaultdict(list) + for p in paths: + try: + e = frontmatter.load(p) + if 'synopsis' not in e.metadata: + raise Exception('missing synopsis') + unknownKeys = set(e.metadata.keys()) - set(KNOWN_KEYS) + if unknownKeys: + raise Exception('unknown keys', unknownKeys) + category = e.metadata.get('category', 'Miscellany') + if category not in CATEGORIES: + raise Exception('unknown category', category) + entries[category].append((p, e)) + except Exception as e: + e.add_note(f"in {p}") + raise + + for category in CATEGORIES: + if entries[category]: + print('\n#', category) + do_category(author_info, entries[category]) + +def main(): + import argparse + + ap = argparse.ArgumentParser() + ap.add_argument('--change-authors', help='File name of the change authors metadata YAML file', type=argparse.FileType('r')) + ap.add_argument('dirs', help='Directories to run on', nargs='+') + + args = ap.parse_args() + + author_info = AuthorInfoDB(yaml.safe_load(args.change_authors), throw_on_missing=True) \ + if args.change_authors \ + else AuthorInfoDB({}, throw_on_missing=False) + + for d in args.dirs: + run_on_dir(author_info, d) + + if __name__ == '__main__': - for d in sys.argv[1:]: - run_on_dir(d) + main() diff --git a/maintainers/release-notes b/maintainers/release-notes index 477df31f1..51864cbc2 100755 --- a/maintainers/release-notes +++ b/maintainers/release-notes @@ -152,7 +152,7 @@ section_title="Release $version_full ($DATE)" # TODO add minor number, and append? echo "# $section_title" echo - build-release-notes doc/manual/rl-next + build-release-notes --change-authors doc/manual/change-authors.yml doc/manual/rl-next ) | tee -a $file log "Wrote $file" diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl deleted file mode 100755 index ebc536f12..000000000 --- a/maintainers/upload-release.pl +++ /dev/null @@ -1,256 +0,0 @@ -#! /usr/bin/env nix-shell -#! nix-shell -i perl -p perl perlPackages.LWPUserAgent perlPackages.LWPProtocolHttps perlPackages.FileSlurp perlPackages.NetAmazonS3 gnupg1 - -use strict; -use Data::Dumper; -use File::Basename; -use File::Path; -use File::Slurp; -use File::Copy; -use JSON::PP; -use LWP::UserAgent; -use Net::Amazon::S3; - -my $evalId = $ARGV[0] or die "Usage: $0 EVAL-ID\n"; - -my $releasesBucketName = "nix-releases"; -my $channelsBucketName = "nix-channels"; - -my $TMPDIR = $ENV{'TMPDIR'} // "/tmp"; - -my $isLatest = ($ENV{'IS_LATEST'} // "") eq "1"; - -# FIXME: cut&paste from nixos-channel-scripts. -sub fetch { - my ($url, $type) = @_; - - my $ua = LWP::UserAgent->new; - $ua->default_header('Accept', $type) if defined $type; - - my $response = $ua->get($url); - die "could not download $url: ", $response->status_line, "\n" unless $response->is_success; - - return $response->decoded_content; -} - -my $evalUrl = "https://hydra.nixos.org/eval/$evalId"; -my $evalInfo = decode_json(fetch($evalUrl, 'application/json')); -#print Dumper($evalInfo); -my $flakeUrl = $evalInfo->{flake} or die; -my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die); -my $nixRev = $flakeInfo->{revision} or die; - -my $buildInfo = decode_json(fetch("$evalUrl/job/build.x86_64-linux", 'application/json')); -#print Dumper($buildInfo); - -my $releaseName = $buildInfo->{nixname}; -$releaseName =~ /nix-(.*)$/ or die; -my $version = $1; - -print STDERR "Flake URL is $flakeUrl, Nix revision is $nixRev, version is $version\n"; - -my $releaseDir = "nix/$releaseName"; - -my $tmpDir = "$TMPDIR/nix-release/$releaseName"; -File::Path::make_path($tmpDir); - -my $narCache = "$TMPDIR/nar-cache"; -File::Path::make_path($narCache); - -my $binaryCache = "https://cache.nixos.org/?local-nar-cache=$narCache"; - -# S3 setup. -my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "No AWS_ACCESS_KEY_ID given."; -my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "No AWS_SECRET_ACCESS_KEY given."; - -my $s3 = Net::Amazon::S3->new( - { aws_access_key_id => $aws_access_key_id, - aws_secret_access_key => $aws_secret_access_key, - retry => 1, - host => "s3-eu-west-1.amazonaws.com", - }); - -my $releasesBucket = $s3->bucket($releasesBucketName) or die; - -my $s3_us = Net::Amazon::S3->new( - { aws_access_key_id => $aws_access_key_id, - aws_secret_access_key => $aws_secret_access_key, - retry => 1, - }); - -my $channelsBucket = $s3_us->bucket($channelsBucketName) or die; - -sub getStorePath { - my ($jobName, $output) = @_; - my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json')); - return $buildInfo->{buildoutputs}->{$output or "out"}->{path} or die "cannot get store path for '$jobName'"; -} - -sub copyManual { - my $manual = getStorePath("build.x86_64-linux", "doc"); - print "$manual\n"; - - my $manualNar = "$tmpDir/$releaseName-manual.nar.xz"; - print "$manualNar\n"; - - unless (-e $manualNar) { - system("NIX_REMOTE=$binaryCache nix store dump-path '$manual' | xz > '$manualNar'.tmp") == 0 - or die "unable to fetch $manual\n"; - rename("$manualNar.tmp", $manualNar) or die; - } - - unless (-e "$tmpDir/manual") { - system("xz -d < '$manualNar' | nix-store --restore $tmpDir/manual.tmp") == 0 - or die "unable to unpack $manualNar\n"; - rename("$tmpDir/manual.tmp/share/doc/nix/manual", "$tmpDir/manual") or die; - system("rm -rf '$tmpDir/manual.tmp'") == 0 or die; - } - - system("aws s3 sync '$tmpDir/manual' s3://$releasesBucketName/$releaseDir/manual") == 0 - or die "syncing manual to S3\n"; -} - -copyManual; - -sub downloadFile { - my ($jobName, $productNr, $dstName) = @_; - - my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json')); - #print STDERR "$jobName: ", Dumper($buildInfo), "\n"; - - my $srcFile = $buildInfo->{buildproducts}->{$productNr}->{path} or die "job '$jobName' lacks product $productNr\n"; - $dstName //= basename($srcFile); - my $tmpFile = "$tmpDir/$dstName"; - - if (!-e $tmpFile) { - print STDERR "downloading $srcFile to $tmpFile...\n"; - - my $fileInfo = decode_json(`NIX_REMOTE=$binaryCache nix store ls --json '$srcFile'`); - - $srcFile = $fileInfo->{target} if $fileInfo->{type} eq 'symlink'; - - #print STDERR $srcFile, " ", Dumper($fileInfo), "\n"; - - system("NIX_REMOTE=$binaryCache nix store cat '$srcFile' > '$tmpFile'.tmp") == 0 - or die "unable to fetch $srcFile\n"; - rename("$tmpFile.tmp", $tmpFile) or die; - } - - my $sha256_expected = $buildInfo->{buildproducts}->{$productNr}->{sha256hash}; - my $sha256_actual = `nix hash file --base16 --type sha256 '$tmpFile'`; - chomp $sha256_actual; - if (defined($sha256_expected) && $sha256_expected ne $sha256_actual) { - print STDERR "file $tmpFile is corrupt, got $sha256_actual, expected $sha256_expected\n"; - exit 1; - } - - write_file("$tmpFile.sha256", $sha256_actual); - - return $sha256_expected; -} - -downloadFile("binaryTarball.i686-linux", "1"); -downloadFile("binaryTarball.x86_64-linux", "1"); -downloadFile("binaryTarball.aarch64-linux", "1"); -downloadFile("binaryTarball.x86_64-darwin", "1"); -downloadFile("binaryTarball.aarch64-darwin", "1"); -downloadFile("binaryTarballCross.x86_64-linux.armv6l-linux", "1"); -downloadFile("binaryTarballCross.x86_64-linux.armv7l-linux", "1"); -downloadFile("installerScript", "1"); - -# Upload docker images to dockerhub. -my $dockerManifest = ""; -my $dockerManifestLatest = ""; - -for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) { - my $system = $platforms->[0]; - my $dockerPlatform = $platforms->[1]; - my $fn = "nix-$version-docker-image-$dockerPlatform.tar.gz"; - downloadFile("dockerImage.$system", "1", $fn); - - print STDERR "loading docker image for $dockerPlatform...\n"; - system("docker load -i $tmpDir/$fn") == 0 or die; - - my $tag = "nixos/nix:$version-$dockerPlatform"; - my $latestTag = "nixos/nix:latest-$dockerPlatform"; - - print STDERR "tagging $version docker image for $dockerPlatform...\n"; - system("docker tag nix:$version $tag") == 0 or die; - - if ($isLatest) { - print STDERR "tagging latest docker image for $dockerPlatform...\n"; - system("docker tag nix:$version $latestTag") == 0 or die; - } - - print STDERR "pushing $version docker image for $dockerPlatform...\n"; - system("docker push -q $tag") == 0 or die; - - if ($isLatest) { - print STDERR "pushing latest docker image for $dockerPlatform...\n"; - system("docker push -q $latestTag") == 0 or die; - } - - $dockerManifest .= " --amend $tag"; - $dockerManifestLatest .= " --amend $latestTag" -} - -print STDERR "creating multi-platform docker manifest...\n"; -system("docker manifest rm nixos/nix:$version"); -system("docker manifest create nixos/nix:$version $dockerManifest") == 0 or die; -if ($isLatest) { - print STDERR "creating latest multi-platform docker manifest...\n"; - system("docker manifest rm nixos/nix:latest"); - system("docker manifest create nixos/nix:latest $dockerManifestLatest") == 0 or die; -} - -print STDERR "pushing multi-platform docker manifest...\n"; -system("docker manifest push nixos/nix:$version") == 0 or die; - -if ($isLatest) { - print STDERR "pushing latest multi-platform docker manifest...\n"; - system("docker manifest push nixos/nix:latest") == 0 or die; -} - -# Upload nix-fallback-paths.nix. -write_file("$tmpDir/fallback-paths.nix", - "{\n" . - " x86_64-linux = \"" . getStorePath("build.x86_64-linux") . "\";\n" . - " i686-linux = \"" . getStorePath("build.i686-linux") . "\";\n" . - " aarch64-linux = \"" . getStorePath("build.aarch64-linux") . "\";\n" . - " x86_64-darwin = \"" . getStorePath("build.x86_64-darwin") . "\";\n" . - " aarch64-darwin = \"" . getStorePath("build.aarch64-darwin") . "\";\n" . - "}\n"); - -# Upload release files to S3. -for my $fn (glob "$tmpDir/*") { - my $name = basename($fn); - next if $name eq "manual"; - my $dstKey = "$releaseDir/" . $name; - unless (defined $releasesBucket->head_key($dstKey)) { - print STDERR "uploading $fn to s3://$releasesBucketName/$dstKey...\n"; - - my $configuration = (); - $configuration->{content_type} = "application/octet-stream"; - - if ($fn =~ /.sha256|install|\.nix$/) { - $configuration->{content_type} = "text/plain"; - } - - $releasesBucket->add_key_filename($dstKey, $fn, $configuration) - or die $releasesBucket->err . ": " . $releasesBucket->errstr; - } -} - -# Update the "latest" symlink. -$channelsBucket->add_key( - "nix-latest/install", "", - { "x-amz-website-redirect-location" => "https://releases.nixos.org/$releaseDir/install" }) - or die $channelsBucket->err . ": " . $channelsBucket->errstr - if $isLatest; - -# Tag the release in Git. -chdir("/home/eelco/Dev/nix-pristine") or die; -system("git remote update origin") == 0 or die; -system("git tag --force --sign $version $nixRev -m 'Tagging release $version'") == 0 or die; -system("git push --tags") == 0 or die; -system("git push --force-with-lease origin $nixRev:refs/heads/latest-release") == 0 or die if $isLatest; diff --git a/maintainers/upload_manual.sh b/maintainers/upload_manual.sh new file mode 100755 index 000000000..f50520490 --- /dev/null +++ b/maintainers/upload_manual.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash +set -euo pipefail + +cd "$(dirname -- "$0")/.." + +# This script uploads the Lix manual to the Lix s3 store. +# It expects credentials to be configured like so: +# +# ~/.aws/credentials: +# +# [default] +# aws_access_key_id = SOMEACCESSKEY +# aws_secret_access_key = SOMESECRETKEY +# +# default can also be replaced by some other string if AWS_PROFILE is set in +# environment. +# +# See: https://rclone.org/s3/#authentication +# +# To obtain such a key, log into the garage host and run: +# (obtain GARAGE_RPC_SECRET into environment perhaps by systemctl cat garage) +# garage key create SOME-KEY-NAME +# garage bucket allow --read --write docs --key SOME-KEY-NAME + +if [[ ! -f result-doc/share/doc/nix/manual/index.html ]]; then + echo -e "result-doc does not appear to contain a Lix manual. You can build one with:\n nix build '.#default^*'" >&2 + exit 1 +fi + +# --checksum: https://rclone.org/s3/#avoiding-head-requests-to-read-the-modification-time +# By default rclone uses the modification time to determine if something needs +# syncing. This is actually very bad for our use case, since we have small +# files that have meaningless (Unix epoch) local modification time data. We can +# make it go both 16x faster and more correct by using md5s instead. +rclone \ + --config doc/manual/rclone.conf \ + -vv \ + sync \ + --checksum \ + result-doc/share/doc/nix/manual/ lix-docs:docs/manual/nightly/ |