aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/base.conf13
l---------src/collections/blog1
l---------src/collections/pastebins1
l---------src/collections/podcasts1
l---------src/collections/screencasts1
l---------src/collections/tils1
-rw-r--r--src/content/about.adoc9
-rw-r--r--src/content/blog/2018/07/17/guix-nixos.adoc196
-rw-r--r--src/content/blog/2018/08/01/npm-ci-reproducibility.adoc148
-rw-r--r--src/content/blog/2018/12/21/ytdl-subs.adoc274
-rw-r--r--src/content/blog/2019/06/02/nixos-stateless-workstation.adoc150
-rw-r--r--src/content/blog/2020/08/10/guix-srht.adoc128
-rw-r--r--src/content/blog/2020/08/31/database-i-with-i-had.adoc295
-rw-r--r--src/content/blog/2020/10/05/cargo2nix-demo.tar.gzbin0 -> 174080 bytes
-rw-r--r--src/content/blog/2020/10/05/cargo2nix.adoc80
-rw-r--r--src/content/blog/2020/10/05/cargo2nix.tar.gzbin0 -> 143360 bytes
-rw-r--r--src/content/blog/2020/10/05/swift2nix-demo.tar.gzbin0 -> 174080 bytes
-rw-r--r--src/content/blog/2020/10/05/swift2nix.adoc199
-rw-r--r--src/content/blog/2020/10/05/swift2nix.tar.gzbin0 -> 143360 bytes
-rw-r--r--src/content/blog/2020/10/19/feature-flags.adoc305
-rw-r--r--src/content/blog/2020/10/20/wrong-interviewing.adoc331
-rw-r--r--src/content/blog/2020/11/07/diy-bugs.adoc108
-rw-r--r--src/content/blog/2020/11/08/paradigm-shift-review.adoc164
-rw-r--r--src/content/blog/2020/11/12/database-parsers-trees.adoc233
-rw-r--r--src/content/blog/2020/11/14/local-first-review.adoc304
-rw-r--r--src/content/blog/2021/01/26/remembering-ann.adoc190
-rw-r--r--src/content/blog/2021/02/17/fallible.adoc244
-rw-r--r--src/content/blog/2021/02/17/fallible.tar.gzbin0 -> 3174400 bytes
-rw-r--r--src/content/blog/2021/04/29/relational-review.adoc130
-rw-r--r--src/content/blog/index.adoc1
-rw-r--r--src/content/img/atom.svg5
-rw-r--r--src/content/img/envelope.svg4
-rw-r--r--src/content/img/favicon.svg62
-rw-r--r--src/content/img/link.svg5
-rw-r--r--src/content/img/lock.svg4
-rw-r--r--src/content/index.adoc1
-rw-r--r--src/content/pastebins/2016/04/05/rpn.adoc34
-rw-r--r--src/content/pastebins/2018/07/11/nix-pinning.adoc38
-rw-r--r--src/content/pastebins/2018/07/13/guix-nixos-systemd.adoc33
-rw-r--r--src/content/pastebins/2018/07/13/guixbuilder-nixos.adoc53
-rw-r--r--src/content/pastebins/2018/07/13/guixbuilder.adoc26
-rw-r--r--src/content/pastebins/2018/07/13/nix-strpad.adoc19
-rw-r--r--src/content/pastebins/2018/07/25/nix-exps.adoc58
-rw-r--r--src/content/pastebins/2018/07/25/nix-showdrv.adoc86
-rw-r--r--src/content/pastebins/2019/06/08/inconsistent-hash.adoc1061
-rw-r--r--src/content/pastebins/2019/12/29/raku-tuple-type.adoc37
-rw-r--r--src/content/pastebins/2020/01/04/guix-import-failure.adoc47
-rw-r--r--src/content/pastebins/2020/02/14/guix-shebang.adoc23
-rw-r--r--src/content/pastebins/2020/11/27/guix-build-local.adoc60
-rw-r--r--src/content/pastebins/2020/12/15/guix-pack-fail.adoc96
-rw-r--r--src/content/pastebins/2021/04/03/naive-slugify-js.adoc40
-rw-r--r--src/content/pastebins/2021/06/08/reading-session-pt1.adoc77
-rw-r--r--src/content/pastebins/2021/06/22/curl-wget.adoc102
-rw-r--r--src/content/pastebins/2021/08/11/h1-spacing.adoc96
-rw-r--r--src/content/pastebins/2021/09/02/sicp-3-19.adoc42
-rw-r--r--src/content/pastebins/2021/09/03/sicp-persistent-queue.adoc85
-rw-r--r--src/content/pastebins/2022/07/14/git-cleanup.adoc70
-rw-r--r--src/content/pastebins/2023/07/22/funcallable-amop.adoc43
-rw-r--r--src/content/pastebins/index.adoc1
-rw-r--r--src/content/podcasts/2020/12/19/test-entry.adoc103
-rw-r--r--src/content/podcasts/2020/12/19/test-entry.flacbin0 -> 462864 bytes
-rw-r--r--src/content/podcasts/index.adoc1
-rw-r--r--src/content/pt/2020-08-12-nome-de-arquivo-com-timestamp-simplificado.md45
-rw-r--r--src/content/screencasts/2021/02/07/autoqemu.adoc42
-rw-r--r--src/content/screencasts/2021/02/07/autoqemu.tar.gzbin0 -> 808960 bytes
-rw-r--r--src/content/screencasts/2021/02/07/autoqemu.webmbin0 -> 12103021 bytes
-rw-r--r--src/content/screencasts/index.adoc1
-rw-r--r--src/content/slides/2020-10-19-rollout-feature-flag-experiment-operational-toggle.slides343
-rw-r--r--src/content/slides/2020-11-14-on-local-first-beyond-the-crdt-silver-bullet.slides266
-rw-r--r--src/content/tils/2020/08/12/filename-timestamp.adoc44
-rw-r--r--src/content/tils/2020/08/13/code-jekyll.adoc155
-rw-r--r--src/content/tils/2020/08/14/browse-git.adoc84
-rw-r--r--src/content/tils/2020/08/16/git-search.adoc59
-rw-r--r--src/content/tils/2020/08/28/grep-online.adoc139
-rw-r--r--src/content/tils/2020/09/04/email-cli-fun-profit.adoc80
-rw-r--r--src/content/tils/2020/09/05/oldschool-pr.adoc118
-rw-r--r--src/content/tils/2020/10/11/search-git-history.adoc41
-rw-r--r--src/content/tils/2020/11/08/find-broken-symlink.adoc36
-rw-r--r--src/content/tils/2020/11/12/diy-nix-bash-ci.adoc74
-rw-r--r--src/content/tils/2020/11/12/git-bisect-automation.adoc35
-rw-r--r--src/content/tils/2020/11/12/useful-bashvars.adoc72
-rw-r--r--src/content/tils/2020/11/14/gpodder-media.adoc33
-rw-r--r--src/content/tils/2020/11/30/git-notes-ci.adoc122
-rw-r--r--src/content/tils/2020/12/15/shellcheck-repo.adoc171
-rw-r--r--src/content/tils/2020/12/29/svg.adoc134
-rw-r--r--src/content/tils/2021/01/12/curl-awk-emails.adoc142
-rw-r--r--src/content/tils/2021/01/17/posix-shebang.adoc55
-rw-r--r--src/content/tils/2021/04/24/cl-generic-precedence.adoc137
-rw-r--r--src/content/tils/2021/04/24/clojure-autocurry.adoc135
-rw-r--r--src/content/tils/2021/04/24/scm-nif.adoc63
-rw-r--r--src/content/tils/2021/07/23/git-tls-gpg.adoc56
-rw-r--r--src/content/tils/2021/08/11/js-bigint-reviver.adoc100
-rw-r--r--src/content/tils/index.adoc1
-rw-r--r--src/headers.txt1
l---------src/pages/root1
95 files changed, 8704 insertions, 0 deletions
diff --git a/src/base.conf b/src/base.conf
new file mode 100644
index 0000000..54ccb99
--- /dev/null
+++ b/src/base.conf
@@ -0,0 +1,13 @@
+export root_dir=src/content
+export header_links=src/headers.txt
+export url_pre='https://euandre.org'
+export email='eu@euandre.org'
+export list_addr='~euandreh/public-inbox@lists.sr.ht'
+export discussions_url_prefix='https://lists.st.ht/~euandreh/public-inbox'
+export sourcecode_url='http://euandre.org/git'
+export sourcecode_url_prefix="$sourcecode_url/tree"
+export author='EuAndreh'
+export site_name="EuAndreh's website"
+export feed_title="EuAndreh's blog"
+export feed_url='feed.articles.en.xml'
+export feed_alternate_url='./'
diff --git a/src/collections/blog b/src/collections/blog
new file mode 120000
index 0000000..8c3a331
--- /dev/null
+++ b/src/collections/blog
@@ -0,0 +1 @@
+../content/blog \ No newline at end of file
diff --git a/src/collections/pastebins b/src/collections/pastebins
new file mode 120000
index 0000000..61731f2
--- /dev/null
+++ b/src/collections/pastebins
@@ -0,0 +1 @@
+../content/pastebins \ No newline at end of file
diff --git a/src/collections/podcasts b/src/collections/podcasts
new file mode 120000
index 0000000..8e5f3ba
--- /dev/null
+++ b/src/collections/podcasts
@@ -0,0 +1 @@
+../content/podcasts \ No newline at end of file
diff --git a/src/collections/screencasts b/src/collections/screencasts
new file mode 120000
index 0000000..09a6cdb
--- /dev/null
+++ b/src/collections/screencasts
@@ -0,0 +1 @@
+../content/screencasts \ No newline at end of file
diff --git a/src/collections/tils b/src/collections/tils
new file mode 120000
index 0000000..435da6e
--- /dev/null
+++ b/src/collections/tils
@@ -0,0 +1 @@
+../content/tils \ No newline at end of file
diff --git a/src/content/about.adoc b/src/content/about.adoc
new file mode 100644
index 0000000..f380ba9
--- /dev/null
+++ b/src/content/about.adoc
@@ -0,0 +1,9 @@
+= About
+
+Hi, I'm EuAndreh. I write software and occasionally music. You can find my
+contact information in the footer of this page, or mail my
+mailto:~euandreh/public-inbox@lists.sr.ht[public inbox]
+([https://lists.sr.ht/~euandreh/public-inbox][archive]).
+
+This is my personal website where I write articles, publish software and more
+related work.
diff --git a/src/content/blog/2018/07/17/guix-nixos.adoc b/src/content/blog/2018/07/17/guix-nixos.adoc
new file mode 100644
index 0000000..6005f9f
--- /dev/null
+++ b/src/content/blog/2018/07/17/guix-nixos.adoc
@@ -0,0 +1,196 @@
+---
+title: Running Guix on NixOS
+date: 2018-07-17
+layout: post
+lang: en
+ref: running-guix-on-nixos
+---
+I wanted to run
+Guix on a NixOS machine. Even though the Guix manual explains how to do it
+[step by step][0], I needed a few extra ones to make it work properly.
+
+[0]: https://www.gnu.org/software/guix/manual/en/html_node/Binary-Installation.html#Binary-Installation
+
+I couldn't just install GuixSD because my wireless network card
+doesn't have any free drivers (yet).
+
+## Creating `guixbuilder` users
+
+Guix requires you to create non-root users that will be used to perform
+the builds in the isolated environments.
+
+The [manual][1] already provides you with a ready to run (as root) command for
+creating the build users:
+
+[1]: https://www.gnu.org/software/guix/manual/en/html_node/Build-Environment-Setup.html#Build-Environment-Setup
+
+```bash
+groupadd --system guixbuild
+for i in `seq -w 1 10`;
+do
+ useradd -g guixbuild -G guixbuild \
+ -d /var/empty -s `which nologin` \
+ -c "Guix build user $i" --system \
+ guixbuilder$i;
+done
+```
+
+However, In my personal NixOS I have disabled [`users.mutableUsers`][2], which
+means that even if I run the above command it means that they'll be removed once
+I rebuild my OS:
+
+[2]: https://nixos.org/nixos/manual/index.html#sec-user-management
+
+```shell
+$ sudo nixos-rebuild switch
+(...)
+removing user ‘guixbuilder7’
+removing user ‘guixbuilder3’
+removing user ‘guixbuilder10’
+removing user ‘guixbuilder1’
+removing user ‘guixbuilder6’
+removing user ‘guixbuilder9’
+removing user ‘guixbuilder4’
+removing user ‘guixbuilder2’
+removing user ‘guixbuilder8’
+removing user ‘guixbuilder5’
+(...)
+```
+
+Instead of enabling `users.mutableUsers` I could add the Guix users by
+adding them to my system configuration:
+
+```nix
+{ config, pkgs, ...}:
+
+{
+
+ # ... NixOS usual config ellided ...
+
+ users = {
+ mutableUsers = false;
+
+ extraUsers =
+ let
+ andrehUser = {
+ andreh = {
+ # my custom user config
+ };
+ };
+ buildUser = (i:
+ {
+ "guixbuilder${i}" = { # guixbuilder$i
+ group = "guixbuild"; # -g guixbuild
+ extraGroups = ["guixbuild"]; # -G guixbuild
+ home = "/var/empty"; # -d /var/empty
+ shell = pkgs.nologin; # -s `which nologin`
+ description = "Guix build user ${i}"; # -c "Guix buid user $i"
+ isSystemUser = true; # --system
+ };
+ }
+ );
+ in
+ # merge all users
+ pkgs.lib.fold (str: acc: acc // buildUser str)
+ andrehUser
+ # for i in `seq -w 1 10`
+ (map (pkgs.lib.fixedWidthNumber 2) (builtins.genList (n: n+1) 10));
+
+ extraGroups.guixbuild = {
+ name = "guixbuild";
+ };
+ };
+}
+```
+
+Here I used `fold` and the `//` operator to merge all of the
+configuration sets into a single `extraUsers` value.
+
+## Creating the `systemd` service
+
+One other thing missing was the `systemd` service.
+
+First I couldn't just copy the `.service` file to `/etc` since in NixOS
+that folder isn't writable. But also I wanted the service to be better
+integrated with the OS.
+
+That was a little easier than creating the users, all I had to do was translate
+the provided [`guix-daemon.service.in`][3] configuration to an equivalent Nix
+expression
+
+[3]: https://git.savannah.gnu.org/cgit/guix.git/tree/etc/guix-daemon.service.in?id=00c86a888488b16ce30634d3a3a9d871ed6734a2
+
+```ini
+# This is a "service unit file" for the systemd init system to launch
+# 'guix-daemon'. Drop it in /etc/systemd/system or similar to have
+# 'guix-daemon' automatically started.
+
+[Unit]
+Description=Build daemon for GNU Guix
+
+[Service]
+ExecStart=/var/guix/profiles/per-user/root/guix-profile/bin/guix-daemon --build-users-group=guixbuild
+Environment=GUIX_LOCPATH=/root/.guix-profile/lib/locale
+RemainAfterExit=yes
+StandardOutput=syslog
+StandardError=syslog
+
+# See <https://lists.gnu.org/archive/html/guix-devel/2016-04/msg00608.html>.
+# Some package builds (for example, go@1.8.1) may require even more than
+# 1024 tasks.
+TasksMax=8192
+
+[Install]
+WantedBy=multi-user.target
+```
+
+This sample `systemd` configuration file became:
+
+```nix
+guix-daemon = {
+ enable = true;
+ description = "Build daemon for GNU Guix";
+ serviceConfig = {
+ ExecStart = "/var/guix/profiles/per-user/root/guix-profile/bin/guix-daemon --build-users-group=guixbuild";
+ Environment="GUIX_LOCPATH=/root/.guix-profile/lib/locale";
+ RemainAfterExit="yes";
+ StandardOutput="syslog";
+ StandardError="syslog";
+ TaskMax= "8192";
+ };
+ wantedBy = [ "multi-user.target" ];
+};
+```
+
+There you go! After running `sudo nixos-rebuild switch` I could get Guix
+up and running:
+
+```bash
+$ guix package -i hello
+The following package will be installed:
+ hello 2.10 /gnu/store/bihfrh609gkxb9dp7n96wlpigiv3krfy-hello-2.10
+
+substitute: updating substitutes from 'https://mirror.hydra.gnu.org'... 100.0%
+The following derivations will be built:
+ /gnu/store/nznmdn6inpwxnlkrasydmda4s2vsp9hg-profile.drv
+ /gnu/store/vibqrvw4c8lacxjrkqyzqsdrmckv77kq-fonts-dir.drv
+ /gnu/store/hi8alg7wi0wgfdi3rn8cpp37zhx8ykf3-info-dir.drv
+ /gnu/store/cvkbp378cvfjikz7mjymhrimv7j12p0i-ca-certificate-bundle.drv
+ /gnu/store/d62fvxymnp95rzahhmhf456bsf0xg1c6-manual-database.drv
+Creating manual page database...
+1 entries processed in 0.0 s
+2 packages in profile
+$ hello
+Hello, world!
+```
+
+Some improvements to this approach are:
+
+1. looking into [NixOS modules][4] and trying to bundle everything together
+ into a single logical unit;
+2. [build Guix from source][5] and share the Nix store and daemon with Guix.
+
+Happy Guix/Nix hacking!
+
+[4]: https://nixos.org/nixos/manual/index.html#sec-writing-modules
+[5]: https://www.gnu.org/software/guix/manual/en/html_node/Requirements.html#Requirements
diff --git a/src/content/blog/2018/08/01/npm-ci-reproducibility.adoc b/src/content/blog/2018/08/01/npm-ci-reproducibility.adoc
new file mode 100644
index 0000000..f896c6c
--- /dev/null
+++ b/src/content/blog/2018/08/01/npm-ci-reproducibility.adoc
@@ -0,0 +1,148 @@
+---
+title: Verifying "npm ci" reproducibility
+date: 2018-08-01
+layout: post
+lang: en
+ref: verifying-npm-ci-reproducibility
+updated_at: 2019-05-22
+---
+When [npm@5](https://blog.npmjs.org/post/161081169345/v500) came bringing
+[package-locks](https://docs.npmjs.com/files/package-locks) with it, I was
+confused about the benefits it provided, since running `npm install` more than
+once could resolve all the dependencies again and yield yet another fresh
+`package-lock.json` file. The message saying "you should add this file to
+version control" left me hesitant on what to do[^package-lock-message].
+
+However the [addition of `npm ci`](https://blog.npmjs.org/post/171556855892/introducing-npm-ci-for-faster-more-reliable)
+filled this gap: it's a stricter variation of `npm install` which
+guarantees that "[subsequent installs are able to generate identical trees](https://docs.npmjs.com/files/package-lock.json)". But are they
+really identical? I could see that I didn't have the same problems of
+different installation outputs, but I didn't know for **sure** if it
+was really identical.
+
+## Computing the hash of a directory's content
+
+I quickly searched for a way to check for the hash signature of an
+entire directory tree, but I couldn't find one. I've made a poor
+man's [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree)
+implementation using `sha256sum` and a few piped commands at the
+terminal:
+
+```bash
+merkle-tree () {
+ dirname="${1-.}"
+ pushd "$dirname"
+ find . -type f | \
+ sort | \
+ xargs -I{} sha256sum "{}" | \
+ sha256sum | \
+ awk '{print $1}'
+ popd
+}
+```
+
+Going through it line by line:
+
+- #1 we define a Bash function called `merkle-tree`;
+- #2 it accepts a single argument: the directory to compute the
+ merkle tree from. If nothing is given, it runs on the current
+ directory (`.`);
+- #3 we go to the directory, so we don't get different prefixes in
+ `find`'s output (like `../a/b`);
+- #4 we get all files from the directory tree. Since we're using
+ `sha256sum` to compute the hash of the file contents, we need to
+ filter out folders from it;
+- #5 we need to sort the output, since different file systems and
+ `find` implementations may return files in different orders;
+- #6 we use `xargs` to compute the hash of each file individually
+ through `sha256sum`. Since a file may contain spaces we need to
+ escape it with quotes;
+- #7 we compute the hash of the combined hashes. Since `sha256sum`
+ output is formatted like `<hash> <filename>`, it produces a
+ different final hash if a file ever changes name without changing
+ it's content;
+- #8 we get the final hash output, excluding the `<filename>` (which
+ is `-` in this case, aka `stdin`).
+
+### Positive points:
+
+1. ignore timestamp: running more than once on different installation
+ yields the same hash;
+2. the name of the file is included in the final hash computation.
+
+### Limitations:
+
+1. it ignores empty folders from the hash computation;
+2. the implementation's only goal is to represent using a digest
+ whether the content of a given directory is the same or not. Leaf
+ presence checking is obviously missing from it.
+
+### Testing locally with sample data
+
+```bash
+mkdir /tmp/merkle-tree-test/
+cd /tmp/merkle-tree-test/
+mkdir -p a/b/ a/c/ d/
+echo "one" > a/b/one.txt
+echo "two" > a/c/two.txt
+echo "three" > d/three.txt
+merkle-tree . # output is be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
+merkle-tree . # output still is be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
+echo "four" > d/four.txt
+merkle-tree . # output is now b5464b958969ed81815641ace96b33f7fd52c20db71a7fccc45a36b3a2ae4d4c
+rm d/four.txt
+merkle-tree . # output back to be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
+echo "hidden-five" > a/b/one.txt
+merkle-tree . # output changed 471fae0d074947e4955e9ac53e95b56e4bc08d263d89d82003fb58a0ffba66f5
+```
+
+It seems to work for this simple test case.
+
+You can try copying and pasting it to verify the hash signatures.
+
+## Using `merkle-tree` to check the output of `npm ci`
+
+*I've done all of the following using Node.js v8.11.3 and npm@6.1.0.*
+
+In this test case I'll take the main repo of
+[Lerna](https://lernajs.io/)[^lerna-package-lock]:
+
+```bash
+cd /tmp/
+git clone https://github.com/lerna/lerna.git
+cd lerna/
+git checkout 57ff865c0839df75dbe1974971d7310f235e1109
+npm ci
+merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
+rm -rf node_modules/
+npm ci
+merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
+npm ci # test if it also works with an existing node_modules/ folder
+merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
+```
+
+Good job `npm ci` :)
+
+#6 and #9 take some time to run (21 seconds in my machine), but this
+specific use case isn't performance sensitive. The slowest step is
+computing the hash of each individual file.
+
+## Conclusion
+
+`npm ci` really "generates identical trees".
+
+I'm not aware of any other existing solution for verifying the hash
+signature of a directory. If you know any I'd
+[like to know](mailto:{{ site.author.email }}).
+
+## *Edit*
+
+2019-05-22: Fix spelling.
+
+[^package-lock-message]: The
+ [documentation](https://docs.npmjs.com/cli/install#description) claims `npm
+ install` is driven by the existing `package-lock.json`, but that's actually
+ [a little bit tricky](https://github.com/npm/npm/issues/17979#issuecomment-332701215).
+
+[^lerna-package-lock]: Finding a big known repo that actually committed the
+ `package-lock.json` file was harder than I expected.
diff --git a/src/content/blog/2018/12/21/ytdl-subs.adoc b/src/content/blog/2018/12/21/ytdl-subs.adoc
new file mode 100644
index 0000000..183c624
--- /dev/null
+++ b/src/content/blog/2018/12/21/ytdl-subs.adoc
@@ -0,0 +1,274 @@
+---
+title: Using "youtube-dl" to manage YouTube subscriptions
+date: 2018-12-21
+layout: post
+lang: en
+ref: using-youtube-dl-to-manage-youtube-subscriptions
+---
+I've recently read the
+[announcement](https://www.reddit.com/r/DataHoarder/comments/9sg8q5/i_built_a_selfhosted_youtube_subscription_manager/)
+of a very nice [self-hosted YouTube subscription
+manager](https://github.com/chibicitiberiu/ytsm). I haven't used
+YouTube's built-in subscriptions for a while now, and haven't missed
+it at all. When I saw the announcement, I considered writing about the
+solution I've built on top of [youtube-dl](https://youtube-dl.org/).
+
+## Background: the problem with YouTube
+
+In many ways, I agree with [André Staltz's view on data ownership and
+privacy](https://staltz.com/what-happens-when-you-block-internet-giants.html):
+
+> I started with the basic premise that "I want to be in control of my
+> data". Sometimes that meant choosing when to interact with an internet
+> giant and how much I feel like revealing to them. Most of times it
+> meant not interacting with them at all. I don't want to let them be in
+> full control of how much they can know about me. I don't want to be in
+> autopilot mode. (...) Which leads us to YouTube. While I was able to
+> find alternatives to Gmail (Fastmail), Calendar (Fastmail), Translate
+> (Yandex Translate), *etc.* YouTube remains as the most indispensable
+> Google-owned web service. It is really really hard to avoid consuming
+> YouTube content. It was probably the smartest startup acquisition
+> ever. My privacy-oriented alternative is to watch YouTube videos
+> through Tor, which is technically feasible but not polite to use the
+> Tor bandwidth for these purposes. I'm still scratching my head with
+> this issue.
+
+Even though I don't use most alternative services he mentions, I do
+watch videos from YouTube. But I also feel uncomfortable logging in to
+YouTube with a Google account, watching videos, creating playlists and
+similar things.
+
+Using the mobile app is worse: you can't even block ads in there.
+You're in less control on what you share with YouTube and Google.
+
+## youtube-dl
+
+youtube-dl is a command-line tool for downloading videos, from YouTube
+and [many other sites](https://rg3.github.io/youtube-dl/supportedsites.html):
+
+```shell
+$ youtube-dl https://www.youtube.com/watch?v=rnMYZnY3uLA
+[youtube] rnMYZnY3uLA: Downloading webpage
+[youtube] rnMYZnY3uLA: Downloading video info webpage
+[download] Destination: A Origem da Vida _ Nerdologia-rnMYZnY3uLA.mp4
+[download] 100% of 32.11MiB in 00:12
+```
+
+It can be used to download individual videos as showed above, but it
+also has some interesting flags that we can use:
+
+- `--output`: use a custom template to create the name of the
+ downloaded file;
+- `--download-archive`: use a text file for recording and remembering
+ which videos were already downloaded;
+- `--prefer-free-formats`: prefer free video formats, like `webm`,
+ `ogv` and Matroska `mkv`;
+- `--playlist-end`: how many videos to download from a "playlist" (a
+ channel, a user or an actual playlist);
+- `--write-description`: write the video description to a
+ `.description` file, useful for accessing links and extra content.
+
+Putting it all together:
+
+```shell
+$ youtube-dl "https://www.youtube.com/channel/UClu474HMt895mVxZdlIHXEA" \
+ --download-archive ~/Nextcloud/cache/youtube-dl-seen.conf \
+ --prefer-free-formats \
+ --playlist-end 20 \
+ --write-description \
+ --output "~/Downloads/yt-dl/%(uploader)s/%(upload_date)s - %(title)s.%(ext)s"
+```
+
+This will download the latest 20 videos from the selected channel, and
+write down the video IDs in the `youtube-dl-seen.conf` file. Running it
+immediately after one more time won't have any effect.
+
+If the channel posts one more video, running the same command again will
+download only the last video, since the other 19 were already
+downloaded.
+
+With this basic setup you have a minimal subscription system at work,
+and you can create some functions to help you manage that:
+
+```shell
+#!/bin/sh
+
+export DEFAULT_PLAYLIST_END=15
+
+download() {
+ youtube-dl "$1" \
+ --download-archive ~/Nextcloud/cache/youtube-dl-seen.conf \
+ --prefer-free-formats \
+ --playlist-end $2 \
+ --write-description \
+ --output "~/Downloads/yt-dl/%(uploader)s/%(upload_date)s - %(title)s.%(ext)s"
+}
+export -f download
+
+
+download_user() {
+ download "https://www.youtube.com/user/$1" ${2-$DEFAULT_PLAYLIST_END}
+}
+export -f download_user
+
+
+download_channel() {
+ download "https://www.youtube.com/channel/$1" ${2-$DEFAULT_PLAYLIST_END}
+}
+export -f download_channel
+
+
+download_playlist() {
+ download "https://www.youtube.com/playlist?list=$1" ${2-$DEFAULT_PLAYLIST_END}
+}
+export -f download_playlist
+```
+
+With these functions, you now can have a subscription fetching script to
+download the latest videos from your favorite channels:
+
+```shell
+#!/bin/sh
+
+download_user ClojureTV 15
+download_channel "UCmEClzCBDx-vrt0GuSKBd9g" 100
+download_playlist "PLqG7fA3EaMRPzL5jzd83tWcjCUH9ZUsbX" 15
+```
+
+Now, whenever you want to watch the latest videos, just run the above
+script and you'll get all of them in your local machine.
+
+## Tradeoffs
+
+### I've made it for myself, with my use case in mind
+
+1. Offline
+
+ My internet speed it somewhat reasonable[^internet-speed], but it is really
+ unstable. Either at work or at home, it's not uncommon to loose internet
+ access for 2 minutes 3~5 times every day, and stay completely offline for a
+ couple of hours once every week.
+
+ Working through the hassle of keeping a playlist on disk has payed
+ off many, many times. Sometimes I even not notice when the
+ connection drops for some minutes, because I'm watching a video and
+ working on some document, all on my local computer.
+
+ There's also no quality adjustment for YouTube's web player, I
+ always pick the higher quality and it doesn't change during the
+ video. For some types of content, like a podcast with some tiny
+ visual resources, this doesn't change much. For other types of
+ content, like a keynote presentation with text written on the
+ slides, watching on 144p isn't really an option.
+
+ If the internet connection drops during the video download,
+ youtube-dl will resume from where it stopped.
+
+ This is an offline first benefit that I really like, and works well
+ for me.
+
+2. Sync the "seen" file
+
+ I already have a running instance of Nextcloud, so just dumping the
+ `youtube-dl-seen.conf` file inside Nextcloud was a no-brainer.
+
+ You could try putting it in a dedicated git repository, and wrap the
+ script with an autocommit after every run. If you ever had a merge
+ conflict, you'd simply accept all changes and then run:
+
+ ```shell
+ $ uniq youtube-dl-seen.conf > youtube-dl-seen.conf
+ ```
+
+ to tidy up the file.
+
+3. Doesn't work on mobile
+
+ My primary device that I use everyday is my laptop, not my phone. It
+ works well for me this way.
+
+ Also, it's harder to add ad-blockers to mobile phones, and most
+ mobile software still depends on Google's and Apple's blessing.
+
+ If you wish, you can sync the videos to the SD card periodically,
+ but that's a bit of extra manual work.
+
+### The Good
+
+1. Better privacy
+
+ We don't even have to configure the ad-blocker to keep ads and
+ trackers away!
+
+ YouTube still has your IP address, so using a VPN is always a good
+ idea. However, a timing analysis would be able to identify you
+ (considering the current implementation).
+
+2. No need to self-host
+
+ There's no host that needs maintenance. Everything runs locally.
+
+ As long as you keep youtube-dl itself up to date and sync your
+ "seen" file, there's little extra work to do.
+
+3. Track your subscriptions with git
+
+ After creating a `subscriptions.sh` executable that downloads all
+ the videos, you can add it to git and use it to track metadata about
+ your subscriptions.
+
+### The Bad
+
+1. Maximum playlist size is your disk size
+
+ This is a good thing for getting a realistic view on your actual
+ "watch later" list. However I've run out of disk space many
+ times, and now I need to be more aware of how much is left.
+
+### The Ugly
+
+We can only avoid all the bad parts of YouTube with youtube-dl as long
+as YouTube keeps the videos public and programmatically accessible. If
+YouTube ever blocks that we'd loose the ability to consume content this
+way, but also loose confidence on considering YouTube a healthy
+repository of videos on the internet.
+
+## Going beyond
+
+Since you're running everything locally, here are some possibilities to
+be explored:
+
+### A playlist that is too long for being downloaded all at once
+
+You can wrap the `download_playlist` function (let's call the wrapper
+`inc_download`) and instead of passing it a fixed number to the
+`--playlist-end` parameter, you can store the `$n` in a folder
+(something like `$HOME/.yt-db/$PLAYLIST_ID`) and increment it by `$step`
+every time you run `inc_download`.
+
+This way you can incrementally download videos from a huge playlist
+without filling your disk with gigabytes of content all at once.
+
+### Multiple computer scenario
+
+The `download_playlist` function could be aware of the specific machine
+that it is running on and apply specific policies depending on the
+machine: always download everything; only download videos that aren't
+present anywhere else; *etc.*
+
+## Conclusion
+
+youtube-dl is a great tool to keep at hand. It covers a really large
+range of video websites and works robustly.
+
+Feel free to copy and modify this code, and
+[send me](mailto:{{ site.author.email }}) suggestions of improvements or related
+content.
+
+## *Edit*
+
+2019-05-22: Fix spelling.
+
+[^internet-speed]: Considering how expensive it is and the many ways it could be
+ better, but also how much it has improved over the last years, I say it's
+ reasonable.
diff --git a/src/content/blog/2019/06/02/nixos-stateless-workstation.adoc b/src/content/blog/2019/06/02/nixos-stateless-workstation.adoc
new file mode 100644
index 0000000..c0cfe75
--- /dev/null
+++ b/src/content/blog/2019/06/02/nixos-stateless-workstation.adoc
@@ -0,0 +1,150 @@
+= Using NixOS as an stateless workstation
+
+date: 2019-06-02
+
+layout: post
+
+lang: en
+
+ref: using-nixos-as-an-stateless-workstation
+
+---
+
+Last week[^last-week] I changed back to an old[^old-computer] Samsung laptop, and installed
+[NixOS](https://nixos.org/) on it.
+
+After using NixOS on another laptop for around two years, I wanted
+verify how reproducible was my desktop environment, and how far does
+NixOS actually can go on recreating my whole OS from my configuration
+files and personal data. I gravitated towards NixOS after trying (and
+failing) to create an `install.sh` script that would imperatively
+install and configure my whole OS using apt-get. When I found a
+GNU/Linux distribution that was built on top of the idea of
+declaratively specifying the whole OS I was automatically convinced[^convinced-by-declarative-aspect].
+
+I was impressed. Even though I've been experiencing the benefits of Nix
+isolation daily, I always felt skeptical that something would be
+missing, because the devil is always on the details. But the result was
+much better than expected!
+
+There were only 2 missing configurations:
+
+1. tap-to-click on the touchpad wasn't enabled by default;
+2. the default theme from the gnome-terminal is "Black on white"
+ instead of "White on black".
+
+That's all.
+
+I haven't checked if I can configure those in NixOS GNOME module, but I
+guess both are scriptable and could be set in a fictional `setup.sh`
+run.
+
+This makes me really happy, actually. More happy than I anticipated.
+
+Having such a powerful declarative OS makes me feel like my data is the
+really important stuff (as it should be), and I can interact with it on
+any workstation. All I need is an internet connection and a few hours to
+download everything. It feels like my physical workstation and the
+installed OS are serving me and my data, instead of me feeling as
+hostage to the specific OS configuration at the moment. Having a few
+backup copies of everything important extends such peacefulness.
+
+After this positive experience with recreating my OS from simple Nix
+expressions, I started to wonder how far I could go with this, and
+started considering other areas of improvements:
+
+== First run on a fresh NixOS installation
+
+Right now the initial setup relies on non-declarative manual tasks, like
+decrypting some credentials, or manually downloading **this** git
+repository with specific configurations before **that** one.
+
+I wonder what some areas of improvements are on this topic, and if
+investing on it is worth it (both time-wise and happiness-wise).
+
+== Emacs
+
+Right now I'm using the [Spacemacs](http://spacemacs.org/), which is a
+community package curation and configuration on top of
+[Emacs](https://www.gnu.org/software/emacs/).
+
+Spacemacs does support the notion of
+[layers](http://spacemacs.org/doc/LAYERS.html), which you can
+declaratively specify and let Spacemacs do the rest.
+
+However this solution isn't nearly as robust as Nix: being purely
+functional, Nix does describe everything required to build a derivation,
+and knows how to do so. Spacemacs it closer to more traditional package
+managers: even though the layers list is declarative, the installation
+is still very much imperative. I've had trouble with Spacemacs not
+behaving the same on different computers, both with identical
+configurations, only brought to convergence back again after a
+`git clean -fdx` inside `~/.emacs.d/`.
+
+The ideal solution would be managing Emacs packages with Nix itself.
+After a quick search I did found that [there is support for Emacs
+packages in
+Nix](https://nixos.org/nixos/manual/index.html#module-services-emacs-adding-packages).
+So far I was only aware of [Guix support for Emacs packages](https://www.gnu.org/software/guix/manual/en/html_node/Application-Setup.html#Emacs-Packages).
+
+This isn't a trivial change because Spacemacs does include extra
+curation and configuration on top of Emacs packages. I'm not sure the
+best way to improve this right now.
+
+### myrepos
+
+I'm using [myrepos](https://myrepos.branchable.com/) to manage all my
+git repositories, and the general rule I apply is to add any repository
+specific configuration in myrepos' `checkout` phase:
+
+```shell
+# sample ~/.mrconfig file snippet
+[dev/guix/guix]
+checkout =
+ git clone https://git.savannah.gnu.org/git/guix.git guix
+ cd guix/
+ git config sendemail.to guix-patches@gnu.org
+```
+
+This way when I clone this repo again the email sending is already
+pre-configured.
+
+This works well enough, but the solution is too imperative, and my
+`checkout` phases tend to become brittle over time if not enough care is
+taken.
+
+### GNU Stow
+
+For my home profile and personal configuration I already have a few
+dozens of symlinks that I manage manually. This has worked so far, but
+the solution is sometimes fragile and [not declarative at all][symlinks]. I
+wonder if something like [GNU Stow][stow] can help me simplify this.
+
+[symlinks]: https://euandre.org/git/dotfiles/tree/bash/symlinks.sh?id=316939aa215181b1d22b69e94241eef757add98d
+[stow]: https://www.gnu.org/software/stow/
+
+## Conclusion
+
+I'm really satisfied with NixOS, and I intend to keep using it. If what
+I've said interests you, maybe try tinkering with the [Nix package
+manager](https://nixos.org/nix/) (not the whole NixOS) on your current
+distribution (it can live alongside any other package manager).
+
+If you have experience with declarative Emacs package managements, GNU
+Stow or any similar tool, *etc.*,
+[I'd like some tips](mailto:{{ site.author.email }}). If you don't have any
+experience at all, I'd still love to hear from you.
+
+[^last-week]: "Last week" as of the start of this writing, so around the end of
+ May 2019.
+
+[^old-computer]: I was using a 32GB RAM, i7 and 250GB SSD Samsung laptop. The
+ switch was back to a 8GB RAM, i5 and 500GB HDD Dell laptop. The biggest
+ difference I noticed was on faster memory, both RAM availability and the
+ disk speed, but I had 250GB less local storage space.
+
+[^convinced-by-declarative-aspect]: The declarative configuration aspect is
+ something that I now completely take for granted, and wouldn't consider
+ using something which isn't declarative. A good metric to show this is me
+ realising that I can't pinpoint the moment when I decided to switch to
+ NixOS. It's like I had a distant past when this wasn't true.
diff --git a/src/content/blog/2020/08/10/guix-srht.adoc b/src/content/blog/2020/08/10/guix-srht.adoc
new file mode 100644
index 0000000..4d7e8d5
--- /dev/null
+++ b/src/content/blog/2020/08/10/guix-srht.adoc
@@ -0,0 +1,128 @@
+---
+title: Guix inside sourcehut builds.sr.ht CI
+date: 2020-08-10
+updated_at: 2020-08-19
+layout: post
+lang: en
+ref: guix-inside-sourcehut-builds-sr-ht-ci
+---
+After the release of the [NixOS images in builds.sr.ht][0] and much
+usage of it, I also started looking at [Guix][1] and
+wondered if I could get it on the awesome builds.sr.ht service.
+
+[0]: https://man.sr.ht/builds.sr.ht/compatibility.md#nixos
+[1]: https://guix.gnu.org/
+
+The Guix manual section on the [binary installation][2] is very thorough, and
+even a [shell installer script][3] is provided, but it is built towards someone
+installing Guix on their personal computer, and relies heavily on interactive
+input.
+
+[2]: https://guix.gnu.org/manual/en/guix.html#Binary-Installation
+[3]: https://git.savannah.gnu.org/cgit/guix.git/plain/etc/guix-install.sh
+
+I developed the following set of scripts that I have been using for some time to
+run Guix tasks inside builds.sr.ht jobs. First, `install-guix.sh`:
+
+```shell
+#!/usr/bin/env bash
+set -x
+set -Eeuo pipefail
+
+VERSION='1.0.1'
+SYSTEM='x86_64-linux'
+BINARY="guix-binary-${VERSION}.${SYSTEM}.tar.xz"
+
+cd /tmp
+wget "https://ftp.gnu.org/gnu/guix/${BINARY}"
+tar -xf "${BINARY}"
+
+sudo mv var/guix /var/
+sudo mv gnu /
+sudo mkdir -p ~root/.config/guix
+sudo ln -fs /var/guix/profiles/per-user/root/current-guix ~root/.config/guix/current
+
+GUIX_PROFILE="$(echo ~root)/.config/guix/current"
+source "${GUIX_PROFILE}/etc/profile"
+
+groupadd --system guixbuild
+for i in $(seq -w 1 10);
+do
+ useradd -g guixbuild \
+ -G guixbuild \
+ -d /var/empty \
+ -s "$(command -v nologin)" \
+ -c "Guix build user ${i}" --system \
+ "guixbuilder${i}";
+done
+
+mkdir -p /usr/local/bin
+cd /usr/local/bin
+ln -s /var/guix/profiles/per-user/root/current-guix/bin/guix .
+ln -s /var/guix/profiles/per-user/root/current-guix/bin/guix-daemon .
+
+guix archive --authorize < ~root/.config/guix/current/share/guix/ci.guix.gnu.org.pub
+```
+
+Almost all of it is taken directly from the [binary installation][2] section
+from the manual, with the interactive bits stripped out: after downloading and
+extracting the Guix tarball, we create some symlinks, add guixbuild users and
+authorize the `ci.guix.gnu.org.pub` signing key.
+
+After installing Guix, we perform a `guix pull` to update Guix inside `start-guix.sh`:
+```shell
+#!/usr/bin/env bash
+set -x
+set -Eeuo pipefail
+
+sudo guix-daemon --build-users-group=guixbuild &
+guix pull
+guix package -u
+guix --version
+```
+
+Then we can put it all together in a sample `.build.yml` configuration file I'm
+using myself:
+
+```yaml
+image: debian/stable
+packages:
+ - wget
+sources:
+ - https://git.sr.ht/~euandreh/songbooks
+tasks:
+ - install-guix: |
+ cd ./songbooks/
+ ./scripts/install-guix.sh
+ ./scripts/start-guix.sh
+ echo 'sudo guix-daemon --build-users-group=guixbuild &' >> ~/.buildenv
+ echo 'export PATH="${HOME}/.config/guix/current/bin${PATH:+:}$PATH"' >> ~/.buildenv
+ - tests: |
+ cd ./songbooks/
+ guix environment -m build-aux/guix.scm -- make check
+ - docs: |
+ cd ./songbooks/
+ guix environment -m build-aux/guix.scm -- make publish-dist
+```
+
+We have to add the `guix-daemon` to `~/.buildenv` so it can be started on every
+following task run. Also, since we used `wget` inside `install-guix.sh`, we had
+to add it to the images package list.
+
+After the `install-guix` task, you can use Guix to build and test your project,
+or run any `guix environment --ad-hoc my-package -- my script` :)
+
+## Improvements
+
+When I originally created this code I had a reason why to have both a `sudo`
+call for `sudo ./scripts/install-guix.sh` and `sudo` usages inside
+`install-guix.sh` itself. I couldn't figure out why (it feels like my past self
+was a bit smarter 😬), but it feels ugly now. If it is truly required I could
+add an explanation for it, or remove this entirely in favor of a more elegant solution.
+
+I could also contribute the Guix image upstream to builds.sr.ht, but there
+wasn't any build or smoke tests in the original [repository][4], so I wasn't
+inclined to make something that just "works on my machine" or add a maintainence
+burden to the author. I didn't look at it again recently, though.
+
+[4]: https://git.sr.ht/~sircmpwn/builds.sr.ht
diff --git a/src/content/blog/2020/08/31/database-i-with-i-had.adoc b/src/content/blog/2020/08/31/database-i-with-i-had.adoc
new file mode 100644
index 0000000..7d127c1
--- /dev/null
+++ b/src/content/blog/2020/08/31/database-i-with-i-had.adoc
@@ -0,0 +1,295 @@
+---
+title: The database I wish I had
+date: 2020-08-31
+updated_at: 2020-09-03
+layout: post
+lang: en
+ref: the-database-i-wish-i-had
+eu_categories: mediator
+---
+
+I watched the talk
+"[Platform as a Reflection of Values: Joyent, Node.js and beyond][platform-values]"
+by Bryan Cantrill, and I think he was able to put into words something I already
+felt for some time: if there's no piece of software out there that reflects your
+values, it's time for you to build that software[^talk-time].
+
+[platform-values]: https://vimeo.com/230142234
+[^talk-time]: At the very end, at time 29:49. When talking about the draft of
+ this article with a friend, he noted that Bryan O'Sullivan (a different
+ Bryan) says a similar thing on his talk
+ "[Running a startup on Haskell](https://www.youtube.com/watch?v=ZR3Jirqk6W8)",
+ at time 4:15.
+
+I kind of agree with what he said, because this is already happening to me. I
+long for a database with a certain set of values, and for a few years I was just
+waiting for someone to finally write it. After watching his talk, Bryan is
+saying to me: "time to stop waiting, and start writing it yourself".
+
+So let me try to give an overview of such database, and go over its values.
+
+## Overview
+
+I want a database that allows me to create decentralized client-side
+applications that can sync data.
+
+The best one-line description I can give right now is:
+
+> It's sort of like PouchDB, Git, Datomic, SQLite and Mentat.
+
+A more descriptive version could be:
+
+> An embedded, immutable, syncable relational database.
+
+Let's go over what I mean by each of those aspects one by one.
+
+### Embedded
+
+I think the server-side database landscape is diverse and mature enough for
+my needs (even though I end up choosing SQLite most of the time), and what I'm
+after is a database to be embedded on client-side applications itself, be it
+desktop, browser, mobile, *etc.*
+
+The purpose of such database is not to keep some local cache of data in case of
+lost connectivity: we have good solutions for that already. It should serve as
+the source of truth, and allow the application to work on top of it.
+
+[**SQLite**][sqlite] is a great example of that: it is a very powerful
+relational database that runs [almost anywhere][sqlite-whentouse]. What I miss
+from it that SQLite doesn't provide is the ability to run it on the browser:
+even though you could compile it to WebAssembly, ~~it assumes a POSIX filesystem
+that would have to be emulated~~[^posix-sqlite].
+
+[sqlite]: https://sqlite.org/index.html
+[sqlite-whentouse]: https://sqlite.org/whentouse.html
+[^posix-sqlite]: It was [pointed out to me](https://news.ycombinator.com/item?id=24338881)
+ that SQLite doesn't assume the existence of a POSIX filesystem, as I wrongly
+ stated. Thanks for the correction.
+
+ This makes me consider it as a storage backend all by itself. I
+ initially considered having an SQLite storage backend as one implementation
+ of the POSIX filesystem storage API that I mentioned. My goal was to rely on
+ it so I could validate the correctness of the actual implementation, given
+ SQLite's robustness.
+
+ However it may even better to just use SQLite, and get an ACID backend
+ without recreating a big part of SQLite from scratch. In fact, both Datomic
+ and PouchDB didn't create an storage backend for themselves, they just
+ plugged on what already existed and already worked. I'm beginning to think
+ that it would be wiser to just do the same, and drop entirely the from
+ scratch implementation that I mentioned.
+
+ That's not to say that adding an IndexedDB compatibility layer to SQLite
+ would be enough to make it fit the other requirements I mention on this
+ page. SQLite still is an implementation of a update-in-place, SQL,
+ table-oriented database. It is probably true that cherry-picking the
+ relevant parts of SQLite (like storage access, consistency, crash recovery,
+ parser generator, *etc.*) and leaving out the unwanted parts (SQL, tables,
+ threading, *etc.*) would be better than including the full SQLite stack, but
+ that's simply an optimization. Both could even coexist, if desired.
+
+ SQLite would have to be treated similarly to how Datomic treats SQL
+ databases: instead of having a table for each entities, spread attributes
+ over the tables, *etc.*, it treats SQL databases as a key-value storage so it
+ doesn't have to re-implement interacting with the disk that other databases
+ do well.
+
+ The tables would contain blocks of binary data, so there isn't a difference
+ on how the SQLite storage backend behaves and how the IndexedDB storage
+ backend behaves, much like how Datomic works the same regardless of the
+ storage backend, same for PouchDB.
+
+ I welcome corrections on what I said above, too.
+
+[**PouchDB**][pouchdb] is another great example: it's a full reimplementation of
+[CouchDB][couchdb] that targets JavaScript environments, mainly the browser and
+Node.js. However I want a tool that can be deployed anywhere, and not limit its
+applications to places that already have a JavaScript runtime environment, or
+force the developer to bundle a JavaScript runtime environment with their
+application. This is true for GTK+ applications, command line programs, Android
+apps, *etc.*
+
+[pouchdb]: https://pouchdb.com/
+[couchdb]: https://couchdb.apache.org/
+
+[**Mentat**][mentat] was an interesting project, but its reliance on SQLite
+makes it inherit most of the downsides (and benefits too) of SQLite itself.
+
+[mentat]: https://github.com/mozilla/mentat
+
+Having such a requirement imposes a different approach to storage: we have to
+decouple the knowledge about the intricacies of storage from the usage of
+storage itself, so that a module (say query processing) can access storage
+through an API without needing to know about its implementation. This allows
+the database to target a POSIX filesystems storage API and an IndexedDB storage
+API, and make the rest of the code agnostic about storage. PouchDB has such
+mechanism (called [adapters][pouchdb-adapters]) and Datomic has them too (called
+[storage services][datomic-storage-services]).
+
+[pouchdb-adapters]: https://pouchdb.com/adapters.html
+[datomic-storage-services]: https://docs.datomic.com/on-prem/storage.html
+
+This would allow the database to adapt to where it is embedded: when targeting
+the browser the IndexedDB storage API would provide the persistence layer
+that the database requires, and similarly the POSIX filesystem storage API would
+provide the persistence layer when targeting POSIX systems (like desktops,
+mobile, *etc.*).
+
+But there's also an extra restriction that comes from by being embedded: it
+needs to provide and embeddable artifact, most likely a binary library object
+that exposes a C compatible FFI, similar to
+[how SQLite does][sqlite-amalgamation]. Bundling a full runtime environment is
+possible, but doesn't make it a compelling solution for embedding. This rules
+out most languages, and leaves us with C, Rust, Zig, and similar options that
+can target POSIX systems and WebAssembly.
+
+[sqlite-amalgamation]: https://www.sqlite.org/amalgamation.html
+
+### Immutable
+
+Being immutable means that only new information is added, no in-place update
+ever happens, and nothing is ever deleted.
+
+Having an immutable database presents us with similar trade-offs found in
+persistent data structures, like lack of coordination when doing reads, caches
+being always coherent, and more usage of space.
+
+[**Datomic**][datomic] is the go to database example of this: it will only add
+information (datoms) and allows you to query them in a multitude of ways. Stuart
+Halloway calls it "accumulate-only" over "append-only"[^accumulate-only]:
+
+> It's accumulate-only, it is not append-only. So append-only, most people when
+> they say that they're implying something physical about what happens.
+
+[datomic]: https://www.datomic.com/
+[^accumulate-only]: Video "[Day of Datomic Part 2](https://vimeo.com/116315075)"
+ on Datomic's information model, at time 12:28.
+
+Also a database can be append-only and overwrite existing information with new
+information, by doing clean-ups of "stale" data. I prefer to adopt the
+"accumulate-only" naming and approach.
+
+[**Git**][git] is another example of this: new commits are always added on top
+of the previous data, and it grows by adding commits instead of replacing
+existing ones.
+
+[git]: https://git-scm.com/
+
+Git repositories can only grow in size, and that is not only an acceptable
+condition, but also one of the reasons to use it.
+
+All this means that no in-place updates happens on data, and the database will
+be much more concerned about how compact and efficiently it stores data than how
+fast it does writes to disk. Being embedded, the storage limitation is either a)
+how much storage the device has or b) how much storage was designed for the
+application to consume. So even though the database could theoretically operate
+with hundreds of TBs, a browser page or mobile application wouldn't have access
+to this amount of storage. SQLite even [says][sqlite-limits] that it does
+support approximately 280 TBs of data, but those limits are untested.
+
+The upside of keeping everything is that you can have historical views of your
+data, which is very powerful. This also means that applications should turn this
+off when not relevant[^no-history].
+
+[sqlite-limits]: https://sqlite.org/limits.html
+[^no-history]: Similar to
+ [Datomic's `:db/noHistory`](https://docs.datomic.com/cloud/best.html#nohistory-for-high-churn).
+
+### Syncable
+
+This is a frequent topic when talking about offline-first solutions. When
+building applications that:
+
+- can fully work offline,
+- stores data,
+- propagates that data to other application instances,
+
+then you'll need a conflict resolution strategy to handle all the situations
+where different application instances disagree. Those application instances
+could be a desktop and a browser version of the same application, or the same
+mobile app in different devices.
+
+A three-way merge seems to be the best approach, on top of which you could add
+application specific conflict resolution functions, like:
+
+- pick the change with higher timestamp;
+- if one change is a delete, pick it;
+- present the diff on the screen and allow the user to merge them.
+
+Some databases try to make this "easy", by choosing a strategy for you, but I've
+found that different applications require different conflict resolution
+strategies. Instead, the database should leave this up to the user to decide,
+and provide tools for them to do it.
+
+[**Three-way merges in version control**][3-way-merge] are the best example,
+performing automatic merges when possible and asking the user to resolve
+conflicts when they appear.
+
+The unit of conflict for a version control system is a line of text. The
+database equivalent would probably be a single attribute, not a full entity or a
+full row.
+
+Making all the conflict resolution logic be local should allow the database to
+have encrypted remotes similar to how [git-remote-gcrypt][git-remote-gcrypt]
+adds this functionality to Git. This would enable users to sync the application
+data across devices using an untrusted intermediary.
+
+[3-way-merge]: https://en.wikipedia.org/wiki/Merge_(version_control)
+[git-remote-gcrypt]: https://spwhitton.name/tech/code/git-remote-gcrypt/
+
+### Relational
+
+I want the power of relational queries on the client applications.
+
+Most of the arguments against traditional table-oriented relational databases
+are related to write performance, but those don't apply here. The bottlenecks
+for client applications usually aren't write throughput. Nobody is interested in
+differentiating between 1 MB/s or 10 MB/s when you're limited to 500 MB total.
+
+The relational model of the database could either be based on SQL and tables
+like in SQLite, or maybe [datalog][datalog] and [datoms][datoms] like in
+Datomic.
+
+[datalog]: https://docs.datomic.com/on-prem/query.html
+[datoms]: https://docs.datomic.com/cloud/whatis/data-model.html#datoms
+
+## From aspects to values
+
+Now let's try to translate the aspects above into values, as suggested by Bryan
+Cantrill.
+
+### Portability
+
+Being able to target so many different platforms is a bold goal, and the
+embedded nature of the database demands portability to be a core value.
+
+### Integrity
+
+When the local database becomes the source of truth of the application, it must
+provide consistency guarantees that enables applications to rely on it.
+
+### Expressiveness
+
+The database should empower applications to slice and dice the data in any way
+it wants to.
+
+## Next steps
+
+Since I can't find any database that fits these requirements, I've finally come
+to terms with doing it myself.
+
+It's probably going to take me a few years to do it, and making it portable
+between POSIX and IndexedDB will probably be the biggest challenge. I got myself
+a few books on databases to start.
+
+I wonder if I'll ever be able to get this done.
+
+## External links
+
+See discussions on [Reddit][reddit], [lobsters][lobsters], [HN][hn] and
+[a lengthy email exchange][lengthy-email].
+
+[reddit]: https://www.reddit.com/r/programming/comments/ijwz5b/the_database_i_wish_i_had/
+[lobsters]: https://lobste.rs/s/m9vkg4/database_i_wish_i_had
+[hn]: https://news.ycombinator.com/item?id=24337244
+[lengthy-email]: https://lists.sr.ht/~euandreh/public-inbox/%3C010101744a592b75-1dce9281-f0b8-4226-9d50-fd2c7901fa72-000000%40us-west-2.amazonses.com%3E
diff --git a/src/content/blog/2020/10/05/cargo2nix-demo.tar.gz b/src/content/blog/2020/10/05/cargo2nix-demo.tar.gz
new file mode 100644
index 0000000..281a91c
--- /dev/null
+++ b/src/content/blog/2020/10/05/cargo2nix-demo.tar.gz
Binary files differ
diff --git a/src/content/blog/2020/10/05/cargo2nix.adoc b/src/content/blog/2020/10/05/cargo2nix.adoc
new file mode 100644
index 0000000..1db3d0c
--- /dev/null
+++ b/src/content/blog/2020/10/05/cargo2nix.adoc
@@ -0,0 +1,80 @@
+= cargo2nix: Dramatically simpler Rust in Nix
+
+date: 2020-10-05 2
+
+layout: post
+
+lang: en
+
+ref: cargo2nix-dramatically-simpler-rust-in-nix
+
+---
+
+In the same vein of my earlier post on
+[swift2nix]({% link _articles/2020-10-05-swift2nix-run-swift-inside-nix-builds.md %}), I
+was able to quickly prototype a Rust and Cargo variation of it:
+[cargo2nix].
+
+
+The initial prototype is even smaller than swift2nix: it has only
+37 lines of code.
+
+[cargo2nix]: https://euandre.org/static/attachments/cargo2nix.tar.gz
+
+Here's how to use it (snippet taken from the repo's README):
+
+```nix
+let
+ niv-sources = import ./nix/sources.nix;
+ mozilla-overlay = import niv-sources.nixpkgs-mozilla;
+ pkgs = import niv-sources.nixpkgs { overlays = [ mozilla-overlay ]; };
+ src = pkgs.nix-gitignore.gitignoreSource [ ] ./.;
+ cargo2nix = pkgs.callPackage niv-sources.cargo2nix {
+ lockfile = ./Cargo.lock;
+ };
+in pkgs.stdenv.mkDerivation {
+ inherit src;
+ name = "cargo-test";
+ buildInputs = [ pkgs.latest.rustChannels.nightly.rust ];
+ phases = [ "unpackPhase" "buildPhase" ];
+ buildPhase = ''
+ # Setup dependencies path to satisfy Cargo
+ mkdir .cargo/
+ ln -s ${cargo2nix.env.cargo-config} .cargo/config
+ ln -s ${cargo2nix.env.vendor} vendor
+
+ # Run the tests
+ cargo test
+ touch $out
+ '';
+}
+```
+
+That `cargo test` part on line 20 is what I have been fighting with every
+"\*2nix" available for Rust out there. I don't want to bash any of them. All I
+want is to have full control of what Cargo commands to run, and the "*2nix" tool
+should only setup the environment for me. Let me drive Cargo myself, no need to
+parameterize how the tool runs it for me, or even replicate its internal
+behaviour by calling the Rust compiler directly.
+
+Sure it doesn't support private registries or Git dependencies, but how much
+bigger does it has to be to support them? Also, it doesn't support those **yet**,
+there's no reason it can't be extended. I just haven't needed it yet, so I
+haven't added. Patches welcome.
+
+The layout of the `vendor/` directory is more explicit and public then what
+swift2nix does: it is whatever the command `cargo vendor` returns. However I
+haven't checked if the shape of the `.cargo-checksum.json` is specified, or
+internal to Cargo.
+
+Try out the demo (also taken from the repo's README):
+
+```shell
+pushd "$(mktemp -d)"
+wget -O- https://euandre.org/static/attachments/cargo2nix-demo.tar.gz |
+ tar -xv
+cd cargo2nix-demo/
+nix-build
+```
+
+Report back if you wish.
diff --git a/src/content/blog/2020/10/05/cargo2nix.tar.gz b/src/content/blog/2020/10/05/cargo2nix.tar.gz
new file mode 100644
index 0000000..8a9985a
--- /dev/null
+++ b/src/content/blog/2020/10/05/cargo2nix.tar.gz
Binary files differ
diff --git a/src/content/blog/2020/10/05/swift2nix-demo.tar.gz b/src/content/blog/2020/10/05/swift2nix-demo.tar.gz
new file mode 100644
index 0000000..f688572
--- /dev/null
+++ b/src/content/blog/2020/10/05/swift2nix-demo.tar.gz
Binary files differ
diff --git a/src/content/blog/2020/10/05/swift2nix.adoc b/src/content/blog/2020/10/05/swift2nix.adoc
new file mode 100644
index 0000000..84f4d34
--- /dev/null
+++ b/src/content/blog/2020/10/05/swift2nix.adoc
@@ -0,0 +1,199 @@
+= swift2nix: Run Swift inside Nix builds
+
+date: 2020-10-05 1
+
+layout: post
+
+lang: en
+
+ref: swift2nix-run-swift-inside-nix-builds
+
+---
+
+While working on a Swift project, I didn't find any tool that would allow Swift
+to run inside [Nix][nix] builds. Even thought you *can* run Swift, the real
+problem arises when using the package manager. It has many of the same problems
+that other package managers have when trying to integrate with Nix, more on this
+below.
+
+I wrote a simple little tool called [swift2nix] that allows you trick
+Swift's package manager into assuming everything is set up. Here's the example
+from swift2nix's README file:
+
+```
+let
+ niv-sources = import ./nix/sources.nix;
+ pkgs = import niv-sources.nixpkgs { };
+ src = pkgs.nix-gitignore.gitignoreSource [ ] ./.;
+ swift2nix = pkgs.callPackage niv-sources.swift2nix {
+ package-resolved = ./Package.resolved;
+ };
+in pkgs.stdenv.mkDerivation {
+ inherit src;
+ name = "swift-test";
+ buildInputs = with pkgs; [ swift ];
+ phases = [ "unpackPhase" "buildPhase" ];
+ buildPhase = ''
+ # Setup dependencies path to satisfy SwiftPM
+ mkdir .build
+ ln -s ${swift2nix.env.dependencies-state-json} .build/dependencies-state.json
+ ln -s ${swift2nix.env.checkouts} .build/checkouts
+
+ # Run the tests
+ swift test
+ touch $out
+ '';
+}
+```
+
+The key parts are lines 15~17: we just fake enough files inside `.build/` that
+Swift believes it has already downloaded and checked-out all dependencies, and
+just moves on to building them.
+
+I've worked on it just enough to make it usable for myself, so beware of
+unimplemented cases.
+
+[nix]: https://nixos.org/
+[swift2nix]: https://euandre.org/static/attachments/swift2nix.tar.gz
+
+## Design
+
+What swift2nix does is just provide you with the bare minimum that Swift
+requires, and readily get out of the way:
+
+1. I explicitly did not want to generated a `Package.nix` file, since
+ `Package.resolved` already exists and contains the required information;
+2. I didn't want to have an "easy" interface right out of the gate, after
+ fighting with "*2nix" tools that focus too much on that.
+
+The final actual code was so small (46 lines) that it made me
+think about package managers, "*2nix" tools and some problems with many of them.
+
+## Problems with package managers
+
+I'm going to talk about solely language package managers. Think npm and cargo,
+not apt-get.
+
+Package managers want to do too much, or assume too much, or just want to take
+control of the entire build of the dependencies.
+
+This is a recurrent problem in package managers, but I don't see it as an
+intrinsic one. There's nothing about a "package manager" that prevents it from
+*declaring* what it expects to encounter and in which format. The *declaring*
+part is important: it should be data, not code, otherwise you're back in the
+same problem, just like lockfiles are just data. Those work in any language, and
+tools can cooperate happily.
+
+There's no need for this declarative expectation to be standardized, or be made
+compatible across languages. That would lead to a poor format that no package
+manager really likes. Instead, If every package manager could say out loud what
+it wants to see exactly, than more tools like swift2nix could exist, and they
+would be more reliable.
+
+This could even work fully offline, and be simply a mapping from the lockfile
+(the `Package.resolved` in Swift's case) to the filesystem representation. For
+Swift, the `.build/dependencies-state.json` comes very close, but it is internal
+to the package manager.
+
+Even though this pain only exists when trying to use Swift inside Nix, it sheds
+light into this common implicit coupling that package managers have. They
+usually have fuzzy boundaries and tight coupling between:
+
+1. resolving the dependency tree and using some heuristic to pick a package
+ version;
+2. generating a lockfile with the exact pinned versions;
+3. downloading the dependencies present on the lockfile into some local cache;
+4. arranging the dependencies from the cache in a meaningful way for itself inside
+ the project;
+5. work using the dependencies while *assuming* that step 4 was done.
+
+When you run `npm install` in a repository with no lockfile, it does 1~4. If you
+do the same with `cargo build`, it does 1~5. That's too much: many of those
+assumptions are implicit and internal to the package manager, and if you ever
+need to rearrange them, you're on your own. Even though you can perform some of
+those steps, you can't compose or rearrange them.
+
+Instead a much saner approach could be:
+
+1. this stays the same;
+2. this also stays the same;
+3. be able to generate some JSON/TOML/edn which represents the local expected
+ filesystem layout with dependencies (i.e. exposing what the package manager
+ expects to find), let's call it `local-registry.json`;
+4. if a `local-registry.json` was provided, do a build using that. Otherwise
+ generate its own, by downloading the dependencies, arranging them, *etc.*
+
+The point is just making what the package manager requires visible to the
+outside world via some declarative data. If this data wasn't provided, it can
+move on to doing its own automatic things.
+
+By making the expectation explicit and public, one can plug tools *à la carte*
+if desired, but doesn't prevent the default code path of doing things the exact
+same way they are now.
+
+## Problems with "*2nix" tools
+
+I have to admit: I'm unhappy with most of they.
+
+They conflate "using Nix" with "replicating every command of the package manager
+inside Nix".
+
+The avoidance of an "easy" interface that I mentioned above comes from me
+fighting with some of the "\*2nix" tools much like I have to fight with package
+managers: I don't want to offload all build responsibilities to the "*2nix"
+tool, I just want to let it download some of the dependencies and get out of the
+way. I want to stick with `npm test` or `cargo build`, and Nix should only
+provide the environment.
+
+This is something that [node2nix] does right. It allows you to build
+the Node.js environment to satisfy NPM, and you can keep using NPM for
+everything else:
+
+```shell
+ln -s ${node2nix-package.shell.nodeDependencies}/lib/node_modules ./node_modules
+npm test
+```
+
+Its natural to want to put as much things into Nix as possible to benefit from
+Nix's advantages. Isn't that how NixOS itself was born?
+
+But a "*2nix" tool should leverage Nix, not be coupled with it. The above
+example lets you run any arbitrary NPM command while profiting from isolation
+and reproducibility that Nix provides. It is even less brittle: any changes to
+how NPM runs some things will be future-compatible, since node2nix isn't trying
+to replicate what NPM does, or fiddling with NPM's internal.
+
+**A "*2nix" tool should build the environment, preferably from the lockfile
+directly and offload everything else to the package manager**. The rest is just
+nice-to-have.
+
+swift2nix itself could provide an "easy" interface, something that allows you to
+write:
+
+```shell
+nix-build -A swift2nix.release
+nix-build -A swift2nix.test
+```
+
+The implementation of those would be obvious: create a new
+`pkgs.stdenv.mkDerivation` and call `swift build -c release` and `swift test`
+while using `swift2nix.env` under the hood.
+
+[node2nix]: https://github.com/svanderburg/node2nix
+
+## Conclusion
+
+Package managers should provide exact dependencies via a data representation,
+i.e. lockfiles, and expose via another data representation how they expect those
+dependencies to appear on the filesystem, i.e. `local-registry.json`. This
+allows package managers to provide an API so that external tools can create
+mirrors, offline builds, other registries, isolated builds, *etc.*
+
+"\*2nix" tools should build simple functions that leverage that
+`local-registry.json`[^local-registry] data and offload all the rest back to the
+package manager itself. This allows the "*2nix" to not keep chasing the package
+manager evolution, always trying to duplicate its behaviour.
+
+[^local-registry]: This `local-registry.json` file doesn't have to be checked-in
+ the repository at all. It could be always generated on the fly, much like
+ how Swift's `dependencies-state.json` is.
diff --git a/src/content/blog/2020/10/05/swift2nix.tar.gz b/src/content/blog/2020/10/05/swift2nix.tar.gz
new file mode 100644
index 0000000..bfab3f1
--- /dev/null
+++ b/src/content/blog/2020/10/05/swift2nix.tar.gz
Binary files differ
diff --git a/src/content/blog/2020/10/19/feature-flags.adoc b/src/content/blog/2020/10/19/feature-flags.adoc
new file mode 100644
index 0000000..c62c2d1
--- /dev/null
+++ b/src/content/blog/2020/10/19/feature-flags.adoc
@@ -0,0 +1,305 @@
+---
+title: "Feature flags: differences between backend, frontend and mobile"
+date: 2020-10-19
+updated_at: 2020-11-03
+layout: post
+lang: en
+ref: feature-flags-differences-between-backend-frontend-and-mobile
+eu_categories: presentation
+---
+
+*This article is derived from a [presentation][presentation] on the same
+subject.*
+
+When discussing about feature flags, I find that their
+costs and benefits are often well exposed and addressed. Online articles like
+"[Feature Toggle (aka Feature Flags)][feature-flags-article]" do a great job of
+explaining them in detail, giving great general guidance of how to apply
+techniques to adopt it.
+
+However the weight of those costs and benefits apply differently on backend,
+frontend or mobile, and those differences aren't covered. In fact, many of them
+stop making sense, or the decision of adopting a feature flag or not may change
+depending on the environment.
+
+In this article I try to make the distinction between environments and how
+ feature flags apply to them, with some final best practices I've acquired when
+ using them in production.
+
+[presentation]: {% link _slides/2020-10-19-rollout-feature-flag-experiment-operational-toggle.slides %}
+[feature-flags-article]: https://martinfowler.com/articles/feature-toggles.html
+
+## Why feature flags
+
+Feature flags in general tend to be cited on the context of
+[continuous deployment][cd]:
+
+> A: With continuous deployment, you deploy to production automatically
+
+> B: But how do I handle deployment failures, partial features, *etc.*?
+
+> A: With techniques like canary, monitoring and alarms, feature flags, *etc.*
+
+Though adopting continuous deployment doesn't force you to use feature
+flags, it creates a demand for it. The inverse is also true: using feature flags
+on the code points you more obviously to continuous deployment. Take the
+following code sample for example, that we will reference later on the article:
+
+```javascript
+function processTransaction() {
+ validate();
+ persist();
+ // TODO: add call to notifyListeners()
+}
+```
+
+While being developed, being tested for suitability or something similar,
+`notifyListeners()` may not be included in the code at once. So instead of
+keeping it on a separate, long-lived branch, a feature flag can decide when the
+new, partially implemented function will be called:
+
+```javascript
+function processTransaction() {
+ validate();
+ persist();
+ if (featureIsEnabled("activate-notify-listeners")) {
+ notifyListeners();
+ }
+}
+```
+
+This allows your code to include `notifyListeners()`, and decide when to call it
+at runtime. For the price of extra things around the code, you get more
+dynamicity.
+
+So the fundamental question to ask yourself when considering adding a feature
+flag should be:
+
+> Am I willing to pay with code complexity to get dynamicity?
+
+It is true that you can make the management of feature flags as
+straightforward as possible, but having no feature flags is simpler than having
+any. What you get in return is the ability to parameterize the behaviour of the
+application at runtime, without doing any code changes.
+
+Sometimes this added complexity may tilt the balance towards not using a feature
+flag, and sometimes the flexibility of changing behaviour at runtime is
+absolutely worth the added complexity. This can vary a lot by code base, feature, but
+fundamentally by environment: its much cheaper to deploy a new version of a
+service than to release a new version of an app.
+
+So the question of which environment is being targeted is key when reasoning
+about costs and benefits of feature flags.
+
+[cd]: https://www.atlassian.com/continuous-delivery/principles/continuous-integration-vs-delivery-vs-deployment
+
+## Control over the environment
+
+The key differentiator that makes the trade-offs apply differently is how much
+control you have over the environment.
+
+When running a **backend** service, you usually are paying for the servers
+themselves, and can tweak them as you wish. This means you have full control do
+to code changes as you wish. Not only that, you decide when to do it, and for
+how long the transition will last.
+
+On the **frontend** you have less control: even though you can choose to make a
+new version available any time you wish, you can't force[^force] clients to
+immediately switch to the new version. That means that a) clients could skip
+upgrades at any time and b) you always have to keep backward and forward
+compatibility in mind.
+
+Even though I'm mentioning frontend directly, it applies to other environment
+with similar characteristics: desktop applications, command-line programs,
+*etc*.
+
+On **mobile** you have even less control: app stores need to allow your app to
+be updated, which could bite you when least desired. Theoretically you could
+make you APK available on third party stores like [F-Droid][f-droid], or even
+make the APK itself available for direct download, which would give you the same
+characteristics of a frontend application, but that happens less often.
+
+On iOS you can't even do that. You have to get Apple's blessing on every single
+update. Even though we already know that is a [bad idea][apple] for over a
+decade now, there isn't a way around it. This is where you have the least
+control.
+
+In practice, the amount of control you have will change how much you value
+dynamicity: the less control you have, the more valuable it is. In other words,
+having a dynamic flag on the backend may or may not be worth it since you could
+always update the code immediately after, but on iOS it is basically always
+worth it.
+
+[f-droid]: https://f-droid.org/
+[^force]: Technically you could force a reload with JavaScript using
+ `window.location.reload()`, but that not only is invasive and impolite, but
+ also gives you the illusion that you have control over the client when you
+ actually don't: clients with disabled JavaScript would be immune to such
+ tactics.
+
+[apple]: http://www.paulgraham.com/apple.html
+
+## Rollout
+
+A rollout is used to *roll out* a new version of software.
+
+They are usually short-lived, being relevant as long as the new code is being
+deployed. The most common rule is percentages.
+
+On the **backend**, it is common to find it on the deployment infrastructure
+itself, like canary servers, blue/green deployments,
+[a kubernetes deployment rollout][k8s], *etc*. You could do those manually, by
+having a dynamic control on the code itself, but rollbacks are cheap enough that
+people usually do a normal deployment and just give some extra attention to the
+metrics dashboard.
+
+Any time you see a blue/green deployment, there is a rollout happening: most
+likely a load balancer is starting to direct traffic to the new server, until
+reaching 100% of the traffic. Effectively, that is a rollout.
+
+On the **frontend**, you can selectively pick which user's will be able to
+download the new version of a page. You could use geographical region, IP,
+cookie or something similar to make this decision.
+
+CDN propagation delays and people not refreshing their web
+pages are also rollouts by themselves, since old and new versions of the
+software will coexist.
+
+On **mobile**, the Play Store allows you to perform
+fine-grained [staged rollouts][staged-rollouts], and the App Store allows you to
+perform limited [phased releases][phased-releases].
+
+Both for Android and iOS, the user plays the role of making the download.
+
+In summary: since you control the servers on the backend, you can do rollouts at
+will, and those are often found automated away in base infrastructure. On the
+frontend and on mobile, there are ways to make new versions available, but users
+may not download them immediately, and many different versions of the software
+end up coexisting.
+
+[k8s]: https://kubernetes.io/docs/concepts/workloads/controllers/deployment/#creating-a-deployment
+[staged-rollouts]: https://support.google.com/googleplay/android-developer/answer/6346149?hl=en
+[phased-releases]: https://help.apple.com/app-store-connect/#/dev3d65fcee1
+
+## Feature flag
+
+A feature flag is a *flag* that tells the application on runtime to turn on or
+off a given *feature*. That means that the actual production code will have more
+than one possible code paths to go through, and that a new version of a feature
+coexists with the old version. The feature flag tells which part of the code to
+go through.
+
+They are usually medium-lived, being relevant as long as the new code is being
+developed. The most common rules are percentages, allow/deny lists, A/B groups
+and client version.
+
+On the **backend**, those are useful for things that have a long development
+cycle, or that needs to done by steps. Consider loading the feature flag rules
+in memory when the application starts, so that you avoid querying a database
+or an external service for applying a feature flag rule and avoid flakiness on
+the result due to intermittent network failures.
+
+Since on the **frontend** you don't control when to update the client software,
+you're left with applying the feature flag rule on the server, and exposing the
+value through an API for maximum dynamicity. This could be in the frontend code
+itself, and fallback to a "just refresh the page"/"just update to the latest
+version" strategy for less dynamic scenarios.
+
+On **mobile** you can't even rely on a "just update to the latest version"
+strategy, since the code for the app could be updated to a new feature and be
+blocked on the store. Those cases aren't recurrent, but you should always assume
+the store will deny updates on critical moments so you don't find yourself with
+no cards to play. That means the only control you actually have is via
+the backend, by parameterizing the runtime of the application using the API. In
+practice, you should always have a feature flag to control any relevant piece of
+code. There is no such thing as "too small code change for a feature flag". What
+you should ask yourself is:
+
+> If the code I'm writing breaks and stays broken for around a month, do I care?
+
+If you're doing an experimental screen, or something that will have a very small
+impact you might answer "no" to the above question. For everything else, the
+answer will be "yes": bug fixes, layout changes, refactoring, new screen,
+filesystem/database changes, *etc*.
+
+## Experiment
+
+An experiment is a feature flag where you care about analytical value of the
+flag, and how it might impact user's behaviour. A feature flag with analytics.
+
+They are also usually medium-lived, being relevant as long as the new code is
+being developed. The most common rule is A/B test.
+
+On the **backend**, an experiment rely on an analytical environment that will
+pick the A/B test groups and distributions, which means those can't be held in
+memory easily. That also means that you'll need a fallback value in case
+fetching the group for a given customer fails.
+
+On the **frontend** and on **mobile** they are no different from feature flags.
+
+## Operational toggle
+
+An operational toggle is like a system-level manual circuit breaker, where you
+turn on/off a feature, fail over the load to a different server, *etc*. They are
+useful switches to have during an incident.
+
+They are usually long-lived, being relevant as long as the code is in
+production. The most common rule is percentages.
+
+They can be feature flags that are promoted to operational toggles on the
+**backend**, or may be purposefully put in place preventively or after a
+postmortem analysis.
+
+On the **frontend** and on **mobile** they are similar to feature flags, where
+the "feature" is being turned on and off, and the client interprets this value
+to show if the "feature" is available or unavailable.
+
+## Best practices
+
+### Prefer dynamic content
+
+Even though feature flags give you more dynamicity, they're still somewhat
+manual: you have to create one for a specific feature and change it by hand.
+
+If you find yourself manually updating a feature flags every other day, or
+tweaking the percentages frequently, consider making it fully dynamic. Try
+using a dataset that is generated automatically, or computing the content on the
+fly.
+
+Say you have a configuration screen with a list of options and sub-options, and
+you're trying to find how to better structure this list. Instead of using a
+feature flag for switching between 3 and 5 options, make it fully dynamic. This
+way you'll be able to perform other tests that you didn't plan, and get more
+flexibility out of it.
+
+### Use the client version to negotiate feature flags
+
+After effectively finishing a feature, the old code that coexisted with the new
+one will be deleted, and all traces of the transition will vanish from the code
+base. However if you just remove the feature flags from the API, all of the old
+versions of clients that relied on that value to show the new feature will go
+downgrade to the old feature.
+
+This means that you should avoid deleting client-facing feature flags, and
+retire them instead: use the client version to decide when the feature is
+stable, and return `true` for every client with a version greater or equal to
+that. This way you can stop thinking about the feature flag, and you don't break
+or downgrade clients that didn't upgrade past the transition.
+
+### Beware of many nested feature flags
+
+Nested flags combine exponentially.
+
+Pick strategic entry points or transitions eligible for feature flags, and
+beware of their nesting.
+
+### Include feature flags in the development workflow
+
+Add feature flags to the list of things to think about during whiteboarding, and
+deleting/retiring a feature flags at the end of the development.
+
+### Always rely on a feature flag on the app
+
+Again, there is no such thing "too small for a feature flag". Too many feature
+flags is a good problem to have, not the opposite. Automate the process of
+creating a feature flag to lower its cost.
diff --git a/src/content/blog/2020/10/20/wrong-interviewing.adoc b/src/content/blog/2020/10/20/wrong-interviewing.adoc
new file mode 100644
index 0000000..9cdfefb
--- /dev/null
+++ b/src/content/blog/2020/10/20/wrong-interviewing.adoc
@@ -0,0 +1,331 @@
+---
+title: How not to interview engineers
+date: 2020-10-20
+updated_at: 2020-10-24
+layout: post
+lang: en
+ref: how-not-to-interview-engineers
+---
+This is a response to Slava's
+"[How to interview engineers][how-to-interview-engineers]" article. I initially
+thought it was a satire, [as have others][poes-law-comment], but he has
+[doubled down on it][slava-on-satire]:
+
+> (...) Some parts are slightly exaggerated for sure, but the essay isn't meant
+> as a joke.
+
+That being true, he completely misses the point on how to improve hiring, and
+proposes a worse alternative on many aspects. It doesn't qualify as provocative,
+it is just wrong.
+
+I was comfortable taking it as a satire, and I would just ignore the whole thing
+if it wasn't (except for the technical memo part), but friends of mine
+considered it to be somewhat reasonable. This is a adapted version of parts of
+the discussions we had, risking becoming a gigantic showcase of
+[Poe's law][poes-law-wiki].
+
+In this piece, I will argument against his view, and propose an alternative
+approach to improve hiring.
+
+It is common to find people saying how broken technical hiring is, as well put
+in words by a phrase on [this comment][hn-satire]:
+
+> Everyone loves to read and write about how developer interviewing is flawed,
+> but no one wants to go out on a limb and make suggestions about how to improve
+> it.
+
+I guess Slava was trying to not fall on this trap, and make a suggestion on how
+to improve instead, which all went terribly wrong.
+
+[how-to-interview-engineers]: https://defmacro.substack.com/p/how-to-interview-engineers
+[poes-law-comment]: https://defmacro.substack.com/p/how-to-interview-engineers/comments#comment-599996
+[slava-on-satire]: https://twitter.com/spakhm/status/1315754730740617216
+[poes-law-wiki]: https://en.wikipedia.org/wiki/Poe%27s_law
+[hn-satire]: https://news.ycombinator.com/item?id=24757511
+
+## What not to do
+
+### Time candidates
+
+Timing the candidate shows up on the "talent" and "judgment" sections, and they
+are both bad ideas for the same reason: programming is not a performance.
+
+What do e-sports, musicians, actors and athletes have in common: performance
+psychologists.
+
+For a pianist, their state of mind during concerts is crucial: they not only
+must be able to deal with stage anxiety, but to become really successful they
+will have to learn how to exploit it. The time window of the concert is what
+people practice thousands of hours for, and it is what defines one's career,
+since how well all the practice went is irrelevant to the nature of the
+profession. Being able to leverage stage anxiety is an actual goal of them.
+
+That is also applicable to athletes, where the execution during a competition
+makes them sink or swim, regardless of how all the training was.
+
+The same cannot be said about composers, though. They are more like book
+writers, where the value is not on very few moments with high adrenaline, but on
+the aggregate over hours, days, weeks, months and years. A composer may have a
+deadline to finish a song in five weeks, but it doesn't really matter if it is
+done on a single night, every morning between 6 and 9, at the very last week, or
+any other way. No rigid time structure applies, only whatever fits best to the
+composer.
+
+Programming is more like composing than doing a concert, which is another way of
+saying that programming is not a performance. People don't practice algorithms
+for months to keep them at their fingertips, so that finally in a single
+afternoon they can sit down and write everything at once in a rigid 4 hours
+window, and launch it immediately after.
+
+Instead software is built iteratively, by making small additions, than
+refactoring the implementation, fixing bugs, writing a lot at once, *etc*.
+all while they get a firmer grasp of the problem, stop to think about it, come
+up with new ideas, *etc*.
+
+Some specifically plan for including spaced pauses, and call it
+"[Hammock Driven Development][hammock-driven-development]", which is just
+artist's "creative idleness" for hackers.
+
+Unless you're hiring for a live coding group, a competitive programming team, or
+a professional live demoer, timing the candidate that way is more harmful than
+useful. This type of timing doesn't find good programmers, it finds performant
+programmers, which isn't the same thing, and you'll end up with people who can
+do great work on small problems but who might be unable to deal with big
+problems, and loose those who can very well handle huge problems, slowly. If you
+are lucky you'll get performant people who can also handle big problems on the
+long term, but maybe not.
+
+An incident is the closest to a "performance" that it gets, and yet it is still
+dramatically different. Surely it is a high stress scenario, but while people
+are trying to find a root cause and solve the problem, only the downtime itself
+is visible to the exterior. It is like being part of the support staff backstage
+during a play: even though execution matters, you're still not on the spot.
+During an incident you're doing debugging in anger rather than live coding.
+
+Although giving a candidate the task to write a "technical memo" has
+potential to get a measure of the written communication skills of someone, doing
+so in a hard time window also misses the point for the same reasons.
+
+[hammock-driven-development]: https://www.youtube.com/watch?v=f84n5oFoZBc
+
+### Pay attention to typing speed
+
+Typing is speed in never the bottleneck of a programmer, no matter how great
+they are.
+
+As [Dijkstra said][dijkstra-typing]:
+
+> But programming, when stripped of all its circumstantial irrelevancies, boils
+> down to no more and no less than very effective thinking so as to avoid
+> unmastered complexity, to very vigorous separation of your many different
+> concerns.
+
+In other words, programming is not about typing, it is about thinking.
+
+Otherwise, the way to get those star programmers that can't type fast enough a
+huge productivity boost is to give them a touch typing course. If they are so
+productive with typing speed being a limitation, imagine what they could
+accomplish if they had razor sharp touch typing skills?
+
+Also, why stop there? A good touch typist can do 90 WPM (words per minute), and
+a great one can do 120 WPM, but with a stenography keyboard they get to 200
+WPM+. That is double the productivity! Why not try
+[speech-to-text][perl-out-loud]? Make them all use [J][j-lang] so they all need
+to type less! How come nobody thought of that?
+
+And if someone couldn't solve the programming puzzle in the given time window,
+but could come back in the following day with an implementation that is not only
+faster, but uses less memory, was simpler to understand and easier to read than
+anybody else? You'd be losing that person too.
+
+[dijkstra-typing]: https://www.cs.utexas.edu/users/EWD/transcriptions/EWD05xx/EWD512.html
+[j-lang]: https://www.jsoftware.com/#/
+[perl-out-loud]: https://www.youtube.com/watch?v=Mz3JeYfBTcY
+
+### IQ
+
+For "building an extraordinary team at a hard technology startup", intelligence
+is not the most important, [determination is][pg-determination].
+
+And talent isn't "IQ specialized for engineers". IQ itself isn't a measure of how
+intelligent someone is. Ever since Alfred Binet with Théodore Simon started to
+formalize what would become IQ tests years later, they already acknowledged
+limitations of the technique for measuring intelligence, which is
+[still true today][scihub-paper].
+
+So having a high IQ tells only how smart people are for a particular aspect of
+intelligence, which is not representative of programming. There are numerous
+aspects of programming that are covered by IQ measurement: how to name variables
+and functions, how to create models which are compatible with schema evolution,
+how to make the system dynamic for runtime parameterization without making it
+fragile, how to measure and observe performance and availability, how to pick
+between acquiring and paying technical debt, *etc*.
+
+Not to say about everything else that a programmer does that is not purely
+programming. Saying high IQ correlates with great programming is a stretch, at
+best.
+
+[pg-determination]: http://www.paulgraham.com/determination.html
+[scihub-paper]: https://sci-hub.do/https://psycnet.apa.org/doiLanding?doi=10.1037%2F1076-8971.6.1.33
+
+### Ditch HR
+
+Slava tangentially picks on HR, and I will digress on that a bit:
+
+> A good rule of thumb is that if a question could be asked by an intern in HR,
+> it's a non-differential signaling question.
+
+Stretching it, this is a rather snobbish view of HR. Why is it that an intern in
+HR can't make signaling questions? Could the same be said of an intern in
+engineering?
+
+In other words: is the question not signaling because the one
+asking is from HR, or because the one asking is an intern? If the latter, than
+he's just arguing that interns have no place in interviewing, but if the former
+than he was picking on HR.
+
+Extrapolating that, it is common to find people who don't value HR's work, and
+only see them as inferiors doing unpleasant work, and who aren't capable enough
+(or *smart* enough) to learn programming.
+
+This is equivalent to people who work primarily on backend, and see others working on
+frontend struggling and say: "isn't it just building views and showing them on
+the browser? How could it possibly be that hard? I bet I could do it better,
+with 20% of code". As you already know, the answer to it is "well, why don't you
+go do it, then?".
+
+This sense of superiority ignores the fact that HR have actual professionals
+doing actual hard work, not unlike programmers. If HR is inferior and so easy,
+why not automate everything away and get rid of a whole department?
+
+I don't attribute this world view to Slava, this is only an extrapolation of a
+snippet of the article.
+
+### Draconian mistreating of candidates
+
+If I found out that people employed theatrics in my interview so that I could
+feel I've "earned the privilege to work at your company", I would quit.
+
+If your moral compass is so broken that you are comfortable mistreating me while
+I'm a candidate, I immediately assume you will also mistreat me as an employee,
+and that the company is not a good place to work, as
+[evil begets stupidity][evil-begets-stupidity]:
+
+> But the other reason programmers are fussy, I think, is that evil begets
+> stupidity. An organization that wins by exercising power starts to lose the
+> ability to win by doing better work. And it's not fun for a smart person to
+> work in a place where the best ideas aren't the ones that win. I think the
+> reason Google embraced "Don't be evil" so eagerly was not so much to impress
+> the outside world as to inoculate themselves against arrogance.
+
+Paul Graham goes beyond "don't be evil" with a better motto:
+"[be good][pg-be-good]".
+
+Abusing the asymmetric nature of an interview to increase the chance that the
+candidate will accept the offer is, well, abusive. I doubt a solid team can
+actually be built on such poor foundations, surrounded by such evil measures.
+
+And if you really want to give engineers "the measure of whoever they're going
+to be working with", there are plenty of reasonable ways of doing it that don't
+include performing fake interviews.
+
+[pg-be-good]: http://www.paulgraham.com/good.html
+[evil-begets-stupidity]: http://www.paulgraham.com/apple.html
+
+### Personality tests
+
+Personality tests around the world need to be a) translated, b) adapted and c)
+validated. Even though a given test may be applicable and useful in a country,
+this doesn't imply it will work for other countries.
+
+Not only tests usually come with translation guidelines, but also its
+applicability needs to be validated again after the translation and adaptation
+is done to see if the test still measures what it is supposed to.
+
+That is also true within the same language. If a test is shown to work in
+England, it may not work in New Zealand, in spite of both speaking english. The
+cultural context difference is influent to the point of invalidating a test and
+making it be no longer valid.
+
+Irregardless of the validity of the proposed "big five" personality test,
+saying "just use attributes x, y and z this test and you'll be fine" is a rough
+simplification, much like saying "just use Raft for distributed systems, after
+all it has been proven to work" shows he throws all of that background away.
+
+So much as applying personality tests themselves is not a trivial task, and
+psychologists do need special training to become able to effectively apply one.
+
+### More cargo culting
+
+He calls the ill-defined "industry standard" to be cargo-culting, but his
+proposal isn't sound enough to not become one.
+
+Even if the ideas were good, they aren't solid enough, or based on solid
+enough things to make them stand out by themselves. Why is it that talent,
+judgment and personality are required to determine the fitness of a good
+candidate? Why not 2, 5, or 20 things? Why those specific 3? Why is talent
+defined like that? Is it just because he found talent to be like that?
+
+Isn't that definitionally also
+[cargo-culting][cargo-culting][^cargo-culting-archive]? Isn't he just repeating
+whatever he found to work form him, without understanding why?
+
+What Feynman proposes is actually the opposite:
+
+> In summary, the idea is to try to give **all** of the information to help others
+> to judge the value of your contribution; not just the information that leads
+> to judgment in one particular direction or another.
+
+What Slava did was just another form of cargo culting, but this was one that he
+believed to work.
+
+[cargo-culting]: http://calteches.library.caltech.edu/51/2/CargoCult.htm
+[^cargo-culting-archive]: [Archived version](https://web.archive.org/web/20201003090303/http://calteches.library.caltech.edu/51/2/CargoCult.htm).
+
+## What to do
+
+I will not give you a list of things that "worked for me, thus they are
+correct". I won't either critique the current "industry standard", nor what I've
+learned from interviewing engineers.
+
+Instead, I'd like to invite you to learn from history, and from what other
+professionals have to teach us.
+
+Programming isn't an odd profession, where everything about it is different from
+anything else. It is just another episode in the "technology" series, which has
+seasons since before recorded history. It may be an episode where things move a
+bit faster, but it is fundamentally the same.
+
+So here is the key idea: what people did *before* software engineering?
+
+What hiring is like for engineers in other areas? Don't civil, electrical and
+other types of engineering exist for much, much longer than software engineering
+does? What have those centuries of accumulated experience thought the world
+about technical hiring?
+
+What studies were performed on the different success rate of interviewing
+strategies? What have they done right and what have they done wrong?
+
+What is the purpose of HR? Why do they even exist? Do we need them, and if so,
+what for? What is the value they bring, since everybody insist on building an HR
+department in their companies? Is the existence of HR another form of cargo
+culting?
+
+What is industrial and organizational psychology? What is that field of study?
+What do they specialize in? What have they learned since the discipline
+appeared? What have they done right and wrong over history? Is is the current
+academic consensus on that area? What is a hot debate topic in academia on that
+area? What is the current bleeding edge of research? What can they teach us
+about hiring? What can they teach us about technical hiring?
+
+## Conclusion
+
+If all I've said makes me a "no hire" in the proposed framework, I'm really
+glad.
+
+This says less about my programming skills, and more about the employer's world
+view, and I hope not to be fooled into applying for a company that adopts this
+one.
+
+Claiming to be selecting "extraordinary engineers" isn't an excuse to reinvent
+the wheel, poorly.
diff --git a/src/content/blog/2020/11/07/diy-bugs.adoc b/src/content/blog/2020/11/07/diy-bugs.adoc
new file mode 100644
index 0000000..b1dd117
--- /dev/null
+++ b/src/content/blog/2020/11/07/diy-bugs.adoc
@@ -0,0 +1,108 @@
+---
+
+title: DIY an offline bug tracker with text files, Git and email
+
+date: 2020-11-07
+
+updated_at: 2021-08-14
+
+layout: post
+
+lang: en
+
+ref: diy-an-offline-bug-tracker-with-text-files-git-and-email
+
+---
+
+When [push comes to shove][youtube-dl-takedown-notice], the operational aspects
+of governance of a software project matter a lot. And everybody likes to chime
+in with their alternative of how to avoid single points of failure in project
+governance, just like I'm doing right now.
+
+The most valuable assets of a project are:
+
+1. source code
+2. discussions
+3. documentation
+4. builds
+5. tasks and bugs
+
+For **source code**, Git and other DVCS solve that already: everybody gets a
+full copy of the entire source code.
+
+If your code forge is compromised, moving it to a new one takes a couple of
+minutes, if there isn't a secondary remote serving as mirror already. In this
+case, no action is required.
+
+If you're having your **discussions** by email,
+"[taking this archive somewhere else and carrying on is effortless][sourcehut-ml]".
+
+Besides, make sure to backup archives of past discussions so that the history is
+also preserved when this migration happens.
+
+The **documentation** should
+[live inside the repository itself][writethedocs-in-repo][^writethedocs-in-repo],
+so that not only it gets first class treatment, but also gets distributed to
+everybody too. Migrating the code to a new forge already migrates the
+documentation with it.
+
+[^writethedocs-in-repo]: Described as "the ultimate marriage of the two". Starts
+ at time 31:50.
+
+As long as you keep the **builds** vendor neutral, the migration should only
+involve adapting how you call your `tests.sh` from the format of
+`provider-1.yml` uses to the format that `provider-2.yml` accepts.
+It isn't valuable to carry the build history with the project, as this data
+quickly decays in value as weeks and months go by, but for simple text logs
+[using Git notes] may be just enough, and they would be replicated with the rest
+of the repository.
+
+[using Git notes]: {% link _tils/2020-11-30-storing-ci-data-on-git-notes.md %}
+
+But for **tasks and bugs** many rely on a vendor-specific service, where you
+register and manage those issues via a web browser. Some provide an
+[interface for interacting via email][todos-srht-email] or an API for
+[bridging local bugs with vendor-specific services][git-bug-bridges]. But
+they're all layers around the service, that disguises it as being a central
+point of failure, which when compromised would lead to data loss. When push comes
+to shove, you'd loose data.
+
+[youtube-dl-takedown-notice]: https://github.com/github/dmca/blob/master/2020/10/2020-10-23-RIAA.md
+[sourcehut-ml]: https://sourcehut.org/blog/2020-10-29-how-mailing-lists-prevent-censorship/
+[writethedocs-in-repo]: https://podcast.writethedocs.org/2017/01/25/episode-3-trends/
+[todos-srht-email]: https://man.sr.ht/todo.sr.ht/#email-access
+[git-bug-bridges]: https://github.com/MichaelMure/git-bug#bridges
+
+## Alternative: text files, Git and email
+
+Why not do the same as documentation, and move tasks and bugs into the
+repository itself?
+
+It requires no extra tool to be installed, and fits right in the already
+existing workflow for source code and documentation.
+
+I like to keep a [`TODOs.md`] file at the repository top-level, with
+two relevant sections: "tasks" and "bugs". Then when building the documentation
+I'll just [generate an HTML file from it], and [publish] it alongside the static
+website. All that is done on the main branch.
+
+Any issues discussions are done in the mailing list, and a reference to a
+discussion could be added to the ticket itself later on. External contributors
+can file tickets by sending a patch.
+
+The good thing about this solution is that it works for 99% of projects out
+there.
+
+For the other 1%, having Fossil's "[tickets][fossil-tickets]" could be an
+alternative, but you may not want to migrate your project to Fossil to get those
+niceties.
+
+Even though I keep a `TODOs.md` file on the main branch, you can have a `tasks`
+ branch with a `task-n.md` file for each task, or any other way you like.
+
+These tools are familiar enough that you can adjust it to fit your workflow.
+
+[`TODOs.md`]: https://euandre.org/git/remembering/tree/TODOs.md?id=3f727802cb73ab7aa139ca52e729fd106ea916d0
+[generate an HTML file from it]: https://euandre.org/git/remembering/tree/aux/workflow/TODOs.sh?id=3f727802cb73ab7aa139ca52e729fd106ea916d0
+[publish]: https://euandreh.xyz/remembering/TODOs.html
+[fossil-tickets]: https://fossil-scm.org/home/doc/trunk/www/bugtheory.wiki
diff --git a/src/content/blog/2020/11/08/paradigm-shift-review.adoc b/src/content/blog/2020/11/08/paradigm-shift-review.adoc
new file mode 100644
index 0000000..c98c131
--- /dev/null
+++ b/src/content/blog/2020/11/08/paradigm-shift-review.adoc
@@ -0,0 +1,164 @@
+---
+
+title: The Next Paradigm Shift in Programming - video review
+
+date: 2020-11-08
+
+layout: post
+
+lang: en
+
+ref: the-next-paradigm-shift-in-programming-video-review
+
+eu_categories: video review
+
+---
+
+This is a review with comments of
+"[The Next Paradigm Shift in Programming][video-link]", by Richard Feldman.
+
+This video was *strongly* suggested to me by a colleague. I wanted to discuss it
+with her, and when drafting my response I figured I could publish it publicly
+instead.
+
+Before anything else, let me just be clear: I really like the talk, and I think
+Richard is a great public speaker. I've watched several of his talks over the
+years, and I feel I've followed his career at a distance, with much respect.
+This isn't a piece criticizing him personally, and I agree with almost
+everything he said. These are just some comments but also nitpicks on a few
+topics I think he missed, or that I view differently.
+
+[video-link]: https://www.youtube.com/watch?v=6YbK8o9rZfI
+
+## Structured programming
+
+The historical overview at the beginning is very good. In fact, the very video I
+watched previously was about structured programming!
+
+Kevlin Henney on
+"[The Forgotten Art of Structured Programming][structured-programming]" does a
+deep-dive on the topic of structured programming, and how on his view it is
+still hidden in our code, when we do a `continue` or a `break` in some ways.
+Even though it is less common to see an explicit `goto` in code these days, many
+of the original arguments of Dijkstra against explicit `goto`s is applicable to
+other constructs, too.
+
+This is a very mature view, and I like how he goes beyond the
+"don't use `goto`s" heuristic and proposes and a much more nuanced understanding
+of what "structured programming" means.
+
+In a few minutes, Richard is able to condense most of the significant bits of
+Kevlin's talk in a didactical way. Good job.
+
+[structured-programming]: https://www.youtube.com/watch?v=SFv8Wm2HdNM
+
+## OOP like a distributed system
+
+Richard extrapolates Alan Kay's original vision of OOP, and he concludes that
+it is more like a distributed system that how people think about OOP these days.
+But he then states that this is a rather bad idea, and we shouldn't pursue it,
+given that distributed systems are known to be hard.
+
+However, his extrapolation isn't really impossible, bad or an absurd. In fact,
+it has been followed through by Erlang. Joe Armstrong used to say that
+"[Erlang might the only OOP language][erlang-oop]", since it actually adopted
+this paradigm.
+
+But Erlang is a functional language. So this "OOP as a distributed system" view
+is more about designing systems in the large than programs in the small.
+
+There is a switch of levels in this comparison I'm making, as can be done with
+any language or paradigm: you can have a functional-like system that is built
+with an OOP language (like a compiler, that given the same input will produce
+the same output), or an OOP-like system that is built with a functional language
+(Rich Hickey calls it
+"[OOP in the large][langsys]"[^the-language-of-the-system]).
+
+So this jump from in-process paradigm to distributed paradigm is rather a big
+one, and I don't think you he can argue that OOP has anything to say about
+software distribution across nodes. You can still have Erlang actors that run
+independently and send messages to each other without a network between them.
+Any OTP application deployed on a single node effectively works like that.
+
+I think he went a bit too far with this extrapolation. Even though I agree it is
+a logical a fair one, it isn't evidently bad as he painted. I would be fine
+working with a single-node OTP application and seeing someone call it "a *real*
+OOP program".
+
+[erlang-oop]: https://www.infoq.com/interviews/johnson-armstrong-oop/
+[langsys]: https://www.youtube.com/watch?v=ROor6_NGIWU
+[^the-language-of-the-system]: From 24:05 to 27:45.
+
+## First class immutability
+
+I agree with his view of languages moving towards the functional paradigm.
+But I think you can narrow down the "first-class immutability" feature he points
+out as present on modern functional programming languages to "first-class
+immutable data structures".
+
+I wouldn't categorize a language as "supporting functional programming style"
+without a library for functional data structures it. By discipline you can avoid
+side-effects, write pure functions as much as possible, and pass functions as
+arguments around is almost every language these days, but if when changing an
+element of a vector mutates things in-place, that is still not functional
+programming.
+
+To avoid that, you end-up needing to make clones of objects to pass to a
+function, using freezes or other workarounds. All those cases are when the
+underlying mix of OOP and functional programming fail.
+
+There are some languages with third-party libraries that provide functional data
+structures, like [immer][immer] for C++, or [ImmutableJS][immutablejs] for
+JavaScript.
+
+But functional programming is more easily achievable in languages that have them
+built-in, like Erlang, Elm and Clojure.
+
+[immer]: https://sinusoid.es/immer/
+[immutablejs]: https://immutable-js.github.io/immutable-js/
+
+## Managed side-effects
+
+His proposal of adopting managed side-effects as a first-class language concept
+is really intriguing.
+
+This is something you can achieve with a library, like [Redux][redux] for JavaScript or
+[re-frame][re-frame] for Clojure.
+
+I haven't worked with a language with managed side-effects at scale, and I don't
+feel this is a problem with Clojure or Erlang. But is this me finding a flaw in
+his argument or not acknowledging a benefit unknown to me? This is a provocative
+question I ask myself.
+
+Also all FP languages with managed side-effects I know are statically-typed, and
+all dynamically-typed FP languages I know don't have managed side-effects baked in.
+
+[redux]: https://redux.js.org/
+[re-frame]: https://github.com/Day8/re-frame
+
+## What about declarative programming?
+
+In "[Out of the Tar Pit][tar-pit]", B. Moseley and P. Marks go beyond his view
+of functional programming as the basis, and name a possible "functional
+relational programming" as an even better solution. They explicitly call out
+some flaws in most of the modern functional programming languages, and instead
+pick declarative programming as an even better starting paradigm.
+
+If the next paradigm shift is towards functional programming, will the following
+shift be towards declarative programming?
+
+[tar-pit]: http://curtclifton.net/papers/MoseleyMarks06a.pdf
+
+## Conclusion
+
+Beyond all Richard said, I also hear often bring up functional programming when
+talking about utilizing all cores of a computer, and how FP can help with that.
+
+Rich Hickey makes a great case for single-process FP on his famous talk
+"[Simple Made Easy][simple-made-easy]".
+
+[simple-made-easy]: https://www.infoq.com/presentations/Simple-Made-Easy/
+
+<!-- I find this conclusion too short, and it doesn't revisits the main points -->
+<!-- presented on the body of the article. I won't rewrite it now, but it would be an -->
+<!-- improvement to extend it to do so. -->
diff --git a/src/content/blog/2020/11/12/database-parsers-trees.adoc b/src/content/blog/2020/11/12/database-parsers-trees.adoc
new file mode 100644
index 0000000..1870fad
--- /dev/null
+++ b/src/content/blog/2020/11/12/database-parsers-trees.adoc
@@ -0,0 +1,233 @@
+= Durable persistent trees and parser combinators - building a database
+
+date: 2020-11-12
+
+updated_at: 2021-02-09
+
+layout: post
+
+lang: en
+
+ref: durable-persistent-trees-and-parser-combinators-building-a-database
+
+eu_categories: mediator
+
+---
+
+I've received with certain frequency messages from people wanting to know if
+I've made any progress on the database project
+[I've written about]({% link _articles/2020-08-31-the-database-i-wish-i-had.md %}).
+
+There are a few areas where I've made progress, and here's a public post on it.
+
+== Proof-of-concept: DAG log
+
+The main thing I wanted to validate with a concrete implementation was the
+concept of modeling a DAG on a sequence of datoms.
+
+The notion of a *datom* is a rip-off from Datomic, which models data with time
+aware *facts*, which come from RDF. RDF's fact is a triple of
+subject-predicate-object, and Datomic's datoms add a time component to it:
+subject-predicate-object-time, A.K.A. entity-attribute-value-transaction:
+
+```clojure
+[[person :likes "pizza" 0 true]
+ [person :likes "bread" 1 true]
+ [person :likes "pizza" 1 false]]
+```
+
+The above datoms say:
+- at time 0, `person` like pizza;
+- at time 1, `person` stopped liking pizza, and started to like bread.
+
+Datomic ensures total consistency of this ever growing log by having a single
+writer, the transactor, that will enforce it when writing.
+
+In order to support disconnected clients, I needed a way to allow multiple
+writers, and I chose to do it by making the log not a list, but a
+directed acyclic graph (DAG):
+
+```clojure
+[[person :likes "pizza" 0 true]
+ [0 :parent :db/root 0 true]
+ [person :likes "bread" 1 true]
+ [person :likes "pizza" 1 false]
+ [1 :parent 0 1 true]]
+```
+
+The extra datoms above add more information to build the directionality to the
+log, and instead of a single consistent log, the DAG could have multiple leaves
+that coexist, much like how different Git branches can have different "latest"
+commits.
+
+In order to validate this idea, I started with a Clojure implementation. The
+goal was not to write the actual final code, but to make a proof-of-concept that
+would allow me to test and stretch the idea itself.
+
+This code [already exists][clj-poc], but is yet fairly incomplete:
+
+- the building of the index isn't done yet (with some
+ [commented code][clj-poc-index] on the next step to be implemented)
+- the indexing is extremely inefficient, with [more][clj-poc-o2-0]
+ [than][clj-poc-o2-1] [one][clj-poc-o2-2] occurrence of `O²` functions;
+- no query support yet.
+
+[clj-poc]: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n1
+[clj-poc-index]: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n295
+[clj-poc-o2-0]: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n130
+[clj-poc-o2-1]: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n146
+[clj-poc-o2-2]: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n253
+
+== Top-down *and* bottom-up
+
+However, as time passed and I started looking at what the final implementation
+would look like, I started to consider keeping the PoC around.
+
+The top-down approach (Clojure PoC) was in fact helping guide me with the
+bottom-up, and I now have "promoted" the Clojure PoC into a "reference
+implementation". It should now be a finished implementation that says what the
+expected behaviour is, and the actual code should match the behaviour.
+
+The good thing about a reference implementation is that it has no performance of
+resources boundary, so if it ends up being 1000x slower and using 500× more
+memory, it should be find. The code can be also 10x or 100x simpler, too.
+
+== Top-down: durable persistent trees
+
+In promoting the PoC into a reference implementation, this top-down approach now
+needs to go beyond doing everything in memory, and the index data structure now
+needs to be disk-based.
+
+Roughly speaking, most storage engines out there are based either on B-Trees or
+LSM Trees, or some variations of those.
+
+But when building an immutable database, update-in-place B-Trees aren't an
+option, as it doesn't accommodate keeping historical views of the tree. LSM Trees
+may seem a better alternative, but duplication on the files with compaction are
+also ways to delete old data which is indeed useful for a historical view.
+
+I think the thing I'm after is a mix of a Copy-on-Write B-Tree, which would keep
+historical versions with the write IO cost amortization of memtables of LSM
+Trees. I don't know of any B-Tree variant out there that resembles this, so I'll
+call it "Flushing Copy-on-Write B-Tree".
+
+I haven't written any code for this yet, so all I have is a high-level view of
+what it will look like:
+
+1. like Copy-on-Write B-Trees, changing a leaf involves creating a new leaf and
+ building a new path from root to the leaf. The upside is that writes a lock
+ free, and no coordination is needed between readers and writers, ever;
+
+2. the downside is that a single leaf update means at least `H` new nodes that
+ will have to be flushed to disk, where `H` is the height of the tree. To avoid
+ that, the writer creates these nodes exclusively on the in-memory memtable, to
+ avoid flushing to disk on every leaf update;
+
+3. a background job will consolidate the memtable data every time it hits X MB,
+ and persist it to disk, amortizing the cost of the Copy-on-Write B-Tree;
+
+4. readers than will have the extra job of getting the latest relevant
+ disk-resident value and merge it with the memtable data.
+
+The key difference to existing Copy-on-Write B-Trees is that the new trees
+are only periodically written to disk, and the intermediate values are kept in
+memory. Since no node is ever updated, the page utilization is maximum as it
+doesn't need to keep space for future inserts and updates.
+
+And the key difference to existing LSM Trees is that no compaction is run:
+intermediate values are still relevant as the database grows. So this leaves out
+tombstones and value duplication done for write performance.
+
+One can delete intermediate index values to reclaim space, but no data is lost
+on the process, only old B-Tree values. And if the database ever comes back to
+that point (like when doing a historical query), the B-Tree will have to be
+rebuilt from a previous value. After all, the database *is* a set of datoms, and
+everything else is just derived data.
+
+Right now I'm still reading about other data structures that storage engines
+use, and I'll start implementing the "Flushing Copy-on-Write B-Tree" as I learn
+more[^learn-more-db] and mature it more.
+
+[^learn-more-db]: If you are interested in learning more about this too, the
+ very best two resources on this subject are Andy Pavlo's
+ "[Intro to Database Systems](https://www.youtube.com/playlist?list=PLSE8ODhjZXjbohkNBWQs_otTrBTrjyohi)"
+ course and Alex Petrov's "[Database Internals](https://www.databass.dev/)" book.
+
+== Bottom-up: parser combinators and FFI
+
+I chose Rust as it has the best WebAssembly tooling support.
+
+My goal is not to build a Rust database, but a database that happens to be in
+Rust. In order to reach client platforms, the primary API is the FFI one.
+
+I'm not very happy with current tools for exposing Rust code via FFI to the
+external world: they either mix C with C++, which I don't want to do, or provide
+no access to the intermediate representation of the FFI, which would be useful
+for generating binding for any language that speaks FFI.
+
+I like better the path that the author of [cbindgen][cbindgen-crate]
+crate [proposes][rust-ffi]: emitting an data representation of the Rust C API
+(the author calls is a `ffi.json` file), and than building transformers from the
+data representation to the target language. This way you could generate a C API
+*and* the node-ffi bindings for JavaScript automatically from the Rust code.
+
+So the first thing to be done before moving on is an FFI exporter that doesn't
+mix C and C++, and generates said `ffi.json`, and than build a few transformers
+that take this `ffi.json` and generate the language bindings, be it C, C++,
+JavaScript, TypeScript, Kotlin, Swift, Dart, *etc*[^ffi-langs].
+
+[^ffi-langs]: Those are, specifically, the languages I'm more interested on. My
+ goal is supporting client applications, and those languages are the most
+ relevant for doing so: C for GTK, C++ for Qt, JavaScript and TypeScript for
+ Node.js and browser, Kotlin for Android and Swing, Swift for iOS, and Dart
+ for Flutter.
+
+I think the best way to get there is by taking the existing code for cbindgen,
+which uses the [syn][syn-crate] crate to parse the Rust code[^rust-syn], and
+adapt it to emit the metadata.
+
+[^rust-syn]: The fact that syn is an external crate to the Rust compiler points
+ to a big warning: procedural macros are not first class in Rust. They are
+ just like Babel plugins in JavaScript land, with the extra shortcoming that
+ there is no specification for the Rust syntax, unlike JavaScript.
+
+ As flawed as this may be, it seems to be generally acceptable and adopted,
+ which works against building a solid ecosystem for Rust.
+
+ The alternative that rust-ffi implements relies on internals of the Rust
+ compiler, which isn't actually worst, just less common and less accepted.
+
+I've started a fork of cbindgen: ~~x-bindgen~~[^x-bindgen]. Right now it is
+just a copy of cbindgen verbatim, and I plan to remove all C and C++ emitting
+code from it, and add a IR emitting code instead.
+
+[^x-bindgen]: *EDIT*: now archived, the experimentation was fun. I've started to move more towards C, so this effort became deprecated.
+
+When starting working on x-bindgen, I realized I didn't know what to look for in
+a header file, as I haven't written any C code in many years. So as I was
+writing [libedn][libedn-repo], I didn't know how to build a good C API to
+expose. So I tried porting the code to C, and right now I'm working on building
+a *good* C API for a JSON parser using parser combinators:
+~~ParsecC~~ [^parsecc].
+
+[^parsecc]: *EDIT*: now also archived.
+
+After "finishing" ParsecC I'll have a good notion of what a good C API is, and
+I'll have a better direction towards how to expose code from libedn to other
+languages, and work on x-bindgen then.
+
+What both libedn and ParsecC are missing right now are proper error reporting,
+and property-based testing for libedn.
+
+[cbindgen-crate]: https://github.com/eqrion/cbindgen
+[syn-crate]: https://github.com/dtolnay/syn
+[rust-ffi]: https://blog.eqrion.net/future-directions-for-cbindgen/
+[libedn-repo]: https://euandre.org/git/libedn/
+
+== Conclusion
+
+I've learned a lot already, and I feel the journey I'm on is worth going
+through.
+
+If any of those topics interest you, message me to discuss more or contribute!
+Patches welcome!
diff --git a/src/content/blog/2020/11/14/local-first-review.adoc b/src/content/blog/2020/11/14/local-first-review.adoc
new file mode 100644
index 0000000..c24095a
--- /dev/null
+++ b/src/content/blog/2020/11/14/local-first-review.adoc
@@ -0,0 +1,304 @@
+= Local-First Software: You Own Your Data, in spite of the Cloud - article review
+
+date: 2020-11-14
+
+layout: post
+
+lang: en
+
+ref: local-first-software-you-own-your-data-in-spite-of-the-cloud-article-review
+
+eu_categories: presentation,article review
+
+---
+
+*This article is derived from a [presentation][presentation] given at a Papers
+We Love meetup on the same subject.*
+
+This is a review of the article
+"[Local-First Software: You Own Your Data, in spite of the Cloud][article-pdf]",
+by M. Kleppmann, A. Wiggins, P. Van Hardenberg and M. F. McGranaghan.
+
+== Offline-first, local-first
+
+The "local-first" term they use isn't new, and I have used it myself in the past
+to refer to this types of application, where the data lives primarily on the
+client, and there are conflict resolution algorithms that reconcile data created
+on different instances.
+
+Sometimes I see confusion with this idea and "client-side", "offline-friendly",
+"syncable", etc. I have myself used this terms, also.
+
+There exists, however, already the "offline-first" term, which conveys almost
+all of that meaning. In my view, "local-first" doesn't extend "offline-first" in
+any aspect, rather it gives a well-defined meaning to it instead. I could say
+that "local-first" is just "offline-first", but with 7 well-defined ideals
+instead of community best practices.
+
+It is a step forward, and given the number of times I've seen the paper shared
+around I think there's a chance people will prefer saying "local-first" in
+*lieu* of "offline-first" from now on.
+
+[presentation]: {% link _slides/2020-11-14-on-local-first-beyond-the-crdt-silver-bullet.slides %}
+[article-pdf]: https://martin.kleppmann.com/papers/local-first.pdf
+
+== Software licenses
+
+On a footnote of the 7th ideal ("You Retain Ultimate Ownership and Control"),
+the authors say:
+
+> In our opinion, maintaining control and ownership of data does not mean that
+> the software must necessarily be open source. (...) as long as it does not
+> artificially restrict what users can do with their files.
+
+They give examples of artificial restrictions, like this artificial restriction
+I've come up with:
+
+```bash
+#!/bin/sh
+
+TODAY=$(date +%s)
+LICENSE_EXPIRATION=$(date -d 2020-11-15 +%s)
+
+if [ $TODAY -ge $LICENSE_EXPIRATION ]; then
+ echo 'License expired!'
+ exit 1
+fi
+
+echo $((2 + 2))
+```
+
+Now when using this very useful program:
+
+```bash
+# today
+$ ./useful-adder.sh
+4
+# tomorrow
+$ ./useful-adder.sh
+License expired!
+```
+
+This is obviously an intentional restriction, and it goes against the 5th ideal
+("The Long Now"). This software would only be useful as long as the embedded
+license expiration allowed. Sure you could change the clock on the computer, but
+there are many other ways that this type of intentional restriction is in
+conflict with that ideal.
+
+However, what about unintentional restrictions? What if a software had an equal
+or similar restriction, and stopped working after days pass? Or what if the
+programmer added a constant to make the development simpler, and this led to
+unintentionally restricting the user?
+
+```bash
+# today
+$ useful-program
+# ...useful output...
+
+# tomorrow, with more data
+$ useful-program
+ERROR: Panic! Stack overflow!
+```
+
+Just as easily as I can come up with ways to intentionally restrict users, I can
+do the same for unintentionally restrictions. A program can stop working for a
+variety of reasons.
+
+If it stops working due do, say, data growth, what are the options? Reverting to
+an earlier backup, and making it read-only? That isn't really a "Long Now", but
+rather a "Long Now as long as the software keeps working as expected".
+
+The point is: if the software isn't free, "The Long Now" isn't achievable
+without a lot of wishful thinking. Maybe the authors were trying to be more
+friendly towards business who don't like free software, but in doing so they've proposed
+a contradiction by reconciling "The Long Now" with proprietary software.
+
+It isn't the same as saying that any free software achieves that ideal,
+either. The license can still be free, but the source code can become
+unavailable due to cloud rot. Or maybe the build is undocumented, or the build
+tools had specific configuration that one has to guess. A piece of free
+software can still fail to achieve "The Long Now". Being free doesn't guarantee
+it, just makes it possible.
+
+A colleague has challenged my view, arguing that the software doesn't really
+need to be free, as long as there is an specification of the file format. This
+way if the software stops working, the format can still be processed by other
+programs. But this doesn't apply in practice: if you have a document that you
+write to, and software stops working, you still want to write to the document.
+An external tool that navigates the content and shows it to you won't allow you
+to keep writing, and when it does that tool is now starting to re-implement the
+software.
+
+An open specification could serve as a blueprint to other implementations,
+making the data format more friendly to reverse-engineering. But the
+re-implementation still has to exist, at which point the original software failed
+to achieve "The Long Now".
+
+It is less bad, but still not quite there yet.
+
+== Denial of existing solutions
+
+When describing "Existing Data Storage and Sharing Models", on a
+footnote[^devil] the authors say:
+
+[^devil]: This is the second aspect that I'm picking on the article from a
+ footnote. I guess the devil really is on the details.
+
+> In principle it is possible to collaborate without a repository service,
+> e.g. by sending patch files by email, but the majority of Git users rely
+> on GitHub.
+
+The authors go to a great length to talk about usability of cloud apps, and even
+point to research they've done on it, but they've missed learning more from
+local-first solutions that already exist.
+
+Say the automerge CRDT proves to be even more useful than what everybody
+imagined. Say someone builds a local-first repository service using it. How will
+it change anything of the Git/GitHub model? What is different about it that
+prevents people in the future writing a paper saying:
+
+> In principle it is possible to collaborate without a repository service,
+> e.g. by using automerge and platform X,
+> but the majority of Git users rely on GitHub.
+
+How is this any better?
+
+If it is already [possible][git-local-first] to have a local-first development
+workflow, why don't people use it? Is it just fashion, or there's a fundamental
+problem with it? If so, what is it, and how to avoid it?
+
+If sending patches by emails is perfectly possible but out of fashion, why even
+talk about Git/GitHub? Isn't this a problem that people are putting themselves
+in? How can CRDTs possibly prevent people from doing that?
+
+My impression is that the authors envision a better future, where development is
+fully decentralized unlike today, and somehow CRDTs will make that happen. If
+more people think this way, "CRDT" is next in line to the buzzword list that
+solves everything, like "containers", "blockchain" or "machine learning".
+
+Rather than picturing an imaginary service that could be described like
+"GitHub+CRDTs" and people would adopt it, I'd rather better understand why
+people don't do it already, since Git is built to work like that.
+
+[git-local-first]: https://drewdevault.com/2018/07/23/Git-is-already-distributed.html
+
+== Ditching of web applications
+
+The authors put web application in a worse position for building local-first
+application, claiming that:
+
+> (...) the architecture of web apps remains fundamentally server-centric.
+> Offline support is an afterthought in most web apps, and the result is
+> accordingly fragile.
+
+Well, I disagree.
+
+The problem isn't inherit to the web platform, but instead how people use it.
+
+I have myself built offline-first applications, leveraging IndexedDB, App Cache,
+*etc*. I wanted to build an offline-first application on the web, and so I did.
+
+In fact, many people choose [PouchDB][pouchdb] *because* of that, since it is a
+good tool for offline-first web applications. The problem isn't really the
+technology, but how much people want their application to be local-first.
+
+Contrast it with Android [Instant Apps][instant-apps], where applications are
+sent to the phone in small parts. Since this requires an internet connection to
+move from a part of the app bundle to another, a subset of the app isn't
+local-first, despite being an app.
+
+The point isn't the technology, but how people are using it. Local-first web
+applications are perfectly possible, just like non-local-first native
+applications are possible.
+
+[pouchdb]: https://pouchdb.com/
+[instant-apps]: https://developer.android.com/topic/google-play-instant
+
+== Costs are underrated
+
+I think the costs of "old-fashioned apps" over "cloud apps" are underrated,
+mainly regarding storage, and that this costs can vary a lot by application.
+
+Say a person writes online articles for their personal website, and puts
+everything into Git. Since there isn't supposed to be any collaboration, all
+of the relevant ideals of local-first are achieved.
+
+Now another person creates videos instead of articles. They could try keeping
+everything local, but after some time the storage usage fills the entire disk.
+This person's local-first setup would be much more complex, and would cost much
+more on maintenance, backup and storage.
+
+Even though both have similar needs, a local-first video repository is much more
+demanding. So the local-first thinking here isn't "just keep everything local",
+but "how much time and money am I willing to spend to keep everything local".
+
+The convenience of "cloud apps" becomes so attractive that many don't even have
+a local copy of their videos, and rely exclusively on service providers to
+maintain, backup and store their content.
+
+The dial measuring "cloud apps" and "old-fashioned apps" needs to be specific to
+use-cases.
+
+== Real-time collaboration is optional
+
+If I were the one making the list of ideals, I wouldn't focus so much on
+real-time collaboration.
+
+Even though seamless collaboration is desired, it being real-time depends on the
+network being available for that. But ideal 3 states that
+"The Network is Optional", so real-time collaboration is also optional.
+
+The fundamentals of a local-first system should enable real-time collaboration
+when network is available, but shouldn't focus on it.
+
+On many places when discussing applications being offline, it is common for me
+to find people saying that their application works
+"even on a plane, subway or elevator". That is a reflection of when said
+developers have to deal with networks being unavailable.
+
+But this leaves out a big chunk of the world where internet connection is
+intermittent, or only works every other day or only once a week, or stops
+working when it rains, *etc*. For this audience, living without network
+connectivity isn't such a discrete moment in time, but part of every day life. I
+like the fact that the authors acknowledge that.
+
+When discussing "working offline", I'd rather keep this type of person in mind,
+then the subset of people who are offline when on the elevator will naturally be
+included.
+
+== On CRDTs and developer experience
+
+When discussing developer experience, the authors bring up some questions to be
+answered further, like:
+
+> For an app developer, how does the use of a CRDT-based data layer compare to
+> existing storage layers like a SQL database, a filesystem, or CoreData? Is a
+> distributed system harder to write software for?
+
+That is an easy one: yes.
+
+A distributed system *is* harder to write software for, being a distributed
+system.
+
+Adding a large layer of data structures and algorithms will make it more complex
+to write software for, naturally. And if trying to make this layer transparent
+to the programmer, so they can pretend that layer doesn't exist is a bad idea,
+as RPC frameworks have tried, and failed.
+
+See "[A Note on Distributed Computing][note-dist-comp]" for a critique on RPC
+frameworks trying to make the network invisible, which I think also applies in
+equivalence for making the CRDTs layer invisible.
+
+[rmi-wiki]: https://en.wikipedia.org/wiki/Java_remote_method_invocation
+[note-dist-comp]: https://web.archive.org/web/20130116163535/http://labs.oracle.com/techrep/1994/smli_tr-94-29.pdf
+
+## Conclusion
+
+I liked a lot the article, as it took the "offline-first" philosophy and ran
+with it.
+
+But I think the authors' view of adding CRDTs and things becoming local-first is
+a bit too magical.
+
+This particular area is one that I have large interest on, and I wish to see
+more being done on the "local-first" space.
diff --git a/src/content/blog/2021/01/26/remembering-ann.adoc b/src/content/blog/2021/01/26/remembering-ann.adoc
new file mode 100644
index 0000000..0d02384
--- /dev/null
+++ b/src/content/blog/2021/01/26/remembering-ann.adoc
@@ -0,0 +1,190 @@
+---
+
+title: "ANN: remembering - Add memory to dmenu, fzf and similar tools"
+
+date: 2021-01-26
+
+layout: post
+
+lang: en
+
+ref: ann-remembering-add-memory-to-dmenu-fzf-and-similar-tools
+
+---
+
+Today I pushed v0.1.0 of [remembering], a tool to enhance the interactive usability of menu-like tools, such as [dmenu] and [fzf].
+
+## Previous solution
+
+I previously used [yeganesh] to fill this gap, but as I started to rely less on Emacs, I added fzf as my go-to tool for doing fuzzy searching on the terminal.
+But I didn't like that fzf always showed the same order of things, when I would only need 3 or 4 commonly used files.
+
+For those who don't know: yeganesh is a wrapper around dmenu that will remember your most used programs and put them on the beginning of the list of executables.
+This is very convenient for interactive prolonged use, as with time the things you usually want are right at the very beginning.
+
+But now I had this thing, yeganesh, that solved this problem for dmenu, but didn't for fzf.
+
+I initially considered patching yeganesh to support it, but I found it more coupled to dmenu than I would desire.
+I'd rather have something that knows nothing about dmenu, fzf or anything, but enhances tools like those in a useful way.
+
+[remembering]: https://euandreh.xyz/remembering/
+[dmenu]: https://tools.suckless.org/dmenu/
+[fzf]: https://github.com/junegunn/fzf
+[yeganesh]: http://dmwit.com/yeganesh/
+
+## Implementation
+
+Other than being decoupled from dmenu, another improvement I though that could be made on top of yeganesh is the programming language choice.
+Instead of Haskell, I went with POSIX sh.
+Sticking to POSIX sh makes it require less build-time dependencies. There aren't any, actually. Packaging is made much easier due to that.
+
+The good thing is that the program itself is small enough ([119 lines] on v0.1.0) that POSIX sh does the job just fine, combined with other POSIX utilities such as [getopts], [sort] and [awk].
+
+[119 lines]: https://euandre.org/git/remembering/tree/remembering?id=v0.1.0
+[getopts]: http://www.opengroup.org/onlinepubs/9699919799/utilities/getopts.html
+[sort]: http://www.opengroup.org/onlinepubs/9699919799/utilities/sort.html
+[awk]: http://www.opengroup.org/onlinepubs/9699919799/utilities/awk.html
+
+The behaviour is: given a program that will read from STDIN and write a single entry to STDOUT, `remembering` wraps that program, and rearranges STDIN so that previous choices appear at the beginning.
+
+Where you would do:
+
+```shell
+$ seq 5 | fzf
+
+ 5
+ 4
+ 3
+ 2
+> 1
+ 5/5
+>
+```
+
+And every time get the same order of numbers, now you can write:
+
+```shell
+$ seq 5 | remembering -p seq-fzf -c fzf
+
+ 5
+ 4
+ 3
+ 2
+> 1
+ 5/5
+>
+```
+
+On the first run, everything is the same. If you picked 4 on the previous example, the following run would be different:
+
+```shell
+$ seq 5 | remembering -p seq-fzf -c fzf
+
+ 5
+ 3
+ 2
+ 1
+> 4
+ 5/5
+>
+```
+
+As time passes, the list would adjust based on the frequency of your choices.
+
+I aimed for reusability, so that I could wrap diverse commands with `remembering` and it would be able to work. To accomplish that, a "profile" (the `-p something` part) stores data about different runs separately.
+
+I took the idea of building something small with few dependencies to other places too:
+- the manpages are written in troff directly;
+- the tests are just more POSIX sh files;
+- and a POSIX Makefile to `check` and `install`.
+
+I was aware of the value of sticking to coding to standards, but I had past experience mostly with programming language standards, such as ECMAScript, Common Lisp, Scheme, or with IndexedDB or DOM APIs.
+It felt good to rediscover these nice POSIX tools, which makes me remember of a quote by [Henry Spencer][poor-unix]:
+
+> Those who do not understand Unix are condemned to reinvent it, poorly.
+
+[poor-unix]: https://en.wikipedia.org/wiki/Henry_Spencer#cite_note-3
+
+## Usage examples
+
+Here are some functions I wrote myself that you may find useful:
+
+### Run a command with fzf on `$PWD`
+
+```shellcheck
+f() {
+ profile="$f-shell-function(pwd | sed -e 's_/_-_g')"
+ file="$(git ls-files | \
+ remembering -p "$profile" \
+ -c "fzf --select-1 --exit -0 --query \"$2\" --preview 'cat {}'")"
+ if [ -n "$file" ]; then
+ # shellcheck disable=2068
+ history -s f $@
+ history -s "$1" "$file"
+ "$1" "$file"
+fi
+}
+```
+
+This way I can run `f vi` or `f vi config` at the root of a repository, and the list of files will always appear on the most used order.
+Adding `pwd` to the profile allows it to not mix data for different repositories.
+
+### Copy password to clipboard
+
+```shell
+choice="$(find "$HOME/.password-store" -type f | \
+ grep -Ev '(.git|.gpg-id)' | \
+ sed -e "s|$HOME/.password-store/||" -e 's/\.gpg$//' | \
+ remembering -p password-store \
+ -c 'dmenu -l 20 -i')"
+
+
+if [ -n "$choice" ]; then
+ pass show "$choice" -c
+fi
+```
+
+Adding the above to a file and binding it to a keyboard shortcut, I can access the contents of my [password store][password-store], with the entries ordered by usage.
+
+[password-store]: https://www.passwordstore.org/
+
+### Replacing yeganesh
+
+Where I previously had:
+
+```shell
+exe=$(yeganesh -x) && exec $exe
+```
+
+Now I have:
+
+```shell
+exe=$(dmenu_path | remembering -p dmenu-exec -c dmenu) && exec $exe
+```
+
+This way, the executables appear on order of usage.
+
+If you don't have `dmenu_path`, you can get just the underlying `stest` tool that looks at the executables available in your `$PATH`. Here's a juicy one-liner to do it:
+
+```shell
+$ wget -O- https://dl.suckless.org/tools/dmenu-5.0.tar.gz | \
+ tar Ozxf - dmenu-5.0/arg.h dmenu-5.0/stest.c | \
+ sed 's|^#include "arg.h"$|// #include "arg.h"|' | \
+ cc -xc - -o stest
+```
+
+With the `stest` utility you'll be able to list executables in your `$PATH` and pipe them to dmenu or something else yourself:
+```shell
+$ (IFS=:; ./stest -flx $PATH;) | sort -u | remembering -p another-dmenu-exec -c dmenu | sh
+```
+
+In fact, the code for `dmenu_path` is almost just like that.
+
+## Conclusion
+
+For my personal use, I've [packaged] `remembering` for GNU Guix and Nix. Packaging it to any other distribution should be trivial, or just downloading the tarball and running `[sudo] make install`.
+
+Patches welcome!
+
+[packaged]: https://euandre.org/git/package-repository/
+[nix-file]: https://euandre.org/git/dotfiles/tree/nixos/not-on-nixpkgs/remembering.nix?id=0831444f745cf908e940407c3e00a61f6152961f
diff --git a/src/content/blog/2021/02/17/fallible.adoc b/src/content/blog/2021/02/17/fallible.adoc
new file mode 100644
index 0000000..8a097f8
--- /dev/null
+++ b/src/content/blog/2021/02/17/fallible.adoc
@@ -0,0 +1,244 @@
+= ANN: fallible - Fault injection library for stress-testing failure scenarios
+
+date: 2021-02-17
+
+updated_at: 2022-03-06
+
+layout: post
+
+lang: en
+
+ref: ann-fallible-fault-injection-library-for-stress-testing-failure-scenarios
+
+---
+
+Yesterday I pushed v0.1.0 of [fallible], a miniscule library for fault-injection
+and stress-testing C programs.
+
+[fallible]: https://euandreh.xyz/fallible/
+
+## *EDIT*
+
+2021-06-12: As of [0.3.0] (and beyond), the macro interface improved and is a bit different from what is presented in this article. If you're interested, I encourage you to take a look at it.
+
+2022-03-06: I've [archived] the project for now. It still needs some maturing before being usable.
+
+[0.3.0]: https://euandreh.xyz/fallible/CHANGELOG.html
+[archived]: https://euandre.org/static/attachments/fallible.tar.gz
+
+## Existing solutions
+
+Writing robust code can be challenging, and tools like static analyzers, fuzzers and friends can help you get there with more certainty.
+As I would try to improve some of my C code and make it more robust, in order to handle system crashes, filled disks, out-of-memory and similar scenarios, I didn't find existing tooling to help me get there as I expected to find.
+I couldn't find existing tools to help me explicitly stress-test those failure scenarios.
+
+Take the "[Writing Robust Programs][gnu-std]" section of the GNU Coding Standards:
+
+[gnu-std]: https://www.gnu.org/prep/standards/standards.html#Semantics
+
+> Check every system call for an error return, unless you know you wish to ignore errors.
+> (...) Check every call to malloc or realloc to see if it returned NULL.
+
+From a robustness standpoint, this is a reasonable stance: if you want to have a robust program that knows how to fail when you're out of memory and `malloc` returns `NULL`, than you ought to check every call to `malloc`.
+
+Take a sample code snippet for clarity:
+
+```c
+void a_function() {
+ char *s1 = malloc(A_NUMBER);
+ strcpy(s1, "some string");
+
+ char *s2 = malloc(A_NUMBER);
+ strcpy(s2, "another string");
+}
+```
+
+At a first glance, this code is unsafe: if any of the calls to `malloc` returns `NULL`, `strcpy` will be given a `NULL` pointer.
+
+My first instinct was to change this code to something like this:
+
+```diff
+@@ -1,7 +1,15 @@
+ void a_function() {
+ char *s1 = malloc(A_NUMBER);
++ if (!s1) {
++ fprintf(stderr, "out of memory, exitting\n");
++ exit(1);
++ }
+ strcpy(s1, "some string");
+
+ char *s2 = malloc(A_NUMBER);
++ if (!s2) {
++ fprintf(stderr, "out of memory, exitting\n");
++ exit(1);
++ }
+ strcpy(s2, "another string");
+ }
+```
+
+As I later found out, there are at least 2 problems with this approach:
+
+1. **it doesn't compose**: this could arguably work if `a_function` was `main`.
+ But if `a_function` lives inside a library, an `exit(1);` is a inelegant way of handling failures, and will catch the top-level `main` consuming the library by surprise;
+2. **it gives up instead of handling failures**: the actual handling goes a bit beyond stopping.
+ What about open file handles, in-memory caches, unflushed bytes, etc.?
+
+If you could force only the second call to `malloc` to fail, [Valgrind] would correctly complain that the program exitted with unfreed memory.
+
+[Valgrind]: https://www.valgrind.org/
+
+So the last change to make the best version of the above code is:
+
+```diff
+@@ -1,15 +1,14 @@
+-void a_function() {
++bool a_function() {
+ char *s1 = malloc(A_NUMBER);
+ if (!s1) {
+- fprintf(stderr, "out of memory, exitting\n");
+- exit(1);
++ return false;
+ }
+ strcpy(s1, "some string");
+
+ char *s2 = malloc(A_NUMBER);
+ if (!s2) {
+- fprintf(stderr, "out of memory, exitting\n");
+- exit(1);
++ free(s1);
++ return false;
+ }
+ strcpy(s2, "another string");
+ }
+```
+
+Instead of returning `void`, `a_function` now returns `bool` to indicate whether an error ocurred during its execution.
+If `a_function` returned a pointer to something, the return value could be `NULL`, or an `int` that represents an error code.
+
+The code is now a) safe and b) failing gracefully, returning the control to the caller to properly handle the error case.
+
+After seeing similar patterns on well designed APIs, I adopted this practice for my own code, but was still left with manually verifying the correctness and robustness of it.
+
+How could I add assertions around my code that would help me make sure the `free(s1);` exists, before getting an error report?
+How do other people and projects solve this?
+
+From what I could see, either people a) hope for the best, b) write safe code but don't strees-test it or c) write ad-hoc code to stress it.
+
+The most proeminent case of c) is SQLite: it has a few wrappers around the familiar `malloc` to do fault injection, check for memory limits, add warnings, create shim layers for other environments, etc.
+All of that, however, is tightly couple with SQLite itself, and couldn't be easily pulled off for using somewhere else.
+
+When searching for it online, an [interesting thread] caught my atention: fail the call to `malloc` for each time it is called, and when the same stacktrace appears again, allow it to proceed.
+
+[interesting thread]: https://stackoverflow.com/questions/1711170/unit-testing-for-failed-malloc
+
+## Implementation
+
+A working implementation of that already exists: [mallocfail].
+It uses `LD_PRELOAD` to replace `malloc` at run-time, computes the SHA of the stacktrace and fails once for each SHA.
+
+I initially envisioned and started implementing something very similar to mallocfail.
+However I wanted it to go beyond out-of-memory scenarios, and using `LD_PRELOAD` for every possible corner that could fail wasn't a good idea on the long run.
+
+Also, mallocfail won't work together with tools such as Valgrind, who want to do their own override of `malloc` with `LD_PRELOAD`.
+
+I instead went with less automatic things: starting with a `fallible_should_fail(char *filename, int lineno)` function that fails once for each `filename`+`lineno` combination, I created macro wrappers around common functions such as `malloc`:
+
+```c
+void *fallible_malloc(size_t size, const char *const filename, int lineno) {
+#ifdef FALLIBLE
+ if (fallible_should_fail(filename, lineno)) {
+ return NULL;
+ }
+#else
+ (void)filename;
+ (void)lineno;
+#endif
+ return malloc(size);
+}
+
+#define MALLOC(size) fallible_malloc(size, __FILE__, __LINE__)
+```
+
+With this definition, I could replace the calls to `malloc` with `MALLOC` (or any other name that you want to `#define`):
+
+```diff
+--- 3.c 2021-02-17 00:15:38.019706074 -0300
++++ 4.c 2021-02-17 00:44:32.306885590 -0300
+@@ -1,11 +1,11 @@
+ bool a_function() {
+- char *s1 = malloc(A_NUMBER);
++ char *s1 = MALLOC(A_NUMBER);
+ if (!s1) {
+ return false;
+ }
+ strcpy(s1, "some string");
+
+- char *s2 = malloc(A_NUMBER);
++ char *s2 = MALLOC(A_NUMBER);
+ if (!s2) {
+ free(s1);
+ return false;
+```
+
+With this change, if the program gets compiled with the `-DFALLIBLE` flag the fault-injection mechanism will run, and `MALLOC` will fail once for each `filename`+`lineno` combination.
+When the flag is missing, `MALLOC` is a very thin wrapper around `malloc`, which compilers could remove entirely, and the `-lfallible` flags can be omitted.
+
+This applies not only to `malloc` or other `stdlib.h` functions.
+If `a_function` is important or relevant, I could add a wrapper around it too, that checks if `fallible_should_fail` to exercise if its callers are also doing the proper clean-up.
+
+The actual code is just this single function, [`fallible_should_fail`], which ended-up taking only ~40 lines.
+In fact, there are more lines of either Makefile (111), README.md (82) or troff (306) on this first version.
+
+The price for such fine-grained control is that this approach requires more manual work.
+
+[mallocfail]: https://github.com/ralight/mallocfail
+[`fallible_should_fail`]: https://euandre.org/git/fallible/tree/src/fallible.c?id=v0.1.0#n16
+
+## Usage examples
+
+### `MALLOC` from the `README.md`
+
+```c
+// leaky.c
+#include <string.h>
+#include <fallible_alloc.h>
+
+int main() {
+ char *aaa = MALLOC(100);
+ if (!aaa) {
+ return 1;
+ }
+ strcpy(aaa, "a safe use of strcpy");
+
+ char *bbb = MALLOC(100);
+ if (!bbb) {
+ // free(aaa);
+ return 1;
+ }
+ strcpy(bbb, "not unsafe, but aaa is leaking");
+
+ free(bbb);
+ free(aaa);
+ return 0;
+}
+```
+
+Compile with `-DFALLIBLE` and run [`fallible-check.1`][fallible-check]:
+```shell
+$ c99 -DFALLIBLE -o leaky leaky.c -lfallible
+$ fallible-check ./leaky
+Valgrind failed when we did not expect it to:
+(...suppressed output...)
+# exit status is 1
+```
+
+[fallible-check]: https://euandreh.xyz/fallible/fallible-check.1.html
+
+## Conclusion
+
+For my personal use, I'll [package] them for GNU Guix and Nix.
+Packaging it to any other distribution should be trivial, or just downloading the tarball and running `[sudo] make install`.
+
+Patches welcome!
+
+[package]: https://euandre.org/git/package-repository/
diff --git a/src/content/blog/2021/02/17/fallible.tar.gz b/src/content/blog/2021/02/17/fallible.tar.gz
new file mode 100644
index 0000000..7bf2a58
--- /dev/null
+++ b/src/content/blog/2021/02/17/fallible.tar.gz
Binary files differ
diff --git a/src/content/blog/2021/04/29/relational-review.adoc b/src/content/blog/2021/04/29/relational-review.adoc
new file mode 100644
index 0000000..e15b478
--- /dev/null
+++ b/src/content/blog/2021/04/29/relational-review.adoc
@@ -0,0 +1,130 @@
+---
+
+title: A Relational Model of Data for Large Shared Data Banks - article-review
+
+date: 2021-04-29
+
+layout: post
+
+lang: en
+
+ref: a-relational-model-of-data-for-large-shared-data-banks-article-review
+
+---
+
+This is a review of the article "[A Relational Model of Data for Large Shared Data Banks][codd-article]", by E. F. Codd.
+
+[codd-article]: https://www.seas.upenn.edu/~zives/03f/cis550/codd.pdf
+
+## Data Independence
+
+Codd brings the idea of *data independence* as a better approach to use on databases.
+This is contrast with the existing approaches, namely hierarquical (tree-based) and network-based.
+
+His main argument is that queries in applications shouldn't depende and be coupled with how the data is represented internally by the database system.
+This key idea is very powerful, and something that we strive for in many other places: decoupling the interface from the implementation.
+
+If the database system has this separation, it can kep the querying interface stable, while having the freedom to change its internal representation at will, for better performance, less storage, etc.
+
+This is true for most modern database systems.
+They can change from B-Trees with leafs containing pointers to data, to B-Trees with leafs containing the raw data , to hash tables.
+All that without changing the query interface, only its performance.
+
+Codd mentions that, from an information representation standpoint, any index is a duplication, but useful for perfomance.
+
+This data independence also impacts ordering (a *relation* doesn't rely on the insertion order).
+
+## Duplicates
+
+His definition of relational data is a bit differente from most modern database systems, namely **no duplicate rows**.
+
+I couldn't find a reason behind this restriction, though.
+For practical purposes, I find it useful to have it.
+
+## Relational Data
+
+In the article, Codd doesn't try to define a language, and today's most popular one is SQL.
+
+However, there is no restriction that says that "SQL database" and "relational database" are synonyms.
+One could have a relational database without using SQL at all, and it would still be a relational one.
+
+The main one that I have in mind, and the reason that led me to reading this paper in the first place, is Datomic.
+
+Is uses an [edn]-based representation for datalog queries[^edn-queries], and a particular schema used to represent data.
+
+Even though it looks very weird when coming from SQL, I'd argue that it ticks all the boxes (except for "no duplicates") that defines a relational database, since building relations and applying operations on them is possible.
+
+Compare and contrast a contrived example of possible representations of SQL and datalog of the same data:
+
+```sql
+-- create schema
+CREATE TABLE people (
+ id UUID PRIMARY KEY,
+ name TEXT NOT NULL,
+ manager_id UUID,
+ FOREIGN KEY (manager_id) REFERENCES people (id)
+);
+
+-- insert data
+INSERT INTO people (id, name, manager_id) VALUES
+ ("d3f29960-ccf0-44e4-be66-1a1544677441", "Foo", "076356f4-1a0e-451c-b9c6-a6f56feec941"),
+ ("076356f4-1a0e-451c-b9c6-a6f56feec941", "Bar");
+
+-- query data, make a relation
+
+SELECT employees.name AS 'employee-name',
+ managers.name AS 'manager-name'
+FROM people employees
+INNER JOIN people managers ON employees.manager_id = managers.id;
+```
+
+{% raw %}
+```
+;; create schema
+#{ {:db/ident :person/id
+ :db/valueType :db.type/uuid
+ :db/cardinality :db.cardinality/one
+ :db/unique :db.unique/value}
+ {:db/ident :person/name
+ :db/valueType :db.type/string
+ :db/cardinality :db.cardinality/one}
+ {:db/ident :person/manager
+ :db/valueType :db.type/ref
+ :db/cardinality :db.cardinality/one}}
+
+;; insert data
+#{ {:person/id #uuid "d3f29960-ccf0-44e4-be66-1a1544677441"
+ :person/name "Foo"
+ :person/manager [:person/id #uuid "076356f4-1a0e-451c-b9c6-a6f56feec941"]}
+ {:person/id #uuid "076356f4-1a0e-451c-b9c6-a6f56feec941"
+ :person/name "Bar"}}
+
+;; query data, make a relation
+{:find [?employee-name ?manager-name]
+ :where [[?person :person/name ?employee-name]
+ [?person :person/manager ?manager]
+ [?manager :person/name ?manager-name]]}
+```
+{% endraw %}
+
+(forgive any errors on the above SQL and datalog code, I didn't run them to check. Patches welcome!)
+
+This employee example comes from the paper, and both SQL and datalog representations match the paper definition of "relational".
+
+Both "Foo" and "Bar" are employees, and the data is normalized.
+SQL represents data as tables, and Datomic as datoms, but relations could be derived from both, which we could view as:
+
+```
+employee_name | manager_name
+----------------------------
+"Foo" | "Bar"
+```
+
+[^edn-queries]: You can think of it as JSON, but with a Clojure taste.
+[edn]: https://github.com/edn-format/edn
+
+## Conclusion
+
+The article also talks about operators, consistency and normalization, which are now so widespread and well-known that it feels a bit weird seeing someone advocating for it.
+
+I also stablish that `relational != SQL`, and other databases such as Datomic are also relational, following Codd's original definition.
diff --git a/src/content/blog/index.adoc b/src/content/blog/index.adoc
new file mode 100644
index 0000000..afd64d4
--- /dev/null
+++ b/src/content/blog/index.adoc
@@ -0,0 +1 @@
+= Blog
diff --git a/src/content/img/atom.svg b/src/content/img/atom.svg
new file mode 100644
index 0000000..37bace2
--- /dev/null
+++ b/src/content/img/atom.svg
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<svg width="22" height="22" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg">
+ <path d="M576 1344q0 80-56 136t-136 56-136-56-56-136 56-136 136-56 136 56 56 136zm512 123q2 28-17 48-18 21-47 21h-135q-25 0-43-16.5t-20-41.5q-22-229-184.5-391.5t-391.5-184.5q-25-2-41.5-20t-16.5-43v-135q0-29 21-47 17-17 43-17h5q160 13 306 80.5t259 181.5q114 113 181.5 259t80.5 306zm512 2q2 27-18 47-18 20-46 20h-143q-26 0-44.5-17.5t-19.5-42.5q-12-215-101-408.5t-231.5-336-336-231.5-408.5-102q-25-1-42.5-19.5t-17.5-43.5v-143q0-28 20-46 18-18 44-18h3q262 13 501.5 120t425.5 294q187 186 294 425.5t120 501.5z"
+ fill="#EA990E" />
+</svg>
diff --git a/src/content/img/envelope.svg b/src/content/img/envelope.svg
new file mode 100644
index 0000000..c2251f4
--- /dev/null
+++ b/src/content/img/envelope.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="utf-8"?>
+<svg width="22" height="22" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg">
+ <path d="M1664 1504v-768q-32 36-69 66-268 206-426 338-51 43-83 67t-86.5 48.5-102.5 24.5h-2q-48 0-102.5-24.5t-86.5-48.5-83-67q-158-132-426-338-37-30-69-66v768q0 13 9.5 22.5t22.5 9.5h1472q13 0 22.5-9.5t9.5-22.5zm0-1051v-24.5l-.5-13-3-12.5-5.5-9-9-7.5-14-2.5h-1472q-13 0-22.5 9.5t-9.5 22.5q0 168 147 284 193 152 401 317 6 5 35 29.5t46 37.5 44.5 31.5 50.5 27.5 43 9h2q20 0 43-9t50.5-27.5 44.5-31.5 46-37.5 35-29.5q208-165 401-317 54-43 100.5-115.5t46.5-131.5zm128-37v1088q0 66-47 113t-113 47h-1472q-66 0-113-47t-47-113v-1088q0-66 47-113t113-47h1472q66 0 113 47t47 113z" />
+</svg>
diff --git a/src/content/img/favicon.svg b/src/content/img/favicon.svg
new file mode 100644
index 0000000..ce566b2
--- /dev/null
+++ b/src/content/img/favicon.svg
@@ -0,0 +1,62 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16">
+ <path d="M 0 8 L 1 8 L 1 9 L 0 9 L 0 8 Z" />
+ <path d="M 0 13 L 1 13 L 1 14 L 0 14 L 0 13 Z" />
+ <path d="M 1 8 L 2 8 L 2 9 L 1 9 L 1 8 Z" />
+ <path d="M 1 13 L 2 13 L 2 14 L 1 14 L 1 13 Z" />
+ <path d="M 2 8 L 3 8 L 3 9 L 2 9 L 2 8 Z" />
+ <path d="M 2 13 L 3 13 L 3 14 L 2 14 L 2 13 Z" />
+ <path d="M 3 8 L 4 8 L 4 9 L 3 9 L 3 8 Z" />
+ <path d="M 3 13 L 4 13 L 4 14 L 3 14 L 3 13 Z" />
+ <path d="M 4 7 L 5 7 L 5 8 L 4 8 L 4 7 Z" />
+ <path d="M 4 8 L 5 8 L 5 9 L 4 9 L 4 8 Z" />
+ <path d="M 4 13 L 5 13 L 5 14 L 4 14 L 4 13 Z" />
+ <path d="M 5 6 L 6 6 L 6 7 L 5 7 L 5 6 Z" />
+ <path d="M 5 7 L 6 7 L 6 8 L 5 8 L 5 7 Z" />
+ <path d="M 5 13 L 6 13 L 6 14 L 5 14 L 5 13 Z" />
+ <path d="M 6 5 L 7 5 L 7 6 L 6 6 L 6 5 Z" />
+ <path d="M 6 6 L 7 6 L 7 7 L 6 7 L 6 6 Z" />
+ <path d="M 6 14 L 7 14 L 7 15 L 6 15 L 6 14 Z" />
+ <path d="M 7 1 L 8 1 L 8 2 L 7 2 L 7 1 Z" />
+ <path d="M 7 14 L 8 14 L 8 15 L 7 15 L 7 14 Z" />
+ <path d="M 7 15 L 8 15 L 8 16 L 7 16 L 7 15 Z" />
+ <path d="M 7 2 L 8 2 L 8 3 L 7 3 L 7 2 Z" />
+ <path d="M 7 3 L 8 3 L 8 4 L 7 4 L 7 3 Z" />
+ <path d="M 7 4 L 8 4 L 8 5 L 7 5 L 7 4 Z" />
+ <path d="M 7 5 L 8 5 L 8 6 L 7 6 L 7 5 Z" />
+ <path d="M 8 1 L 9 1 L 9 2 L 8 2 L 8 1 Z" />
+ <path d="M 8 15 L 9 15 L 9 16 L 8 16 L 8 15 Z" />
+ <path d="M 9 1 L 10 1 L 10 2 L 9 2 L 9 1 Z" />
+ <path d="M 9 2 L 10 2 L 10 3 L 9 3 L 9 2 Z" />
+ <path d="M 9 6 L 10 6 L 10 7 L 9 7 L 9 6 Z" />
+ <path d="M 9 15 L 10 15 L 10 16 L 9 16 L 9 15 Z" />
+ <path d="M 10 2 L 11 2 L 11 3 L 10 3 L 10 2 Z" />
+ <path d="M 10 3 L 11 3 L 11 4 L 10 4 L 10 3 Z" />
+ <path d="M 10 4 L 11 4 L 11 5 L 10 5 L 10 4 Z" />
+ <path d="M 10 5 L 11 5 L 11 6 L 10 6 L 10 5 Z" />
+ <path d="M 10 6 L 11 6 L 11 7 L 10 7 L 10 6 Z" />
+ <path d="M 11 6 L 12 6 L 12 7 L 11 7 L 11 6 Z" />
+ <path d="M 11 8 L 12 8 L 12 9 L 11 9 L 11 8 Z" />
+ <path d="M 10 15 L 11 15 L 11 16 L 10 16 L 10 15 Z" />
+ <path d="M 11 10 L 12 10 L 12 11 L 11 11 L 11 10 Z" />
+ <path d="M 11 12 L 12 12 L 12 13 L 11 13 L 11 12 Z" />
+ <path d="M 11 14 L 12 14 L 12 15 L 11 15 L 11 14 Z" />
+ <path d="M 11 15 L 12 15 L 12 16 L 11 16 L 11 15 Z" />
+ <path d="M 12 6 L 13 6 L 13 7 L 12 7 L 12 6 Z" />
+ <path d="M 12 8 L 13 8 L 13 9 L 12 9 L 12 8 Z" />
+ <path d="M 12 10 L 13 10 L 13 11 L 12 11 L 12 10 Z" />
+ <path d="M 12 12 L 13 12 L 13 13 L 12 13 L 12 12 Z" />
+ <path d="M 12 14 L 13 14 L 13 15 L 12 15 L 12 14 Z" />
+ <path d="M 13 6 L 14 6 L 14 7 L 13 7 L 13 6 Z" />
+ <path d="M 13 8 L 14 8 L 14 9 L 13 9 L 13 8 Z" />
+ <path d="M 13 10 L 14 10 L 14 11 L 13 11 L 13 10 Z" />
+ <path d="M 13 12 L 14 12 L 14 13 L 13 13 L 13 12 Z" />
+ <path d="M 13 13 L 14 13 L 14 14 L 13 14 L 13 13 Z" />
+ <path d="M 13 14 L 14 14 L 14 15 L 13 15 L 13 14 Z" />
+ <path d="M 14 7 L 15 7 L 15 8 L 14 8 L 14 7 Z" />
+ <path d="M 14 8 L 15 8 L 15 9 L 14 9 L 14 8 Z" />
+ <path d="M 14 9 L 15 9 L 15 10 L 14 10 L 14 9 Z" />
+ <path d="M 14 10 L 15 10 L 15 11 L 14 11 L 14 10 Z" />
+ <path d="M 14 11 L 15 11 L 15 12 L 14 12 L 14 11 Z" />
+ <path d="M 14 12 L 15 12 L 15 13 L 14 13 L 14 12 Z" />
+</svg>
diff --git a/src/content/img/link.svg b/src/content/img/link.svg
new file mode 100644
index 0000000..e5c7050
--- /dev/null
+++ b/src/content/img/link.svg
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<svg width="22" height="22" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg">
+ <path fill-rule="evenodd"
+ d="M7.775 3.275a.75.75 0 001.06 1.06l1.25-1.25a2 2 0 112.83 2.83l-2.5 2.5a2 2 0 01-2.83 0 .75.75 0 00-1.06 1.06 3.5 3.5 0 004.95 0l2.5-2.5a3.5 3.5 0 00-4.95-4.95l-1.25 1.25zm-4.69 9.64a2 2 0 010-2.83l2.5-2.5a2 2 0 012.83 0 .75.75 0 001.06-1.06 3.5 3.5 0 00-4.95 0l-2.5 2.5a3.5 3.5 0 004.95 4.95l1.25-1.25a.75.75 0 00-1.06-1.06l-1.25 1.25a2 2 0 01-2.83 0z" />
+</svg>
diff --git a/src/content/img/lock.svg b/src/content/img/lock.svg
new file mode 100644
index 0000000..1a4a18e
--- /dev/null
+++ b/src/content/img/lock.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="utf-8"?>
+<svg width="22" height="22" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg">
+ <path d="M640 768h512v-192q0-106-75-181t-181-75-181 75-75 181v192zm832 96v576q0 40-28 68t-68 28h-960q-40 0-68-28t-28-68v-576q0-40 28-68t68-28h32v-192q0-184 132-316t316-132 316 132 132 316v192h32q40 0 68 28t28 68z" />
+</svg>
diff --git a/src/content/index.adoc b/src/content/index.adoc
new file mode 100644
index 0000000..275f7c4
--- /dev/null
+++ b/src/content/index.adoc
@@ -0,0 +1 @@
+= index
diff --git a/src/content/pastebins/2016/04/05/rpn.adoc b/src/content/pastebins/2016/04/05/rpn.adoc
new file mode 100644
index 0000000..25ca6ba
--- /dev/null
+++ b/src/content/pastebins/2016/04/05/rpn.adoc
@@ -0,0 +1,34 @@
+---
+
+title: RPN macro setup
+
+date: 2016-04-05
+
+layout: post
+
+lang: en
+
+ref: rpn-macro-setup
+
+---
+
+```lisp
+(defmacro rpn (body)
+ (rpn-expander body))
+
+(defun rpn-expander (body)
+ (mapcar (lambda (x)
+ (if (listp x)
+ (rpn-expander x)
+ x))
+ (reverse body)))
+
+(rpn ((2 1 +) 2 *))
+; => 6
+
+#|
+Just a quick stub.
+
+One could easily improve #'RPN-EXPANDER in order to better suit one's needs.
+|#
+```
diff --git a/src/content/pastebins/2018/07/11/nix-pinning.adoc b/src/content/pastebins/2018/07/11/nix-pinning.adoc
new file mode 100644
index 0000000..2d35e09
--- /dev/null
+++ b/src/content/pastebins/2018/07/11/nix-pinning.adoc
@@ -0,0 +1,38 @@
+---
+
+title: Nix pinning
+
+date: 2018-07-11
+
+layout: post
+
+lang: en
+
+eu_categories: nix
+
+ref: nix-pinning
+
+---
+
+```nix
+let
+ # Pin the nixpkgs version
+ stdenv = pkgs.stdenv;
+ pkgsOriginal = import <nixpkgs> {};
+ pkgsSrc = pkgsOriginal.fetchzip {
+ url = "https://github.com/NixOS/nixpkgs/archive/18.03.zip";
+ sha256 = "0hk4y2vkgm1qadpsm4b0q1vxq889jhxzjx3ragybrlwwg54mzp4f";
+ };
+
+ pkgs = import (pkgsSrc) {};
+
+ buildNodeJS = pkgs.callPackage <nixpkgs/pkgs/development/web/nodejs/nodejs.nix> {};
+
+in rec {
+ nodeFromNVMRC = buildNodeJS {
+ version = "8.7.0";
+ sha256 = "16mml3cwjnq7yf9yd67d2dybav3nvbnk89fkixs1wz7fd26d05ss";
+ patches = [];
+ };
+}
+```
diff --git a/src/content/pastebins/2018/07/13/guix-nixos-systemd.adoc b/src/content/pastebins/2018/07/13/guix-nixos-systemd.adoc
new file mode 100644
index 0000000..c2b8b62
--- /dev/null
+++ b/src/content/pastebins/2018/07/13/guix-nixos-systemd.adoc
@@ -0,0 +1,33 @@
+---
+
+title: GNU Guix systemd daemon for NixOS
+
+date: 2018-07-13
+
+layout: post
+
+lang: en
+
+eu_categories: nix,guix
+
+ref: gnu-guix-systemd-daemon-for-nixos
+
+---
+
+```nix
+ # Derived from Guix guix-daemon.service.in
+ # https://git.savannah.gnu.org/cgit/guix.git/tree/etc/guix-daemon.service.in?id=00c86a888488b16ce30634d3a3a9d871ed6734a2
+ systemd.services.guix-daemon = {
+ enable = true;
+ description = "Build daemon for GNU Guix";
+ serviceConfig = {
+ ExecStart = "/var/guix/profiles/per-user/root/guix-profile/bin/guix-daemon --build-users-group=guixbuild";
+ Environment="GUIX_LOCPATH=/root/.guix-profile/lib/locale";
+ RemainAfterExit="yes";
+ StandardOutput="syslog";
+ StandardError="syslog";
+ TaskMax= 8192;
+ };
+ wantedBy = [ "multi-user.target" ];
+ };
+```
diff --git a/src/content/pastebins/2018/07/13/guixbuilder-nixos.adoc b/src/content/pastebins/2018/07/13/guixbuilder-nixos.adoc
new file mode 100644
index 0000000..880d347
--- /dev/null
+++ b/src/content/pastebins/2018/07/13/guixbuilder-nixos.adoc
@@ -0,0 +1,53 @@
+---
+
+title: Guix users in NixOS system configuration
+
+date: 2018-07-13
+
+layout: post
+
+lang: en
+
+eu_categories: nix,guix
+
+ref: guix-users-in-nixos-system-configuration
+
+---
+
+```nix
+ users = {
+ mutableUsers = false;
+
+ extraUsers =
+ let
+ andrehUser = {
+ andreh = {
+ # my custom user config
+ };
+ };
+ # From the Guix manual:
+ # https://www.gnu.org/software/guix/manual/en/html_node/Build-Environment-Setup.html#Build-Environment-Setup
+ buildUser = (i:
+ {
+ "guixbuilder${i}" = { # guixbuilder$i
+ group = "guixbuild"; # -g guixbuild
+ extraGroups = ["guixbuild"]; # -G guixbuild
+ home = "/var/empty"; # -d /var/empty
+ shell = pkgs.nologin; # -s `which nologin`
+ description = "Guix build user ${i}"; # -c "Guix buid user $i"
+ isSystemUser = true; # --system
+ };
+ }
+ );
+ in
+ # merge all users
+ pkgs.lib.fold (str: acc: acc // buildUser str)
+ andrehUser
+ # for i in `seq -w 1 10`
+ (map (pkgs.lib.fixedWidthNumber 2) (builtins.genList (n: n+1) 10));
+
+ extraGroups.guixbuild = {
+ name = "guixbuild";
+ };
+ };
+```
diff --git a/src/content/pastebins/2018/07/13/guixbuilder.adoc b/src/content/pastebins/2018/07/13/guixbuilder.adoc
new file mode 100644
index 0000000..82204a8
--- /dev/null
+++ b/src/content/pastebins/2018/07/13/guixbuilder.adoc
@@ -0,0 +1,26 @@
+---
+
+title: Guix builder user creation commands
+
+date: 2018-07-13
+
+layout: post
+
+lang: en
+
+eu_categories: guix
+
+ref: guix-builder-user-creation-commands
+
+---
+
+```shell
+groupadd --system guixbuild
+for i in `seq -w 1 10`;
+do
+ useradd -g guixbuild -G guixbuild \
+ -d /var/empty -s `which nologin` \
+ -c "Guix build user $i" --system \
+ guixbuilder$i;
+done
+```
diff --git a/src/content/pastebins/2018/07/13/nix-strpad.adoc b/src/content/pastebins/2018/07/13/nix-strpad.adoc
new file mode 100644
index 0000000..359bda5
--- /dev/null
+++ b/src/content/pastebins/2018/07/13/nix-strpad.adoc
@@ -0,0 +1,19 @@
+---
+
+title: Nix string padding
+
+date: 2018-07-13
+
+layout: post
+
+lang: en
+
+eu_categories: nix
+
+ref: nix-string-padding
+
+---
+
+```nix
+padString = (n: if n < 10 then "0" + toString n else toString n)
+```
diff --git a/src/content/pastebins/2018/07/25/nix-exps.adoc b/src/content/pastebins/2018/07/25/nix-exps.adoc
new file mode 100644
index 0000000..23d75b6
--- /dev/null
+++ b/src/content/pastebins/2018/07/25/nix-exps.adoc
@@ -0,0 +1,58 @@
+---
+
+title: Nix exps
+
+date: 2018-07-25
+
+layout: post
+
+lang: en
+
+eu_categories: nix
+
+ref: nix-exps
+
+---
+
+```nix
+let
+ pkgsOriginal = import <nixpkgs> {};
+ pkgsSrc = pkgsOriginal.fetchzip {
+ url = "https://github.com/NixOS/nixpkgs/archive/18.03.zip";
+ sha256 = "0hk4y2vkgm1qadpsm4b0q1vxq889jhxzjx3ragybrlwwg54mzp4f";
+ };
+ pkgs = import (pkgsSrc) {};
+ stdenv = pkgs.stdenv;
+
+ # Taken from:
+ # http://www.cs.yale.edu/homes/lucas.paul/posts/2017-04-10-hakyll-on-nix.html
+ websiteBuilder = pkgs.stdenv.mkDerivation {
+ name = "website-builder";
+ src = ./hakyll;
+ phases = "unpackPhase buildPhase";
+ buildInputs = [
+ (pkgs.haskellPackages.ghcWithPackages (p: with p; [ hakyll ]))
+ ];
+ buildPhase = ''
+ mkdir -p $out/bin
+ ghc -O2 -dynamic --make Main.hs -o $out/bin/generate-site
+ '';
+ };
+in rec {
+ euandrehWebsite = stdenv.mkDerivation rec {
+ name = "euandreh-website";
+ src = ./site;
+ phases = "unpackPhase buildPhase";
+ # version = "0.1";
+ buildInputs = [ websiteBuilder ];
+ buildPhase = ''
+ export LOCALE_ARCHIVE="${pkgs.glibcLocales}/lib/locale/locale-archive";
+ export LANG=en_US.UTF-8
+ generate-site build
+
+ mkdir $out
+ cp -r _site/* $out
+ '';
+ };
+}
+```
diff --git a/src/content/pastebins/2018/07/25/nix-showdrv.adoc b/src/content/pastebins/2018/07/25/nix-showdrv.adoc
new file mode 100644
index 0000000..813965d
--- /dev/null
+++ b/src/content/pastebins/2018/07/25/nix-showdrv.adoc
@@ -0,0 +1,86 @@
+---
+
+title: nix show-derivation sample output
+
+date: 2018-07-25
+
+layout: post
+
+lang: en
+
+eu_categories: nix
+
+ref: nix-show-derivation-sample-output
+
+---
+
+```nix
+$ nix show-derivation /nix/store/zzz9cl2ly0mb2njr7vwa5528fxmn29m8-combofont-0.2.drv
+{
+ "/nix/store/zzz9cl2ly0mb2njr7vwa5528fxmn29m8-combofont-0.2.drv": {
+ "outputs": {
+ "out": {
+ "path": "/nix/store/dc897j29s5pl5mcw064n5b07bydacfm5-combofont-0.2",
+ "hashAlgo": "r:sha1",
+ "hash": "06be9cab7176fe6d99dd773315d9ec5c62f6a71b"
+ }
+ },
+ "inputSrcs": [
+ "/nix/store/b6ill8amfg0gki49zapm4asrrw9zzgz9-builder.sh"
+ ],
+ "inputDrvs": {
+ "/nix/store/3s0crp8826gwvfap6kjjyh9a7wq92awk-stdenv.drv": [
+ "out"
+ ],
+ "/nix/store/fafsh2hx1xxqgm8gwkj3bw3czz6dcvvw-mirrors-list.drv": [
+ "out"
+ ],
+ "/nix/store/qqla9sd8p8qwgl2a1wpn75bwp2vw70mm-bash-4.4-p12.drv": [
+ "out"
+ ],
+ "/nix/store/v8fxvb0wlsa5pmrfawa3dg501mglw43c-curl-7.59.0.drv": [
+ "dev"
+ ]
+ },
+ "platform": "x86_64-linux",
+ "builder": "/nix/store/lw7xaqhakk0i1c631m3cvac3x4lc5gr5-bash-4.4-p12/bin/bash",
+ "args": [
+ "-e",
+ "/nix/store/b6ill8amfg0gki49zapm4asrrw9zzgz9-builder.sh"
+ ],
+ "env": {
+ "buildInputs": "",
+ "builder": "/nix/store/lw7xaqhakk0i1c631m3cvac3x4lc5gr5-bash-4.4-p12/bin/bash",
+ "configureFlags": "",
+ "curlOpts": "",
+ "depsBuildBuild": "",
+ "depsBuildBuildPropagated": "",
+ "depsBuildTarget": "",
+ "depsBuildTargetPropagated": "",
+ "depsHostBuild": "",
+ "depsHostBuildPropagated": "",
+ "depsTargetTarget": "",
+ "depsTargetTargetPropagated": "",
+ "downloadToTemp": "1",
+ "executable": "",
+ "impureEnvVars": "http_proxy https_proxy ftp_proxy all_proxy no_proxy NIX_CURL_FLAGS NIX_HASHED_MIRRORS NIX_CONNECT_TIMEOUT NIX_MIRRORS_apache NIX_MIRRORS_bioc NIX_MIRRORS_bitlbee NIX_MIRRORS_cpan NIX_MIRRORS_debian NIX_MIRRORS_fedora NIX_MIRRORS_gcc NIX_MIRRORS_gentoo NIX_MIRRORS_gnome NIX_MIRRORS_gnu NIX_MIRRORS_gnupg NIX_MIRRORS_hackage NIX_MIRRORS_hashedMirrors NIX_MIRRORS_imagemagick NIX_MIRRORS_kde NIX_MIRRORS_kernel NIX_MIRRORS_maven NIX_MIRRORS_metalab NIX_MIRRORS_mozilla NIX_MIRRORS_mysql NIX_MIRRORS_oldsuse NIX_MIRRORS_openbsd NIX_MIRRORS_opensuse NIX_MIRRORS_postgresql NIX_MIRRORS_pypi NIX_MIRRORS_roy NIX_MIRRORS_sagemath NIX_MIRRORS_samba NIX_MIRRORS_savannah NIX_MIRRORS_sourceforge NIX_MIRRORS_sourceforgejp NIX_MIRRORS_steamrt NIX_MIRRORS_ubuntu NIX_MIRRORS_xfce NIX_MIRRORS_xorg",
+ "mirrorsFile": "/nix/store/36pk3fz566c2zj6bj8qy7gxl1z14xc4f-mirrors-list",
+ "name": "combofont-0.2",
+ "nativeBuildInputs": "/nix/store/hgv54iw72sgpqmzgv30s6gsfc4rd4wzp-curl-7.59.0-dev",
+ "out": "/nix/store/dc897j29s5pl5mcw064n5b07bydacfm5-combofont-0.2",
+ "outputHash": "3fkzcqjwxkciacvpvncnvzknf6mrrgh6",
+ "outputHashAlgo": "sha1",
+ "outputHashMode": "recursive",
+ "postFetch": "mkdir \"$out\";tar -xf $downloadedFile \\\n '--strip-components=0' \\\n -C \"$out\" --anchored --exclude=tlpkg --keep-old-files\n",
+ "preferHashedMirrors": "1",
+ "preferLocalBuild": "1",
+ "propagatedBuildInputs": "",
+ "propagatedNativeBuildInputs": "",
+ "showURLs": "",
+ "stdenv": "/nix/store/i3kgk0nibrbpgmzdwdfi2ym50i8m3lww-stdenv",
+ "system": "x86_64-linux",
+ "urls": "http://146.185.144.154/texlive-2017/combofont.tar.xz http://gateway.ipfs.io/ipfs/QmRLK45EC828vGXv5YDaBsJBj2LjMjjA2ReLVrXsasRzy7/texlive-2017/combofont.tar.xz"
+ }
+ }
+}
+```
diff --git a/src/content/pastebins/2019/06/08/inconsistent-hash.adoc b/src/content/pastebins/2019/06/08/inconsistent-hash.adoc
new file mode 100644
index 0000000..51d8ad3
--- /dev/null
+++ b/src/content/pastebins/2019/06/08/inconsistent-hash.adoc
@@ -0,0 +1,1061 @@
+---
+
+title: Inconsistent hash of buildGoModule
+
+date: 2019-06-08
+
+layout: post
+
+lang: en
+
+eu_categories: nix
+
+ref: inconsistent-hash-of-buildgomodule
+
+---
+
+FIXED: The `<nixpkgs>` was different on different environments.
+See <https://discourse.nixos.org/t/inconsistent-hash-of-buildgomodule/3127/2>.
+
+---
+
+The [commit that made this visible][0].
+
+[0]: https://euandre.org/git/servers/commit?id=6ba76140238b5e3c7009c201f9f80ac86063f438
+
+## Offending derivation:
+
+[Full source code on the repository][1]:
+
+[1]: https://euandre.org/git/servers/tree/default.nix?id=6ba76140238b5e3c7009c201f9f80ac86063f438#n3
+
+```nix
+terraform-godaddy = pkgs.buildGoModule rec {
+ name = "terraform-godaddy-${version}";
+ version = "1.6.4";
+ src = pkgs.fetchFromGitHub {
+ owner = "n3integration";
+ repo = "terraform-godaddy";
+ rev = "v${version}";
+ sha256 = "00blqsan74s53dk9ab4hxi1kzxi46k57dr65dmbiradfa3yz3852";
+ };
+ modSha256 = "0p81wqw2n8vraxk20xwg717582ijwq2k7v5j3n13y4cd5bxd8hhz";
+ postInstall =
+ "mv $out/bin/terraform-godaddy $out/bin/terraform-provider-godaddy";
+};
+```
+
+## Local build:
+
+```shell
+$ nix-build -A terraform-godaddy
+these derivations will be built:
+ /nix/store/3hs274i9qdsg3hsgp05j7i5cqxsvpcqx-terraform-godaddy-1.6.4-go-modules.drv
+ /nix/store/y5961vv6y9c0ps2sbd8xfnpqvk0q7qhq-terraform-godaddy-1.6.4.drv
+building '/nix/store/3hs274i9qdsg3hsgp05j7i5cqxsvpcqx-terraform-godaddy-1.6.4-go-modules.drv'...
+unpacking sources
+unpacking source archive /nix/store/m62ydk4wy6818sysfys0qz20cx5nzj7h-source
+source root is source
+patching sources
+configuring
+building
+go: finding github.com/mitchellh/copystructure v1.0.0
+go: finding github.com/blang/semver v3.5.1+incompatible
+go: finding github.com/posener/complete v1.2.1
+go: finding github.com/apparentlymart/go-cidr v1.0.0
+go: finding github.com/agext/levenshtein v1.2.1
+go: finding github.com/mitchellh/reflectwalk v1.0.0
+go: finding github.com/mitchellh/mapstructure v1.1.2
+go: finding github.com/hashicorp/hil v0.0.0-20170627220502-fa9f258a9250
+go: finding github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d
+go: finding github.com/bgentry/speakeasy v0.1.0
+go: finding github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af
+go: finding github.com/hashicorp/errwrap v1.0.0
+go: finding github.com/hashicorp/hcl2 v0.0.0-20181220012050-6631d7cd0a68
+go: finding google.golang.org/grpc v1.17.0
+go: finding golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9
+go: finding github.com/hashicorp/go-version v1.0.0
+go: finding google.golang.org/appengine v1.4.0
+go: finding golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4
+go: finding honnef.co/go/tools v0.0.0-20180920025451-e3ad64cb4ed3
+go: finding github.com/hashicorp/terraform v0.11.11
+go: finding google.golang.org/genproto v0.0.0-20181221175505-bd9b4fb69e2f
+go: finding github.com/mitchellh/go-wordwrap v1.0.0
+go: finding github.com/hashicorp/go-cleanhttp v0.5.0
+go: finding github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348
+go: finding golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890
+go: finding github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7
+go: finding github.com/kr/pty v1.1.3
+go: finding github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d
+go: finding github.com/aws/aws-sdk-go v1.16.11
+go: finding cloud.google.com/go v0.26.0
+go: finding google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8
+go: finding github.com/sergi/go-diff v1.0.0
+go: finding golang.org/x/sys v0.0.0-20181228144115-9a3f9b0469bb
+go: finding github.com/go-ini/ini v1.40.0
+go: finding github.com/golang/protobuf v1.2.0
+go: finding github.com/satori/go.uuid v1.2.0
+go: finding github.com/mitchellh/cli v1.0.0
+go: finding google.golang.org/appengine v1.1.0
+go: finding honnef.co/go/tools v0.0.0-20180728063816-88497007e858
+go: finding golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f
+go: finding github.com/mitchellh/iochan v1.0.0
+go: finding github.com/mitchellh/go-homedir v1.0.0
+go: finding github.com/spf13/pflag v1.0.2
+go: finding github.com/kr/pretty v0.1.0
+go: finding github.com/go-test/deep v1.0.1
+go: finding github.com/hashicorp/go-multierror v1.0.0
+go: finding github.com/spf13/pflag v1.0.3
+go: finding github.com/onsi/ginkgo v1.7.0
+go: finding github.com/onsi/gomega v1.4.3
+go: finding github.com/zclconf/go-cty v0.0.0-20181218225846-4fe1e489ee06
+go: finding gopkg.in/yaml.v2 v2.2.2
+go: finding github.com/mitchellh/gox v0.4.0
+go: finding github.com/zclconf/go-cty v0.0.0-20181129180422-88fbe721e0f8
+go: finding golang.org/x/crypto v0.0.0-20180816225734-aabede6cba87
+go: finding golang.org/x/net v0.0.0-20181220203305-927f97764cc3
+go: finding golang.org/x/net v0.0.0-20180826012351-8a410e7b638d
+go: finding github.com/google/go-cmp v0.2.0
+go: finding golang.org/x/sys v0.0.0-20180830151530-49385e6e1522
+go: finding github.com/onsi/ginkgo v1.6.0
+go: finding gopkg.in/fsnotify.v1 v1.4.7
+go: finding gopkg.in/yaml.v2 v2.2.1
+go: finding github.com/hashicorp/go-plugin v0.0.0-20181212150838-f444068e8f5a
+go: finding github.com/armon/go-radix v1.0.0
+go: finding golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be
+go: finding github.com/golang/mock v1.1.1
+go: finding github.com/ulikunitz/xz v0.5.5
+go: finding golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52
+go: finding github.com/davecgh/go-spew v1.1.1
+go: finding golang.org/x/net v0.0.0-20180906233101-161cd47e91fd
+go: finding gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405
+go: finding github.com/hpcloud/tail v1.0.0
+go: finding golang.org/x/lint v0.0.0-20181217174547-8f45f776aaf1
+go: finding github.com/mattn/go-colorable v0.0.9
+go: finding google.golang.org/grpc v1.16.0
+go: finding github.com/vmihailenco/msgpack v3.3.3+incompatible
+go: finding github.com/posener/complete v1.1.1
+go: finding github.com/mitchellh/go-testing-interface v1.0.0
+go: finding github.com/golang/protobuf v1.1.0
+go: finding github.com/mattn/go-isatty v0.0.3
+go: finding github.com/kr/text v0.1.0
+go: finding golang.org/x/net v0.0.0-20181106065722-10aee1819953
+go: finding github.com/hashicorp/go-hclog v0.0.0-20181001195459-61d530d6c27f
+go: finding github.com/oklog/run v1.0.0
+go: finding github.com/mitchellh/hashstructure v1.0.0
+go: finding golang.org/x/tools v0.0.0-20181221235234-d00ac6d27372
+go: finding github.com/hashicorp/go-getter v0.0.0-20181213035916-be39683deade
+go: finding github.com/kisielk/gotool v1.0.0
+go: finding howett.net/plist v0.0.0-20181124034731-591f970eefbb
+go: finding github.com/vmihailenco/msgpack v4.0.1+incompatible
+go: finding golang.org/x/sync v0.0.0-20181108010431-42b317875d0f
+go: finding golang.org/x/net v0.0.0-20180724234803-3673e40ba225
+go: finding gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7
+go: finding github.com/fatih/color v1.7.0
+go: finding cloud.google.com/go v0.34.0
+go: finding github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb
+go: finding github.com/hashicorp/hcl v1.0.0
+go: finding github.com/hashicorp/go-uuid v1.0.0
+go: finding github.com/hashicorp/go-multierror v0.0.0-20180717150148-3d5d8f294aa0
+go: finding github.com/mattn/go-isatty v0.0.4
+go: finding github.com/hashicorp/errwrap v0.0.0-20180715044906-d6c0cd880357
+go: finding github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310
+go: finding golang.org/x/net v0.0.0-20180811021610-c39426892332
+go: finding github.com/fsnotify/fsnotify v1.4.7
+go: finding github.com/bsm/go-vlq v0.0.0-20150828105119-ec6e8d4f5f4e
+go: finding github.com/golang/mock v1.2.0
+go: finding golang.org/x/net v0.0.0-20181129055619-fae4c4e3ad76
+go: finding github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3
+go: finding github.com/aws/aws-sdk-go v1.15.78
+go: finding github.com/golang/lint v0.0.0-20180702182130-06c8688daad7
+go: finding golang.org/x/text v0.3.0
+go: finding github.com/pmezard/go-difflib v1.0.0
+go: finding golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc
+go: finding github.com/kr/pty v1.1.1
+go: finding github.com/client9/misspell v0.3.4
+go: finding github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b
+go: finding golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3
+go: finding gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127
+go: finding github.com/jessevdk/go-flags v1.4.0
+go: finding github.com/stretchr/testify v1.2.2
+go: finding github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd
+go: finding golang.org/x/net v0.0.0-20181114220301-adae6a3d119a
+go: finding github.com/apparentlymart/go-textseg v1.0.0
+go: finding golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e
+go: finding github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77
+go: finding google.golang.org/grpc v1.14.0
+go: finding golang.org/x/lint v0.0.0-20180702182130-06c8688daad7
+go: finding github.com/hashicorp/go-safetemp v1.0.0
+go: finding github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8
+installing
+hash mismatch in fixed-output derivation '/nix/store/jgbfkhlsz6bmq724p5cqqcgfyc7l6sdv-terraform-godaddy-1.6.4-go-modules':
+ wanted: sha256:0p81wqw2n8vraxk20xwg717582ijwq2k7v5j3n13y4cd5bxd8hhz
+ got: sha256:10n2dy7q9kk1ly58sw965n6qa8l0nffh8vyd1vslx0gdlyj25xxs
+cannot build derivation '/nix/store/y5961vv6y9c0ps2sbd8xfnpqvk0q7qhq-terraform-godaddy-1.6.4.drv': 1 dependencies couldn't be built
+error: build of '/nix/store/y5961vv6y9c0ps2sbd8xfnpqvk0q7qhq-terraform-godaddy-1.6.4.drv' failed
+```
+
+## Build [on CI](https://builds.sr.ht/~euandreh/job/67836#task-setup-0):
+
+The `setup.sh` script contains a call to `nix-shell` which in turns
+build the same `terraform-godaddy` derivation:
+
+```shell
+$ cd vps/
+$ ./scripts/ci/setup.sh
+warning: Nix search path entry '/nix/var/nix/profiles/per-user/root/channels' does not exist, ignoring
+these derivations will be built:
+ /nix/store/as9r3n55czsdiq82iacs0hq12alxb2m0-remove-references-to.drv
+ /nix/store/fdh1ahjdh3fgsz4qz386klsa9bsqil48-source.drv
+ /nix/store/x7r5kh20ajlnj6vw6fg649w0iypcg1ga-terraform-godaddy-1.6.4-go-modules.drv
+ /nix/store/w4ghinrmpq524k3617ikfc8i42aa0dbb-terraform-godaddy-1.6.4.drv
+these paths will be fetched (868.72 MiB download, 4262.91 MiB unpacked):
+ /nix/store/01aggsi1ndjhnr93gcy8c4s1xbxab8dn-unzip-6.0
+ /nix/store/02nzlzdw0kiici9368jp5s84cpbqxkva-python3.7-certifi-2018.11.29
+ /nix/store/0bdf1xn7p6xzk008yr6cahq3wjlvah5g-terraform-provider-flexibleengine-1.5.0-bin
+ /nix/store/0jl2dhydfh3jbfpkgkrixisqkhj12d4y-libffi-3.2.1-dev
+ /nix/store/0jwyd55g8nfhm25a0bh1j1by6afdriic-perl5.28.2-File-Listing-6.04
+ /nix/store/0rc1jyfbxwffmsphyv2pfnxd6smysc1l-terraform-provider-ansible-0.0.4-bin
+ /nix/store/0w6l8kh3d30kg3nxc8xyi84gmrfxjnns-git-2.21.0
+ /nix/store/0z8i6sq8mg138qnifr1z37y780xkk8hf-terraform-provider-cloudstack-0.2.0-bin
+ /nix/store/15fv1623h1vcn5z0nq42v5rgjirbp5r0-terraform-provider-rancher-1.2.1-bin
+ /nix/store/18rr3rg32imsnfyx6zb6s8lc8qpkdr74-nghttp2-1.38.0-bin
+ /nix/store/1dydqkwswavzkyvr1qr62zmx3nqpmpp4-gnutls-3.6.7
+ /nix/store/1fl7yd9chgswnabbsvva7xvg5ak1q44p-terraform-provider-vault-1.8.0-bin
+ /nix/store/1hml3hx7qlbkv139khazb24jh69nngcd-terraform-provider-bigip-0.12.2-bin
+ /nix/store/1kz91g5mfj271lj5kxz2m1axcs2yqafy-thin-provisioning-tools-0.7.6
+ /nix/store/1wh5wgw6a3w91mk2avvn9ssw32nlw9kd-terraform-provider-openstack-1.18.0-bin
+ /nix/store/206dvjl6595dk40dli12ziv393ww54wl-bzip2-1.0.6.0.1
+ /nix/store/20wmykp8fj2izxdj8lic8ggcfpdid5ka-tzdata-2019a
+ /nix/store/2ar3zk5fjr34ys2dqnsfbb678x6fdlj4-openssh-7.9p1
+ /nix/store/2dfjlvp38xzkyylwpavnh61azi0d168b-binutils-2.31.1
+ /nix/store/2j9jm3jaxfn2g6wxak61wkhmrg6c4nn5-unbound-1.9.1-lib
+ /nix/store/2k46270d0h3gqj1c0wgx8prnj51jqryd-db-5.3.28
+ /nix/store/2lh08897y86kxvyjdd1vlnkg8fz88nkd-terraform-provider-rundeck-0.1.0-bin
+ /nix/store/2xhsrw4ws6kc4x3983wdwwlnim27c6iz-shadow-4.6
+ /nix/store/2yy3pv77rwbxk7b2mpysmiqdzhmgmphg-terraform-provider-http-1.1.1-bin
+ /nix/store/31l04a1yxxdbdpzdp8mpfk96rhj3bg2c-python3.7-netaddr-0.7.19
+ /nix/store/35mdgd1wc67g60azsrghzgn4fjhr5d2r-zfs-user-0.7.13-lib
+ /nix/store/3mgn9jnjzj1rgxclbixk5xa0kkx9xpw3-openssl-1.0.2r-dev
+ /nix/store/3qjz5kfri8sa0dj1213rap75alpqsm2l-terraform-provider-mailgun-0.1.0-bin
+ /nix/store/3s4fr71ykyw54kyyqavd0ba42klg0bhf-libXcursor-1.2.0
+ /nix/store/3xq3w5fgz99rhp3rxfkbp0ahg37mgmly-pango-1.43.0
+ /nix/store/3xzkc4wyadr3vrva2q320axjr6cyb43n-python-2.7.16
+ /nix/store/43i41p1n1sxssmqpf9jp5x4gcy6r2fl6-git-2.21.0
+ /nix/store/479dvd7q6c18l3jl2myhfxmfsjbqjjch-python3.7-dopy-2016-01-04
+ /nix/store/4i1mw6av3d6pr9bqggb4hnv6cykbrhhi-kexec-tools-2.0.19
+ /nix/store/4jw2677fvb11aj1bal9a2iksqz0mk80m-expat-2.2.6
+ /nix/store/4qq5hh1r6sqb0kpxc305rb468s45j4aw-libICE-1.0.9
+ /nix/store/4z62pandn85xhcc5vazmi29cs2yps47b-iproute2-5.0.0
+ /nix/store/50gfgyi2rxi4n25if8cqvlxlh5czl0wd-yajl-2.1.0
+ /nix/store/50rywa1m6asdz1y78a6dpa0xf98vm01v-perl5.28.2-LWP-MediaTypes-6.04
+ /nix/store/50wggbbr0wdg21hrvl4icwlppvk4464b-terraform-provider-opc-1.3.6-bin
+ /nix/store/5b0s7hhp52vq4psmicf8m8y2jr5jsiaz-terraform-provider-ucloud-1.6.0-bin
+ /nix/store/5k67y2lglsnswrya21z51d4h87a081k5-terraform-provider-kubernetes-1.6.2-bin
+ /nix/store/5l3967kll8m6s66zprzwb2p6vf2mh5yd-libtasn1-4.13
+ /nix/store/5lcz7p2xz1zp8iyd9yjmrg1kxw5yygnx-terraform-provider-panos-1.5.1-bin
+ /nix/store/5nkxpwdgpxs97yqh2fxz9y0rm80rc280-terraform-provider-heroku-1.9.0-bin
+ /nix/store/5pjazw71xk4kysxrzacgjl4iai691k25-curl-7.64.1
+ /nix/store/5qnlfx9qncn0fcw6mbfj6j58pz0cv0p3-binutils-wrapper-2.31.1
+ /nix/store/5x1551gw825apcsnwx8gzfnmiapbz8yl-perl5.28.2-IO-HTML-1.001
+ /nix/store/5ydkc9jcaaxlz58dr7gvyhi3gcmafsfy-python3.7-pyparsing-2.3.1
+ /nix/store/5ygnx64lyv5a8pnpmlj7bs8s2dz2hkxd-terraform-provider-spotinst-1.13.2-bin
+ /nix/store/5z3s6zbi98gh8cfliaplnmv15j568c46-terraform-provider-null-2.1.2-bin
+ /nix/store/61shjilahl0d237fg9b3z3chza2lgms4-patchelf-0.9
+ /nix/store/63gjp25l4cmdkl63zy0rcgmsvd2p2p34-terraform-0.11.14
+ /nix/store/63k736kr346ncpzv5yiqiyyyiqpa2h8m-terraform-provider-bitbucket-1.0.0-bin
+ /nix/store/6554dpyahvcs49dmv434aky6bfkmqb30-gnumake-4.2.1
+ /nix/store/69msrhi85iay3cb7c3nksr0s8l0xpsc7-util-linux-2.33.2
+ /nix/store/69vq0a9sqynmz335apm8zgyjdmq34s5j-libX11-1.6.7
+ /nix/store/6b2jabk1scwhhk9bz7wjzycvmkiw419d-libapparmor-2.13.1
+ /nix/store/6brahzfjri338n3fggplfrsmf63mrwnx-terraform-provider-nutanix-1.0.1-bin
+ /nix/store/6bvd29jny80ka8df9prr5hrl5yz7d98k-systemd-239.20190219
+ /nix/store/6hv1yfwyydyg2lzqcllwjb68xl4mrppw-terraform-provider-tencentcloud-1.5.0-bin
+ /nix/store/6hwdmzpspbnb7ix5z6m9h60jyy42kj90-dbus-1.12.12-lib
+ /nix/store/6mz512j183wj7qas2qm6zkrks5k8rh00-gettext-0.19.8.1
+ /nix/store/6whclwjzwg46s0dkxwk1xz8cdcxnkd3y-db-4.8.30
+ /nix/store/715lcljfyp8grxlmaf51pn0n3ml3dwgg-bash-interactive-4.4-p23
+ /nix/store/7256h1y98mmzsckwk2x7i3v3cxmvgrmq-python3.7-pyOpenSSL-19.0.0-dev
+ /nix/store/749qksf79hvn0aprcznd9bwfv550qwh3-go-1.12.1
+ /nix/store/7axz4xwz0vfrdgjyk59xg998bdqbvg5x-terraform-provider-random-2.1.2-bin
+ /nix/store/7b7nbb0w2iwskwhzjhfwrif631h4smia-libpciaccess-0.14
+ /nix/store/7crry947d1xvp1f15c6q089l0gcy5hpc-stdenv-linux
+ /nix/store/7fd40sykaxj6dvya7mvif3f16wrqijr9-terraform-provider-terraform-1.0.2-bin
+ /nix/store/7gwvcm8dc24vnphbx85q1afaxhfhac28-perl5.28.2-HTTP-Cookies-6.04
+ /nix/store/7k3hvg4sfpr6y2bg8b7x9mkb0d2p3scr-terraform-provider-clc-0.1.0-bin
+ /nix/store/7mmn8ri08z48vfj69c2h66f3g349ilq1-mailcap-2.1.48
+ /nix/store/7mqpbfs391s9hbnfzkpgw3inj8mkldr8-terraform-provider-azurerm-1.27.1-bin
+ /nix/store/85hh7apv9n3gganpnnq36zvlwm126mdh-openssl-1.0.2r-bin
+ /nix/store/89wg3f6hk41gxm4n6cikj6r7gr2k7h8j-terraform-provider-nixos-0.0.1-bin
+ /nix/store/8aylwgi9nb6hsgz6620fzsllbc7h2rx1-c-ares-1.15.0
+ /nix/store/8bxvyvd3ky0w5gk3k0lq2fmvj30fbzj8-zfs-user-0.7.13
+ /nix/store/8dl5c7n4555lr6qipki2424652gf8az8-ebtables-2.0.10-4
+ /nix/store/8gn2b5vvlazg608cj1y5l4igp9rckmnq-dejavu-fonts-minimal-2.37
+ /nix/store/8krs5vbid0ic6vvlvjvndvjb815q8hbd-terraform-provider-ns1-1.3.1-bin
+ /nix/store/8ndwixznskf4zbf6h96ww4j8ap4j8067-fribidi-1.0.5
+ /nix/store/8vqr6vbvyzlpsk6q0mnj93sf5j1wr5qa-perl5.28.2-Test-RequiresInternet-0.05
+ /nix/store/8xy69pkisipvdmrpm1nmbi6qa2c6lhn0-bash-interactive-4.4-p23-info
+ /nix/store/8yy3wngrdcpmjyw2ryas1y4wwhbd1356-patch-2.7.6
+ /nix/store/8z5vplmgshflm7yixhp8q7hy11xxxd8a-terraform-provider-elasticsearch-0.6.0-bin
+ /nix/store/91a8wnca647kfw67sk0iykdbyjpr8430-perl5.28.2-libwww-perl-6.38
+ /nix/store/920nr51iw7qaplqjlqrlnql9g5ljq3vp-gdbm-1.18.1
+ /nix/store/93rldbdly90q16lzk7hzilhk4qkdpqfq-keyutils-1.6-lib
+ /nix/store/94nrq9paz335s155x9za8n7kb0q3y211-libgcrypt-1.8.4
+ /nix/store/95rd64vii9j6h31fcr9lba8m8940zfpj-terraform-provider-github-2.0.0-bin
+ /nix/store/97d3r4a7v1nal53x0gv17hrbbcp0rb21-util-linux-2.33.2-bin
+ /nix/store/9dk1gh07pwkvg62rns4k670h54bhfhgh-zlib-1.2.11-dev
+ /nix/store/9fvjgcjn1d0c9476qlr05psvwljwzq59-python3.7-cryptography-2.6.1
+ /nix/store/9gjpg5lsdhgrhi805948c648nhn39l8z-terraform-provider-template-2.1.2-bin
+ /nix/store/9hysgvp7qrfcil4b5qhwdq2vm9hism13-libxcb-1.13.1
+ /nix/store/9pa3p1rqhnvlrngaqsx09766cl1j6zf3-python3.7-httpretty-0.9.6
+ /nix/store/9xb22l3577nznvd6dqqis6ixgmwq9ygh-python3.7-pycparser-2.19
+ /nix/store/9yb9whkdgf3zyy85xac248kwq1wm6qd6-harfbuzz-2.3.1
+ /nix/store/a185xh0jcx7il7hw2gfh0pmvrah3x67y-systemd-239.20190219-lib
+ /nix/store/a2cg0faxbwnicf41vwmw467jw7i9ix46-pinentry-1.1.0
+ /nix/store/a2rr0irv2ssvvphvafgrxy4di0pkkagn-audit-2.8.5
+ /nix/store/ag3fp30cz58ijm2yyy5adp1f3kw814b5-libXcomposite-0.4.5
+ /nix/store/amlnqi4vvmpb9gjmyh1vr5hr2py12ss2-terraform-provider-docker-1.1.1-bin
+ /nix/store/ap4sr1n0wlgmybxbw3pvq8klh8snc3n8-sqlite-3.28.0
+ /nix/store/apcaggi0q3vrb4ha1b07cjxiim2li5ly-gcc-7.4.0
+ /nix/store/apjv9g35sklrab9lzz9r9rq7lnczv2wy-terraform-provider-triton-0.5.1-bin
+ /nix/store/aqgl1dqd6lr7jr9knqsyyq09bm0ibw7s-python3.7-cffi-1.12.3
+ /nix/store/aqp0mrdbhvkm8rl1z0p2rkqnz6pbclhq-libXfixes-5.0.3
+ /nix/store/b2rwzjp56yzd4jg2csx568h2dgj1l3l2-perl5.28.2-Try-Tiny-0.30
+ /nix/store/b2wy5p5bykcnkwz5q1w8qq4qfzr4arc7-python3.7-MarkupSafe-1.1.1
+ /nix/store/b6pdz8g070kbf0rdavjz6rip7sx06r8h-libkrb5-1.17-dev
+ /nix/store/b77nn2r1c8cqpr9prh1ldwr3m6xdrkpa-openldap-2.4.47
+ /nix/store/b7w6bpx5z0ncy35kqxvmpg4lwrnc8jf2-libxml2-2.9.9
+ /nix/store/bd1hz6m8gh9m91hikjhq7aiq2khdkx2r-fontconfig-2.12.6
+ /nix/store/bf26sh99bngrnpzrj7gyz0689b060vak-terraform-provider-skytap-0.9.0-bin
+ /nix/store/bh412cii40qpzc20pzd48dq45jv9xm5a-perl5.28.2-HTTP-Date-6.02
+ /nix/store/bvsihhp4jv61hz6mc17mn1sar03k0i8d-terraform-provider-telefonicaopencloud-1.0.0-bin
+ /nix/store/bxps2h6axpqrjxcmib344a3immy3gvhd-readline-6.3p08
+ /nix/store/byxya0m4656ylf5imvs2v9p2c1av1kjl-terraform-provider-matchbox-0.2.3-bin
+ /nix/store/c7jkmfjhl3jkgnkrhh021vrqry7zplc1-linux-headers-4.19.16
+ /nix/store/caa29d4y2zip0ly9mcc7f4w94blw8k60-lz4-1.9.1
+ /nix/store/cd09qv56inq4gwa89656r4n0lq0vgidw-libXi-1.7.9
+ /nix/store/ch6pz5kfg0bd3sfyf1813cpskg7lidns-python3.7-urllib3-1.24.2
+ /nix/store/ck0lifb2jgkmg6c7frz7fxqwz5fbdnxk-terraform-provider-ultradns-0.1.0-bin
+ /nix/store/cncad2f4lfxfxnwd9lfhjjd89x3anxqr-terraform-provider-yandex-0.5.2-bin
+ /nix/store/csajl6aq80s9v2xbkmlzgfxlilmbzff6-terraform-provider-influxdb-1.1.0-bin
+ /nix/store/d0wcd9mid6067i6va19lwiv29hln6n2j-python3.7-requests-2.21.0
+ /nix/store/d7rh74cch3ybp9r239j5c2c1rb0kx3pa-terraform-provider-chef-0.1.0-bin
+ /nix/store/daizqdqrm7g4favv814hnijmqhay8hs4-dbus-1.12.12
+ /nix/store/di6rrbw1kbdrwxiymq91dgdvp2rvk1xv-dnsmasq-2.80
+ /nix/store/drdzgwhnqjvq4g1aqsyz56c04k6dxnbi-bash-interactive-4.4-p23-doc
+ /nix/store/ds1prvgw3i3ic8c7axyrw4lwm3d0gqab-gcc-wrapper-7.4.0
+ /nix/store/dvsw0fhfzqf4xg0q2idhs02rhwn4k8cv-libXrender-0.9.10
+ /nix/store/dy437h3f5i500gv6znll974c87grzh3l-libXft-2.3.3
+ /nix/store/dy5wi2sqnhbnlpvjr8a0z96id1mq243j-python3.7-six-1.12.0
+ /nix/store/dz9dqcnz8v9cy54x5hax599zjwckp0kd-terraform-provider-gitlab-1.3.0-bin
+ /nix/store/f11valqiyik1ggdlnhg3ibwgrj1imidb-numactl-2.0.12
+ /nix/store/f39sk2aim9xz7dzn7qvqh442xm58v77w-nfs-utils-2.3.3
+ /nix/store/f3rbnn0jhm549mcp7k9ysjcq26j8fvyy-terraform-provider-segment-0.2.0-bin
+ /nix/store/f7zcmzqcavbj7bp1dlfk86f9bkqvk9p3-bridge-utils-1.5
+ /nix/store/f7zh0d0n2dj4dcig903zd5jgb2cpaxf6-terraform-provider-ignition-1.0.1-bin
+ /nix/store/fhgiisqjpzwl8z4fchgc07avg1azmp0r-cyrus-sasl-2.1.27
+ /nix/store/fky41n197z9n737kbih4j7ncnh7cngnz-perl5.28.2-Net-HTTP-6.18
+ /nix/store/fq4765nh9p0si8mh9cnywsq48zr1qc27-terraform-provider-runscope-0.5.0-bin
+ /nix/store/fwvdxglj9asp4f90ihry29n2fm8a6i09-terraform-provider-aws-2.9.0-bin
+ /nix/store/g4qqgmrm254axgndybnpwg7s780bxy1a-numad-0.5
+ /nix/store/g63hwfkd4yjncqh81ndn9vbmghdv41ng-terraform-provider-digitalocean-1.3.0
+ /nix/store/gg469jh0m4dk4b0x6s44ziad69czbv22-python3.7-pycrypto-3.7.3
+ /nix/store/gkpa27fykskx0dd52dca515gd91qhhgf-terraform-provider-local-1.2.2-bin
+ /nix/store/glrnpb3rkribnrjh5gzs24nmvl3m00cg-parted-3.2
+ /nix/store/gn5cd1z252aip0rvds71g9mgfhh6i8p7-fontconfig-2.12.6-lib
+ /nix/store/gsl1dw8ycrdvlzczsl59mkz0qpbwcmz1-iptables-1.8.2
+ /nix/store/gydzhj2y5j1ggbainbilvpxi5glw5hmf-terraform-provider-alicloud-1.41.0-bin
+ /nix/store/h02lb0p8krj1smsrid2n44ak058bbd82-expand-response-params
+ /nix/store/h40ib0qsa07b6ld1pv4x76xx2g7xgik6-terraform-provider-newrelic-1.5.0-bin
+ /nix/store/h8898ysg2s23k6palhxy9a5sbgrvvrcy-nfs-utils-2.3.3-lib
+ /nix/store/hbs2vrw1y8y1qz1hi71jaz0j3pl95qfs-terraform-provider-helm-0.9.1-bin
+ /nix/store/hg863b95fxv9zlk008qjyf87qgyx58h1-libseccomp-2.4.1-lib
+ /nix/store/hl8lzq90qjhq0n710lm5n17lc9i80vsh-terraform-provider-oraclepaas-1.5.2-bin
+ /nix/store/hnvmacd16kzmwcsavzkssrqj2kiryy2p-perl5.28.2-WWW-RobotRules-6.02
+ /nix/store/hpmni5y805q7a07q9sn3nwjk4i2m2jl5-libkrb5-1.17
+ /nix/store/hps5ziw9zq6mcjh9b7naaxawnqymws4m-jasper-2.0.16
+ /nix/store/hy2xn2jxm4wp7j86p08m9xdpxncskdgv-terraform-provider-pagerduty-1.2.1-bin
+ /nix/store/i10riha5s5dgafznk3gwn36fyr3cpxb4-libXinerama-1.1.4
+ /nix/store/i3kh8yq4kgkfn234pnwxnvxbrcgcckc8-curl-7.64.1-dev
+ /nix/store/i652w9gqbmc6k48lz3b02ncv7hpgc7nv-perl5.28.2-HTTP-Message-6.18
+ /nix/store/i8859i082xqnrhzg7h6gz2ylc5wbw5pa-libnl-3.4.0
+ /nix/store/id798ngchr83gc0mmqd3zlviljshjhvb-terraform-provider-nomad-1.3.0-bin
+ /nix/store/im1940h7b6pjlnh38q6lasdn8iybsv4v-python3.7-jmespath-0.9.4
+ /nix/store/in7wgxanbdycb9wpq1j29928gllc0ap6-terraform-provider-profitbricks-1.4.4-bin
+ /nix/store/irqcza91k5smn6f4dyvqzw0zjn50d58f-libssh2-1.8.2
+ /nix/store/isdbs6d2jk75kj0qk4s3prwlwcgkgalf-tf-plugin-env
+ /nix/store/ixycmxkr0wrz3gfxrnrdgcsk4gcyirpv-terraform-provider-azurestack-0.6.0-bin
+ /nix/store/j1px1l6vk39i3chghlwy9222jcjdfdq0-libcap-ng-0.7.9
+ /nix/store/jf6lfawjvwr6ggnd4lhc5w4dp9v3kgh4-libXdmcp-1.1.3
+ /nix/store/jjld4xam968mz645xh4g7i5zrnhsfyp9-terraform-provider-cobbler-1.0.1-bin
+ /nix/store/jn0bddfc3fzhnf5ns4s2khhzclswvzb2-libpcap-1.9.0
+ /nix/store/jsg4bi31drwy614hdkkwf32m4wz3im6g-libassuan-2.5.3
+ /nix/store/jszvy5lyyjbfi7mjr7s9bnbq9cyq858v-terraform-provider-external-1.1.2-bin
+ /nix/store/jvfc6r03a95aignzbfg987kspa04s0md-perl5.28.2-HTTP-Daemon-6.01
+ /nix/store/jwwkky1pn1fw3yiaxmc5k3brb0rqlwvq-graphite2-1.3.6
+ /nix/store/k1xlz5zy7rm2a428byz850c1igc2j1z8-readline-7.0p5
+ /nix/store/k3qhzd0x8a1z6h5kyifnv3axbfs7fy66-terraform-provider-statuscake-0.2.0-bin
+ /nix/store/k4v5havnf7pmcv40xadh8mb7b0nbcgxz-libglvnd-1.0.0
+ /nix/store/k59h7bs9307y7rb0z70vasvj8hd5pcn5-perl5.28.2-HTML-Tagset-3.20
+ /nix/store/k5vljkz5p59nrh50vx5k2790ksqcxjpc-terraform-provider-dme-0.1.0-bin
+ /nix/store/k7g175rls2pk34m23wqhplv8mbnsc0lc-pixman-0.38.4
+ /nix/store/kccb2k5hdjhdyxbxsri9lwwc4z1pvx6z-cups-2.2.11-lib
+ /nix/store/kds827ryxx16rwhrsdn9wnr2pxf5qaxm-terraform-provider-google-2.6.0-bin
+ /nix/store/kh18cbdb9f79gl58axwr8qq6c7bd0bl0-terraform-provider-acme-1.1.2-bin
+ /nix/store/kh2h5rnmm4gvjm8z7w2y511h15i7nhk9-gnum4-1.4.18
+ /nix/store/kmscm0qm9j480wpd1yh42b1g0zc6qbmv-nghttp2-1.38.0-lib
+ /nix/store/kqmg1xxd3vi37bqh7gdvi61bkp7wb9hi-terraform-provider-circonus-0.2.0-bin
+ /nix/store/kvjcl6plvmkm6i2lzd7wrkbiis3b4vhg-terraform-provider-mysql-1.5.1-bin
+ /nix/store/kvy2sz5lvi89lnh4rmw1df4jsnhqf1ay-libnftnl-1.1.2
+ /nix/store/l6ns1zcd18j9708y3agxgi0kihs4zc7i-terraform-provider-softlayer-0.0.1-bin
+ /nix/store/l8zqmzg19i62iz4argyjjr071rid3q9b-terraform-provider-cloudflare-1.13.0-bin
+ /nix/store/l9821zngvlh8bd6mlyzvi1mc754dyhjz-terraform-provider-libvirt-0.5.1-bin
+ /nix/store/lgrhsbfmpf1cjbpig8llxfrfb6xhz7xv-terraform-provider-scaleway-1.9.2-bin
+ /nix/store/lnzh3cjjcbafh6wsivw10wl60g7xplxj-libXrandr-1.5.2
+ /nix/store/lr9yhdbn8a3la69j56cz0vi1qva973dv-kbd-2.0.4
+ /nix/store/lswf09qbkkrqd0rgzaqyrkr44lf78y9x-libXext-1.3.4
+ /nix/store/lvqp39d4hx776nkw3a0qfnvvjmnj49hc-procps-3.3.15
+ /nix/store/m2n4drah6566qlccaabjhnnl4slql3cd-python3.7-pysocks-1.6.8
+ /nix/store/m65jki67b02la5k5r9vgddcp13l32lw5-python3.7-httplib2-0.12.3
+ /nix/store/m6hisb1d7q1c4z0s3icax40gynz4f8fl-gmp-6.1.2
+ /nix/store/mmjbmvw64yl2756y1zvsxk0ic0nhzq2a-libnfnetlink-1.0.1
+ /nix/store/mn0nzy294n07x1b92m9n0rwrv7z1441m-libunistring-0.9.10
+ /nix/store/mp1hzpgp3sa6xac8dc7rldh5rab8lk2w-perl5.28.2-Test-Needs-0.002006
+ /nix/store/mpnl3p6mzm71vci81r0h346jywm6863s-perl5.28.2-Encode-Locale-1.05
+ /nix/store/mrclkdxryhjd6i36hlad6fwahjd14fmg-libpng-apng-1.6.37
+ /nix/store/msa690459q4n9fiq125gsfambbd62qb4-libdaemon-0.14
+ /nix/store/msfkr5yqdxjx5cm24pvn3q1552rsjn8h-libev-4.25
+ /nix/store/mvmjdim7dn589inb8dsjxap08h4ip4h5-terraform-provider-huaweicloud-1.4.0-bin
+ /nix/store/n1dcmv0ii513dhlnllc790vfn8i9j9lj-python3.7-Jinja2-2.10.1
+ /nix/store/n1mrfbzlh3cjm9mfyrp48pybl3sg4717-terraform-provider-vsphere-1.10.0-bin
+ /nix/store/n1y9i0bv0sg8n8759zd6smr2zjyn8jf3-python3.7-paramiko-2.4.2
+ /nix/store/n2mzl8vljdksdqybihdy9mm5v7hm19q5-python3.7-idna-2.8
+ /nix/store/n3rakrhhvi3bb0ffnjs51drmy157p51q-terraform-provider-vcd-2.1.0-bin
+ /nix/store/n6z00dm6a5fdv935v8bv59909ra51xli-npth-1.6
+ /nix/store/n7wdfylfi5wnrjdg4im9v2q9gnl99mmb-terraform-provider-archive-1.2.2-bin
+ /nix/store/nc8x0pwchcc9xiv1nsj9idvpnfvkhh8p-terraform-provider-datadog-1.9.0-bin
+ /nix/store/ndbpc44lv43k7jnb0ip1qwk8f0slx685-bash-interactive-4.4-p23-dev
+ /nix/store/ndjjyr4rqibzkgs8w55bx2idhnckh39p-libidn2-2.1.1a
+ /nix/store/nfn8wgiqf418y444fch4bpf2ay6ca55i-curl-7.64.1-man
+ /nix/store/ni4nc256xs4f5hmhlhybxl8k40fwi5m3-libedit-20190324-3.1
+ /nix/store/np4ikymr5fq5iknjfrwrgqmcsid4dmw9-terraform-provider-consul-2.3.0-bin
+ /nix/store/nq2x9w3rjd5l2yvlv328i19ljar8bdab-libidn-1.35
+ /nix/store/nvyhmkghwxh5f1wiid27vzxa0ddx929p-python3.7-packaging-19.0
+ /nix/store/nwhvl00i2wa4ms26lszk36vwir90jd3x-libvirt-4.10.0
+ /nix/store/p00q64lbln1z9kfgpd2r6qhk0kc7i7w7-terraform-provider-oneandone-1.3.0-bin
+ /nix/store/p19dhs366b9zbbhs61xfw7d77sk9mkjr-terraform-provider-cloudscale-1.1.0-bin
+ /nix/store/p8s6295x84d594sxvzml8rsxqjdghmc5-cairo-1.16.0
+ /nix/store/p9rjwvja55djz5g2qxyc9wzcpmska0ql-terraform-provider-dyn-1.1.0-bin
+ /nix/store/pb6r7dllpfw5cbhpmv2v2kms9a57r4v5-terraform-provider-tls-2.0.1-bin
+ /nix/store/pfd5p3pyfrkwxh42j491kkqgl8n9aa67-perl5.28.2-TermReadKey-2.38
+ /nix/store/pk6r8sd18gmxns8r73qi2yrmzf4f4cp0-terraform-provider-arukas-1.1.0-bin
+ /nix/store/pknq6p5h43zm4r0dgjnfywql04hdv3js-atk-2.32.0
+ /nix/store/pvsfn6d0byl3hfwnyfg21yivyj8iff8s-terraform-provider-opentelekomcloud-1.8.0-bin
+ /nix/store/pvzbhdzqm4i20v3flr5mf7yfs7n2lrvg-python3.7-dnspython-1.16.0
+ /nix/store/pwkjsnbgb4mw0x5r5zh8s7c9wqryhmbl-dmidecode-3.2
+ /nix/store/pwnppsfjfxibhwhf598l7mx31i8154j9-bison-3.3.2
+ /nix/store/q0ndxs1vqdy5r749h5hhhbixgyf5yasx-terraform-provider-opsgenie-0.1.0-bin
+ /nix/store/qbwcp86aslamyhhmf2xx0l5d17dyg2jh-libmnl-1.0.4
+ /nix/store/qd4j58ykdkg9yvy8kvgh0i00gacy0ldm-perl-5.28.2
+ /nix/store/qgr66z24rfbb8cc965rr2sklh38p083n-git-crypt-0.6.0
+ /nix/store/qhad1pgmn3z406pgk345281xb5zjqrkm-libelf-0.8.13
+ /nix/store/qpj9rsal85rc94pizrkwb3c5nkivlfcl-p11-kit-0.23.14
+ /nix/store/qq59cnpcbnp4p054ipbs54fv946r4qr8-python3.7-boto-2.49.0
+ /nix/store/r3x6y48q13qwl9x1wwz37002b7fhyidv-python3.7-asn1crypto-0.24.0
+ /nix/store/r54ql4g0hcxzp15sfjiagd1dmxh4s8n6-python3.7-bcrypt-3.1.6
+ /nix/store/r6mrgd9k1jzzqrhphrg1qgxvgvbka7p8-pcre2-10.33
+ /nix/store/r7lhx3aqyils26h7wbxbgf376c0n4ab5-libssh2-1.8.2-dev
+ /nix/store/rcn9d2q4mpapxf4qd54hkqz32ljhv0rw-util-linux-2.33.2
+ /nix/store/rf104cwz7kaa51s49n4c2aar0jrrj8px-nghttp2-1.38.0-dev
+ /nix/store/ricz15lpkjrasc5cpzp6l60iwlc87wv3-avahi-0.7
+ /nix/store/rj8xd9ajm3wqjz1vfkignlp011fss53q-bzip2-1.0.6.0.1-bin
+ /nix/store/rk78bh2s5yjpmgdhzqlf1hnj6ij0h20n-terraform-provider-tfe-0.8.2-bin
+ /nix/store/rl2z4bb7wkfp0g12ccqffh287qal1109-kmod-26
+ /nix/store/rs21a235ix9v8y4hgazkzi6g1x5dqf7v-terraform-provider-netlify-0.1.0-bin
+ /nix/store/ryinn9xa3g8bn55nj1h54ypnlp9naq6i-stdenv-linux
+ /nix/store/rzfzb501miszas14xq6cr3c04m8kkdrb-terraform-0.11.14-bin
+ /nix/store/s3m5z3wxm94c0bfyjxmqf6i0gf1bpx90-libaio-0.3.110
+ /nix/store/s5f3vpmig33nk4zyk228q55wdydd3pc2-python3-3.7.3
+ /nix/store/s7p4iij8p4hi6bmc2bf3flyf6wa6yzrj-terraform-provider-dns-2.1.1-bin
+ /nix/store/s7rqxrfb631i53dfl90gac35095jyypq-util-linux-2.33.2-bin
+ /nix/store/scdscan356g79qb7cf776gy7src22zbl-terraform-provider-grafana-1.3.0-bin
+ /nix/store/sfrh0r54ykfzv62h17gi8hm6778j7k0l-libyaml-0.2.1
+ /nix/store/shf6d1928fzxcaz6zh0bhcqv3xhvxhjd-terraform-provider-linode-1.6.0-bin
+ /nix/store/skkpbcqavjd8q0zmd94js6nz7pgbvpfl-mirrors-list
+ /nix/store/sm4yylq92rip64wdk3iniy91w48a90ia-openssl-1.0.2r
+ /nix/store/sn2cydjzikl3rws2nfa7pdvayb45brrd-terraform-provider-logicmonitor-1.2.0-bin
+ /nix/store/ssr1y1736h7c6p8vs76iyxwg5h889x7d-terraform-provider-dnsimple-0.1.0-bin
+ /nix/store/sw6n4yz49cz5vm4ggpk2l5j1vngac8j2-terraform-provider-secret-1.0.0-bin
+ /nix/store/sw6vlm5g6r6sivlncz7vh8ps7v7r22aa-terraform-provider-brightbox-1.1.2-bin
+ /nix/store/v31bag67wm17wkdg7zr9yi62c5028y59-libXau-1.0.9
+ /nix/store/v5q3cnkjfy8rfacsjqn1nza93mbczgd5-gdk-pixbuf-2.38.1
+ /nix/store/v69ld4vcgkr4i4giv1nzl4kax9zx1fpa-python3.7-pyasn1-0.4.5
+ /nix/store/vbp6wnr2gyj50nabxgclkbqblmnwcnbg-terraform-provider-azuread-0.3.1-bin
+ /nix/store/vckbx0p1isjvmgjh7ppni3h87imazbzb-libcap-2.27-lib
+ /nix/store/vin4cp4m5af1mxkb2jqqi8xkf98ca2sv-python3.7-ansible-2.7.9
+ /nix/store/vkdh5ibsmzj6p53krnqqz1pv620f42r0-terraform-provider-logentries-1.0.0-bin
+ /nix/store/vqvmd2r9pf9f74jqipbhrn7wksiiy1jf-pcsclite-1.8.25-bin
+ /nix/store/vrrs5p13mykyniglgfdsn8xii9b7s850-terraform-provider-powerdns-0.1.0-bin
+ /nix/store/vzjwz11r1yy02xv07vx2577pqizzx83n-nettle-3.4.1
+ /nix/store/vzs0x1kaliybgk7yr9lrf6ad4x5v1k9y-libjpeg-turbo-2.0.2
+ /nix/store/wbbwikfkc7fbida822a5z9b4xmsnwm3d-python3.7-chardet-3.0.4
+ /nix/store/wccns8l8bav11z3xlhasmnkz383q1k9p-libnetfilter_conntrack-1.0.7
+ /nix/store/wf5nv1gzrx378icqmjgwl2isg7s8ly80-lvm2-2.03.01
+ /nix/store/wh8pg14c3ykmmnd342llbzjigahc54dw-terraform-provider-fastly-0.6.0-bin
+ /nix/store/wi2mn48l130r7wafvj757rvzfkla59if-pm-utils-1.4.1
+ /nix/store/wi4jm555w0rc1daiy2sz9iwrpk6cb2d8-terraform-provider-ibm-0.11.1-bin
+ /nix/store/wkby8pdmg4vpvbl0d85gynh103k2h1ky-nghttp2-1.38.0
+ /nix/store/wv8y2h4w7lxi9x6k8pzh3dxy7i4csfbm-terraform-provider-digitalocean-1.3.0-bin
+ /nix/store/wvccp35z40fj1v8xbz6czrm4bbiaqd45-perl5.28.2-HTTP-Negotiate-6.01
+ /nix/store/wxdxzvaj3mcmf3i9yqwinpcb26iz3pas-perl5.28.2-Test-Fatal-0.014
+ /nix/store/wy2alf2g85wym6i9n279d7a9nianx3is-curl-7.64.1-bin
+ /nix/store/wym0bhrfdx5ndfpx6y39c6j3pf2n6wak-libksba-1.3.5
+ /nix/store/x1idgjd7vs75nj9s3krimbfjvh27n06d-terraform-provider-librato-0.1.0-bin
+ /nix/store/x3rijvjjrch1fjs60lrw9xb7pidp817f-gtk+-2.24.32
+ /nix/store/x6zlzsjsd7m9mjkmxlp15vcay58g6a04-python3.7-pycryptodome-3.7.3
+ /nix/store/x987jpq3yswa2aj51d63vwszfdm3r8ld-terraform-provider-atlas-0.1.1-bin
+ /nix/store/xav1lh5jlgrz7amaz7b5ghrz0c273lji-python3.7-pyOpenSSL-19.0.0
+ /nix/store/xbcc8v50g7h7wqh892q7k22wb1ky8cg7-libevent-2.1.8
+ /nix/store/xbwr4gb4zgjmcrbx82zlcp1jfgcz75ya-python3.7-cryptography-2.6.1-dev
+ /nix/store/xdhh337yhl93x33vzd9davinrbr9x9iz-libmicrohttpd-0.9.63
+ /nix/store/xfgg45by0j5hxi1kdwh8x2pkfd67wwzd-nss-cacert-3.42.1
+ /nix/store/xkv7la24vsyn9n23wc1izcmmp7aifzb3-terraform-provider-packet-2.1.0-bin
+ /nix/store/xmy3chnan9iiag9apm7dd825cmlkiiij-libusb-1.0.22
+ /nix/store/xn8xmzxjx7i8wwfvbiwfgmv9mn4n45dk-terraform-provider-icinga2-0.2.0-bin
+ /nix/store/xs2k8driha83f9k017bkgch8lcl4z7w0-python3.7-ply-3.11
+ /nix/store/xv9pfis6ixrsv7z1jrmgagi25qljvg0d-python3.7-pynacl-1.3.0
+ /nix/store/xxwqa4rwfi97az8a6dl6vhqiyjvmnm9v-libsecret-0.18.8
+ /nix/store/xyx7dm2b6ylcdp38pfwcip9ssx0fdzga-libtool-2.4.6-lib
+ /nix/store/xzzf5h2f02bwjcph28vly4wklp13wb0g-perl5.28.2-HTML-Parser-3.72
+ /nix/store/y1v3g4m9bmmmvmw4z84m5fpmdy42lbr4-terraform-provider-selectel-2.1.0-bin
+ /nix/store/y20bvdwb0s95wa4gzhkkxd1xcc4c8whx-terraform-provider-postgresql-0.3.0-bin
+ /nix/store/y3x0fvlz4a30iajw3vd1rkg45vl3k15c-pcsclite-1.8.25
+ /nix/store/y48is3y65apgyjwlwiyzjay1dw19l19a-dns-root-data-2019-01-11
+ /nix/store/y5gfmqjp68h4fqq8z4p219pimm7ws49j-python3.7-cffi-1.12.3-dev
+ /nix/store/ybj0i3axzbkb4n4c4a5gz5lr3z70v7h3-bash-interactive-4.4-p23-man
+ /nix/store/ybsmcpfglj9fm5kjxyykbnwfjmhxrwfv-terraform-provider-google-beta-2.6.0-bin
+ /nix/store/yf4i32dx953p2dv2agfdyxdwg6ba0l61-python3.7-setuptools-41.0.1
+ /nix/store/yfmvcf5cslq4f9kv0vlmxksdgvick22d-libgpg-error-1.36
+ /nix/store/yh1w64xprl0rxmj8nvxpmikp6x3qvgdb-libffi-3.2.1
+ /nix/store/yi01h84z709wf02id8hsb170z53wvk7r-glibc-2.27-dev
+ /nix/store/yik0kf8fgxjj1rj3s4bsrx1y6smz8rhx-cdrtools-3.02a06
+ /nix/store/yjdcrd1lkzp8c7cawcpy40c4p3ngaw12-terraform-provider-rightscale-1.3.0-bin
+ /nix/store/ymn53mhfkkhs26qw708yv7bd7jmbp636-terraform-provider-nsxt-1.1.0-bin
+ /nix/store/yn7smb316b6d99wjw2v9fgxzbrqnq9jm-terraform-provider-gandi-1.0.0-bin
+ /nix/store/yr6qv6j9qrc03gl7bknw6p1fx1pzk0l9-terraform-provider-hcloud-1.9.0-bin
+ /nix/store/yrmrvha03pvdyi9ww2bi6xjpk5930sf8-glib-2.60.1
+ /nix/store/yvqaj61p81kd4bq3dyw64idqj61rwpka-terraform-provider-hedvig-1.0.3-bin
+ /nix/store/z0sqda5bg0l4p0851nw05h7hii0jj1kr-python3.7-PyYAML-5.1
+ /nix/store/z2darh83lb4rmsfnnyjc0hll51fyvj49-libSM-1.2.3
+ /nix/store/z4vgv1w5cmd6p90grfgr1k6m87fydr3g-terraform-provider-ovh-0.3.0-bin
+ /nix/store/z68464p6aafah8b8xcybkwyhmqdf0jgx-gnupg-2.2.15
+ /nix/store/z7jh25by0vv378gacjd01idi52dj688h-libtiff-4.0.10
+ /nix/store/z7n5a3kwjylsgnc1d271j4dn41m6shpz-libtirpc-1.1.4
+ /nix/store/z9fv0x6vwlx3xkcac3lg6v8g0figkx39-ncurses-6.1-20190112
+ /nix/store/za8dm1xsfslzdk4j434w869bngrygici-perl5.28.2-URI-1.76
+ /nix/store/zaq3w03j96qqhzpcfs9yacwa98sdsmiv-terraform-provider-oci-3.24.1-bin
+ /nix/store/zckvgwnyah69mmwn0g5vr4y85rmzwld9-libsodium-1.0.17
+ /nix/store/zdfchj49phsp0sahcvdfp8ipc0chakg3-terraform-provider-rabbitmq-1.0.0-bin
+ /nix/store/zh1q7yvyaxlcmj3n6g0rrdaq0v73pp90-linux-pam-1.3.1
+ /nix/store/znjkmjz0pgckxxzq0m9d17isnsd9s03q-cracklib-2.9.7
+ /nix/store/zxdnpb673bf27hcqzvssv5629m4x5bjv-freetype-2.10.0
+copying path '/nix/store/drdzgwhnqjvq4g1aqsyz56c04k6dxnbi-bash-interactive-4.4-p23-doc' from 'https://cache.nixos.org'...
+copying path '/nix/store/a2rr0irv2ssvvphvafgrxy4di0pkkagn-audit-2.8.5' from 'https://cache.nixos.org'...
+copying path '/nix/store/8xy69pkisipvdmrpm1nmbi6qa2c6lhn0-bash-interactive-4.4-p23-info' from 'https://cache.nixos.org'...
+copying path '/nix/store/ybj0i3axzbkb4n4c4a5gz5lr3z70v7h3-bash-interactive-4.4-p23-man' from 'https://cache.nixos.org'...
+copying path '/nix/store/2dfjlvp38xzkyylwpavnh61azi0d168b-binutils-2.31.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/f7zcmzqcavbj7bp1dlfk86f9bkqvk9p3-bridge-utils-1.5' from 'https://cache.nixos.org'...
+copying path '/nix/store/206dvjl6595dk40dli12ziv393ww54wl-bzip2-1.0.6.0.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/8aylwgi9nb6hsgz6620fzsllbc7h2rx1-c-ares-1.15.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/rj8xd9ajm3wqjz1vfkignlp011fss53q-bzip2-1.0.6.0.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/znjkmjz0pgckxxzq0m9d17isnsd9s03q-cracklib-2.9.7' from 'https://cache.nixos.org'...
+copying path '/nix/store/nfn8wgiqf418y444fch4bpf2ay6ca55i-curl-7.64.1-man' from 'https://cache.nixos.org'...
+copying path '/nix/store/6whclwjzwg46s0dkxwk1xz8cdcxnkd3y-db-4.8.30' from 'https://cache.nixos.org'...
+copying path '/nix/store/2k46270d0h3gqj1c0wgx8prnj51jqryd-db-5.3.28' from 'https://cache.nixos.org'...
+copying path '/nix/store/8gn2b5vvlazg608cj1y5l4igp9rckmnq-dejavu-fonts-minimal-2.37' from 'https://cache.nixos.org'...
+copying path '/nix/store/pwkjsnbgb4mw0x5r5zh8s7c9wqryhmbl-dmidecode-3.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/y48is3y65apgyjwlwiyzjay1dw19l19a-dns-root-data-2019-01-11' from 'https://cache.nixos.org'...
+copying path '/nix/store/8dl5c7n4555lr6qipki2424652gf8az8-ebtables-2.0.10-4' from 'https://cache.nixos.org'...
+copying path '/nix/store/h02lb0p8krj1smsrid2n44ak058bbd82-expand-response-params' from 'https://cache.nixos.org'...
+copying path '/nix/store/4jw2677fvb11aj1bal9a2iksqz0mk80m-expat-2.2.6' from 'https://cache.nixos.org'...
+copying path '/nix/store/bd1hz6m8gh9m91hikjhq7aiq2khdkx2r-fontconfig-2.12.6' from 'https://cache.nixos.org'...
+copying path '/nix/store/8ndwixznskf4zbf6h96ww4j8ap4j8067-fribidi-1.0.5' from 'https://cache.nixos.org'...
+copying path '/nix/store/920nr51iw7qaplqjlqrlnql9g5ljq3vp-gdbm-1.18.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/m6hisb1d7q1c4z0s3icax40gynz4f8fl-gmp-6.1.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/kh2h5rnmm4gvjm8z7w2y511h15i7nhk9-gnum4-1.4.18' from 'https://cache.nixos.org'...
+copying path '/nix/store/6554dpyahvcs49dmv434aky6bfkmqb30-gnumake-4.2.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/pwnppsfjfxibhwhf598l7mx31i8154j9-bison-3.3.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/jwwkky1pn1fw3yiaxmc5k3brb0rqlwvq-graphite2-1.3.6' from 'https://cache.nixos.org'...
+copying path '/nix/store/6mz512j183wj7qas2qm6zkrks5k8rh00-gettext-0.19.8.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/4i1mw6av3d6pr9bqggb4hnv6cykbrhhi-kexec-tools-2.0.19' from 'https://cache.nixos.org'...
+copying path '/nix/store/93rldbdly90q16lzk7hzilhk4qkdpqfq-keyutils-1.6-lib' from 'https://cache.nixos.org'...
+copying path '/nix/store/rl2z4bb7wkfp0g12ccqffh287qal1109-kmod-26' from 'https://cache.nixos.org'...
+copying path '/nix/store/4qq5hh1r6sqb0kpxc305rb468s45j4aw-libICE-1.0.9' from 'https://cache.nixos.org'...
+copying path '/nix/store/v31bag67wm17wkdg7zr9yi62c5028y59-libXau-1.0.9' from 'https://cache.nixos.org'...
+copying path '/nix/store/jf6lfawjvwr6ggnd4lhc5w4dp9v3kgh4-libXdmcp-1.1.3' from 'https://cache.nixos.org'...
+copying path '/nix/store/s3m5z3wxm94c0bfyjxmqf6i0gf1bpx90-libaio-0.3.110' from 'https://cache.nixos.org'...
+copying path '/nix/store/6b2jabk1scwhhk9bz7wjzycvmkiw419d-libapparmor-2.13.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/vckbx0p1isjvmgjh7ppni3h87imazbzb-libcap-2.27-lib' from 'https://cache.nixos.org'...
+copying path '/nix/store/j1px1l6vk39i3chghlwy9222jcjdfdq0-libcap-ng-0.7.9' from 'https://cache.nixos.org'...
+copying path '/nix/store/yik0kf8fgxjj1rj3s4bsrx1y6smz8rhx-cdrtools-3.02a06' from 'https://cache.nixos.org'...
+copying path '/nix/store/msa690459q4n9fiq125gsfambbd62qb4-libdaemon-0.14' from 'https://cache.nixos.org'...
+copying path '/nix/store/qhad1pgmn3z406pgk345281xb5zjqrkm-libelf-0.8.13' from 'https://cache.nixos.org'...
+copying path '/nix/store/msfkr5yqdxjx5cm24pvn3q1552rsjn8h-libev-4.25' from 'https://cache.nixos.org'...
+copying path '/nix/store/yh1w64xprl0rxmj8nvxpmikp6x3qvgdb-libffi-3.2.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/yfmvcf5cslq4f9kv0vlmxksdgvick22d-libgpg-error-1.36' from 'https://cache.nixos.org'...
+copying path '/nix/store/0jl2dhydfh3jbfpkgkrixisqkhj12d4y-libffi-3.2.1-dev' from 'https://cache.nixos.org'...
+copying path '/nix/store/jsg4bi31drwy614hdkkwf32m4wz3im6g-libassuan-2.5.3' from 'https://cache.nixos.org'...
+copying path '/nix/store/94nrq9paz335s155x9za8n7kb0q3y211-libgcrypt-1.8.4' from 'https://cache.nixos.org'...
+copying path '/nix/store/nq2x9w3rjd5l2yvlv328i19ljar8bdab-libidn-1.35' from 'https://cache.nixos.org'...
+copying path '/nix/store/vzs0x1kaliybgk7yr9lrf6ad4x5v1k9y-libjpeg-turbo-2.0.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/hpmni5y805q7a07q9sn3nwjk4i2m2jl5-libkrb5-1.17' from 'https://cache.nixos.org'...
+copying path '/nix/store/hps5ziw9zq6mcjh9b7naaxawnqymws4m-jasper-2.0.16' from 'https://cache.nixos.org'...
+copying path '/nix/store/b6pdz8g070kbf0rdavjz6rip7sx06r8h-libkrb5-1.17-dev' from 'https://cache.nixos.org'...
+copying path '/nix/store/wym0bhrfdx5ndfpx6y39c6j3pf2n6wak-libksba-1.3.5' from 'https://cache.nixos.org'...
+copying path '/nix/store/qbwcp86aslamyhhmf2xx0l5d17dyg2jh-libmnl-1.0.4' from 'https://cache.nixos.org'...
+copying path '/nix/store/mmjbmvw64yl2756y1zvsxk0ic0nhzq2a-libnfnetlink-1.0.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/kvy2sz5lvi89lnh4rmw1df4jsnhqf1ay-libnftnl-1.1.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/wccns8l8bav11z3xlhasmnkz383q1k9p-libnetfilter_conntrack-1.0.7' from 'https://cache.nixos.org'...
+copying path '/nix/store/i8859i082xqnrhzg7h6gz2ylc5wbw5pa-libnl-3.4.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/jn0bddfc3fzhnf5ns4s2khhzclswvzb2-libpcap-1.9.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/7b7nbb0w2iwskwhzjhfwrif631h4smia-libpciaccess-0.14' from 'https://cache.nixos.org'...
+copying path '/nix/store/gsl1dw8ycrdvlzczsl59mkz0qpbwcmz1-iptables-1.8.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/mrclkdxryhjd6i36hlad6fwahjd14fmg-libpng-apng-1.6.37' from 'https://cache.nixos.org'...
+copying path '/nix/store/4z62pandn85xhcc5vazmi29cs2yps47b-iproute2-5.0.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/zxdnpb673bf27hcqzvssv5629m4x5bjv-freetype-2.10.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/hg863b95fxv9zlk008qjyf87qgyx58h1-libseccomp-2.4.1-lib' from 'https://cache.nixos.org'...
+copying path '/nix/store/gn5cd1z252aip0rvds71g9mgfhh6i8p7-fontconfig-2.12.6-lib' from 'https://cache.nixos.org'...
+copying path '/nix/store/zckvgwnyah69mmwn0g5vr4y85rmzwld9-libsodium-1.0.17' from 'https://cache.nixos.org'...
+copying path '/nix/store/5l3967kll8m6s66zprzwb2p6vf2mh5yd-libtasn1-4.13' from 'https://cache.nixos.org'...
+copying path '/nix/store/z7jh25by0vv378gacjd01idi52dj688h-libtiff-4.0.10' from 'https://cache.nixos.org'...
+copying path '/nix/store/z7n5a3kwjylsgnc1d271j4dn41m6shpz-libtirpc-1.1.4' from 'https://cache.nixos.org'...
+copying path '/nix/store/xyx7dm2b6ylcdp38pfwcip9ssx0fdzga-libtool-2.4.6-lib' from 'https://cache.nixos.org'...
+copying path '/nix/store/mn0nzy294n07x1b92m9n0rwrv7z1441m-libunistring-0.9.10' from 'https://cache.nixos.org'...
+copying path '/nix/store/9hysgvp7qrfcil4b5qhwdq2vm9hism13-libxcb-1.13.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/ndjjyr4rqibzkgs8w55bx2idhnckh39p-libidn2-2.1.1a' from 'https://cache.nixos.org'...
+copying path '/nix/store/69vq0a9sqynmz335apm8zgyjdmq34s5j-libX11-1.6.7' from 'https://cache.nixos.org'...
+copying path '/nix/store/b7w6bpx5z0ncy35kqxvmpg4lwrnc8jf2-libxml2-2.9.9' from 'https://cache.nixos.org'...
+copying path '/nix/store/ag3fp30cz58ijm2yyy5adp1f3kw814b5-libXcomposite-0.4.5' from 'https://cache.nixos.org'...
+copying path '/nix/store/lswf09qbkkrqd0rgzaqyrkr44lf78y9x-libXext-1.3.4' from 'https://cache.nixos.org'...
+copying path '/nix/store/aqp0mrdbhvkm8rl1z0p2rkqnz6pbclhq-libXfixes-5.0.3' from 'https://cache.nixos.org'...
+copying path '/nix/store/cd09qv56inq4gwa89656r4n0lq0vgidw-libXi-1.7.9' from 'https://cache.nixos.org'...
+copying path '/nix/store/i10riha5s5dgafznk3gwn36fyr3cpxb4-libXinerama-1.1.4' from 'https://cache.nixos.org'...
+copying path '/nix/store/dvsw0fhfzqf4xg0q2idhs02rhwn4k8cv-libXrender-0.9.10' from 'https://cache.nixos.org'...
+copying path '/nix/store/k4v5havnf7pmcv40xadh8mb7b0nbcgxz-libglvnd-1.0.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/3s4fr71ykyw54kyyqavd0ba42klg0bhf-libXcursor-1.2.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/dy437h3f5i500gv6znll974c87grzh3l-libXft-2.3.3' from 'https://cache.nixos.org'...
+copying path '/nix/store/lnzh3cjjcbafh6wsivw10wl60g7xplxj-libXrandr-1.5.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/sfrh0r54ykfzv62h17gi8hm6778j7k0l-libyaml-0.2.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/c7jkmfjhl3jkgnkrhh021vrqry7zplc1-linux-headers-4.19.16' from 'https://cache.nixos.org'...
+copying path '/nix/store/zh1q7yvyaxlcmj3n6g0rrdaq0v73pp90-linux-pam-1.3.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/yi01h84z709wf02id8hsb170z53wvk7r-glibc-2.27-dev' from 'https://cache.nixos.org'...
+copying path '/nix/store/lr9yhdbn8a3la69j56cz0vi1qva973dv-kbd-2.0.4' from 'https://cache.nixos.org'...
+copying path '/nix/store/5qnlfx9qncn0fcw6mbfj6j58pz0cv0p3-binutils-wrapper-2.31.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/apcaggi0q3vrb4ha1b07cjxiim2li5ly-gcc-7.4.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/caa29d4y2zip0ly9mcc7f4w94blw8k60-lz4-1.9.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/ds1prvgw3i3ic8c7axyrw4lwm3d0gqab-gcc-wrapper-7.4.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/7mmn8ri08z48vfj69c2h66f3g349ilq1-mailcap-2.1.48' from 'https://cache.nixos.org'...
+copying path '/nix/store/skkpbcqavjd8q0zmd94js6nz7pgbvpfl-mirrors-list' from 'https://cache.nixos.org'...
+copying path '/nix/store/z9fv0x6vwlx3xkcac3lg6v8g0figkx39-ncurses-6.1-20190112' from 'https://cache.nixos.org'...
+copying path '/nix/store/vzjwz11r1yy02xv07vx2577pqizzx83n-nettle-3.4.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/ni4nc256xs4f5hmhlhybxl8k40fwi5m3-libedit-20190324-3.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/h8898ysg2s23k6palhxy9a5sbgrvvrcy-nfs-utils-2.3.3-lib' from 'https://cache.nixos.org'...
+copying path '/nix/store/wkby8pdmg4vpvbl0d85gynh103k2h1ky-nghttp2-1.38.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/kmscm0qm9j480wpd1yh42b1g0zc6qbmv-nghttp2-1.38.0-lib' from 'https://cache.nixos.org'...
+copying path '/nix/store/n6z00dm6a5fdv935v8bv59909ra51xli-npth-1.6' from 'https://cache.nixos.org'...
+copying path '/nix/store/xfgg45by0j5hxi1kdwh8x2pkfd67wwzd-nss-cacert-3.42.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/f11valqiyik1ggdlnhg3ibwgrj1imidb-numactl-2.0.12' from 'https://cache.nixos.org'...
+copying path '/nix/store/g4qqgmrm254axgndybnpwg7s780bxy1a-numad-0.5' from 'https://cache.nixos.org'...
+copying path '/nix/store/sm4yylq92rip64wdk3iniy91w48a90ia-openssl-1.0.2r' from 'https://cache.nixos.org'...
+copying path '/nix/store/qpj9rsal85rc94pizrkwb3c5nkivlfcl-p11-kit-0.23.14' from 'https://cache.nixos.org'...
+copying path '/nix/store/fhgiisqjpzwl8z4fchgc07avg1azmp0r-cyrus-sasl-2.1.27' from 'https://cache.nixos.org'...
+copying path '/nix/store/xbcc8v50g7h7wqh892q7k22wb1ky8cg7-libevent-2.1.8' from 'https://cache.nixos.org'...
+copying path '/nix/store/irqcza91k5smn6f4dyvqzw0zjn50d58f-libssh2-1.8.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/18rr3rg32imsnfyx6zb6s8lc8qpkdr74-nghttp2-1.38.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/5pjazw71xk4kysxrzacgjl4iai691k25-curl-7.64.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/r7lhx3aqyils26h7wbxbgf376c0n4ab5-libssh2-1.8.2-dev' from 'https://cache.nixos.org'...
+copying path '/nix/store/wy2alf2g85wym6i9n279d7a9nianx3is-curl-7.64.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/rf104cwz7kaa51s49n4c2aar0jrrj8px-nghttp2-1.38.0-dev' from 'https://cache.nixos.org'...
+copying path '/nix/store/b77nn2r1c8cqpr9prh1ldwr3m6xdrkpa-openldap-2.4.47' from 'https://cache.nixos.org'...
+copying path '/nix/store/2ar3zk5fjr34ys2dqnsfbb678x6fdlj4-openssh-7.9p1' from 'https://cache.nixos.org'...
+copying path '/nix/store/8yy3wngrdcpmjyw2ryas1y4wwhbd1356-patch-2.7.6' from 'https://cache.nixos.org'...
+copying path '/nix/store/61shjilahl0d237fg9b3z3chza2lgms4-patchelf-0.9' from 'https://cache.nixos.org'...
+copying path '/nix/store/r6mrgd9k1jzzqrhphrg1qgxvgvbka7p8-pcre2-10.33' from 'https://cache.nixos.org'...
+copying path '/nix/store/qd4j58ykdkg9yvy8kvgh0i00gacy0ldm-perl-5.28.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/mpnl3p6mzm71vci81r0h346jywm6863s-perl5.28.2-Encode-Locale-1.05' from 'https://cache.nixos.org'...
+copying path '/nix/store/85hh7apv9n3gganpnnq36zvlwm126mdh-openssl-1.0.2r-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/k59h7bs9307y7rb0z70vasvj8hd5pcn5-perl5.28.2-HTML-Tagset-3.20' from 'https://cache.nixos.org'...
+copying path '/nix/store/3mgn9jnjzj1rgxclbixk5xa0kkx9xpw3-openssl-1.0.2r-dev' from 'https://cache.nixos.org'...
+copying path '/nix/store/xzzf5h2f02bwjcph28vly4wklp13wb0g-perl5.28.2-HTML-Parser-3.72' from 'https://cache.nixos.org'...
+copying path '/nix/store/bh412cii40qpzc20pzd48dq45jv9xm5a-perl5.28.2-HTTP-Date-6.02' from 'https://cache.nixos.org'...
+copying path '/nix/store/5x1551gw825apcsnwx8gzfnmiapbz8yl-perl5.28.2-IO-HTML-1.001' from 'https://cache.nixos.org'...
+copying path '/nix/store/0jwyd55g8nfhm25a0bh1j1by6afdriic-perl5.28.2-File-Listing-6.04' from 'https://cache.nixos.org'...
+copying path '/nix/store/50rywa1m6asdz1y78a6dpa0xf98vm01v-perl5.28.2-LWP-MediaTypes-6.04' from 'https://cache.nixos.org'...
+copying path '/nix/store/pfd5p3pyfrkwxh42j491kkqgl8n9aa67-perl5.28.2-TermReadKey-2.38' from 'https://cache.nixos.org'...
+copying path '/nix/store/mp1hzpgp3sa6xac8dc7rldh5rab8lk2w-perl5.28.2-Test-Needs-0.002006' from 'https://cache.nixos.org'...
+copying path '/nix/store/8vqr6vbvyzlpsk6q0mnj93sf5j1wr5qa-perl5.28.2-Test-RequiresInternet-0.05' from 'https://cache.nixos.org'...
+copying path '/nix/store/b2rwzjp56yzd4jg2csx568h2dgj1l3l2-perl5.28.2-Try-Tiny-0.30' from 'https://cache.nixos.org'...
+copying path '/nix/store/za8dm1xsfslzdk4j434w869bngrygici-perl5.28.2-URI-1.76' from 'https://cache.nixos.org'...
+copying path '/nix/store/wxdxzvaj3mcmf3i9yqwinpcb26iz3pas-perl5.28.2-Test-Fatal-0.014' from 'https://cache.nixos.org'...
+copying path '/nix/store/i652w9gqbmc6k48lz3b02ncv7hpgc7nv-perl5.28.2-HTTP-Message-6.18' from 'https://cache.nixos.org'...
+copying path '/nix/store/fky41n197z9n737kbih4j7ncnh7cngnz-perl5.28.2-Net-HTTP-6.18' from 'https://cache.nixos.org'...
+copying path '/nix/store/7gwvcm8dc24vnphbx85q1afaxhfhac28-perl5.28.2-HTTP-Cookies-6.04' from 'https://cache.nixos.org'...
+copying path '/nix/store/jvfc6r03a95aignzbfg987kspa04s0md-perl5.28.2-HTTP-Daemon-6.01' from 'https://cache.nixos.org'...
+copying path '/nix/store/wvccp35z40fj1v8xbz6czrm4bbiaqd45-perl5.28.2-HTTP-Negotiate-6.01' from 'https://cache.nixos.org'...
+copying path '/nix/store/hnvmacd16kzmwcsavzkssrqj2kiryy2p-perl5.28.2-WWW-RobotRules-6.02' from 'https://cache.nixos.org'...
+copying path '/nix/store/k7g175rls2pk34m23wqhplv8mbnsc0lc-pixman-0.38.4' from 'https://cache.nixos.org'...
+copying path '/nix/store/91a8wnca647kfw67sk0iykdbyjpr8430-perl5.28.2-libwww-perl-6.38' from 'https://cache.nixos.org'...
+copying path '/nix/store/xav1lh5jlgrz7amaz7b5ghrz0c273lji-python3.7-pyOpenSSL-19.0.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/43i41p1n1sxssmqpf9jp5x4gcy6r2fl6-git-2.21.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/bxps2h6axpqrjxcmib344a3immy3gvhd-readline-6.3p08' from 'https://cache.nixos.org'...
+copying path '/nix/store/k1xlz5zy7rm2a428byz850c1igc2j1z8-readline-7.0p5' from 'https://cache.nixos.org'...
+copying path '/nix/store/2xhsrw4ws6kc4x3983wdwwlnim27c6iz-shadow-4.6' from 'https://cache.nixos.org'...
+copying path '/nix/store/715lcljfyp8grxlmaf51pn0n3ml3dwgg-bash-interactive-4.4-p23' from 'https://cache.nixos.org'...
+copying path '/nix/store/ap4sr1n0wlgmybxbw3pvq8klh8snc3n8-sqlite-3.28.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/ndbpc44lv43k7jnb0ip1qwk8f0slx685-bash-interactive-4.4-p23-dev' from 'https://cache.nixos.org'...
+copying path '/nix/store/3xzkc4wyadr3vrva2q320axjr6cyb43n-python-2.7.16' from 'https://cache.nixos.org'...
+copying path '/nix/store/s5f3vpmig33nk4zyk228q55wdydd3pc2-python3-3.7.3' from 'https://cache.nixos.org'...
+copying path '/nix/store/0w6l8kh3d30kg3nxc8xyi84gmrfxjnns-git-2.21.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/aqgl1dqd6lr7jr9knqsyyq09bm0ibw7s-python3.7-cffi-1.12.3' from 'https://cache.nixos.org'...
+copying path '/nix/store/9fvjgcjn1d0c9476qlr05psvwljwzq59-python3.7-cryptography-2.6.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/yf4i32dx953p2dv2agfdyxdwg6ba0l61-python3.7-setuptools-41.0.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/7crry947d1xvp1f15c6q089l0gcy5hpc-stdenv-linux' from 'https://cache.nixos.org'...
+copying path '/nix/store/b2wy5p5bykcnkwz5q1w8qq4qfzr4arc7-python3.7-MarkupSafe-1.1.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/z0sqda5bg0l4p0851nw05h7hii0jj1kr-python3.7-PyYAML-5.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/n1dcmv0ii513dhlnllc790vfn8i9j9lj-python3.7-Jinja2-2.10.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/r3x6y48q13qwl9x1wwz37002b7fhyidv-python3.7-asn1crypto-0.24.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/02nzlzdw0kiici9368jp5s84cpbqxkva-python3.7-certifi-2018.11.29' from 'https://cache.nixos.org'...
+copying path '/nix/store/wbbwikfkc7fbida822a5z9b4xmsnwm3d-python3.7-chardet-3.0.4' from 'https://cache.nixos.org'...
+copying path '/nix/store/pvzbhdzqm4i20v3flr5mf7yfs7n2lrvg-python3.7-dnspython-1.16.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/m65jki67b02la5k5r9vgddcp13l32lw5-python3.7-httplib2-0.12.3' from 'https://cache.nixos.org'...
+copying path '/nix/store/n2mzl8vljdksdqybihdy9mm5v7hm19q5-python3.7-idna-2.8' from 'https://cache.nixos.org'...
+copying path '/nix/store/31l04a1yxxdbdpzdp8mpfk96rhj3bg2c-python3.7-netaddr-0.7.19' from 'https://cache.nixos.org'...
+copying path '/nix/store/xs2k8driha83f9k017bkgch8lcl4z7w0-python3.7-ply-3.11' from 'https://cache.nixos.org'...
+copying path '/nix/store/v69ld4vcgkr4i4giv1nzl4kax9zx1fpa-python3.7-pyasn1-0.4.5' from 'https://cache.nixos.org'...
+copying path '/nix/store/im1940h7b6pjlnh38q6lasdn8iybsv4v-python3.7-jmespath-0.9.4' from 'https://cache.nixos.org'...
+copying path '/nix/store/9xb22l3577nznvd6dqqis6ixgmwq9ygh-python3.7-pycparser-2.19' from 'https://cache.nixos.org'...
+copying path '/nix/store/x6zlzsjsd7m9mjkmxlp15vcay58g6a04-python3.7-pycryptodome-3.7.3' from 'https://cache.nixos.org'...
+copying path '/nix/store/y5gfmqjp68h4fqq8z4p219pimm7ws49j-python3.7-cffi-1.12.3-dev' from 'https://cache.nixos.org'...
+copying path '/nix/store/gg469jh0m4dk4b0x6s44ziad69czbv22-python3.7-pycrypto-3.7.3' from 'https://cache.nixos.org'...
+copying path '/nix/store/5ydkc9jcaaxlz58dr7gvyhi3gcmafsfy-python3.7-pyparsing-2.3.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/m2n4drah6566qlccaabjhnnl4slql3cd-python3.7-pysocks-1.6.8' from 'https://cache.nixos.org'...
+copying path '/nix/store/dy5wi2sqnhbnlpvjr8a0z96id1mq243j-python3.7-six-1.12.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/ryinn9xa3g8bn55nj1h54ypnlp9naq6i-stdenv-linux' from 'https://cache.nixos.org'...
+copying path '/nix/store/r54ql4g0hcxzp15sfjiagd1dmxh4s8n6-python3.7-bcrypt-3.1.6' from 'https://cache.nixos.org'...
+copying path '/nix/store/9pa3p1rqhnvlrngaqsx09766cl1j6zf3-python3.7-httpretty-0.9.6' from 'https://cache.nixos.org'...
+copying path '/nix/store/nvyhmkghwxh5f1wiid27vzxa0ddx929p-python3.7-packaging-19.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/xv9pfis6ixrsv7z1jrmgagi25qljvg0d-python3.7-pynacl-1.3.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/xbwr4gb4zgjmcrbx82zlcp1jfgcz75ya-python3.7-cryptography-2.6.1-dev' from 'https://cache.nixos.org'...
+copying path '/nix/store/1kz91g5mfj271lj5kxz2m1axcs2yqafy-thin-provisioning-tools-0.7.6' from 'https://cache.nixos.org'...
+copying path '/nix/store/n1y9i0bv0sg8n8759zd6smr2zjyn8jf3-python3.7-paramiko-2.4.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/7256h1y98mmzsckwk2x7i3v3cxmvgrmq-python3.7-pyOpenSSL-19.0.0-dev' from 'https://cache.nixos.org'...
+copying path '/nix/store/20wmykp8fj2izxdj8lic8ggcfpdid5ka-tzdata-2019a' from 'https://cache.nixos.org'...
+copying path '/nix/store/ch6pz5kfg0bd3sfyf1813cpskg7lidns-python3.7-urllib3-1.24.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/749qksf79hvn0aprcznd9bwfv550qwh3-go-1.12.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/d0wcd9mid6067i6va19lwiv29hln6n2j-python3.7-requests-2.21.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/rzfzb501miszas14xq6cr3c04m8kkdrb-terraform-0.11.14-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/qq59cnpcbnp4p054ipbs54fv946r4qr8-python3.7-boto-2.49.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/479dvd7q6c18l3jl2myhfxmfsjbqjjch-python3.7-dopy-2016-01-04' from 'https://cache.nixos.org'...
+copying path '/nix/store/kh18cbdb9f79gl58axwr8qq6c7bd0bl0-terraform-provider-acme-1.1.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/vin4cp4m5af1mxkb2jqqi8xkf98ca2sv-python3.7-ansible-2.7.9' from 'https://cache.nixos.org'...
+copying path '/nix/store/gydzhj2y5j1ggbainbilvpxi5glw5hmf-terraform-provider-alicloud-1.41.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/0rc1jyfbxwffmsphyv2pfnxd6smysc1l-terraform-provider-ansible-0.0.4-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/n7wdfylfi5wnrjdg4im9v2q9gnl99mmb-terraform-provider-archive-1.2.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/pk6r8sd18gmxns8r73qi2yrmzf4f4cp0-terraform-provider-arukas-1.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/x987jpq3yswa2aj51d63vwszfdm3r8ld-terraform-provider-atlas-0.1.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/fwvdxglj9asp4f90ihry29n2fm8a6i09-terraform-provider-aws-2.9.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/vbp6wnr2gyj50nabxgclkbqblmnwcnbg-terraform-provider-azuread-0.3.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/7mqpbfs391s9hbnfzkpgw3inj8mkldr8-terraform-provider-azurerm-1.27.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/ixycmxkr0wrz3gfxrnrdgcsk4gcyirpv-terraform-provider-azurestack-0.6.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/1hml3hx7qlbkv139khazb24jh69nngcd-terraform-provider-bigip-0.12.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/63k736kr346ncpzv5yiqiyyyiqpa2h8m-terraform-provider-bitbucket-1.0.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/sw6vlm5g6r6sivlncz7vh8ps7v7r22aa-terraform-provider-brightbox-1.1.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/d7rh74cch3ybp9r239j5c2c1rb0kx3pa-terraform-provider-chef-0.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/kqmg1xxd3vi37bqh7gdvi61bkp7wb9hi-terraform-provider-circonus-0.2.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/7k3hvg4sfpr6y2bg8b7x9mkb0d2p3scr-terraform-provider-clc-0.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/l8zqmzg19i62iz4argyjjr071rid3q9b-terraform-provider-cloudflare-1.13.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/p19dhs366b9zbbhs61xfw7d77sk9mkjr-terraform-provider-cloudscale-1.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/0z8i6sq8mg138qnifr1z37y780xkk8hf-terraform-provider-cloudstack-0.2.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/jjld4xam968mz645xh4g7i5zrnhsfyp9-terraform-provider-cobbler-1.0.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/np4ikymr5fq5iknjfrwrgqmcsid4dmw9-terraform-provider-consul-2.3.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/nc8x0pwchcc9xiv1nsj9idvpnfvkhh8p-terraform-provider-datadog-1.9.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/wv8y2h4w7lxi9x6k8pzh3dxy7i4csfbm-terraform-provider-digitalocean-1.3.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/k5vljkz5p59nrh50vx5k2790ksqcxjpc-terraform-provider-dme-0.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/g63hwfkd4yjncqh81ndn9vbmghdv41ng-terraform-provider-digitalocean-1.3.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/s7p4iij8p4hi6bmc2bf3flyf6wa6yzrj-terraform-provider-dns-2.1.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/ssr1y1736h7c6p8vs76iyxwg5h889x7d-terraform-provider-dnsimple-0.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/amlnqi4vvmpb9gjmyh1vr5hr2py12ss2-terraform-provider-docker-1.1.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/p9rjwvja55djz5g2qxyc9wzcpmska0ql-terraform-provider-dyn-1.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/8z5vplmgshflm7yixhp8q7hy11xxxd8a-terraform-provider-elasticsearch-0.6.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/jszvy5lyyjbfi7mjr7s9bnbq9cyq858v-terraform-provider-external-1.1.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/wh8pg14c3ykmmnd342llbzjigahc54dw-terraform-provider-fastly-0.6.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/0bdf1xn7p6xzk008yr6cahq3wjlvah5g-terraform-provider-flexibleengine-1.5.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/yn7smb316b6d99wjw2v9fgxzbrqnq9jm-terraform-provider-gandi-1.0.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/95rd64vii9j6h31fcr9lba8m8940zfpj-terraform-provider-github-2.0.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/dz9dqcnz8v9cy54x5hax599zjwckp0kd-terraform-provider-gitlab-1.3.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/kds827ryxx16rwhrsdn9wnr2pxf5qaxm-terraform-provider-google-2.6.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/ybsmcpfglj9fm5kjxyykbnwfjmhxrwfv-terraform-provider-google-beta-2.6.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/scdscan356g79qb7cf776gy7src22zbl-terraform-provider-grafana-1.3.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/yr6qv6j9qrc03gl7bknw6p1fx1pzk0l9-terraform-provider-hcloud-1.9.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/yvqaj61p81kd4bq3dyw64idqj61rwpka-terraform-provider-hedvig-1.0.3-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/hbs2vrw1y8y1qz1hi71jaz0j3pl95qfs-terraform-provider-helm-0.9.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/5nkxpwdgpxs97yqh2fxz9y0rm80rc280-terraform-provider-heroku-1.9.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/2yy3pv77rwbxk7b2mpysmiqdzhmgmphg-terraform-provider-http-1.1.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/mvmjdim7dn589inb8dsjxap08h4ip4h5-terraform-provider-huaweicloud-1.4.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/wi4jm555w0rc1daiy2sz9iwrpk6cb2d8-terraform-provider-ibm-0.11.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/xn8xmzxjx7i8wwfvbiwfgmv9mn4n45dk-terraform-provider-icinga2-0.2.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/f7zh0d0n2dj4dcig903zd5jgb2cpaxf6-terraform-provider-ignition-1.0.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/csajl6aq80s9v2xbkmlzgfxlilmbzff6-terraform-provider-influxdb-1.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/5k67y2lglsnswrya21z51d4h87a081k5-terraform-provider-kubernetes-1.6.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/x1idgjd7vs75nj9s3krimbfjvh27n06d-terraform-provider-librato-0.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/shf6d1928fzxcaz6zh0bhcqv3xhvxhjd-terraform-provider-linode-1.6.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/gkpa27fykskx0dd52dca515gd91qhhgf-terraform-provider-local-1.2.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/vkdh5ibsmzj6p53krnqqz1pv620f42r0-terraform-provider-logentries-1.0.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/sn2cydjzikl3rws2nfa7pdvayb45brrd-terraform-provider-logicmonitor-1.2.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/3qjz5kfri8sa0dj1213rap75alpqsm2l-terraform-provider-mailgun-0.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/byxya0m4656ylf5imvs2v9p2c1av1kjl-terraform-provider-matchbox-0.2.3-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/kvjcl6plvmkm6i2lzd7wrkbiis3b4vhg-terraform-provider-mysql-1.5.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/rs21a235ix9v8y4hgazkzi6g1x5dqf7v-terraform-provider-netlify-0.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/h40ib0qsa07b6ld1pv4x76xx2g7xgik6-terraform-provider-newrelic-1.5.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/89wg3f6hk41gxm4n6cikj6r7gr2k7h8j-terraform-provider-nixos-0.0.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/id798ngchr83gc0mmqd3zlviljshjhvb-terraform-provider-nomad-1.3.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/8krs5vbid0ic6vvlvjvndvjb815q8hbd-terraform-provider-ns1-1.3.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/ymn53mhfkkhs26qw708yv7bd7jmbp636-terraform-provider-nsxt-1.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/5z3s6zbi98gh8cfliaplnmv15j568c46-terraform-provider-null-2.1.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/6brahzfjri338n3fggplfrsmf63mrwnx-terraform-provider-nutanix-1.0.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/zaq3w03j96qqhzpcfs9yacwa98sdsmiv-terraform-provider-oci-3.24.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/p00q64lbln1z9kfgpd2r6qhk0kc7i7w7-terraform-provider-oneandone-1.3.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/50wggbbr0wdg21hrvl4icwlppvk4464b-terraform-provider-opc-1.3.6-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/1wh5wgw6a3w91mk2avvn9ssw32nlw9kd-terraform-provider-openstack-1.18.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/pvsfn6d0byl3hfwnyfg21yivyj8iff8s-terraform-provider-opentelekomcloud-1.8.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/q0ndxs1vqdy5r749h5hhhbixgyf5yasx-terraform-provider-opsgenie-0.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/hl8lzq90qjhq0n710lm5n17lc9i80vsh-terraform-provider-oraclepaas-1.5.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/z4vgv1w5cmd6p90grfgr1k6m87fydr3g-terraform-provider-ovh-0.3.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/xkv7la24vsyn9n23wc1izcmmp7aifzb3-terraform-provider-packet-2.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/hy2xn2jxm4wp7j86p08m9xdpxncskdgv-terraform-provider-pagerduty-1.2.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/5lcz7p2xz1zp8iyd9yjmrg1kxw5yygnx-terraform-provider-panos-1.5.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/y20bvdwb0s95wa4gzhkkxd1xcc4c8whx-terraform-provider-postgresql-0.3.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/vrrs5p13mykyniglgfdsn8xii9b7s850-terraform-provider-powerdns-0.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/in7wgxanbdycb9wpq1j29928gllc0ap6-terraform-provider-profitbricks-1.4.4-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/zdfchj49phsp0sahcvdfp8ipc0chakg3-terraform-provider-rabbitmq-1.0.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/15fv1623h1vcn5z0nq42v5rgjirbp5r0-terraform-provider-rancher-1.2.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/7axz4xwz0vfrdgjyk59xg998bdqbvg5x-terraform-provider-random-2.1.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/yjdcrd1lkzp8c7cawcpy40c4p3ngaw12-terraform-provider-rightscale-1.3.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/2lh08897y86kxvyjdd1vlnkg8fz88nkd-terraform-provider-rundeck-0.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/fq4765nh9p0si8mh9cnywsq48zr1qc27-terraform-provider-runscope-0.5.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/lgrhsbfmpf1cjbpig8llxfrfb6xhz7xv-terraform-provider-scaleway-1.9.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/sw6n4yz49cz5vm4ggpk2l5j1vngac8j2-terraform-provider-secret-1.0.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/f3rbnn0jhm549mcp7k9ysjcq26j8fvyy-terraform-provider-segment-0.2.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/y1v3g4m9bmmmvmw4z84m5fpmdy42lbr4-terraform-provider-selectel-2.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/bf26sh99bngrnpzrj7gyz0689b060vak-terraform-provider-skytap-0.9.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/l6ns1zcd18j9708y3agxgi0kihs4zc7i-terraform-provider-softlayer-0.0.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/5ygnx64lyv5a8pnpmlj7bs8s2dz2hkxd-terraform-provider-spotinst-1.13.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/k3qhzd0x8a1z6h5kyifnv3axbfs7fy66-terraform-provider-statuscake-0.2.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/bvsihhp4jv61hz6mc17mn1sar03k0i8d-terraform-provider-telefonicaopencloud-1.0.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/9gjpg5lsdhgrhi805948c648nhn39l8z-terraform-provider-template-2.1.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/6hv1yfwyydyg2lzqcllwjb68xl4mrppw-terraform-provider-tencentcloud-1.5.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/7fd40sykaxj6dvya7mvif3f16wrqijr9-terraform-provider-terraform-1.0.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/rk78bh2s5yjpmgdhzqlf1hnj6ij0h20n-terraform-provider-tfe-0.8.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/pb6r7dllpfw5cbhpmv2v2kms9a57r4v5-terraform-provider-tls-2.0.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/apjv9g35sklrab9lzz9r9rq7lnczv2wy-terraform-provider-triton-0.5.1-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/5b0s7hhp52vq4psmicf8m8y2jr5jsiaz-terraform-provider-ucloud-1.6.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/ck0lifb2jgkmg6c7frz7fxqwz5fbdnxk-terraform-provider-ultradns-0.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/1fl7yd9chgswnabbsvva7xvg5ak1q44p-terraform-provider-vault-1.8.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/n3rakrhhvi3bb0ffnjs51drmy157p51q-terraform-provider-vcd-2.1.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/n1mrfbzlh3cjm9mfyrp48pybl3sg4717-terraform-provider-vsphere-1.10.0-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/cncad2f4lfxfxnwd9lfhjjd89x3anxqr-terraform-provider-yandex-0.5.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/2j9jm3jaxfn2g6wxak61wkhmrg6c4nn5-unbound-1.9.1-lib' from 'https://cache.nixos.org'...
+copying path '/nix/store/01aggsi1ndjhnr93gcy8c4s1xbxab8dn-unzip-6.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/1dydqkwswavzkyvr1qr62zmx3nqpmpp4-gnutls-3.6.7' from 'https://cache.nixos.org'...
+copying path '/nix/store/69msrhi85iay3cb7c3nksr0s8l0xpsc7-util-linux-2.33.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/xdhh337yhl93x33vzd9davinrbr9x9iz-libmicrohttpd-0.9.63' from 'https://cache.nixos.org'...
+copying path '/nix/store/rcn9d2q4mpapxf4qd54hkqz32ljhv0rw-util-linux-2.33.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/50gfgyi2rxi4n25if8cqvlxlh5czl0wd-yajl-2.1.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/yrmrvha03pvdyi9ww2bi6xjpk5930sf8-glib-2.60.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/z2darh83lb4rmsfnnyjc0hll51fyvj49-libSM-1.2.3' from 'https://cache.nixos.org'...
+copying path '/nix/store/pknq6p5h43zm4r0dgjnfywql04hdv3js-atk-2.32.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/p8s6295x84d594sxvzml8rsxqjdghmc5-cairo-1.16.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/v5q3cnkjfy8rfacsjqn1nza93mbczgd5-gdk-pixbuf-2.38.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/9yb9whkdgf3zyy85xac248kwq1wm6qd6-harfbuzz-2.3.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/xxwqa4rwfi97az8a6dl6vhqiyjvmnm9v-libsecret-0.18.8' from 'https://cache.nixos.org'...
+copying path '/nix/store/3xq3w5fgz99rhp3rxfkbp0ahg37mgmly-pango-1.43.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/a185xh0jcx7il7hw2gfh0pmvrah3x67y-systemd-239.20190219-lib' from 'https://cache.nixos.org'...
+copying path '/nix/store/s7rqxrfb631i53dfl90gac35095jyypq-util-linux-2.33.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/6hwdmzpspbnb7ix5z6m9h60jyy42kj90-dbus-1.12.12-lib' from 'https://cache.nixos.org'...
+copying path '/nix/store/xmy3chnan9iiag9apm7dd825cmlkiiij-libusb-1.0.22' from 'https://cache.nixos.org'...
+copying path '/nix/store/ricz15lpkjrasc5cpzp6l60iwlc87wv3-avahi-0.7' from 'https://cache.nixos.org'...
+copying path '/nix/store/di6rrbw1kbdrwxiymq91dgdvp2rvk1xv-dnsmasq-2.80' from 'https://cache.nixos.org'...
+copying path '/nix/store/kccb2k5hdjhdyxbxsri9lwwc4z1pvx6z-cups-2.2.11-lib' from 'https://cache.nixos.org'...
+copying path '/nix/store/vqvmd2r9pf9f74jqipbhrn7wksiiy1jf-pcsclite-1.8.25-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/x3rijvjjrch1fjs60lrw9xb7pidp817f-gtk+-2.24.32' from 'https://cache.nixos.org'...
+copying path '/nix/store/y3x0fvlz4a30iajw3vd1rkg45vl3k15c-pcsclite-1.8.25' from 'https://cache.nixos.org'...
+copying path '/nix/store/a2cg0faxbwnicf41vwmw467jw7i9ix46-pinentry-1.1.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/lvqp39d4hx776nkw3a0qfnvvjmnj49hc-procps-3.3.15' from 'https://cache.nixos.org'...
+copying path '/nix/store/z68464p6aafah8b8xcybkwyhmqdf0jgx-gnupg-2.2.15' from 'https://cache.nixos.org'...
+copying path '/nix/store/6bvd29jny80ka8df9prr5hrl5yz7d98k-systemd-239.20190219' from 'https://cache.nixos.org'...
+copying path '/nix/store/qgr66z24rfbb8cc965rr2sklh38p083n-git-crypt-0.6.0' from 'https://cache.nixos.org'...
+copying path '/nix/store/daizqdqrm7g4favv814hnijmqhay8hs4-dbus-1.12.12' from 'https://cache.nixos.org'...
+copying path '/nix/store/wf5nv1gzrx378icqmjgwl2isg7s8ly80-lvm2-2.03.01' from 'https://cache.nixos.org'...
+copying path '/nix/store/97d3r4a7v1nal53x0gv17hrbbcp0rb21-util-linux-2.33.2-bin' from 'https://cache.nixos.org'...
+copying path '/nix/store/glrnpb3rkribnrjh5gzs24nmvl3m00cg-parted-3.2' from 'https://cache.nixos.org'...
+copying path '/nix/store/f39sk2aim9xz7dzn7qvqh442xm58v77w-nfs-utils-2.3.3' from 'https://cache.nixos.org'...
+copying path '/nix/store/wi2mn48l130r7wafvj757rvzfkla59if-pm-utils-1.4.1' from 'https://cache.nixos.org'...
+copying path '/nix/store/35mdgd1wc67g60azsrghzgn4fjhr5d2r-zfs-user-0.7.13-lib' from 'https://cache.nixos.org'...
+copying path '/nix/store/9dk1gh07pwkvg62rns4k670h54bhfhgh-zlib-1.2.11-dev' from 'https://cache.nixos.org'...
+copying path '/nix/store/8bxvyvd3ky0w5gk3k0lq2fmvj30fbzj8-zfs-user-0.7.13' from 'https://cache.nixos.org'...
+copying path '/nix/store/i3kh8yq4kgkfn234pnwxnvxbrcgcckc8-curl-7.64.1-dev' from 'https://cache.nixos.org'...
+copying path '/nix/store/nwhvl00i2wa4ms26lszk36vwir90jd3x-libvirt-4.10.0' from 'https://cache.nixos.org'...
+building '/nix/store/as9r3n55czsdiq82iacs0hq12alxb2m0-remove-references-to.drv'...
+copying path '/nix/store/l9821zngvlh8bd6mlyzvi1mc754dyhjz-terraform-provider-libvirt-0.5.1-bin' from 'https://cache.nixos.org'...
+building '/nix/store/fdh1ahjdh3fgsz4qz386klsa9bsqil48-source.drv'...
+
+trying https://github.com/n3integration/terraform-godaddy/archive/v1.6.4.tar.gz
+ % Total % Received % Xferd Average Speed Time Time Time Current
+ Dload Upload Total Spent Left Speed
+100 139 0 139 0 0 863 0 --:--:-- --:--:-- --:--:-- 858
+100 19326 0 19326 0 0 59282 0 --:--:-- --:--:-- --:--:-- 59282
+unpacking source archive /build/v1.6.4.tar.gz
+copying path '/nix/store/isdbs6d2jk75kj0qk4s3prwlwcgkgalf-tf-plugin-env' from 'https://cache.nixos.org'...
+building '/nix/store/x7r5kh20ajlnj6vw6fg649w0iypcg1ga-terraform-godaddy-1.6.4-go-modules.drv'...
+unpacking sources
+unpacking source archive /nix/store/m62ydk4wy6818sysfys0qz20cx5nzj7h-source
+source root is source
+patching sources
+configuring
+building
+go: finding github.com/mitchellh/gox v0.4.0
+go: finding github.com/hashicorp/go-hclog v0.0.0-20181001195459-61d530d6c27f
+go: finding github.com/hashicorp/go-getter v0.0.0-20181213035916-be39683deade
+go: finding github.com/mitchellh/go-testing-interface v1.0.0
+go: finding github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af
+go: finding github.com/agext/levenshtein v1.2.1
+go: finding github.com/apparentlymart/go-cidr v1.0.0
+go: finding github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d
+go: finding github.com/hashicorp/hil v0.0.0-20170627220502-fa9f258a9250
+go: finding github.com/posener/complete v1.2.1
+go: finding github.com/mitchellh/copystructure v1.0.0
+go: finding github.com/hashicorp/errwrap v1.0.0
+go: finding github.com/hashicorp/hcl2 v0.0.0-20181220012050-6631d7cd0a68
+go: finding github.com/hashicorp/go-version v1.0.0
+go: finding github.com/mitchellh/reflectwalk v1.0.0
+go: finding golang.org/x/net v0.0.0-20181220203305-927f97764cc3
+go: finding golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4
+go: finding golang.org/x/tools v0.0.0-20181221235234-d00ac6d27372
+go: finding github.com/golang/protobuf v1.2.0
+go: finding github.com/hashicorp/go-multierror v1.0.0
+go: finding github.com/mitchellh/go-homedir v1.0.0
+go: finding github.com/apparentlymart/go-textseg v1.0.0
+go: finding github.com/hashicorp/go-uuid v1.0.0
+go: finding github.com/satori/go.uuid v1.2.0
+go: finding github.com/mitchellh/cli v1.0.0
+go: finding github.com/mitchellh/mapstructure v1.1.2
+go: finding golang.org/x/lint v0.0.0-20181217174547-8f45f776aaf1
+go: finding golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9
+go: finding google.golang.org/appengine v1.4.0
+go: finding golang.org/x/net v0.0.0-20181114220301-adae6a3d119a
+go: finding golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc
+go: finding honnef.co/go/tools v0.0.0-20180920025451-e3ad64cb4ed3
+go: finding github.com/hashicorp/terraform v0.11.11
+go: finding github.com/aws/aws-sdk-go v1.15.78
+go: finding github.com/onsi/ginkgo v1.7.0
+go: finding github.com/kr/pty v1.1.3
+go: finding github.com/mattn/go-isatty v0.0.3
+go: finding github.com/onsi/gomega v1.4.3
+go: finding github.com/bgentry/speakeasy v0.1.0
+go: finding gopkg.in/yaml.v2 v2.2.2
+go: finding gopkg.in/yaml.v2 v2.2.1
+go: finding github.com/fatih/color v1.7.0
+go: finding github.com/zclconf/go-cty v0.0.0-20181129180422-88fbe721e0f8
+go: finding github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7
+go: finding golang.org/x/text v0.3.0
+go: finding github.com/sergi/go-diff v1.0.0
+go: finding github.com/armon/go-radix v1.0.0
+go: finding github.com/fsnotify/fsnotify v1.4.7
+go: finding golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e
+go: finding golang.org/x/net v0.0.0-20181129055619-fae4c4e3ad76
+go: finding github.com/kr/pretty v0.1.0
+go: finding github.com/hashicorp/go-cleanhttp v0.5.0
+go: finding github.com/mattn/go-isatty v0.0.4
+go: finding gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127
+go: finding github.com/go-test/deep v1.0.1
+go: finding cloud.google.com/go v0.34.0
+go: finding github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348
+go: finding howett.net/plist v0.0.0-20181124034731-591f970eefbb
+go: finding github.com/hashicorp/errwrap v0.0.0-20180715044906-d6c0cd880357
+go: finding golang.org/x/crypto v0.0.0-20180816225734-aabede6cba87
+go: finding golang.org/x/net v0.0.0-20180724234803-3673e40ba225
+go: finding github.com/mattn/go-colorable v0.0.9
+go: finding github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d
+go: finding github.com/go-ini/ini v1.40.0
+go: finding github.com/mitchellh/iochan v1.0.0
+go: finding golang.org/x/sys v0.0.0-20181228144115-9a3f9b0469bb
+go: finding github.com/jessevdk/go-flags v1.4.0
+go: finding github.com/posener/complete v1.1.1
+go: finding github.com/spf13/pflag v1.0.3
+go: finding github.com/stretchr/testify v1.2.2
+go: finding golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890
+go: finding github.com/davecgh/go-spew v1.1.1
+go: finding golang.org/x/net v0.0.0-20180906233101-161cd47e91fd
+go: finding golang.org/x/sync v0.0.0-20181108010431-42b317875d0f
+go: finding github.com/bsm/go-vlq v0.0.0-20150828105119-ec6e8d4f5f4e
+go: finding gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7
+go: finding github.com/mitchellh/go-wordwrap v1.0.0
+go: finding github.com/ulikunitz/xz v0.5.5
+go: finding github.com/hashicorp/hcl v1.0.0
+go: finding github.com/hashicorp/go-multierror v0.0.0-20180717150148-3d5d8f294aa0
+go: finding gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405
+go: finding golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f
+go: finding github.com/google/go-cmp v0.2.0
+go: finding github.com/golang/mock v1.2.0
+go: finding gopkg.in/fsnotify.v1 v1.4.7
+go: finding github.com/onsi/ginkgo v1.6.0
+go: finding github.com/golang/protobuf v1.1.0
+go: finding github.com/aws/aws-sdk-go v1.16.11
+go: finding github.com/hpcloud/tail v1.0.0
+go: finding google.golang.org/grpc v1.17.0
+go: finding github.com/blang/semver v3.5.1+incompatible
+go: finding github.com/vmihailenco/msgpack v3.3.3+incompatible
+go: finding github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310
+go: finding golang.org/x/net v0.0.0-20180811021610-c39426892332
+go: finding github.com/zclconf/go-cty v0.0.0-20181218225846-4fe1e489ee06
+go: finding github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8
+go: finding github.com/hashicorp/go-plugin v0.0.0-20181212150838-f444068e8f5a
+go: finding github.com/pmezard/go-difflib v1.0.0
+go: finding github.com/spf13/pflag v1.0.2
+go: finding github.com/hashicorp/go-safetemp v1.0.0
+go: finding github.com/vmihailenco/msgpack v4.0.1+incompatible
+go: finding google.golang.org/genproto v0.0.0-20181221175505-bd9b4fb69e2f
+go: finding golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52
+go: finding github.com/kr/text v0.1.0
+go: finding golang.org/x/net v0.0.0-20180826012351-8a410e7b638d
+go: finding golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3
+go: finding github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3
+go: finding github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b
+go: finding github.com/golang/mock v1.1.1
+go: finding cloud.google.com/go v0.26.0
+go: finding github.com/oklog/run v1.0.0
+go: finding golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be
+go: finding google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8
+go: finding github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd
+go: finding google.golang.org/grpc v1.14.0
+go: finding github.com/client9/misspell v0.3.4
+go: finding github.com/kr/pty v1.1.1
+go: finding google.golang.org/appengine v1.1.0
+go: finding honnef.co/go/tools v0.0.0-20180728063816-88497007e858
+go: finding golang.org/x/sys v0.0.0-20180830151530-49385e6e1522
+go: finding github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb
+go: finding github.com/kisielk/gotool v1.0.0
+go: finding github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77
+go: finding github.com/mitchellh/hashstructure v1.0.0
+go: finding golang.org/x/net v0.0.0-20181106065722-10aee1819953
+go: finding google.golang.org/grpc v1.16.0
+go: finding golang.org/x/lint v0.0.0-20180702182130-06c8688daad7
+go: finding github.com/golang/lint v0.0.0-20180702182130-06c8688daad7
+installing
+hash mismatch in fixed-output derivation '/nix/store/q8y0mzjl78hfhazjgq2sc84i7dp9wnh0-terraform-godaddy-1.6.4-go-modules':
+ wanted: sha256:10n2dy7q9kk1ly58sw965n6qa8l0nffh8vyd1vslx0gdlyj25xxs
+ got: sha256:0p81wqw2n8vraxk20xwg717582ijwq2k7v5j3n13y4cd5bxd8hhz
+cannot build derivation '/nix/store/w4ghinrmpq524k3617ikfc8i42aa0dbb-terraform-godaddy-1.6.4.drv': 1 dependencies couldn't be built
+copying path '/nix/store/63gjp25l4cmdkl63zy0rcgmsvd2p2p34-terraform-0.11.14' from 'https://cache.nixos.org'...
+error: build of '/nix/store/9drkn1qxkkcrz5g3413lpmbc2xysa582-terraform-0.11.14.drv', '/nix/store/w4ghinrmpq524k3617ikfc8i42aa0dbb-terraform-godaddy-1.6.4.drv' failed
+```
diff --git a/src/content/pastebins/2019/12/29/raku-tuple-type.adoc b/src/content/pastebins/2019/12/29/raku-tuple-type.adoc
new file mode 100644
index 0000000..3d5ff34
--- /dev/null
+++ b/src/content/pastebins/2019/12/29/raku-tuple-type.adoc
@@ -0,0 +1,37 @@
+---
+
+title: Raku tuple type annotation
+
+date: 2019-12-29
+
+layout: post
+
+lang: en
+
+ref: raku-tuple-type-annotation
+
+---
+
+```perl
+# Single Str return value: this works
+sub f1(Str $in --> Str) {
+ $in;
+}
+
+# Tuple of Str as return value: this works
+sub f2(Str $in) {
+ ($in, $in);
+}
+
+# Tuple of Str as return value with type annotation: this doesn't works
+sub f2(Str $in --> (Str, Str)) {
+ ($in, $in);
+}
+```
+
+Error log is:
+
+```perl
+===SORRY!=== Error while compiling /path/to/my/file
+Malformed return value
+```
diff --git a/src/content/pastebins/2020/01/04/guix-import-failure.adoc b/src/content/pastebins/2020/01/04/guix-import-failure.adoc
new file mode 100644
index 0000000..3388a8d
--- /dev/null
+++ b/src/content/pastebins/2020/01/04/guix-import-failure.adoc
@@ -0,0 +1,47 @@
+---
+
+title: Failure on Guix TeX Live importer
+
+date: 2020-01-04
+
+layout: post
+
+lang: en
+
+eu_categories: guix
+
+ref: failure-on-guix-tex-live-importer
+
+---
+
+```shell
+$ guix import texlive fontspec
+redirection vers « https://ctan.org/xml/1.2/pkg/fontspec »...
+Backtrace:
+ 11 (primitive-load "/home/andreh/.config/guix/current/bin/…")
+In guix/ui.scm:
+ 1806:12 10 (run-guix-command _ . _)
+In guix/scripts/import.scm:
+ 116:11 9 (guix-import . _)
+In guix/scripts/import/texlive.scm:
+ 91:19 8 (guix-import-texlive . _)
+In guix/memoization.scm:
+ 98:0 7 (_ #<hash-table 7fe80e6c1480 0/31> ("fontspec" "latex") _)
+In unknown file:
+ 6 (_ #<procedure 7fe80e6e4de0 at guix/memoization.scm:17…> …)
+In guix/store.scm:
+ 625:10 5 (call-with-store #<procedure 7fe80e714a60 at guix/impor…>)
+In guix/import/texlive.scm:
+ 148:23 4 (_ #<store-connection 256.99 7fe811f3c960>)
+In guix/utils.scm:
+ 664:8 3 (call-with-temporary-directory #<procedure 7fe80cac1b40…>)
+In guix/svn-download.scm:
+ 160:14 2 (_ "/tmp/guix-directory.WtLohP")
+In guix/build/svn.scm:
+ 39:2 1 (svn-fetch _ _ _ #:svn-command _ #:recursive? _ # _ # _)
+In guix/build/utils.scm:
+ 652:6 0 (invoke _ . _)
+
+guix/build/utils.scm:652:6: In procedure invoke:
+Throw to key `srfi-34' with args `(#<condition &invoke-error [program: "svn" arguments: ("export" "--non-interactive" "--trust-server-cert" "-r" "49435" "svn://www.tug.org/texlive/tags/texlive-2018.2/Master/texmf-dist/source/latex/fontspec" "/tmp/guix-directory.WtLohP") exit-status: 1 term-signal: #f stop-signal: #f] 7fe80d229c80>)'.
+```
diff --git a/src/content/pastebins/2020/02/14/guix-shebang.adoc b/src/content/pastebins/2020/02/14/guix-shebang.adoc
new file mode 100644
index 0000000..67d504d
--- /dev/null
+++ b/src/content/pastebins/2020/02/14/guix-shebang.adoc
@@ -0,0 +1,23 @@
+---
+
+title: Guix shebang
+
+date: 2020-02-14
+
+layout: post
+
+lang: en
+
+eu_categories: guix
+
+ref: guix-shebang
+
+---
+
+```shell
+#!/usr/bin/env -S guix environment --ad-hoc bash -- bash
+set -Eeuo pipefail
+cd "$(dirname "${BASH_SOURCE[0]}")"
+
+pwd
+```
diff --git a/src/content/pastebins/2020/11/27/guix-build-local.adoc b/src/content/pastebins/2020/11/27/guix-build-local.adoc
new file mode 100644
index 0000000..350d50f
--- /dev/null
+++ b/src/content/pastebins/2020/11/27/guix-build-local.adoc
@@ -0,0 +1,60 @@
+---
+
+title: Guix build local module
+
+date: 2020-11-27
+
+layout: post
+
+lang: en
+
+eu_categories: guix
+
+ref: guix-build-local-module
+
+---
+
+FIXED: rename `name` on line 9 of the first snippet, and use `"my-hello"` instead of `"hello"`.
+
+---
+
+Inside a file named `build.scm`:
+```scheme
+(define-module (build)
+ #:use-module (guix packages)
+ #:use-module (guix download)
+ #:use-module (guix build-system gnu)
+ #:use-module (guix licenses))
+
+(define-public my-hello
+ (package
+ (name "hello")
+ (version "2.10")
+ (source (origin
+ (method url-fetch)
+ (uri (string-append "mirror://gnu/hello/hello-" version
+ ".tar.gz"))
+ (sha256
+ (base32
+ "0ssi1wpaf7plaswqqjwigppsg5fyh99vdlb9kzl7c9lng89ndq1i"))))
+ (build-system gnu-build-system)
+ (synopsis "")
+ (description "")
+ (home-page "")
+ (license gpl3+)))
+
+```
+
+A plain build command didn't work:
+
+```shell
+$ guix build -L. my-hello
+guix build: error: my-hello : paquet inconnu
+```
+
+But with an eval expression it did:
+
+```shell
+$ guix build -L. -e '(@ (build) my-hello)'
+# works
+```
diff --git a/src/content/pastebins/2020/12/15/guix-pack-fail.adoc b/src/content/pastebins/2020/12/15/guix-pack-fail.adoc
new file mode 100644
index 0000000..2834f90
--- /dev/null
+++ b/src/content/pastebins/2020/12/15/guix-pack-fail.adoc
@@ -0,0 +1,96 @@
+---
+
+title: Failure with relocatable Guix pack tarball
+
+date: 2020-12-15
+
+layout: post
+
+lang: en
+
+eu_categories: guix
+
+ref: failure-with-relocatable-guix-pack-tarball
+
+---
+
+FIXED: Use `GUIX_PROFILE= source etc/profile`
+
+---
+
+The example from the [blog post][guix-tarball-article] fails.
+
+[guix-tarball-article]: https://guix.gnu.org/blog/2018/tarballs-the-ultimate-container-image-format/
+
+```shell
+$ tar xf `guix pack --relocatable -S /bin=bin -S /etc=etc guile gnutls guile-json`
+$ source etc/profile
+$ bin/guile -c '(use-modules (json))'
+guile: warning: failed to install locale
+Backtrace:
+In ice-9/boot-9.scm:
+ 1736:10 13 (with-exception-handler _ _ #:unwind? _ # _)
+In unknown file:
+ 12 (apply-smob/0 #<thunk 7f9d240ca740>)
+In ice-9/boot-9.scm:
+ 718:2 11 (call-with-prompt ("prompt") #<procedure 7f9d240db740 ?> ?)
+In ice-9/eval.scm:
+ 619:8 10 (_ #(#(#<directory (guile-user) 7f9d23d00f00>)))
+In ice-9/command-line.scm:
+ 185:18 9 (_ #<input: string 7f9d23cfaa10>)
+In unknown file:
+ 8 (eval (use-modules (json)) #<directory (guile-user) 7f9?>)
+In ice-9/eval.scm:
+ 721:20 7 (primitive-eval (use-modules (json)))
+In ice-9/psyntax.scm:
+ 1241:36 6 (expand-top-sequence ((use-modules (json))) _ _ #f _ _ _)
+ 1233:19 5 (parse _ (("placeholder" placeholder)) ((top) #(# # ?)) ?)
+ 285:10 4 (parse _ (("placeholder" placeholder)) (()) _ c&e (eval) ?)
+In ice-9/boot-9.scm:
+ 3898:20 3 (process-use-modules _)
+ 222:17 2 (map1 (((json))))
+ 3899:31 1 (_ ((json)))
+ 3300:6 0 (resolve-interface (json) #:select _ #:hide _ #:prefix _ ?)
+
+ice-9/boot-9.scm:3300:6: In procedure resolve-interface:
+no code for module (json)
+$ bin/guile -c '(use-modules (gnutls))'
+guile: warning: failed to install locale
+Backtrace:
+In ice-9/boot-9.scm:
+ 1736:10 13 (with-exception-handler _ _ #:unwind? _ # _)
+In unknown file:
+ 12 (apply-smob/0 #<thunk 7f7fe607a7c0>)
+In ice-9/boot-9.scm:
+ 718:2 11 (call-with-prompt ("prompt") #<procedure 7f7fe6085940 ?> ?)
+In ice-9/eval.scm:
+ 619:8 10 (_ #(#(#<directory (guile-user) 7f7fe5ca8f00>)))
+In ice-9/command-line.scm:
+ 185:18 9 (_ #<input: string 7f7fe5ca2a10>)
+In unknown file:
+ 8 (eval (use-modules (gnutls)) #<directory (guile-user) 7?>)
+In ice-9/eval.scm:
+ 721:20 7 (primitive-eval (use-modules (gnutls)))
+In ice-9/psyntax.scm:
+ 1241:36 6 (expand-top-sequence ((use-modules (gnutls))) _ _ #f _ _ ?)
+ 1233:19 5 (parse _ (("placeholder" placeholder)) ((top) #(# # ?)) ?)
+ 285:10 4 (parse _ (("placeholder" placeholder)) (()) _ c&e (eval) ?)
+In ice-9/boot-9.scm:
+ 3898:20 3 (process-use-modules _)
+ 222:17 2 (map1 (((gnutls))))
+ 3899:31 1 (_ ((gnutls)))
+ 3300:6 0 (resolve-interface (gnutls) #:select _ #:hide _ #:prefix ?)
+
+ice-9/boot-9.scm:3300:6: In procedure resolve-interface:
+no code for module (gnutls)
+```
+
+My Guix version if fairly recent:
+```shell
+$ guix describe
+Génération 83 14 déc. 2020 00:28:16 (actuelle)
+ guix 41807eb
+ URL du dépôt : https://git.savannah.gnu.org/git/guix.git
+ branche: master
+ commit : 41807eb5329299b8c45cd49356a4ead01ce0d469
+```
diff --git a/src/content/pastebins/2021/04/03/naive-slugify-js.adoc b/src/content/pastebins/2021/04/03/naive-slugify-js.adoc
new file mode 100644
index 0000000..f765495
--- /dev/null
+++ b/src/content/pastebins/2021/04/03/naive-slugify-js.adoc
@@ -0,0 +1,40 @@
+---
+
+title: JavaScript naive slugify
+
+date: 2021-04-03
+
+updated_at: 2021-08-15
+
+layout: post
+
+lang: en
+
+ref: javascript-naive-slugify
+
+---
+
+```javascript
+const s = "Pézão: açaí, saci-pererê.";
+
+const slugify = s =>
+ s
+ .toLowerCase()
+ .replaceAll(":", "")
+ .replaceAll(".", "")
+ .replaceAll(",", "")
+ .replaceAll("-", "")
+ .replaceAll("á", "a")
+ .replaceAll("ã", "a")
+ .replaceAll("à", "a")
+ .replaceAll("é", "e")
+ .replaceAll("ê", "e")
+ .replaceAll("í", "i")
+ .replaceAll("ó", "o")
+ .replaceAll("ô", "o")
+ .replaceAll("ú", "u")
+ .replaceAll("ü", "u")
+ .replaceAll("ç", "c");
+
+console.log(slugify(s));
+```
diff --git a/src/content/pastebins/2021/06/08/reading-session-pt1.adoc b/src/content/pastebins/2021/06/08/reading-session-pt1.adoc
new file mode 100644
index 0000000..b97ef08
--- /dev/null
+++ b/src/content/pastebins/2021/06/08/reading-session-pt1.adoc
@@ -0,0 +1,77 @@
+---
+
+title: Debit Reading Session - SICP solutions pt.1
+
+date: 2021-06-08
+
+layout: post
+
+lang: en
+
+ref: debit-reading-session-sicp-solutions-pt-1
+
+---
+
+```scheme
+;; 1.41
+(define (double f)
+ (lambda (x)
+ (f (f x))))
+
+
+:;; 1.42
+(define (compose f g)
+ (lambda (x)
+ (f (g x))))
+
+
+;;; 1.43
+(define (repeated f n)
+ (if (= 1 n)
+ identity
+ (comp (repeated f (dec n)))))
+
+
+;;; 2.27
+(define (map-tree node-fn leaf-fn tree)
+ (cond
+ ((null? tree) tree)
+ ((not (pair? tree)) (leaf-fn tree))
+ (else
+ (node-fn
+ (cons (map-tree node-fn leaf-fn (car tree))
+ (map-tree node-fn leaf-fn (cdr tree)))))))
+
+(define (map-nodes f tree)
+ (map-tree f identity tree))
+
+(define (deep-reverse x)
+ (map-nodes reverse x))
+
+
+;;; 2.28
+(define (flatten tree)
+ (define (rec acc t)
+ (cond
+ ((null? t) acc)
+ ((not (pair? t)) (cons t acc))
+ (else
+ (rec (rec (cdr t) acc)
+ (car t)))))
+ (rec nil tree))
+
+
+;;; 2.30
+(define (square-tree tree)
+ (map-leaves square tree))
+
+
+;;; 2.31
+(define square-tree map-leaves) ; ha!
+
+
+;;; 2.32
+TODO
+```
+
+FYI: I just typed those in, I didn't yet test them yet.
diff --git a/src/content/pastebins/2021/06/22/curl-wget.adoc b/src/content/pastebins/2021/06/22/curl-wget.adoc
new file mode 100644
index 0000000..1030c7b
--- /dev/null
+++ b/src/content/pastebins/2021/06/22/curl-wget.adoc
@@ -0,0 +1,102 @@
+---
+
+title: "cloc: curl and wget"
+
+date: 2021-06-22
+
+layout: post
+
+lang: en
+
+ref: cloc-curl-and-wget
+
+---
+
+`curl`:
+
+```shell
+$ pushd `mktemp -d`
+/tmp/tmp.AZkwvk7azD ~/
+$ git clone git://github.com/curl/curl .
+Clonage dans '.'...
+remote: Enumerating objects: 167029, done.
+remote: Counting objects: 100% (925/925), done.
+remote: Compressing objects: 100% (372/372), done.
+remote: Total 167029 (delta 590), reused 818 (delta 548), pack-reused 166104
+Réception d'objets: 100% (167029/167029), 75.63 Mio | 9.33 Mio/s, fait.
+Résolution des deltas: 100% (131415/131415), fait.
+$ cloc .
+ 3386 text files.
+ 3342 unique files.
+ 2084 files ignored.
+
+github.com/AlDanial/cloc v 1.90 T=1.34 s (973.7 files/s, 260104.4 lines/s)
+------------------------------------------------------------------------------------
+Language files blank comment code
+------------------------------------------------------------------------------------
+C 535 25645 36361 135318
+XML 23 21 20 45997
+m4 29 1526 1976 16972
+Perl 56 2611 4010 15411
+C/C++ Header 223 4178 10109 13794
+Markdown 53 2784 0 7038
+Visual Studio Solution 27 0 21 5049
+D 242 398 0 3549
+CMake 34 754 1288 3056
+DOS Batch 7 293 370 1554
+YAML 18 115 171 1493
+make 21 296 660 1440
+Bourne Shell 22 326 633 1136
+Pascal 2 228 0 634
+Python 4 196 221 628
+Visual Basic Script 1 30 60 341
+C++ 3 58 69 169
+Gencat NLS 1 2 0 130
+TNSDL 1 3 0 113
+Windows Resource File 2 17 47 110
+Bourne Again Shell 1 17 44 97
+Protocol Buffers 1 2 0 28
+diff 1 0 0 11
+Lisp 1 1 23 7
+TOML 1 0 0 3
+------------------------------------------------------------------------------------
+SUM: 1309 39501 56083 254078
+------------------------------------------------------------------------------------
+```
+
+`wget`:
+
+```shell
+$ pushd `mktemp -d`
+/tmp/tmp.NX0udlJMiz ~/
+$ git clone git://git.savannah.gnu.org/wget.git .
+Clonage dans '.'...
+remote: Counting objects: 52248, done.
+remote: Compressing objects: 100% (18430/18430), done.
+remote: Total 52248 (delta 23879), reused 52248 (delta 23879)
+Réception d'objets: 100% (52248/52248), 13.11 Mio | 6.18 Mio/s, fait.
+Résolution des deltas: 100% (23879/23879), fait.
+$ cloc .
+ 12210 text files.
+ 11629 unique files.
+ 11876 files ignored.
+
+github.com/AlDanial/cloc v 1.90 T=1.26 s (270.4 files/s, 61357.4 lines/s)
+--------------------------------------------------------------------------------
+Language files blank comment code
+--------------------------------------------------------------------------------
+C 53 6596 8955 34084
+Perl 106 1832 870 7415
+Python 105 1481 2374 5318
+C/C++ Header 43 704 1153 1486
+Bourne Shell 11 308 311 1278
+m4 3 172 183 940
+make 9 136 172 522
+YAML 3 27 13 515
+Bourne Again Shell 6 78 89 274
+Markdown 2 37 0 113
+lex 1 29 65 73
+--------------------------------------------------------------------------------
+SUM: 342 11400 14185 52018
+--------------------------------------------------------------------------------
+```
diff --git a/src/content/pastebins/2021/08/11/h1-spacing.adoc b/src/content/pastebins/2021/08/11/h1-spacing.adoc
new file mode 100644
index 0000000..9a00ece
--- /dev/null
+++ b/src/content/pastebins/2021/08/11/h1-spacing.adoc
@@ -0,0 +1,96 @@
+---
+
+title: Spaces around h1 tags
+
+date: 2021-08-11
+
+updated_at: 2021-08-15
+
+layout: post
+
+lang: en
+
+ref: spaces-around-h1-tags
+
+---
+
+*EDIT*: Apparently, the behaviour below is consistent between Firefox and
+Chromium for links, but not for `<h1>`.
+My conclusion is that the `<h1>` behaviour is a Firefox quirk, but the `<a>` is
+expected.
+
+---
+
+The HTML below has selectable extra spaces after `<h1>` tags:
+
+```html
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8" />
+ <meta name="viewport" content="width=device-width, initial-scale=1" />
+ <title>Spaces around h1 tags</title>
+ </head>
+ <body>
+ <main>
+ <h1>
+ With spaces around when selecting this heading
+ </h1>
+ <h1>Without spaces around</h1>
+ <p>
+ Is this expected behaviour?
+ </p>
+ </main>
+ </body>
+</html>
+```
+
+The rendered output is:
+
+<h1>
+ With spaces around when selecting this heading
+</h1>
+<h1>Without spaces around</h1>
+<p>
+ Is this expected behaviour?
+</p>
+
+---
+
+The same with links:
+
+```html
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8" />
+ <meta name="viewport" content="width=device-width, initial-scale=1" />
+ <title>Spaces after a tags</title>
+ </head>
+ <body>
+ <main>
+ <p>
+ <a href="#">
+ With extra underlined space
+ </a>
+ </p>
+ <p>
+ <a href="#">Without extra underlined space</a>
+ </p>
+ </main>
+ </body>
+</html>
+```
+
+The rendered output is:
+
+<p>
+ <a href="#">
+ With extra underlined space
+ </a>
+ after the link.
+</p>
+<p>
+ <a href="#">Without extra underlined space</a>
+ after the link.
+</p>
diff --git a/src/content/pastebins/2021/09/02/sicp-3-19.adoc b/src/content/pastebins/2021/09/02/sicp-3-19.adoc
new file mode 100644
index 0000000..75ee346
--- /dev/null
+++ b/src/content/pastebins/2021/09/02/sicp-3-19.adoc
@@ -0,0 +1,42 @@
+---
+
+title: SICP exercise 3.19
+
+date: 2021-09-02
+
+layout: post
+
+lang: en
+
+ref: sicp-exercise-3-19
+
+---
+
+```scheme
+(define (cycle? l)
+ (define (rec l x)
+ (cond
+ ((null? x) false)
+ ((eq? l x) true)
+ (true (rec l (cdr x)))))
+ (rec l (cdr l)))
+```
+
+Sample interactive session:
+
+```scheme
+scheme@(guile-user)> (define true #t)
+scheme@(guile-user)> (define false #f)
+scheme@(guile-user)>
+(define (cycle? l)
+ (define (rec l x)
+ (cond
+ ((null? x) false)
+ ((eq? l x) true)
+ (true (rec l (cdr x)))))
+ (rec l (cdr l)))
+scheme@(guile-user)> (cycle? '(1 2 3))
+$9 = #f
+scheme@(guile-user)> (cycle? (make-cycle '(1 2 3)))
+$10 = #t
+```
diff --git a/src/content/pastebins/2021/09/03/sicp-persistent-queue.adoc b/src/content/pastebins/2021/09/03/sicp-persistent-queue.adoc
new file mode 100644
index 0000000..8cf7ea2
--- /dev/null
+++ b/src/content/pastebins/2021/09/03/sicp-persistent-queue.adoc
@@ -0,0 +1,85 @@
+---
+
+title: SICP persistent amortized O(1) queue
+
+date: 2021-09-03
+
+layout: post
+
+lang: en
+
+ref: sicp-persistent-amortized-o1-queue
+
+---
+
+```scheme
+(define (queue)
+ (cons '()
+ '()))
+
+(define (enqueue x q)
+ (cons (car q)
+ (cons x (cdr q))))
+
+(define (flush q)
+ (cons (reverse (cdr q))
+ '()))
+
+(define (dequeue q)
+ (if (null? (car q))
+ (dequeue (flush q))
+ (cons (caar q)
+ (cons (cdar q)
+ (cdr q)))))
+
+(define (empty? q)
+ (and (null? (car q))
+ (null? (cdr q))))
+
+(define (peek q)
+ (car (dequeue q)))
+
+(define (print-queue q)
+ (define (rec l leading-space?)
+ (when (not (null? l))
+ (when leading-space?
+ (display " "))
+ (display (car l))
+ (rec (cdr l) #t)))
+
+ (display "#q(")
+ (rec (car q) false)
+ (rec (reverse (cdr q)) (not (null? (car q))))
+ (display ")")
+ (newline))
+```
+
+Sample interactive session:
+```scheme
+scheme@(guile-user)> (define true #t)
+scheme@(guile-user)> (define false #f)
+scheme@(guile-user)> (define q (queue))
+scheme@(guile-user)> (print-queue q)
+#q()
+scheme@(guile-user)> (print-queue (enqueue 'a q))
+#q(a)
+scheme@(guile-user)> (print-queue q)
+#q()
+scheme@(guile-user)> (set! q (enqueue 'a q))
+scheme@(guile-user)> (print-queue q)
+#q(a)
+scheme@(guile-user)> (set! q (enqueue 'e (enqueue 'd (enqueue 'c (enqueue 'b q)))))
+scheme@(guile-user)> (print-queue q)
+#q(e d c b a)
+scheme@(guile-user)> (peek q)
+$28 = a
+scheme@(guile-user)> (define ret (dequeue q))
+scheme@(guile-user)> (define value (car ret))
+scheme@(guile-user)> (set! q (cdr ret))
+scheme@(guile-user)> value
+$29 = a
+scheme@(guile-user)> (print-queue q)
+#q(b c d e)
+scheme@(guile-user)> (print-queue (cdr (dequeue (cdr (dequeue (enqueue 'g (enqueue 'f q)))))))
+#q(d e f g)
+```
diff --git a/src/content/pastebins/2022/07/14/git-cleanup.adoc b/src/content/pastebins/2022/07/14/git-cleanup.adoc
new file mode 100644
index 0000000..52cd17f
--- /dev/null
+++ b/src/content/pastebins/2022/07/14/git-cleanup.adoc
@@ -0,0 +1,70 @@
+---
+
+title: git-cleanup command
+
+date: 2022-07-14
+
+layout: post
+
+lang: en
+
+ref: git-cleanup-command
+
+---
+
+```
+#!/bin/sh
+set -eu
+
+usage() {
+ cat <<-'EOF'
+ Usage:
+ git cleanup
+ git cleanup -h
+ EOF
+}
+
+help() {
+ cat <<-'EOF'
+
+ Options:
+ -h, --help show this message
+ EOF
+}
+
+for flag in "$@"; do
+ case "$flag" in
+ --)
+ break
+ ;;
+ --help)
+ usage
+ help
+ exit
+ ;;
+ *)
+ ;;
+ esac
+done
+
+while getopts 'h' flag; do
+ case "$flag" in
+ h)
+ usage
+ help
+ exit
+ ;;
+ *)
+ usage >&2
+ exit 2
+ ;;
+ esac
+done
+shift $((OPTIND - 1))
+
+
+
+git branch --merged |
+ grep -v -e '^\*' -e '^. main$' |
+ xargs git branch -d
+```
diff --git a/src/content/pastebins/2023/07/22/funcallable-amop.adoc b/src/content/pastebins/2023/07/22/funcallable-amop.adoc
new file mode 100644
index 0000000..37c79fe
--- /dev/null
+++ b/src/content/pastebins/2023/07/22/funcallable-amop.adoc
@@ -0,0 +1,43 @@
+---
+
+title: Funcallable AMOP
+
+date: 2023-07-22
+
+layout: post
+
+lang: en
+
+ref: funcallable-amop
+
+---
+
+
+Using `macrolet` to allow a `funcallable-standard-class` to be invoked without
+using `funcall` directly, and let the macroexpansion do that instead:
+
+```
+#!/usr/bin/env li
+
+(asdf:load-system :closer-mop)
+
+(defclass constructor ()
+ ((name :initarg :name :accessor constructor-name))
+ (:metaclass closer-mop:funcallable-standard-class))
+
+(defmethod initialize-instance :after ((c constructor) &key)
+ (with-slots (name) c
+ (closer-mop:set-funcallable-instance-function
+ c
+ (lambda (x)
+ (format t "~s: ~s - ~s~%" name :funcalled x)))))
+
+(let ((c (make-instance 'constructor :name "the-name")))
+ (funcall c 1))
+
+(let ((c (make-instance 'constructor :name "the-name")))
+ (macrolet ((c (&body body)
+ `(funcall c ,@body)))
+ (funcall c 2)
+ (c 3)))
+```
diff --git a/src/content/pastebins/index.adoc b/src/content/pastebins/index.adoc
new file mode 100644
index 0000000..433a2c1
--- /dev/null
+++ b/src/content/pastebins/index.adoc
@@ -0,0 +1 @@
+= Pastebins
diff --git a/src/content/podcasts/2020/12/19/test-entry.adoc b/src/content/podcasts/2020/12/19/test-entry.adoc
new file mode 100644
index 0000000..3ec8811
--- /dev/null
+++ b/src/content/podcasts/2020/12/19/test-entry.adoc
@@ -0,0 +1,103 @@
+= A test entry
+
+audio: true
+
+
+After.
+
+A link to [home][home].
+
+Another link to home: https://euandre.org
+
+[home]: https://euandre.org
+
+A code block:
+
+```shell
+$ l
+total 372K
+drwxr-xr-x 23 andreh users 4,0K déc. 19 10:44 ./
+drwxr-xr-x 30 andreh users 4,0K déc. 14 17:28 ../
+-rw-r--r-- 1 andreh users 565 déc. 15 17:24 about.md
+-rw-r--r-- 1 andreh users 330 déc. 19 10:04 aja.md
+-rw-r--r-- 1 andreh users 125 déc. 19 10:04 aja-par-categorie.md
+-rw-r--r-- 1 andreh users 496 déc. 19 10:04 a-propos.md
+drwxr-xr-x 2 andreh users 4,0K déc. 16 04:36 _articles/
+-rw-r--r-- 1 andreh users 136 déc. 15 17:04 articles-by-category.md
+-rw-r--r-- 1 andreh users 139 déc. 19 10:04 articles-par-categorie.md
+-rw-r--r-- 1 andreh users 137 déc. 19 10:04 artigos-por-categoria.md
+-rw-r--r-- 1 andreh users 140 déc. 19 10:04 artikoloj-lau-kategorio.md
+-rw-r--r-- 1 andreh users 508 oct. 23 09:35 .build.yml
+-rw-r--r-- 1 andreh users 9,0K déc. 19 10:09 _config.yml
+-rw-r--r-- 1 andreh users 34K déc. 18 22:48 COPYING
+-rw-r--r-- 1 andreh users 2,4K déc. 19 09:28 default.nix
+-rw-r--r-- 1 andreh users 41 déc. 4 13:43 description
+-rw-r--r-- 1 andreh users 134 déc. 19 10:04 diapositives.md
+-rw-r--r-- 1 andreh users 139 déc. 19 10:04 diapositives-par-categorie.md
+drwxr-xr-x 2 andreh users 4,0K nov. 15 20:01 en/
+drwxr-xr-x 2 andreh users 4,0K nov. 15 20:08 eo/
+-rw-r--r-- 1 andreh users 159 déc. 19 10:04 episodes-de-podcast-par-categorie.md
+-rw-r--r-- 1 andreh users 159 déc. 19 10:04 episodios-do-podcast-por-categoria.md
+-rw-r--r-- 1 andreh users 130 déc. 19 10:04 eslaides.md
+-rw-r--r-- 1 andreh users 134 déc. 19 10:04 eslaides-por-categoria.md
+-rw-r--r-- 1 andreh users 1,2K nov. 15 20:01 favicon.ico
+drwxr-xr-x 2 andreh users 4,0K nov. 15 20:08 fr/
+-rw-r--r-- 1 andreh users 54 déc. 6 13:57 Gemfile
+-rw-r--r-- 1 andreh users 1,6K déc. 6 13:57 Gemfile.lock
+drwxr-xr-x 11 andreh users 4,0K déc. 19 10:44 .git/
+-rw-r--r-- 1 andreh users 75 oct. 16 07:05 .gitignore
+-rw-r--r-- 1 andreh users 100 oct. 14 06:42 .gitmodules
+-rw-r--r-- 1 andreh users 317 déc. 19 10:04 hea.md
+-rw-r--r-- 1 andreh users 124 déc. 19 10:04 hea-por-categoria.md
+-rw-r--r-- 1 andreh users 125 déc. 19 10:04 hml-lau-kategorio.md
+-rw-r--r-- 1 andreh users 309 déc. 19 10:04 hml.md
+-rw-r--r-- 1 andreh users 27 nov. 14 13:51 .ignore
+drwxr-xr-x 2 andreh users 4,0K déc. 19 10:44 _includes/
+-rw-r--r-- 1 andreh users 123 nov. 15 20:01 index.md
+drwxr-xr-x 3 andreh users 4,0K déc. 19 03:02 .jekyll-cache/
+drwxr-xr-x 2 andreh users 4,0K déc. 19 09:29 _layouts/
+drwxr-xr-x 5 andreh users 4,0K nov. 15 20:07 locale/
+-rw-r--r-- 1 andreh users 136 déc. 19 10:04 lumbildoj-lau-kategorio.md
+-rw-r--r-- 1 andreh users 131 déc. 19 10:04 lumbildoj.md
+drwxr-xr-x 2 andreh users 4,0K nov. 4 17:26 nix/
+drwxr-xr-x 2 andreh users 4,0K déc. 19 03:20 _pastebins/
+-rw-r--r-- 1 andreh users 139 déc. 15 17:08 pastebins-by-category.md
+-rw-r--r-- 1 andreh users 140 nov. 15 20:01 pastebins.en.md
+-rw-r--r-- 1 andreh users 140 déc. 19 10:04 pastebins.eo.md
+-rw-r--r-- 1 andreh users 140 déc. 19 10:04 pastebins.fr.md
+-rw-r--r-- 1 andreh users 142 déc. 19 10:04 pastebins-lau-kategorio.md
+-rw-r--r-- 1 andreh users 142 déc. 19 10:04 pastebins-par-categorie.md
+-rw-r--r-- 1 andreh users 141 déc. 19 10:04 pastebins-por-categoria.md
+-rw-r--r-- 1 andreh users 140 déc. 19 10:04 pastebins.pt.md
+drwxr-xr-x 2 andreh users 4,0K déc. 19 09:04 _plugins/
+-rw-r--r-- 1 andreh users 134 déc. 19 09:29 podcast.en.md
+-rw-r--r-- 1 andreh users 152 déc. 19 09:29 podcast-episodes-by-category.md
+-rw-r--r-- 1 andreh users 134 déc. 19 10:04 podcast.fr.md
+-rw-r--r-- 1 andreh users 134 déc. 19 10:04 podcast.pt.md
+drwxr-xr-x 2 andreh users 4,0K déc. 19 10:45 _podcasts/
+-rw-r--r-- 1 andreh users 151 déc. 19 10:04 podkastajoj-lau-kategorio.md
+-rw-r--r-- 1 andreh users 135 déc. 19 10:04 podkasto.md
+-rw-r--r-- 1 andreh users 471 déc. 19 10:04 pri.md
+drwxr-xr-x 2 andreh users 4,0K nov. 15 20:08 pt/
+-rw-r--r-- 1 andreh users 3,8K nov. 15 20:01 public-key.txt
+-rw-r--r-- 1 andreh users 58 oct. 23 09:54 README
+drwxr-xr-x 3 andreh users 4,0K déc. 19 09:00 resources/
+lrwxrwxrwx 1 andreh users 54 déc. 19 10:44 result -> /nix/store/czw8d9gcim51k76ixcgmdpi9kpvysc9d-publish.sh/
+drwxr-xr-x 3 andreh users 4,0K déc. 19 10:35 scripts/
+-rw-r--r-- 1 andreh users 29 juil. 23 18:37 shell.nix
+drwxr-xr-x 17 andreh users 4,0K déc. 19 10:35 _site/
+-rw-r--r-- 1 andreh users 3,9K déc. 19 10:29 site.json
+-rw-r--r-- 1 andreh users 1,9K nov. 15 20:01 sitemap.xml
+drwxr-xr-x 2 andreh users 4,0K déc. 16 04:36 _slides/
+-rw-r--r-- 1 andreh users 130 déc. 15 17:08 slides-by-category.md
+-rw-r--r-- 1 andreh users 25 nov. 15 20:01 slides.css
+-rw-r--r-- 1 andreh users 128 nov. 15 20:01 slides.md
+-rw-r--r-- 1 andreh users 471 déc. 19 10:04 sobre.md
+drwxr-xr-x 2 andreh users 4,0K déc. 19 09:04 static/
+-rw-r--r-- 1 andreh users 2,4K déc. 15 16:06 styles.css
+-rwxr-xr-x 1 andreh users 265 nov. 6 10:16 tests.sh
+-rw-r--r-- 1 andreh users 122 déc. 15 17:41 til-by-category.md
+-rw-r--r-- 1 andreh users 265 nov. 15 20:01 til.md
+drwxr-xr-x 2 andreh users 4,0K déc. 16 04:03 _tils/
+drwxr-xr-x 3 andreh users 4,0K oct. 10 09:20 vendor/
+```
diff --git a/src/content/podcasts/2020/12/19/test-entry.flac b/src/content/podcasts/2020/12/19/test-entry.flac
new file mode 100644
index 0000000..786ab59
--- /dev/null
+++ b/src/content/podcasts/2020/12/19/test-entry.flac
Binary files differ
diff --git a/src/content/podcasts/index.adoc b/src/content/podcasts/index.adoc
new file mode 100644
index 0000000..1f17da7
--- /dev/null
+++ b/src/content/podcasts/index.adoc
@@ -0,0 +1 @@
+= Podcasts
diff --git a/src/content/pt/2020-08-12-nome-de-arquivo-com-timestamp-simplificado.md b/src/content/pt/2020-08-12-nome-de-arquivo-com-timestamp-simplificado.md
new file mode 100644
index 0000000..2e7fc32
--- /dev/null
+++ b/src/content/pt/2020-08-12-nome-de-arquivo-com-timestamp-simplificado.md
@@ -0,0 +1,45 @@
+---
+
+title: Nome de arquivo com timestamp simplificado
+
+date: 2020-08-12
+
+updated_at: 2020-11-04
+
+layout: post
+
+lang: pt
+
+ref: simple-filename-timestamp
+
+eu_categories: shell
+
+---
+
+Quando vou escrever um post no Jekyll ou criar um arquivo de log com a data no
+nome, eu normalmente engasgo para achar um jeito direto de fazer isso. Há uma
+solução simples: `date -I`.
+
+```shell
+./meu-programa.sh > meu-programa.$(date -I).log
+cp template-de-post.md _posts/$(date -I)-slug-do-post.md
+```
+
+Usar essa ferramenta padrão do GNU/Linux permite que você simplesmente escreva
+`touch $(date -I).md` para criar um arquivo `2020-08-12.md`.
+
+Eu sempre tinha que parar para reler o `man date` ou buscar na internet de novo
+e de novo como fazer isso, e depois de sempre chegar no mesmo resultado ficou
+claro para mim que `date -I` quanto `date -Is` (`s` de segundos) são as
+respostas que eu estou procurando 95% do tempo:
+
+```shell
+# dentro do meu-programa.sh
+echo "Programa começou em $(date -Is)"
+# saída é:
+# Programa começou em 2020-08-12T09:15:16-03:00
+```
+
+Ambos os formatos de data são hierárquicos, com intervalos de tempo maior à
+esquerda. Isso significa que você pode facilmente ordená-los (e até usar TAB
+para completar) sem esforço ou ferramenta extra.
diff --git a/src/content/screencasts/2021/02/07/autoqemu.adoc b/src/content/screencasts/2021/02/07/autoqemu.adoc
new file mode 100644
index 0000000..e295a71
--- /dev/null
+++ b/src/content/screencasts/2021/02/07/autoqemu.adoc
@@ -0,0 +1,42 @@
+= AutoQEMU - automate installation and SSH setup of ISO OS images
+
+video: true
+
+After reading begriffs "[Tips for stable and portable software]", the
+"Begriffs Buildfarm?" section caught my attention, as this is something I would
+be interested in.
+
+After emailing the author, a [public thread] began on the subject.
+
+As we discussed how it could be done, I decided to experiment with the idea of
+automating the setup of virtual environments with QEMU.
+
+This screencast is a simple demo of automating the installation of
+Alpine Linux 3.12.3 standard x86_64 with AutoQEMU[^AutoQEMU], which is nothing
+more than POSIX sh, [expect] scripts and Makefiles glued together.
+
+[^AutoQEMU]: The solution was a little too brittle to scale, and some
+distributions proved to be particularly problematic. I've [archived] my
+progress if you're interested in what I've done, and maybe wish to continue.
+
+As of this writing, I just worked on it for 2~3 days, so everything is still
+pretty ad-hoc.
+
+The commands from the screencast were[^script-command]:
+
+[^script-command]: Only now, writing again what I ran on the screencast I thought that I should have tried something like [script](https://www.man7.org/linux/man-pages/man1/script.1.html). Maybe next time (thanks [klaatu](https://gnuworldorder.info/) for the tip!).
+
+```shell
+pushd `mktemp -d`
+git clone https://euandre.org/git/autoqemu .
+make
+make install PREFIX=$HOME/.local
+autoqemu ssh alpine
+```
+
+It assumes that `$HOME/.local/bin` is in `$PATH`.
+
+[Tips for stable and portable software]: https://begriffs.com/posts/2020-08-31-portable-stable-software.html
+[public thread]: https://talk.begriffs.com/pipermail/friends/2021-February/001263.html
+[archived]: https://euandre.org/static/attachments/autoqemu.tar.gz
+[expect]: https://core.tcl-lang.org/expect/index
diff --git a/src/content/screencasts/2021/02/07/autoqemu.tar.gz b/src/content/screencasts/2021/02/07/autoqemu.tar.gz
new file mode 100644
index 0000000..3022f14
--- /dev/null
+++ b/src/content/screencasts/2021/02/07/autoqemu.tar.gz
Binary files differ
diff --git a/src/content/screencasts/2021/02/07/autoqemu.webm b/src/content/screencasts/2021/02/07/autoqemu.webm
new file mode 100644
index 0000000..f553efb
--- /dev/null
+++ b/src/content/screencasts/2021/02/07/autoqemu.webm
Binary files differ
diff --git a/src/content/screencasts/index.adoc b/src/content/screencasts/index.adoc
new file mode 100644
index 0000000..083adac
--- /dev/null
+++ b/src/content/screencasts/index.adoc
@@ -0,0 +1 @@
+= Screencasts
diff --git a/src/content/slides/2020-10-19-rollout-feature-flag-experiment-operational-toggle.slides b/src/content/slides/2020-10-19-rollout-feature-flag-experiment-operational-toggle.slides
new file mode 100644
index 0000000..22770e6
--- /dev/null
+++ b/src/content/slides/2020-10-19-rollout-feature-flag-experiment-operational-toggle.slides
@@ -0,0 +1,343 @@
+---
+
+title: Rollout, feature flag, experiment, operational toggle
+
+date: 2020-10-19
+
+layout: slides
+
+lang: en
+
+ref: rollout-feature-flag-experiment-operational-toggle
+
+article: _articles/2020-10-19-feature-flags-differences-between-backend-frontend-and-mobile.md
+
+---
+
+# Rollout, feature flag, experiment, operational toggle
+Different use cases for **backend**, **frontend** and **mobile**
+
+---
+
+"Feature flags" tend to come up when talking about **continuous deployment**
+
+???
+
+I'm using "quotes" because I'm mixing up different meanings of "rollout"
+
+---
+
+# CI
+continuous integration
+
+# CD
+continuous delivery
+
+# CD
+**continuous deployment**
+
+???
+
+Background: build vocabulary, why are feature flags related to CD
+
+CI solves: manual integration of long-lived branches
+
+CD solves: automation of deployment process
+
+CD solves: releases as frequent as possible
+
+That's where the "GoCD" name comes from
+
+---
+
+# Types:
+1. rollout
+2. feature flag
+3. experiment
+4. operational toggle
+
+---
+
+# rollout
+## For *rolling out* a new version of software
+
+**Short-lived** using **percentages**
+
+- a [new deployment of k8s][k8s]
+- new [APK released to the Play Store][apk]
+
+[k8s]: https://kubernetes.io/docs/concepts/workloads/controllers/deployment/#creating-a-deployment
+[apk]: https://support.google.com/googleplay/android-developer/answer/6346149?hl=en
+
+???
+
+Relevant as long as the new code is deployed
+
+---
+
+# feature flag
+## For turning a feature *on* or *off*
+
+**Medium-lived** using **allow list**, **A/B test**, **percentage**,
+**app version**, *etc*.
+
+- `:new-chargeback-flow`
+- `:new-debit-card-activation-screen`
+
+???
+
+Relevant as long as the new code is being developed
+
+---
+
+# experiment
+## For analyzing behaviour
+
+**Medium-lived** using **allow list** and **A/B test**
+
+- `:debit-withdrawal-test`
+
+---
+
+# operational toggle
+## For disabling features in `#crash`-like situations
+
+**Long-lived** using **percentage**
+
+- `:bank-barcode-payment`
+- `:savings-bank-barcode-query-provider`
+
+???
+
+Lives for as long as the code is in production.
+
+It feels like a system-level circuit breaker.
+
+---
+
+We now know about the types
+
+## But they have different relevance for **backend**, **frontend** and **mobile**
+
+---
+
+# backend
+
+1. **rollout**: k8s blue/green, canary and ~`common-rollout`~ `common-xp`
+2. **feature flag**: ~`common-rollout`~ `common-xp` and datasets
+3. **experiment**: `common-xp`
+4. **operational toggle**: ~`common-rollout`~ `common-xp`
+
+???
+
+This is a bit why common-rollout isn't called *common-feature-flag*: it was
+initially designed with backend usage of mostly *rollouts* in mind, and just a
+bit *feature flags*.
+
+Avoid using configuration for doing operational toggles: it is less dynamic, so
+it defeats the purpose.
+
+---
+
+# frontend
+
+1. **rollout**: CDN and page refreshes
+2. **feature flag**: percentages and maybe IPs (no `:customer/id` on the website)
+3. **experiment**: via dynamic backend control
+4. **operational toggle**: via dynamic backend control
+
+---
+
+# mobile
+
+1. **rollout**: app stores
+2. **feature flag**: via dynamic backend control
+3. **experiment**: via dynamic backend control
+4. **operational toggle**: via dynamic backend control
+
+---
+
+Key differentiator is
+## How much **control** we have over the **environment**
+
+---
+
+## **backend**
+
+# Full control
+🎉
+
+???
+
+Can edit, update and even delete rollouts as desired.
+
+Mix and match at will!
+
+---
+
+## **frontend**
+
+# Partial control
+
+When choose when to make a new version available
+
+???
+
+We can control when a new version is available, partially when someone will
+upgrade it.
+
+But it is easy to fallback to "reload the page and try again".
+
+---
+
+## **mobile**
+
+# Very limited control
+
+- app stores can restrict updates (worse for iOS)
+- customers still have to download new versions
+
+---
+
+# Costs
+
+- more complex code
+- compatibility with old app versions
+- nesting is exponential
+
+---
+
+# Benefits
+
+- dynamicity
+
+---
+
+## Weighting costs × benefits
+
+The less control we have, the more we value dynamicity
+
+---
+
+## Weighting costs × benefits
+
+- backend: sometimes worth the cost
+- frontend: almost always worth cost
+- mobile: **always** worth cost
+
+---
+
+# Best practices
+
+---
+
+## Dynamic content > feature flag
+
+Always true for **mobile**, almost always for **frontend**
+
+---
+
+## Use `:include-list` for named groups
+
+Always true for **backend**, **frontend** and **mobile**
+
+{% raw %}
+```clojure [2-3]
+{:rules
+ #{{:type :include-list
+ :content {:filename "debit-team-members.txt"}}}}
+```
+{% endraw %}
+
+---
+
+## Always use `:app-version`
+
+only for **mobile**
+
+{% raw %}
+```clojure [2]
+{:rules
+ #{{:type :app-version
+ :content {:min-version #{{:platform :android
+ :code 1000000}
+ {:platform :ios
+ :code 2000000}}}}}}
+```
+{% endraw %}
+
+---
+
+## Extend ~`common-rollout`~ `common-xp` if required
+
+That's how `:include-list`, `:app-version`, *etc.* were born
+
+---
+
+## Beware of many nested feature flags
+
+True for **backend**, **frontend** and **mobile**
+
+???
+
+Exponential growth of combinations
+
+---
+
+## Don't delete app-facing feature flags
+
+True for **mobile**
+
+???
+
+This could break old app versions, only do this intentionally
+
+We don't have (yet) a strategy for dealing with LTS of the app, and we just say:
+"we'll support every app version out there".
+
+---
+
+## Include a feature flag on the whiteboarding phase
+
+---
+
+## Include deleting/retiring the feature flag at the end
+
+---
+
+## Avoid renaming a feature flag
+
+Use `:app-version` with `:min-version` instead
+
+---
+
+# And most importantly...
+
+---
+
+# ***Always*** rely on a feature flag on the app
+
+Never do a hot fix, avoid expedited releases at all costs
+
+???
+
+The app is where we have less control, so the feature flag is how we get some of
+that control back.
+
+This doesn't mean you'll need 1 feature flag per PR
+
+There's not such thing as:
+"This is such a small thing, it doesn't need a feature flag"
+
+You should ask yourself:
+"It this crashes the app, am I OK with waiting for the next release train?"
+
+---
+
+## Thank you!
+
+References:
+
+1. "[Feature Toggles (aka Feature Flags)](https://martinfowler.com/articles/feature-toggles.html)", by Pete Hodgson
+1. "[Continuous integration vs. continuous delivery vs. continuous deployment](https://www.atlassian.com/continuous-delivery/principles/continuous-integration-vs-delivery-vs-deployment)", by Sten Pittet
+1. [Accelerate](https://itrevolution.com/book/accelerate/), by N. Forsgren, J. Humble and G. Kim
diff --git a/src/content/slides/2020-11-14-on-local-first-beyond-the-crdt-silver-bullet.slides b/src/content/slides/2020-11-14-on-local-first-beyond-the-crdt-silver-bullet.slides
new file mode 100644
index 0000000..33fc239
--- /dev/null
+++ b/src/content/slides/2020-11-14-on-local-first-beyond-the-crdt-silver-bullet.slides
@@ -0,0 +1,266 @@
+---
+
+title: 'On "local-first": beyond the CRDT silver bullet'
+
+date: 2020-11-14
+
+layout: slides
+
+lang: en
+
+ref: on-local-first-beyond-the-crdt-silver-bullet
+
+article: _articles/2020-11-14-local-first-software-you-own-your-data-in-spite-of-the-cloud-article-review.md
+
+---
+
+# On local-first
+
+Beyond the CRDT silver bullet
+
+---
+
+# Part 1
+
+Exposition
+
+---
+
+## "cloud apps" vs "old-fashioned apps"
+
+---
+
+## Target
+
+- documents
+- files
+- personal data repositories
+
+Not: banking services, e-commerce, social networking, ride-sharing, *etc*.
+
+---
+
+## 7 Ideals for local-first software
+
+---
+
+### 1 - No Spinners: Your Work at Your Fingertips
+
+---
+
+### 2 - Your Work Is Not Trapped on One Device
+
+---
+
+### 3 - The Network Is Optional
+
+---
+
+### 4 - Seamless Collaboration with Your Colleagues
+
+---
+
+### 5 - The Long Now
+
+---
+
+### 6 - Security and Privacy by Default
+
+---
+
+### 7 - You Retain Ultimate Ownership and Control
+
+---
+
+## Towards a Better Future
+
+CRDTs (Conflict-free Replicated Data Types) as a Foundational Technology
+
+---
+
+### Use case
+
+```
+# in node A and node B
+s = "Hello, World"
+
+# in node A
+s = "Hello, Alice"
+
+# in node B
+s = "Hello, Bob"
+```
+
+How to reconcile those?
+- `Hello, ABloibce`
+- `Hello, AliceBob`
+- `Hello, BobAlice`
+- `Hello, Alice`
+- `Hello, Bob`
+
+---
+
+Existing CRDTs differ:
+- performance
+- storage
+- compression
+- metadata overhead
+
+---
+
+Hint towards the "automerge" CRDT
+
+---
+
+*show comparison table, page 9*
+
+---
+
+# Part 2
+
+Critique
+
+---
+
+### Software license
+
+> In our opinion, maintaining control and ownership of data does not mean that
+> the software must necessarily be open source.
+
+---
+
+#### Example 1 - intentional restriction
+
+```bash
+#!/bin/sh
+
+TODAY=$(date +%s)
+LICENSE_EXPIRATION=$(date -d 2020-10-27 +%s)
+
+if [ $TODAY -ge $LICENSE_EXPIRATION ]; then
+ echo 'License expired!'
+ exit 1
+fi
+
+echo $((2 + 2))
+```
+
+```bash
+# today
+$ ./useful-adder.sh
+4
+# tomorrow
+$ ./useful-adder.sh
+License expired!
+```
+
+---
+
+#### Example 2 - unintentional restriction
+
+```bash
+# today
+$ useful-program
+# ...useful output...
+
+# tomorrow, with more data
+$ useful-program
+ERROR: Panic! Stack overflow!
+```
+---
+
+### local-first **requires** free software
+
+Otherwise "The Long Now" (ideal nº5) is lost
+
+---
+
+### Denial of existing solutions
+
+> In principle it is possible to collaborate without a repository service,
+> e.g. by sending patch files by email, but the majority of Git users rely
+> on GitHub.
+
+Solution: either GitHub+CRDTs or `git` **`send-email`**
+
+---
+
+### Plain text formats
+
+> Git is highly optimized for code and similar line-based text file
+
+It even pulls software to the plain text direction, e.g.:
+- delivery-templates
+- `common-core.protocols.config`
+
+Why not exploit that more?
+
+---
+
+### Ditching of web applications
+
+> The architecture of web apps remains fundamentally server-centric
+
+Disagree. Contrast [PouchDB][pouchdb] with Android [Instant Apps][instant-apps]
+
+[pouchdb]: https://pouchdb.com/
+[instant-apps]: https://developer.android.com/topic/google-play-instant
+
+???
+
+Talk on dynamic content
+
+---
+
+### Costs are underrated
+
+- storage
+- backups
+- maintenance
+
+Example: blog vs vlog
+
+---
+
+### Real-time collaboration a bit overrated
+
+It is only possible on the presence of reliable, medium-quality network
+connection
+
+> X also works when inside an elevator, subway or plane!
+
+<!-- 🤦‍ -->
+
+---
+
+### On CRDTs and developer experience
+
+> For an app developer, how does the use of a CRDT-based data layer compare to
+> existing storage layers like a SQL database, a filesystem, or CoreData? Is a
+> distributed system harder to write software for?
+
+Yes.
+
+See "[A Note on Distributed Computing][note-dist-comp]"
+
+[note-dist-comp]: https://web.archive.org/web/20130116163535/http://labs.oracle.com/techrep/1994/smli_tr-94-29.pdf
+
+---
+
+## Conclusion
+
+Why this is a "paper I love": it took offline-first and ran with it.
+
+But a pinch of CRDT won't make the world local-first.
+
+The tricky part is the end of the sentence: "**in spite of the Cloud**".
+
+---
+
+## Thank you!
+
+References:
+
+1. "[Local-First Software: You Own Your Data, in spite of the Cloud](https://martin.kleppmann.com/papers/local-first.pdf)", by M. Kleppmann, A. Wiggins, P. Van Hardenberg and M. F. McGranaghan
+1. [The Morning Paper](https://blog.acolyer.org/2019/11/20/local-first-software/) article
+1. "[A Note on Distributed Computing](https://web.archive.org/web/20130116163535/http://labs.oracle.com/techrep/1994/smli_tr-94-29.pdf)", by J. Waldo, G. Wyant, A. Wollrath and S Kendall
diff --git a/src/content/tils/2020/08/12/filename-timestamp.adoc b/src/content/tils/2020/08/12/filename-timestamp.adoc
new file mode 100644
index 0000000..7495fc9
--- /dev/null
+++ b/src/content/tils/2020/08/12/filename-timestamp.adoc
@@ -0,0 +1,44 @@
+---
+
+title: Simple filename timestamp
+
+date: 2020-08-12
+
+updated_at:
+
+layout: post
+
+lang: en
+
+ref: simple-filename-timestamp
+
+eu_categories: shell
+
+---
+
+When writing Jekyll posts or creating log files with dates on them, I usually
+struggle with finding a direct way of accomplishing that. There's a simple
+solution: `date -I`.
+
+```shell
+./my-program.sh > my-program.$(date -I).log
+cp post-template.md _posts/$(date -I)-post-slug.md
+```
+
+Using this built-in GNU/Linux tool allows you to `touch $(date -I).md` to readily
+create a `2020-08-12.md` file.
+
+I always had to read `man date` or search the web over and over, and after doing
+this repeatedly it became clear that both `date -I` and `date -Is` (`s` here
+stands for seconds) are the thing that I'm looking for 95% of the time:
+
+```shell
+# inside my-program.sh
+echo "Program started at $(date -Is)"
+# output is:
+# Program started at 2020-08-12T09:04:58-03:00
+```
+
+Both date formats are hierarchical, having the bigger time intervals to the
+left. This means that you can easily sort them (and even tab-complete them) with
+no extra effort or tool required.
diff --git a/src/content/tils/2020/08/13/code-jekyll.adoc b/src/content/tils/2020/08/13/code-jekyll.adoc
new file mode 100644
index 0000000..6566928
--- /dev/null
+++ b/src/content/tils/2020/08/13/code-jekyll.adoc
@@ -0,0 +1,155 @@
+---
+title: Anchor headers and code lines in Jekyll
+date: 2020-08-13
+layout: post
+lang: en
+ref: anchor-headers-and-code-lines-in-jekyll
+---
+The default Jekyll toolbox ([Jekyll][0], [kramdown][1] and [rouge][2]) doesn't
+provide with a configuration option to add anchors to headers and code blocks.
+
+[0]: https://jekyllrb.com/
+[1]: https://kramdown.gettalong.org/
+[2]: http://rouge.jneen.net/
+
+The best way I found of doing this is by creating a simple Jekyll plugin, more
+specifically, a [Jekyll hook][3]. These allow you to jump in to the Jekyll build
+and add a processing stage before of after Jekyll performs something.
+
+[3]: https://jekyllrb.com/docs/plugins/hooks/
+
+All you have to do is add the code to `_plugins/my-jekyll-plugin-code.rb`, and
+Jekyll knows to pick it up and call your code on the appropriate time.
+
+## Anchor on headers
+
+Since I wanted to add anchors to headers in all documents, this Jekyll hook
+works on `:documents` after they have been transformed into HTML, the
+`:post_render` phase:
+
+```ruby
+Jekyll::Hooks.register :documents, :post_render do |doc|
+ if doc.output_ext == ".html"
+ doc.output =
+ doc.output.gsub(
+ /<h([1-6])(.*?)id="([\w-]+)"(.*?)>(.*?)<\/h[1-6]>/,
+ '<a href="#\3"><h\1\2id="\3"\4>\5</h\1></a>'
+ )
+ end
+end
+```
+
+I've derived my implementations from two "official"[^official] hooks,
+[jemoji][4] and [jekyll-mentions][5].
+
+[4]: https://github.com/jekyll/jemoji
+[5]: https://github.com/jekyll/jekyll-mentions
+[^official]: I don't know how official they are, I just assumed it because they
+ live in the same organization inside GitHub that Jekyll does.
+
+All I did was to wrap the header tag inside an `<a>`, and set the `href` of that
+`<a>` to the existing id of the header. Before the hook the HTML looks like:
+
+```html
+...some unmodified text...
+<h2 id="my-header">
+ My header
+</h2>
+...more unmodified text...
+```
+
+And after the hook should turn that into:
+
+```html
+...some unmodified text...
+<a href="#my-header">
+ <h2 id="my-header">
+ My header
+ </h2>
+</a>
+...more unmodified text...
+```
+
+The used regexp tries to match only h1-h6 tags, and keep the rest of the HTML
+attributes untouched, since this isn't a general HTML parser, but the generated HTML
+is somewhat under your control. Use at your own risk because
+[you shouldn't parse HTML with regexps][6]. Also I used this strategy in my
+environment, where no other plugins are installed. I haven't considered how this
+approach may conflict with other Jekyll plugins.
+
+[6]: https://stackoverflow.com/questions/1732348/regex-match-open-tags-except-xhtml-self-contained-tags/1732454#1732454
+
+In the new anchor tag you can add your custom CSS class to style it as you wish.
+
+## Anchor on code blocks
+
+Adding anchors to code blocks needs a little bit of extra work, because line
+numbers themselves don't have preexisting ids, so we need to generate them
+without duplications between multiple code blocks in the same page.
+
+Similarly, this Jekyll hook also works on `:documents` in the `:post_render`
+phase:
+
+```ruby
+PREFIX = '<pre class="lineno">'
+POSTFIX = '</pre>'
+Jekyll::Hooks.register :documents, :post_render do |doc|
+ if doc.output_ext == ".html"
+ code_block_counter = 1
+ doc.output = doc.output.gsub(/<pre class="lineno">[\n0-9]+<\/pre>/) do |match|
+ line_numbers = match
+ .gsub(/<pre class="lineno">([\n0-9]+)<\/pre>/, '\1')
+ .split("\n")
+
+ anchored_line_numbers_array = line_numbers.map do |n|
+ id = "B#{code_block_counter}-L#{n}"
+ "<a id=\"#{id}\" href=\"##{id}\">#{n}</a>"
+ end
+ code_block_counter += 1
+
+ PREFIX + anchored_line_numbers_array.join("\n") + POSTFIX
+ end
+ end
+end
+```
+
+This solution assumes the default Jekyll toolbox with code line numbers turned
+on in `_config.yml`:
+
+```yaml
+kramdown:
+ syntax_highlighter_opts:
+ span:
+ line_numbers: false
+ block:
+ line_numbers: true
+```
+
+The anchors go from B1-L1 to BN-LN, using the `code_block_counter` to track
+which code block we're in and don't duplicate anchor ids. Before the hook the
+HTML looks like:
+
+```html
+...some unmodified text...
+<pre class="lineno">1
+2
+3
+4
+5
+</pre>
+...more unmodified text...
+```
+
+And after the hook should turn that into:
+
+```html
+...some unmodified text...
+<pre class="lineno"><a id="B1-L1" href="#B1-L1">1</a>
+<a id="B1-L2" href="#B1-L2">2</a>
+<a id="B1-L3" href="#B1-L3">3</a>
+<a id="B1-L4" href="#B1-L4">4</a>
+<a id="B1-L5" href="#B1-L5">5</a></pre>
+...more unmodified text...
+```
+
+Happy writing :)
diff --git a/src/content/tils/2020/08/14/browse-git.adoc b/src/content/tils/2020/08/14/browse-git.adoc
new file mode 100644
index 0000000..d06f0c1
--- /dev/null
+++ b/src/content/tils/2020/08/14/browse-git.adoc
@@ -0,0 +1,84 @@
+---
+
+title: Browse a git repository at a specific commit
+
+date: 2020-08-14
+
+layout: post
+
+lang: en
+
+ref: browse-a-git-repository-at-a-specific-commit
+
+eu_categories: git
+
+---
+
+I commonly use tools like `git log` together with `git show` when inspecting
+past changes in a repository:
+
+```shell
+git log
+# search for a the commit I'm looking for
+git show <my-commit>
+# see the diff for the commit
+```
+
+But I also wanted to not only be able to look at the diff of a specific commit,
+but to browse the whole repository at that specific commit.
+
+I used to accomplish it the "brute force" way: clone the whole repository in
+another folder and checkout the commit there:
+
+```shell
+git clone <original-repo> /tmp/tmp-repo-clone
+cd /tmp-repo-clone
+git checkout <my-commit>
+```
+
+But git itself allows we to specific the directory of the checkout by using the
+`--work-tree` global git flag. This is what `man git` says about it:
+
+```txt
+--work-tree=<path>
+ Set the path to the working tree. It can be an absolute path or a path relative to the current working
+ directory. This can also be controlled by setting the GIT_WORK_TREE environment variable and the
+ core.worktree configuration variable (see core.worktree in git-config(1) for a more detailed
+ discussion).
+```
+
+So it allows us to set the desired path of the working tree. So if we want to
+copy the contents of the current working tree into `copy/`:
+
+```shell
+mkdir copy
+git --work-tree=copy/ checkout .
+```
+
+After that `copy/` will contain a replica of the code in HEAD. But to checkout a
+specific, we need some extra parameters:
+
+```shell
+git --work-tree=<dir> checkout <my-commit> -- .
+```
+
+There's an extra `-- .` at the end, which initially looks like we're sending
+Morse signals to git, but we're actually saying to `git-checkout` which
+sub directory of `<my-commit>` we want to look at. Which means we can do
+something like:
+
+```shell
+git --work-tree=<dir> checkout <my-commit> -- src/
+```
+
+And with that `<dir>` will only contain what was inside `src/` at `<commit>`.
+
+After any of those checkouts, you have to `git reset .` to reset your current
+staging area back to what it was before the checkout.
+
+
+## References
+
+1. [GIT: Checkout to a specific folder][0] (StackOverflow)
+
+[0]: https://stackoverflow.com/a/16493707
diff --git a/src/content/tils/2020/08/16/git-search.adoc b/src/content/tils/2020/08/16/git-search.adoc
new file mode 100644
index 0000000..f3ae6f0
--- /dev/null
+++ b/src/content/tils/2020/08/16/git-search.adoc
@@ -0,0 +1,59 @@
+---
+
+title: Search in git
+
+date: 2020-08-16
+
+layout: post
+
+lang: en
+
+ref: search-in-git
+
+eu_categories: git
+
+---
+
+Here's a useful trio to know about to help you search things in git:
+
+1. `git show <commit>`
+2. `git log --grep='<regexp>'`
+3. `git grep '<regexp>' [commit]`
+
+## 1. `git show <commit>`
+
+Show a specific commit and it's diff:
+
+```shell
+git show
+# shows the latest commit
+git show <commit>
+# shows an specific <commit>
+git show v1.2
+# shows commit tagged with v1.2
+```
+
+## 2. `git log --grep='<regexp>'`
+
+Search through the commit messages:
+
+```shell
+git log --grep='refactor'
+```
+
+## 3. `git grep '<regexp>' [commit]`
+
+Search content in git history:
+
+```shell
+git grep 'TODO'
+# search the repository for the "TODO" string
+git grep 'TODO' $(git rev-list --all)
+# search the whole history for "TODO" string
+```
+
+And if you find an occurrence of the regexp in a specific commit and you want to
+browse the repository in that point in time, you can
+[use git checkout for that][0].
+
+[0]: {% link _tils/2020-08-14-browse-a-git-repository-at-a-specific-commit.md %}
diff --git a/src/content/tils/2020/08/28/grep-online.adoc b/src/content/tils/2020/08/28/grep-online.adoc
new file mode 100644
index 0000000..8b3b63f
--- /dev/null
+++ b/src/content/tils/2020/08/28/grep-online.adoc
@@ -0,0 +1,139 @@
+---
+
+title: Grep online repositories
+
+date: 2020-08-28
+
+layout: post
+
+lang: en
+
+ref: grep-online-repositories
+
+eu_categories: git
+
+---
+
+I often find interesting source code repositories online that I want to grep for
+some pattern but I can't, because either:
+
+- the repository is on [cgit][cgit] or a similar code repository that doesn't
+ allow search in files, or;
+- the search function is really bad, and doesn't allow me to use regular expressions for searching patterns in the code.
+
+[cgit]: https://git.zx2c4.com/cgit/
+
+Here's a simple script that allows you to overcome that problem easily:
+
+```shell
+#!/usr/bin/env bash
+set -eu
+
+end="\033[0m"
+red="\033[0;31m"
+red() { echo -e "${red}${1}${end}"; }
+
+usage() {
+ red "Missing argument $1.\n"
+ cat <<EOF
+Usage:
+ $0 <REGEX_PATTERN> <REPOSITORY_URL>
+
+ Arguments:
+ REGEX_PATTERN Regular expression that "git grep" can search
+ REPOSITORY_URL URL address that "git clone" can download the repository from
+
+Examples:
+ Searching "make get-git" in cgit repository:
+ git search 'make get-git' https://git.zx2c4.com/cgit/
+ git search 'make get-git' https://git.zx2c4.com/cgit/ -- \$(git rev-list --all)
+EOF
+ exit 2
+}
+
+
+REGEX_PATTERN="${1:-}"
+REPOSITORY_URL="${2:-}"
+[[ -z "${REGEX_PATTERN}" ]] && usage 'REGEX_PATTERN'
+[[ -z "${REPOSITORY_URL}" ]] && usage 'REPOSITORY_URL'
+
+mkdir -p /tmp/git-search
+DIRNAME="$(echo "${REPOSITORY_URL%/}" | rev | cut -d/ -f1 | rev)"
+if [[ ! -d "/tmp/git-search/${DIRNAME}" ]]; then
+ git clone "${REPOSITORY_URL}" "/tmp/git-search/${DIRNAME}"
+fi
+pushd "/tmp/git-search/${DIRNAME}"
+
+shift 3 || shift 2 # when "--" is missing
+git grep "${REGEX_PATTERN}" "${@}"
+```
+
+It is a wrapper around `git grep` that downloads the repository when missing.
+Save in a file called `git-search`, make the file executable and add it to your
+path.
+
+Overview:
+
+- *lines 1~2*:
+
+ Bash shebang and the `set -eu` options to exit on error or undefined
+ variables.
+
+- *lines 4~30*:
+
+ Usage text to be printed when providing less arguments than expected.
+
+- *line 33*:
+
+ Extract the repository name from the URL, removing trailing slashes.
+
+- *lines 34~37*:
+
+ Download the repository when missing and go to the folder.
+
+- *line 39*:
+
+ Make the variable `$@` contain the rest of the unused arguments.
+
+- *line 40*:
+
+ Perform `git grep`, forwarding the remaining arguments from `$@`.
+
+Example output:
+```shell
+$ git search 'make get-git' https://git.zx2c4.com/cgit/
+Clonage dans '/tmp/git-search/cgit'...
+remote: Enumerating objects: 542, done.
+remote: Counting objects: 100% (542/542), done.
+remote: Compressing objects: 100% (101/101), done.
+warning: object 51dd1eff1edc663674df9ab85d2786a40f7ae3a5: gitmodulesParse: could not parse gitmodules blob
+remote: Total 7063 (delta 496), reused 446 (delta 441), pack-reused 6521
+Réception d'objets: 100% (7063/7063), 8.69 Mio | 5.39 Mio/s, fait.
+Résolution des deltas: 100% (5047/5047), fait.
+/tmp/git-search/cgit ~/dev/libre/songbooks/docs
+README: $ make get-git
+
+$ git search 'make get-git' https://git.zx2c4.com/cgit/
+/tmp/git-search/cgit ~/dev/libre/songbooks/docs
+README: $ make get-git
+```
+
+Subsequent greps on the same repository are faster because no download is needed.
+
+When no argument is provided, it prints the usage text:
+```shell
+$ git search
+Missing argument REGEX_PATTERN.
+
+Usage:
+ /home/andreh/dev/libre/dotfiles/scripts/ad-hoc/git-search <REGEX_PATTERN> <REPOSITORY_URL>
+
+ Arguments:
+ REGEX_PATTERN Regular expression that "git grep" can search
+ REPOSITORY_URL URL address that "git clone" can download the repository from
+
+Examples:
+ Searching "make get-git" in cgit repository:
+ git search 'make get-git' https://git.zx2c4.com/cgit/
+ git search 'make get-git' https://git.zx2c4.com/cgit/ -- $(git rev-list --all)
+```
diff --git a/src/content/tils/2020/09/04/email-cli-fun-profit.adoc b/src/content/tils/2020/09/04/email-cli-fun-profit.adoc
new file mode 100644
index 0000000..320f3ab
--- /dev/null
+++ b/src/content/tils/2020/09/04/email-cli-fun-profit.adoc
@@ -0,0 +1,80 @@
+---
+title: Send emails using the command line for fun and profit!
+date: 2020-09-04
+layout: post
+lang: en
+ref: send-emails-using-the-command-line-for-fun-and-profit
+---
+Here are a few reasons why:
+
+1. send yourself and other people notification of cronjobs, scripts runs, CI
+ jobs, *etc.*
+
+2. leverage the POSIX pipe `|`, and pipe emails away!
+
+3. because you can.
+
+Reason 3 is the fun part, reasons 1 and 2 are the profit part.
+
+First [install and configure SSMTP][ssmtp] for using, say, Gmail as the email
+server:
+
+```shell
+# file /etc/ssmtp/ssmtp.conf
+FromLineOverride=YES
+MailHub=smtp.gmail.com:587
+UseSTARTTLS=YES
+UseTLS=YES
+rewriteDomain=gmail.com
+root=username@gmail.com
+AuthUser=username
+AuthPass=password
+```
+
+Now install [GNU Mailutils][gnu-mailutils] (`sudo apt-get install mailutils` or the
+equivalent on your OS), and send yourself your first email:
+
+```shell
+echo body | mail -aFrom:email@example.com email@example.com -s subject
+```
+
+And that's about it, you've got mail. Here are some more places where it might
+be applicable:
+
+```shell
+# report a backup cronjob, attaching logs
+set -e
+
+finish() {
+ status=$?
+ if [[ $status = 0 ]]; then
+ STATUS="SUCCESS (status $status)"
+ else
+ STATUS="FAILURE (status $status)"
+ fi
+
+ mail user@example.com \
+ -s "Backup job report on $(hostname): ${STATUS}" \
+ --content-type 'text/plain; charset=utf-8' \
+ -A"$LOG_FILE" <<< 'The log report is in the attachment.'
+}
+trap finish EXIT
+
+do-long-backup-cmd-here
+```
+
+```
+# share the output of a cmd with someone
+some-program | mail someone@example.com -s "The weird logs that I was talking about"
+```
+
+...and so on.
+
+You may consider adding a `alias mail='mail -aFrom:email@example.com'` so you
+don't keep re-entering the "From: " part.
+
+Send yourself some emails to see it working!
+
+[ssmtp]: https://wiki.archlinux.org/index.php/SSMTP
+[gnu-mailutils]: https://mailutils.org/
+[forwarding-wiki-section]: https://wiki.archlinux.org/index.php/SSMTP#Forward_to_a_Gmail_mail_server
diff --git a/src/content/tils/2020/09/05/oldschool-pr.adoc b/src/content/tils/2020/09/05/oldschool-pr.adoc
new file mode 100644
index 0000000..5b4e445
--- /dev/null
+++ b/src/content/tils/2020/09/05/oldschool-pr.adoc
@@ -0,0 +1,118 @@
+---
+
+title: Pull requests with Git, the old school way
+
+date: 2020-09-05
+
+layout: post
+
+lang: en
+
+ref: pull-requests-with-git-the-old-school-way
+
+eu_categories: git
+
+---
+It might be news to you, as it was to me, that "pull requests" that you can
+create on a Git hosting provider's web UI[^pr-webui] like
+GitLab/Bitbucket/GitHub actually comes from Git itself: `git request-pull`.
+
+[^pr-webui]: And maybe even using the Git hosting provider's API from the
+ command line!
+
+At the very core, they accomplish the same thing: both the original and the web
+UI ones are ways for you to request the project maintainers to pull in your
+changes from your fork. It's like saying: "hi there, I did some changes on my
+clone of the repository, what do you think about bringing those in?".
+
+The only difference is that you're working with only Git itself, so you're not
+tied to any Git hosting provider: you can send pull requests across them
+transparently! You could even use your own [cgit][cgit] installation. No need to
+be locked in by any of them, putting the "D" back in "DVCS": it's a
+**distributed** version control system.
+
+[cgit]: https://git.zx2c4.com/cgit/
+
+## `git request-pull` introduction
+
+Here's the raw output of a `git request-pull`:
+
+```shell
+$ git request-pull HEAD public-origin
+The following changes since commit 302c9f2f035c0360acd4e13142428c100a10d43f:
+
+ db post: Add link to email exchange (2020-09-03 21:23:55 -0300)
+
+are available in the Git repository at:
+
+ https://euandre.org/git/euandre.org/
+
+for you to fetch changes up to 524c646cdac4153e54f2163e280176adbc4873fa:
+
+ db post: better pinpoint sqlite unsuitability (2020-09-03 22:08:56 -0300)
+
+----------------------------------------------------------------
+EuAndreh (1):
+ db post: better pinpoint sqlite unsuitability
+
+ _posts/2020-08-31-the-database-i-wish-i-had.md | 12 ++++++------
+ 1 file changed, 6 insertions(+), 6 deletions(-)
+```
+
+That very first line is saying: "create me a pull request with only a single
+commit, defined by `HEAD`, and use the URL defined by `public-origin`".
+
+Here's a pitfall: you may try using your `origin` remote at first where I put
+`public-origin`, but that is many times pointing to something like
+`git@example.com`, or `git.example.com:repo.git` (check that with
+`git remote -v | grep origin`). On both cases those are addresses available for
+interaction via SSH, and it would be better if your pull requests used an
+address ready for public consumption.
+
+A simple solution for that is for you to add the `public-origin` alias as the
+HTTPS alternative to the SSH version:
+
+```shell
+$ git remote add public-origin https://example.com/user/repo
+```
+
+Every Git hosting provider exposes repositories via HTTPS.
+
+Experiment it yourself, and get acquainted with the CLI.
+
+## Delivering decentralized pull requests
+
+Now that you can create the content of a pull request, you can just
+[deliver it][cli-email] to the interested parties email:
+
+```shell
+# send a PR with your last commit to the author's email
+git request-pull HEAD public-origin | mail author@example.com -s "PR: Add thing to repo"
+
+# send a PR with your last 5 commits to the project's mailing
+# list, including the patch
+git request-pull -p HEAD~5 public-origin | \
+ mail list@example.com -s "PR: Add another thing to repo"
+
+# send every commit that is new in "other-branch"
+git request-pull master public-origin other-branch | \
+ mail list@example.com -s 'PR: All commits from my "other-brach"'
+```
+
+[cli-email]: {% link _tils/2020-09-04-send-emails-using-the-command-line-for-fun-and-profit.md %}
+
+## Conclusion
+
+In practice, I've never used or seen anyone use pull requests this way:
+everybody is just [sending patches via email][decentralized-git].
+
+If you stop to think about this model, the problem of "Git hosting providers
+becoming too centralized" is a non-issue, and "Git federation" proposals are a
+less attractive as they may sound initially.
+
+Using Git this way is not scary or so weird as the first impression may suggest.
+It is actually how Git was designed to be used.
+
+Check `git help request-pull` for more info.
+
+[decentralized-git]: https://drewdevault.com/2018/07/23/Git-is-already-distributed.html
diff --git a/src/content/tils/2020/10/11/search-git-history.adoc b/src/content/tils/2020/10/11/search-git-history.adoc
new file mode 100644
index 0000000..251abe9
--- /dev/null
+++ b/src/content/tils/2020/10/11/search-git-history.adoc
@@ -0,0 +1,41 @@
+---
+
+title: Search changes to a filename pattern in Git history
+
+date: 2020-10-11
+
+layout: post
+
+lang: en
+
+ref: search-changes-to-a-filename-pattern-in-git-history
+
+eu_categories: git
+
+---
+
+This is [yet][git-til-1] [another][git-til-2] ["search in Git"][git-til-3] TIL
+entry. You could say that Git has a unintuitive CLI, or that is it very
+powerful.
+
+I wanted to search for an old file that I new that was in the
+history of the repository, but was deleted some time ago. So I didn't really
+remember the name, only bits of it.
+
+I immediately went to the list of TILs I had written on searching in Git, but
+it wasn't readily obvious how to do it, so here it goes:
+
+```shell
+git log -- *pattern*
+```
+
+You could add globs before the pattern to match things on any directory, and add
+our `-p` friend to promptly see the diffs:
+
+```shell
+git log -p -- **/*pattern*
+```
+
+[git-til-1]: {% link _tils/2020-08-14-browse-a-git-repository-at-a-specific-commit.md %}
+[git-til-2]: {% link _tils/2020-08-16-search-in-git.md %}
+[git-til-3]: {% link _tils/2020-08-28-grep-online-repositories.md %}
diff --git a/src/content/tils/2020/11/08/find-broken-symlink.adoc b/src/content/tils/2020/11/08/find-broken-symlink.adoc
new file mode 100644
index 0000000..bc97fc6
--- /dev/null
+++ b/src/content/tils/2020/11/08/find-broken-symlink.adoc
@@ -0,0 +1,36 @@
+---
+
+title: Find broken symlinks with "find"
+
+date: 2020-11-08
+
+layout: post
+
+lang: en
+
+ref: find-broken-symlinks-with-find
+
+eu_categories: shell
+
+---
+
+The `find` command knows how to show broken symlinks:
+
+```shell
+find . -xtype l
+```
+
+This was useful to me when combined with [Git Annex][git-annex]. Its
+[`wanted`][git-annex-wanted] option allows you to have a "sparse" checkout of
+the content, and save space by not having to copy every annexed file locally:
+
+```shell
+git annex wanted . 'exclude=Music/* and exclude=Videos/*'
+```
+
+You can `find` any broken symlinks outside those directories by querying with
+Git Annex itself, but `find . -xtype l` works on other places too, where broken
+symlinks might be a problem.
+
+[git-annex]: https://git-annex.branchable.com/
+[git-annex-wanted]: https://git-annex.branchable.com/git-annex-wanted/
diff --git a/src/content/tils/2020/11/12/diy-nix-bash-ci.adoc b/src/content/tils/2020/11/12/diy-nix-bash-ci.adoc
new file mode 100644
index 0000000..3336482
--- /dev/null
+++ b/src/content/tils/2020/11/12/diy-nix-bash-ci.adoc
@@ -0,0 +1,74 @@
+---
+
+title: DIY bare bones CI server with Bash and Nix
+
+date: 2020-11-12 3
+
+layout: post
+
+lang: en
+
+ref: diy-bare-bones-ci-server-with-bash-and-nix
+
+eu_categories: ci
+
+---
+
+With a server with Nix installed (no need for NixOS), you can leverage its build
+isolation for running CI jobs by adding a [post-receive][post-receive] Git hook
+to the server.
+
+In most of my project I like to keep a `test` attribute which runs the test with
+`nix-build -A test`. This way, a post-receive hook could look like:
+
+```shell
+#!/usr/bin/env bash
+set -Eeuo pipefail
+set -x
+
+LOGS_DIR="/data/static/ci-logs/libedn"
+mkdir -p "$LOGS_DIR"
+LOGFILE="${LOGS_DIR}/$(date -Is)-$(git rev-parse master).log"
+exec &> >(tee -a "${LOGFILE}")
+
+unset GIT_DIR
+CLONE="$(mktemp -d)"
+git clone . "$CLONE"
+pushd "$CLONE"
+
+finish() {
+ printf "\n\n>>> exit status was %s\n" "$?"
+}
+trap finish EXIT
+
+nix-build -A test
+```
+
+We initially (lines #5 to #8) create a log file, named after *when* the run is
+running and for *which* commit it is running for. The `exec` and `tee` combo
+allows the output of the script to go both to `stdout` *and* the log file. This
+makes the logs output show up when you do a `git push`.
+
+Lines #10 to #13 create a fresh clone of the repository and line #20 runs the
+test command.
+
+After using a similar post-receive hook for a while, I now even generate a
+simple HTML file to make the logs available ([example project][ci-logs])
+through the browser.
+
+[post-receive]: https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks
+[ci-logs]: https://euandreh.xyz/remembering/ci.html
+
+## Upsides
+
+No vendor lock-in, as all you need is a server with Nix installed.
+
+And if you pin the Nixpkgs version you're using, this very simple setup yields
+extremely sandboxed runs on a very hermetic environment.
+
+## Downsides
+
+Besides the many missing shiny features of this very simplistic CI, `nix-build`
+can be very resource intensive. Specifically, it consumes too much memory. So if
+it has to download too many things, or the build closure gets too big, the
+server might very well run out of memory.
diff --git a/src/content/tils/2020/11/12/git-bisect-automation.adoc b/src/content/tils/2020/11/12/git-bisect-automation.adoc
new file mode 100644
index 0000000..9c34b2a
--- /dev/null
+++ b/src/content/tils/2020/11/12/git-bisect-automation.adoc
@@ -0,0 +1,35 @@
+---
+
+title: Git bisect automation
+
+date: 2020-11-12 2
+
+layout: post
+
+lang: en
+
+ref: git-bisect-automation
+
+eu_categories: git
+
+---
+
+It is good to have an standardized way to run builds and tests on the repository
+of a project, so that you can find when a bug was introduced by using
+`git bisect run`.
+
+I've already been in the situation when a bug was introduced and I didn't know
+how it even was occurring, and running Git bisect over hundreds of commits to
+pinpoint the failing commit was very empowering:
+
+```
+$ GOOD_COMMIT_SHA=e1fd0a817d192c5a5df72dd7422e36558fa78e46
+$ git bisect start HEAD $GOOD_COMMIT_SHA
+$ git bisect run sn -c './build.sh && ./run-failing-case.sh'
+```
+
+Git will than do a binary search between the commits, and run the commands you
+provide it with to find the failing commit.
+
+Instead of being afraid of doing a bisect, you should instead leverage it, and
+make Git help you dig through the history of the repository to find the bad code.
diff --git a/src/content/tils/2020/11/12/useful-bashvars.adoc b/src/content/tils/2020/11/12/useful-bashvars.adoc
new file mode 100644
index 0000000..33a072e
--- /dev/null
+++ b/src/content/tils/2020/11/12/useful-bashvars.adoc
@@ -0,0 +1,72 @@
+---
+
+title: Useful Bash variables
+
+date: 2020-11-12 1
+
+layout: post
+
+lang: en
+
+ref: useful-bash-variables
+
+eu_categories: shell
+
+---
+
+[GNU Bash][gnu-bash] has a few two letter variables that may be useful when
+typing on the terminal.
+
+[gnu-bash]: https://www.gnu.org/software/bash/
+
+## `!!`: the text of the last command
+
+The [`!!` variable][previous-command] refers to the previous command, and I find
+useful when following chains for symlinks:
+
+[previous-command]: https://www.gnu.org/software/bash/manual/bash.html#Event-Designators
+
+```shell
+$ which git
+/run/current-system/sw/bin/git
+$ readlink $(!!)
+readlink $(which git)
+/nix/store/5bgr1xpm4m0r72h9049jbbhagxdyrnyb-git-2.28.0/bin/git
+```
+
+It is also useful when you forget to prefix `sudo` to a command that requires
+it:
+
+```shell
+$ requires-sudo.sh
+requires-sudo.sh: Permission denied
+$ sudo !!
+sudo ./requires-sudo.sh
+# all good
+```
+
+Bash prints the command expansion before executing it, so it is better for you
+to follow along what it is doing.
+
+## `$_`: most recent parameter
+
+The [`$_` variable][recent-parameter] will give you the most recent parameter
+you provided to a previous argument, which can save you typing sometimes:
+
+```shell
+# instead of...
+$ mkdir -p a/b/c/d/
+$ cd a/b/c/d/
+
+# ...you can:
+$ mkdir -p a/b/c/d/
+$ cd $_
+```
+
+[recent-parameter]: https://www.gnu.org/software/bash/manual/bash.html#Special-Parameters
+
+## Conclusion
+
+I wouldn't use those in a script, as it would make the script terser to read, I
+find those useful shortcut that are handy when writing at the interactive
+terminal.
diff --git a/src/content/tils/2020/11/14/gpodder-media.adoc b/src/content/tils/2020/11/14/gpodder-media.adoc
new file mode 100644
index 0000000..a74b225
--- /dev/null
+++ b/src/content/tils/2020/11/14/gpodder-media.adoc
@@ -0,0 +1,33 @@
+---
+
+title: gPodder as a media subscription manager
+
+date: 2020-11-14
+
+layout: post
+
+lang: en
+
+ref: gpodder-as-a-media-subscription-manager
+
+---
+
+As we [re-discover][rss] the value of Atom/RSS feeds, most useful feed clients I
+know of don't support media, specifically audio and video.
+
+[gPodder][gpodder] does.
+
+It is mostly know as a desktop podcatcher. But the thing about podcasts is that
+the feed is provided through an RSS/Atom feed. So you can just use gPodder as
+your media feed client, where you have control of what you look at.
+
+I audio and video providers I know of offer an RSS/Atom view of their content,
+so you can, say, treat any YouTube channel like a feed on its own.
+
+gPodder will then managed your feeds, watched/unwatched, queue downloads, etc.
+
+Being obvious now, it was a big finding for me. If it got you interested, I
+recommend you giving gPodder a try.
+
+[rss]: https://www.charlieharrington.com/unexpected-useless-and-urgent
+[gpodder]: https://gpodder.github.io/
diff --git a/src/content/tils/2020/11/30/git-notes-ci.adoc b/src/content/tils/2020/11/30/git-notes-ci.adoc
new file mode 100644
index 0000000..f8dd063
--- /dev/null
+++ b/src/content/tils/2020/11/30/git-notes-ci.adoc
@@ -0,0 +1,122 @@
+---
+
+title: Storing CI data on Git notes
+
+date: 2020-11-30
+
+layout: post
+
+lang: en
+
+ref: storing-ci-data-on-git-notes
+
+eu_categories: git,ci
+
+---
+
+Extending the bare bones CI server I've [talked about before][previous-article],
+divoplade on Freenode suggested storing CI artifacts on [Git notes][git-notes],
+such as tarballs, binaries, logs, *etc*.
+
+I've written a small script that will put log files and CI job data on Git notes,
+and make it visible on the porcelain log. It is a simple extension of the
+previous article:
+
+```shell
+#!/usr/bin/env bash
+set -Eeuo pipefail
+set -x
+
+PREFIX='/srv/ci/vps'
+mkdir -p "$PREFIX"
+read -r _ SHA _ # oldrev newrev refname
+FILENAME="$(date -Is)-$SHA.log"
+LOGFILE="$PREFIX/$FILENAME"
+exec &> >(tee -a "$LOGFILE")
+
+echo "Starting CI job at: $(date -Is)"
+
+finish() {
+ STATUS="$?"
+ printf "\n\n>>> exit status was %s\n" "$STATUS"
+ echo "Finishing CI job at: $(date -Is)"
+ popd
+ NOTE=$(cat <<EOF
+See CI logs with:
+ git notes --ref=refs/notes/ci-logs show $SHA
+ git notes --ref=refs/notes/ci-data show $SHA
+EOF
+)
+ git notes --ref=refs/notes/ci-data add -f -m "$STATUS $FILENAME"
+ git notes --ref=refs/notes/ci-logs add -f -F "$LOGFILE"
+ git notes add -f -m "$NOTE"
+ printf "\n\n>>> CI logs added as Git note."
+}
+trap finish EXIT
+
+unset GIT_DIR
+CLONE="$(mktemp -d)"
+git clone . "$CLONE"
+pushd "$CLONE"
+git config --global user.email git@euandre.org
+git config --global user.name 'EuAndreh CI'
+
+./container make check site
+./container make publish
+```
+
+The important part is in the `finish()` function:
+- #25 stores the exit status and the generated filename separated by spaces;
+- #26 adds the log file in a note using the `refs/notes/ci-logs` ref;
+- #27 it adds a note to the commit saying how to see the logs.
+
+A commit now has an attached note, and shows it whenever you look at it:
+
+```diff
+$ git show 87c57133abd8be5d7cc46afbf107f59b26066575
+commit 87c57133abd8be5d7cc46afbf107f59b26066575
+Author: EuAndreh <eu@euandre.org>
+Date: Wed Feb 24 21:58:28 2021 -0300
+
+ vps/machines.scm: Change path to cronjob files
+
+Notes:
+ See CI logs with:
+ git notes --ref=refs/notes/ci-logs show 87c57133abd8be5d7cc46afbf107f59b26066575
+ git notes --ref=refs/notes/ci-data show 87c57133abd8be5d7cc46afbf107f59b26066575
+
+diff --git a/servers/vps/machines.scm b/servers/vps/machines.scm
+index d1830ca..a4ccde7 100644
+--- a/servers/vps/machines.scm
++++ b/servers/vps/machines.scm
+@@ -262,8 +262,8 @@ pki " mail-domain " key \"" (tls-priv-for mail-domain) "\""))
+ (service mcron-service-type
+ (mcron-configuration
+ (jobs
+- (list #~(job "30 1 * * 1" "guix gc -d")
+- #~(job "30 0 * * *" "/var/lib/euandreh/backup.sh")))))
++ (list #~(job "30 1 * * 1" "/opt/bin/gc.sh")
++ #~(job "30 0 * * *" "/opt/bin/backup.sh")))))
+ (service dhcp-client-service-type)
+ #;
+ (service opensmtpd-service-type
+```
+
+Other tools such as [cgit][cgit] will also show notes on the web interface:
+<https://euandre.org/git/servers/commit?id=87c57133abd8be5d7cc46afbf107f59b26066575>.
+
+You can go even further: since cgit can serve raw blob directly, you can even
+serve such artifacts (log files, release artifacts, binaries) from cgit itself:
+
+```shell
+$ SHA="$(git notes --ref=refs/notes/ci-logs list 87c57133abd8be5d7cc46afbf107f59b26066575)"
+$ echo "https://euandre.org/git/servers/blob?id=$SHA"
+https://euandre.org/git/servers/blob?id=1707a97bae24e3864fe7943f8dda6d01c294fb5c
+```
+
+And like that you'll have cgit serving the artifacts for you:
+<https://euandre.org/git/servers/blob?id=1707a97bae24e3864fe7943f8dda6d01c294fb5c>.
+
+[previous-article]: {% link _tils/2020-11-12-diy-bare-bones-ci-server-with-bash-and-nix.md %}
+[git-notes]: https://git-scm.com/docs/git-notes
+[cgit]: https://git.zx2c4.com/cgit/
diff --git a/src/content/tils/2020/12/15/shellcheck-repo.adoc b/src/content/tils/2020/12/15/shellcheck-repo.adoc
new file mode 100644
index 0000000..71d10a3
--- /dev/null
+++ b/src/content/tils/2020/12/15/shellcheck-repo.adoc
@@ -0,0 +1,171 @@
+---
+
+title: 'Awk snippet: ShellCheck all scripts in a repository'
+
+date: 2020-12-15
+
+updated_at: 2020-12-16
+
+layout: post
+
+lang: en
+
+ref: awk-snippet-shellcheck-all-scripts-in-a-repository
+
+eu_categories: shell
+
+---
+
+Inspired by Fred Herbert's "[Awk in 20 Minutes][awk-20min]", here's a problem I
+just solved with a line of Awk: run ShellCheck in all scripts of a repository.
+
+In my repositories I usually have Bash and POSIX scripts, which I want to keep
+tidy with [ShellCheck][shellcheck]. Here's the first version of
+`assert-shellcheck.sh`:
+
+```shell
+#!/bin/sh -eux
+
+find . -type f -name '*.sh' -print0 | xargs -0 shellcheck
+```
+
+This is the type of script that I copy around to all repositories, and I want it
+to be capable of working on any repository, without requiring a list of files to
+run ShellCheck on.
+
+This first version worked fine, as all my scripts had the '.sh' ending. But I
+recently added some scripts without any extension, so `assert-shellcheck.sh`
+called for a second version. The first attempt was to try grepping the shebang
+line:
+
+```shell
+$ grep '^#!/' assert-shellcheck.sh
+#!/usr/sh
+```
+
+Good, we have a grep pattern on the first try. Let's try to find all the
+matching files:
+
+```shell
+$ find . -type f | xargs grep -l '^#!/'
+./TODOs.org
+./.git/hooks/pre-commit.sample
+./.git/hooks/pre-push.sample
+./.git/hooks/pre-merge-commit.sample
+./.git/hooks/fsmonitor-watchman.sample
+./.git/hooks/pre-applypatch.sample
+./.git/hooks/pre-push
+./.git/hooks/prepare-commit-msg.sample
+./.git/hooks/commit-msg.sample
+./.git/hooks/post-update.sample
+./.git/hooks/pre-receive.sample
+./.git/hooks/applypatch-msg.sample
+./.git/hooks/pre-rebase.sample
+./.git/hooks/update.sample
+./build-aux/with-guile-env.in
+./build-aux/test-driver
+./build-aux/missing
+./build-aux/install-sh
+./build-aux/install-sh~
+./bootstrap
+./scripts/assert-todos.sh
+./scripts/songbooks
+./scripts/compile-readme.sh
+./scripts/ci-build.sh
+./scripts/generate-tasks-and-bugs.sh
+./scripts/songbooks.in
+./scripts/with-container.sh
+./scripts/assert-shellcheck.sh
+```
+
+This approach has a problem, though: it includes files ignored by Git, such as
+`builld-aux/install-sh~`, and even goes into the `.git/` directory and finds
+sample hooks in `.git/hooks/*`.
+
+To list the files that Git is tracking we'll try `git ls-files`:
+
+```shell
+$ git ls-files | xargs grep -l '^#!/'
+TODOs.org
+bootstrap
+build-aux/with-guile-env.in
+old/scripts/assert-docs-spelling.sh
+old/scripts/build-site.sh
+old/scripts/builder.bats.sh
+scripts/assert-shellcheck.sh
+scripts/assert-todos.sh
+scripts/ci-build.sh
+scripts/compile-readme.sh
+scripts/generate-tasks-and-bugs.sh
+scripts/songbooks.in
+scripts/with-container.sh
+```
+
+It looks to be almost there, but the `TODOs.org` entry shows a flaw in it: grep
+is looking for a `'^#!/'` pattern on any part of the file. In my case,
+`TODOs.org` had a snippet in the middle of the file where a line started with
+`#!/bin/sh`.
+
+So what we actually want is to match the **first** line against the pattern. We
+could loop through each file, get the first line with `head -n 1` and grep
+against that, but this is starting to look messy. I bet there is another way of
+doing it concisely...
+
+Let's try Awk. I need a way to select the line numbers to replace `head -n 1`,
+and to stop processing the file if the pattern matches. A quick search points me
+to using `FNR` for the former, and `{ nextline }` for the latter. Let's try it:
+
+```shell
+$ git ls-files | xargs awk 'FNR>1 { nextfile } /^#!\// { print FILENAME; nextfile }'
+bootstrap
+build-aux/with-guile-env.in
+old/scripts/assert-docs-spelling.sh
+old/scripts/build-site.sh
+old/scripts/builder.bats.sh
+scripts/assert-shellcheck.sh
+scripts/assert-todos.sh
+scripts/ci-build.sh
+scripts/compile-readme.sh
+scripts/generate-tasks-and-bugs.sh
+scripts/songbooks.in
+scripts/with-container.sh
+```
+
+Great! Only `TODOs.org` is missing, but the script is much better: instead of
+matching against any part of the file that may have a shebang-like line, we only
+look for the first. Let's put it back into the `assert-shellcheck.sh` file and
+use `NULL` for separators to accommodate files with spaces in the name:
+
+```
+#!/usr/sh -eux
+
+git ls-files -z | \
+ xargs -0 awk 'FNR>1 { nextfile } /^#!\// { print FILENAME; nextfile }' | \
+ xargs shellcheck
+```
+
+This is where I've stopped, but I imagine a likely improvement: match against
+only `#!/bin/sh` and `#!/usr/bin/env bash` shebangs (the ones I use most), to
+avoid running ShellCheck on Perl files, or other shebangs.
+
+Also when reviewing the text of this article, I found that `{ nextfile }` is a
+GNU Awk extension. It would be an improvement if `assert-shellcheck.sh` relied
+on the POSIX subset of Awk for working correctly.
+
+## *Update*
+
+After publishing, I could remove `{ nextfile }` and even make the script
+simpler:
+
+```shell
+#!/usr/sh -eux
+
+git ls-files -z | \
+ xargs -0 awk 'FNR==1 && /^#!\// { print FILENAME }' | \
+ xargs shellcheck
+```
+
+Now both the shell and Awk usage are POSIX compatible.
+
+[awk-20min]: https://ferd.ca/awk-in-20-minutes.html
+[shellcheck]: https://www.shellcheck.net/
diff --git a/src/content/tils/2020/12/29/svg.adoc b/src/content/tils/2020/12/29/svg.adoc
new file mode 100644
index 0000000..54cca9a
--- /dev/null
+++ b/src/content/tils/2020/12/29/svg.adoc
@@ -0,0 +1,134 @@
+---
+
+title: SVG favicon
+
+date: 2020-12-29
+
+updated_at: 2021-01-12
+
+layout: post
+
+lang: en
+
+ref: svg-favicon
+
+---
+
+I've wanted to change this website's favicon from a plain `.ico` file to a
+proper SVG. The problem I was trying to solve was to reuse the same image on
+other places, such as avatars.
+
+Generating a PNG from the existing 16x16 icon was possible but bad: the final
+image was blurry. Converting the `.ico` to an SVG was possible, but sub-optimal:
+tools try to guess some vector paths, and the final SVG didn't match the
+original.
+
+Instead I used a tool to draw the "vector pixels" as black squares, and after
+getting the final result I manually cleaned-up the generated XML:
+
+```xml
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16">
+ <path d="M 0 8 L 1 8 L 1 9 L 0 9 L 0 8 Z" />
+ <path d="M 0 13 L 1 13 L 1 14 L 0 14 L 0 13 Z" />
+ <path d="M 1 8 L 2 8 L 2 9 L 1 9 L 1 8 Z" />
+ <path d="M 1 13 L 2 13 L 2 14 L 1 14 L 1 13 Z" />
+ <path d="M 2 8 L 3 8 L 3 9 L 2 9 L 2 8 Z" />
+ <path d="M 2 13 L 3 13 L 3 14 L 2 14 L 2 13 Z" />
+ <path d="M 3 8 L 4 8 L 4 9 L 3 9 L 3 8 Z" />
+ <path d="M 3 13 L 4 13 L 4 14 L 3 14 L 3 13 Z" />
+ <path d="M 4 7 L 5 7 L 5 8 L 4 8 L 4 7 Z" />
+ <path d="M 4 8 L 5 8 L 5 9 L 4 9 L 4 8 Z" />
+ <path d="M 4 13 L 5 13 L 5 14 L 4 14 L 4 13 Z" />
+ <path d="M 5 6 L 6 6 L 6 7 L 5 7 L 5 6 Z" />
+ <path d="M 5 7 L 6 7 L 6 8 L 5 8 L 5 7 Z" />
+ <path d="M 5 13 L 6 13 L 6 14 L 5 14 L 5 13 Z" />
+ <path d="M 6 5 L 7 5 L 7 6 L 6 6 L 6 5 Z" />
+ <path d="M 6 6 L 7 6 L 7 7 L 6 7 L 6 6 Z" />
+ <path d="M 6 14 L 7 14 L 7 15 L 6 15 L 6 14 Z" />
+ <path d="M 7 1 L 8 1 L 8 2 L 7 2 L 7 1 Z" />
+ <path d="M 7 14 L 8 14 L 8 15 L 7 15 L 7 14 Z" />
+ <path d="M 7 15 L 8 15 L 8 16 L 7 16 L 7 15 Z" />
+ <path d="M 7 2 L 8 2 L 8 3 L 7 3 L 7 2 Z" />
+ <path d="M 7 3 L 8 3 L 8 4 L 7 4 L 7 3 Z" />
+ <path d="M 7 4 L 8 4 L 8 5 L 7 5 L 7 4 Z" />
+ <path d="M 7 5 L 8 5 L 8 6 L 7 6 L 7 5 Z" />
+ <path d="M 8 1 L 9 1 L 9 2 L 8 2 L 8 1 Z" />
+ <path d="M 8 15 L 9 15 L 9 16 L 8 16 L 8 15 Z" />
+ <path d="M 9 1 L 10 1 L 10 2 L 9 2 L 9 1 Z" />
+ <path d="M 9 2 L 10 2 L 10 3 L 9 3 L 9 2 Z" />
+ <path d="M 9 6 L 10 6 L 10 7 L 9 7 L 9 6 Z" />
+ <path d="M 9 15 L 10 15 L 10 16 L 9 16 L 9 15 Z" />
+ <path d="M 10 2 L 11 2 L 11 3 L 10 3 L 10 2 Z" />
+ <path d="M 10 3 L 11 3 L 11 4 L 10 4 L 10 3 Z" />
+ <path d="M 10 4 L 11 4 L 11 5 L 10 5 L 10 4 Z" />
+ <path d="M 10 5 L 11 5 L 11 6 L 10 6 L 10 5 Z" />
+ <path d="M 10 6 L 11 6 L 11 7 L 10 7 L 10 6 Z" />
+ <path d="M 11 6 L 12 6 L 12 7 L 11 7 L 11 6 Z" />
+ <path d="M 11 8 L 12 8 L 12 9 L 11 9 L 11 8 Z" />
+ <path d="M 10 15 L 11 15 L 11 16 L 10 16 L 10 15 Z" />
+ <path d="M 11 10 L 12 10 L 12 11 L 11 11 L 11 10 Z" />
+ <path d="M 11 12 L 12 12 L 12 13 L 11 13 L 11 12 Z" />
+ <path d="M 11 14 L 12 14 L 12 15 L 11 15 L 11 14 Z" />
+ <path d="M 11 15 L 12 15 L 12 16 L 11 16 L 11 15 Z" />
+ <path d="M 12 6 L 13 6 L 13 7 L 12 7 L 12 6 Z" />
+ <path d="M 12 8 L 13 8 L 13 9 L 12 9 L 12 8 Z" />
+ <path d="M 12 10 L 13 10 L 13 11 L 12 11 L 12 10 Z" />
+ <path d="M 12 12 L 13 12 L 13 13 L 12 13 L 12 12 Z" />
+ <path d="M 12 14 L 13 14 L 13 15 L 12 15 L 12 14 Z" />
+ <path d="M 13 6 L 14 6 L 14 7 L 13 7 L 13 6 Z" />
+ <path d="M 13 8 L 14 8 L 14 9 L 13 9 L 13 8 Z" />
+ <path d="M 13 10 L 14 10 L 14 11 L 13 11 L 13 10 Z" />
+ <path d="M 13 12 L 14 12 L 14 13 L 13 13 L 13 12 Z" />
+ <path d="M 13 13 L 14 13 L 14 14 L 13 14 L 13 13 Z" />
+ <path d="M 13 14 L 14 14 L 14 15 L 13 15 L 13 14 Z" />
+ <path d="M 14 7 L 15 7 L 15 8 L 14 8 L 14 7 Z" />
+ <path d="M 14 8 L 15 8 L 15 9 L 14 9 L 14 8 Z" />
+ <path d="M 14 9 L 15 9 L 15 10 L 14 10 L 14 9 Z" />
+ <path d="M 14 10 L 15 10 L 15 11 L 14 11 L 14 10 Z" />
+ <path d="M 14 11 L 15 11 L 15 12 L 14 12 L 14 11 Z" />
+ <path d="M 14 12 L 15 12 L 15 13 L 14 13 L 14 12 Z" />
+</svg>
+```
+
+The good thing about this new favicon
+(at [`/static/lord-favicon.svg`](/static/lord-favicon.svg)) is that
+a) it is simple enough that I feel
+comfortable editing it manually and b) it is an SVG, which means I can generate
+any desired size.
+
+With the new favicon file, I now had to add to the templates' `<head>` a
+`<link>` to this icon:
+```html
+<head>
+ <meta charset="UTF-8" />
+ <link rel="icon" type="image/svg+xml" href="/static/favicon.svg">
+ ...
+```
+
+Still missing is a bitmap image for places that can't handle vector images. I
+used Jekyll generator to create an PNG from the existing SVG:
+
+```ruby
+module Jekyll
+ class FaviconGenerator < Generator
+ safe true
+ priority :high
+
+ SIZE = 420
+
+ def generate(site)
+ svg = 'static/favicon.svg'
+ png = 'static/favicon.png'
+ unless File.exist? png then
+ puts "Missing '#{png}', generating..."
+ puts `inkscape -o #{png} -w #{SIZE} -h #{SIZE} #{svg}`
+ end
+ end
+ end
+end
+```
+
+I had to increase the priority of the generator so that it would run before
+other places that would use a `{% link /static/lord-favicon.png %}`, otherwise
+the file would be considered missing.
diff --git a/src/content/tils/2021/01/12/curl-awk-emails.adoc b/src/content/tils/2021/01/12/curl-awk-emails.adoc
new file mode 100644
index 0000000..880ddf1
--- /dev/null
+++ b/src/content/tils/2021/01/12/curl-awk-emails.adoc
@@ -0,0 +1,142 @@
+---
+
+title: 'Awk snippet: send email to multiple recipients with cURL'
+
+date: 2021-01-12
+
+layout: post
+
+lang: en
+
+ref: awk-snippet-send-email-to-multiple-recipients-with-curl
+
+---
+
+As I experiment with [Neomutt][neomutt], I wanted to keep being able to enqueue emails for sending later like my previous setup, so that I didn't rely on having an internet connection.
+
+My requirements for the `sendmail` command were:
+1. store the email in a file, and send it later.
+1. send from different addresses, using different SMTP servers;
+
+I couldn't find an MTA that could accomplish that, but I was able to quickly write a solution.
+
+The first part was the easiest: store the email in a file:
+
+```shell
+# ~/.config/mutt/muttrc:
+set sendmail=~/bin/enqueue-email.sh
+
+# ~/bin/enqueue-email.sh:
+#!/bin/sh -eu
+
+cat - > "$HOME/mbsync/my-queued-emails/$(date -Is)"
+```
+
+Now that I had the email file store locally, I needed a program to send the email from the file, so that I could create a cronjob like:
+
+```shell
+for f in ~/mbsync/my-queued-emails/*; do
+ ~/bin/dispatch-email.sh "$f" && rm "$f"
+done
+```
+
+The `dispatch-email.sh` would have to look at the `From: ` header and decide which SMTP server to use.
+As I [found out][curl-email] that [curl][curl] supports SMTP and is able to send emails, this is what I ended up with:
+
+```shell
+#!/bin/sh -eu
+
+F="$1"
+
+rcpt="$(awk '
+ match($0, /^(To|Cc|Bcc): (.*)$/, m) {
+ split(m[2], tos, ",")
+ for (i in tos) {
+ print "--mail-rcpt " tos[i]
+ }
+ }
+' "$F")"
+
+if grep -qE '^From: .*<addr@server1\.org>$' "$F"; then
+ curl \
+ -s \
+ --url smtp://smtp.server1.org:587 \
+ --ssl-reqd \
+ --mail-from addr@server1.org \
+ $rcpt \
+ --user 'addr@server1.org:my-long-and-secure-passphrase' \
+ --upload-file "$F"
+elif grep -qE '^From: .*<addr@server2\.org>$' "$F"; then
+ curl \
+ -s \
+ --url smtp://smtp.server2.org:587 \
+ --ssl-reqd \
+ --mail-from addr@server2.org \
+ $rcpt \
+ --user 'addr@server2.org:my-long-and-secure-passphrase' \
+ --upload-file "$F"
+else
+ echo 'Bad "From: " address'
+ exit 1
+fi
+```
+
+Most of curl flags used are self-explanatory, except for `$rcpt`.
+
+curl connects to the SMTP server, but doesn't set the recipient address by looking at the message.
+My solution was to generate the curl flags, store them in `$rcpt` and use it unquoted to leverage shell word splitting.
+
+To me, the most interesting part was building the `$rcpt` flags.
+My first instinct was to try grep, but it couldn't print only matches in a regex.
+As I started to turn towards sed, I envisioned needing something else to loop over the sed output, and I then moved to Awk.
+
+In the short Awk snippet, 3 things were new to me: the `match(...)`, `split(...)` and `for () {}`.
+The only other function I have ever used was `gsub(...)`, but these new ones felt similar enough that I could almost guess their behaviour and arguments.
+`match(...)` stores the matches of a regex on the given array positionally, and `split(...)` stores the chunks in the given array.
+
+I even did it incrementally:
+
+```shell
+$ H='To: to@example.com, to2@example.com\nCc: cc@example.com, cc2@example.com\nBcc: bcc@example.com,bcc2@example.com\n'
+$ printf "$H" | awk '/^To: .*$/ { print $0 }'
+To: to@example.com, to2@example.com
+$ printf "$H" | awk 'match($0, /^To: (.*)$/, m) { print m }'
+awk: ligne de commande:1: (FILENAME=- FNR=1) fatal : tentative d'utilisation du tableau « m » dans un contexte scalaire
+$ printf "$H" | awk 'match($0, /^To: (.*)$/, m) { print m[0] }'
+To: to@example.com, to2@example.com
+$ printf "$H" | awk 'match($0, /^To: (.*)$/, m) { print m[1] }'
+to@example.com, to2@example.com
+$ printf "$H" | awk 'match($0, /^To: (.*)$/, m) { split(m[1], tos, " "); print tos }'
+awk: ligne de commande:1: (FILENAME=- FNR=1) fatal : tentative d'utilisation du tableau « tos » dans un contexte scalaire
+$ printf "$H" | awk 'match($0, /^To: (.*)$/, m) { split(m[1], tos, " "); print tos[0] }'
+
+$ printf "$H" | awk 'match($0, /^To: (.*)$/, m) { split(m[1], tos, " "); print tos[1] }'
+to@example.com,
+$ printf "$H" | awk 'match($0, /^To: (.*)$/, m) { split(m[1], tos, " "); print tos[2] }'
+to2@example.com
+$ printf "$H" | awk 'match($0, /^To: (.*)$/, m) { split(m[1], tos, " "); print tos[3] }'
+
+```
+
+(This isn't the verbatim interactive session, but a cleaned version to make it more readable.)
+
+At this point, I realized I needed a for loop over the `tos` array, and I moved the Awk snippet into the `~/bin/dispatch-email.sh`.
+I liked the final thing:
+
+```awk
+match($0, /^(To|Cc|Bcc): (.*)$/, m) {
+ split(m[2], tos, ",")
+ for (i in tos) {
+ print "--mail-rcpt " tos[i]
+ }
+}
+```
+
+As I learn more about Awk, I feel that it is too undervalued, as many people turn to Perl or other programming languages when Awk suffices.
+The advantage is pretty clear: writing programs that run on any POSIX system, without extra dependencies required.
+
+Coding to the standards is underrated.
+
+[neomutt]: https://neomutt.org/
+[curl-email]: https://blog.edmdesigner.com/send-email-from-linux-command-line/
+[curl]: https://curl.se/
diff --git a/src/content/tils/2021/01/17/posix-shebang.adoc b/src/content/tils/2021/01/17/posix-shebang.adoc
new file mode 100644
index 0000000..5f5b897
--- /dev/null
+++ b/src/content/tils/2021/01/17/posix-shebang.adoc
@@ -0,0 +1,55 @@
+= POSIX sh and shebangs
+
+date: 2021-01-17
+
+layout: post
+
+lang: en
+
+ref: posix-sh-and-shebangs
+
+---
+
+As I [keep moving][posix-awk-0] [towards POSIX][posix-awk-1], I'm on the process of migrating all my Bash scripts to POSIX sh.
+
+As I dropped `[[`, arrays and other Bashisms, I was left staring at the first line of every script, wondering what to do: what is the POSIX sh equivalent of `#!/usr/bin/env bash`?
+I already knew that POSIX says nothing about shebangs, and that the portable way to call a POSIX sh script is `sh script.sh`, but I didn't know what to do with that first line.
+
+What I had previously was:
+```shell
+#!/usr/bin/env bash
+set -Eeuo pipefail
+cd "$(dirname "${BASH_SOURCE[0]}")"
+```
+
+Obviously, the `$BASH_SOURCE` would be gone, and I would have to adapt some of my scripts to not rely on the script location.
+The `-E` and `-o pipefail` options were also gone, and would be replaced by nothing.
+
+I converted all of them to:
+```shell
+#!/bin/sh -eu
+```
+
+I moved the `-eu` options to the shebang line itself, striving for conciseness.
+But as I changed callers from `./script.sh` to `sh script.sh`, things started to fail.
+Some tests that should fail reported errors, but didn't return 1.
+
+My first reaction was to revert back to `./script.sh`, but the POSIX bug I caught is a strong strain, and when I went back to it, I figured that the callers were missing some flags.
+Specifically, `sh -eu script.sh`.
+
+Then it clicked: when running with `sh script.sh`, the shebang line with the sh options is ignored, as it is a comment!
+
+Which means that the shebang most friendly with POSIX is:
+
+```shell
+#!/bin/sh
+set -eu
+```
+
+1. when running via `./script.sh`, if the system has an executable at `/bin/sh`, it will be used to run the script;
+2. when running via `sh script.sh`, the sh options aren't ignored as previously.
+
+TIL.
+
+[posix-awk-0]: {% link _tils/2020-12-15-awk-snippet-shellcheck-all-scripts-in-a-repository.md %}
+[posix-awk-1]: {% link _tils/2021-01-12-awk-snippet-send-email-to-multiple-recipients-with-curl.md %}
diff --git a/src/content/tils/2021/04/24/cl-generic-precedence.adoc b/src/content/tils/2021/04/24/cl-generic-precedence.adoc
new file mode 100644
index 0000000..8051232
--- /dev/null
+++ b/src/content/tils/2021/04/24/cl-generic-precedence.adoc
@@ -0,0 +1,137 @@
+---
+
+title: Common Lisp argument precedence order parameterization of a generic function
+
+date: 2021-04-24 2
+
+layout: post
+
+lang: en
+
+ref: common-lisp-argument-precedence-order-parameterization-of-a-generic-function
+
+---
+
+When CLOS dispatches a method, it picks the most specific method definition to the argument list:
+
+```lisp
+
+* (defgeneric a-fn (x))
+#<STANDARD-GENERIC-FUNCTION A-FN (0) {5815ACB9}>
+
+* (defmethod a-fn (x) :default-method)
+#<STANDARD-METHOD A-FN (T) {581DB535}>
+
+* (defmethod a-fn ((x number)) :a-number)
+#<STANDARD-METHOD A-FN (NUMBER) {58241645}>
+
+* (defmethod a-fn ((x (eql 1))) :number-1)
+#<STANDARD-METHOD A-FN ((EQL 1)) {582A7D75}>
+
+* (a-fn nil)
+:DEFAULT-METHOD
+
+* (a-fn "1")
+:DEFAULT-METHOD
+
+* (a-fn 0)
+:A-NUMBER
+
+* (a-fn 1)
+:NUMBER-1
+```
+
+CLOS uses a similar logic when choosing the method from parent classes, when multiple ones are available:
+
+```lisp
+* (defclass class-a () ())
+
+#<STANDARD-CLASS CLASS-A {583E0B25}>
+* (defclass class-b () ())
+
+#<STANDARD-CLASS CLASS-B {583E7F6D}>
+* (defgeneric another-fn (obj))
+
+#<STANDARD-GENERIC-FUNCTION ANOTHER-FN (0) {583DA749}>
+* (defmethod another-fn ((obj class-a)) :class-a)
+; Compiling LAMBDA (.PV-CELL. .NEXT-METHOD-CALL. OBJ):
+; Compiling Top-Level Form:
+
+#<STANDARD-METHOD ANOTHER-FN (CLASS-A) {584523C5}>
+* (defmethod another-fn ((obj class-b)) :class-b)
+; Compiling LAMBDA (.PV-CELL. .NEXT-METHOD-CALL. OBJ):
+; Compiling Top-Level Form:
+
+#<STANDARD-METHOD ANOTHER-FN (CLASS-B) {584B8895}>
+```
+
+Given the above definitions, when inheriting from `class-a` and `class-b`, the order of inheritance matters:
+
+```lisp
+* (defclass class-a-coming-first (class-a class-b) ())
+#<STANDARD-CLASS CLASS-A-COMING-FIRST {584BE6AD}>
+
+* (defclass class-b-coming-first (class-b class-a) ())
+#<STANDARD-CLASS CLASS-B-COMING-FIRST {584C744D}>
+
+* (another-fn (make-instance 'class-a-coming-first))
+:CLASS-A
+
+* (another-fn (make-instance 'class-b-coming-first))
+:CLASS-B
+```
+
+Combining the order of inheritance with generic functions with multiple arguments, CLOS has to make a choice of how to pick a method given two competing definitions, and its default strategy is prioritizing from left to right:
+
+```lisp
+* (defgeneric yet-another-fn (obj1 obj2))
+#<STANDARD-GENERIC-FUNCTION YET-ANOTHER-FN (0) {584D9EC9}>
+
+* (defmethod yet-another-fn ((obj1 class-a) obj2) :first-arg-specialized)
+#<STANDARD-METHOD YET-ANOTHER-FN (CLASS-A T) {5854269D}>
+
+* (defmethod yet-another-fn (obj1 (obj2 class-b)) :second-arg-specialized)
+#<STANDARD-METHOD YET-ANOTHER-FN (T CLASS-B) {585AAAAD}>
+
+* (yet-another-fn (make-instance 'class-a) (make-instance 'class-b))
+:FIRST-ARG-SPECIALIZED
+```
+
+CLOS has to make a choice between the first and the second definition of `yet-another-fn`, but its choice is just a heuristic.
+What if we want the choice to be based on the second argument, instead of the first?
+
+For that, we use the `:argument-precedence-order` option when declaring a generic function:
+
+```lisp
+* (defgeneric yet-another-fn (obj1 obj2) (:argument-precedence-order obj2 obj1))
+#<STANDARD-GENERIC-FUNCTION YET-ANOTHER-FN (2) {584D9EC9}>
+
+* (yet-another-fn (make-instance 'class-a) (make-instance 'class-b))
+:SECOND-ARG-SPECIALIZED
+```
+
+I liked that the `:argument-precedence-order` option exists.
+We shouldn't have to change the arguments from `(obj1 obj2)` to `(obj2 obj1)` just to make CLOS pick the method that we want.
+We can configure its default behaviour if desired, and keep the order of arguments however it best fits the generic function.
+
+## Comparison with Clojure
+
+Clojure has an equivalent, when using `defmulti`.
+
+Since when declaring a multi-method with `defmulti` we must define the dispatch function, Clojure uses it to pick the method definition.
+Since the dispatch function is required, there is no need for a default behaviour, such as left-to-right.
+
+## Conclusion
+
+Making the argument precedence order configurable for generic functions but not for class definitions makes a lot of sense.
+
+When declaring a class, we can choose the precedence order, and that is about it.
+But when defining a generic function, the order of arguments is more important to the function semantics, and the argument precedence being left-to-right is just the default behaviour.
+
+One shouldn't change the order of arguments of a generic function for the sake of tailoring it to the CLOS priority ranking algorithm, but doing it for a class definition is just fine.
+
+TIL.
+
+## References
+
+1. [Object-Oriented Programming in Common Lisp: A Programmer's Guide to CLOS](https://en.wikipedia.org/wiki/Object-Oriented_Programming_in_Common_Lisp), by Sonja E. Keene
diff --git a/src/content/tils/2021/04/24/clojure-autocurry.adoc b/src/content/tils/2021/04/24/clojure-autocurry.adoc
new file mode 100644
index 0000000..c1e277f
--- /dev/null
+++ b/src/content/tils/2021/04/24/clojure-autocurry.adoc
@@ -0,0 +1,135 @@
+---
+
+title: Clojure auto curry
+
+date: 2021-04-24 1
+
+updated_at: 2021-04-27
+
+layout: post
+
+lang: en
+
+ref: clojure-auto-curry
+
+---
+
+Here's a simple macro defined by [Loretta He][lorettahe] to create Clojure functions that are curried on all arguments, relying on Clojure's multi-arity support:
+
+```clojure
+(defmacro defcurry
+ [name args & body]
+ (let [partials (map (fn [n]
+ `(~(subvec args 0 n) (partial ~name ~@(take n args))))
+ (range 1 (count args)))]
+ `(defn ~name
+ (~args ~@body)
+ ~@partials)))
+```
+
+A naive `add` definition, alongside its usage and macroexpansion:
+
+```clojure
+user=> (defcurry add
+ [a b c d e]
+ (+ 1 2 3 4 5))
+#'user/add
+
+user=> (add 1)
+#object[clojure.core$partial$fn__5857 0x2c708440 "clojure.core$partial$fn__5857@2c708440"]
+
+user=> (add 1 2 3 4)
+#object[clojure.core$partial$fn__5863 0xf4c0e4e "clojure.core$partial$fn__5863@f4c0e4e"]
+
+user=> ((add 1) 2 3 4 5)
+15
+
+user=> (((add 1) 2 3) 4 5)
+15
+
+user=> (use 'clojure.pprint)
+nil
+
+user=> (pprint
+ (macroexpand
+ '(defcurry add
+ [a b c d e]
+ (+ 1 2 3 4 5))))
+(def
+ add
+ (clojure.core/fn
+ ([a b c d e] (+ 1 2 3 4 5))
+ ([a] (clojure.core/partial add a))
+ ([a b] (clojure.core/partial add a b))
+ ([a b c] (clojure.core/partial add a b c))
+ ([a b c d] (clojure.core/partial add a b c d))))
+nil
+```
+
+This simplistic `defcurry` definition doesn't support optional parameters, multi-arity, `&` rest arguments, docstrings, etc., but it could certainly evolve to do so.
+
+I like how `defcurry` is so short, and abdicates the responsability of doing the multi-arity logic to Clojure's built-in multi-arity support.
+Simple and elegant.
+
+Same Clojure as before, now with auto-currying via macros.
+
+[lorettahe]: http://lorettahe.github.io/clojure/2016/09/22/clojure-auto-curry
+
+## Comparison with Common Lisp
+
+My attempt at writing an equivalent for Common Lisp gives me:
+
+```lisp
+(defun partial (fn &rest args)
+ (lambda (&rest args2)
+ (apply fn (append args args2))))
+
+(defun curry-n (n func)
+ (cond ((< n 0) (error "Too many arguments"))
+ ((zerop n) (funcall func))
+ (t (lambda (&rest rest)
+ (curry-n (- n (length rest))
+ (apply #'partial func rest))))))
+
+(defmacro defcurry (name args &body body)
+ `(defun ,name (&rest rest)
+ (let ((func (lambda ,args ,@body)))
+ (curry-n (- ,(length args) (length rest))
+ (apply #'partial func rest)))))
+```
+
+Without built-in multi-arity support, we have to do more work, like tracking the number of arguments consumed so far.
+We also have to write `#'partial` ourselves.
+That is, without dependending on any library, sticking to ANSI Common Lisp.
+
+The usage is pretty similar:
+
+```lisp
+* (defcurry add (a b c d e)
+ (+ a b c d e))
+ADD
+
+* (add 1)
+#<FUNCTION (LAMBDA (&REST REST) :IN CURRY-N) {100216419B}>
+
+* (funcall (add 1) 2 3 4)
+#<FUNCTION (LAMBDA (&REST REST) :IN CURRY-N) {100216537B}>
+
+* (funcall (add 1) 2 3 4 5)
+15
+
+* (funcall (funcall (add 1) 2 3) 4 5)
+15
+
+* (macroexpand-1
+ '(defcurry add (a b c d e)
+ (+ a b c d e)))
+(DEFUN ADD (&REST REST)
+ (LET ((FUNC (LAMBDA (A B C D E) (+ A B C D E))))
+ (CURRY-N (- 5 (LENGTH REST)) (APPLY #'PARTIAL FUNC REST))))
+T
+```
+
+This also require `funcall`s, since we return a `lambda` that doesn't live in the function namespace.
+
+Like the Clojure one, it doesn't support optional parameters, `&rest` rest arguments, docstrings, etc., but it also could evolve to do so.
diff --git a/src/content/tils/2021/04/24/scm-nif.adoc b/src/content/tils/2021/04/24/scm-nif.adoc
new file mode 100644
index 0000000..f53451b
--- /dev/null
+++ b/src/content/tils/2021/04/24/scm-nif.adoc
@@ -0,0 +1,63 @@
+---
+
+title: Three-way conditional for number signs on Lisp
+
+date: 2021-04-24 3
+
+updated_at: 2021-08-14
+
+layout: post
+
+lang: en
+
+ref: three-way-conditional-for-number-signs-on-lisp
+
+---
+
+A useful macro from Paul Graham's [On Lisp][on-lisp] book:
+
+```lisp
+(defmacro nif (expr pos zero neg)
+ (let ((g (gensym)))
+ `(let ((,g ,expr))
+ (cond ((plusp ,g) ,pos)
+ ((zerop ,g) ,zero)
+ (t ,neg)))))
+```
+
+After I looked at this macro, I started seeing opportunities to using it in many places, and yet I didn't see anyone else using it.
+
+The latest example I can think of is section 1.3.3 of [Structure and Interpretation of Computer Programs][sicp], which I was reading recently:
+
+```scheme
+(define (search f neg-point pos-point)
+ (let ((midpoint (average neg-point pos-point)))
+ (if (close-enough? neg-point post-point)
+ midpoint
+ (let ((test-value (f midpoint)))
+ (cond ((positive? test-value)
+ (search f neg-point midpoint))
+ ((negative? test-value)
+ (search f midpoint pos-point))
+ (else midpoint))))))
+```
+
+Not that the book should introduce such macro this early, but I couldn't avoid feeling bothered by not using the `nif` macro, which could even remove the need for the intermediate `test-value` variable:
+
+```scheme
+(define (search f neg-point pos-point)
+ (let ((midpoint (average neg-point pos-point)))
+ (if (close-enough? neg-point post-point)
+ midpoint
+ (nif (f midpoint)
+ (search f neg-point midpoint)
+ (midpoint)
+ (search f midpoint pos-point)))))
+```
+
+It also avoids `cond`'s extra clunky parentheses for grouping, which is unnecessary but built-in.
+
+As a macro, I personally feel it tilts the balance towards expressivenes despite its extra cognitive load toll.
+
+[on-lisp]: http://www.paulgraham.com/onlisptext.html
+[sicp]: https://mitpress.mit.edu/sites/default/files/sicp/index.html
diff --git a/src/content/tils/2021/07/23/git-tls-gpg.adoc b/src/content/tils/2021/07/23/git-tls-gpg.adoc
new file mode 100644
index 0000000..fd42c1c
--- /dev/null
+++ b/src/content/tils/2021/07/23/git-tls-gpg.adoc
@@ -0,0 +1,56 @@
+---
+
+title: GPG verification of Git repositories without TLS
+
+date: 2021-07-23
+
+layout: post
+
+lang: en
+
+ref: gpg-verification-of-git-repositories-without-tls
+
+---
+
+For online Git repositories that use the [Git Protocol] for serving code, you
+can can use GPG to handle authentication, if you have the committer's public
+key.
+
+Here's how I'd verify that I've cloned an authentic version of
+[remembering][remembering][^not-available]:
+
+[^not-available]: Funnily enough, not available anymore via the Git Protocol, now only with HTTPS.
+
+```shell
+$ wget -qO- https://euandre.org/public.asc | gpg --import -
+gpg: clef 81F90EC3CD356060 : « EuAndreh <eu@euandre.org> » n'est pas modifiée
+gpg: Quantité totale traitée : 1
+gpg: non modifiées : 1
+$ pushd `mktemp -d`
+$ git clone git://euandreh.xyz/remembering .
+$ git verify-commit HEAD
+gpg: Signature faite le dim. 27 juin 2021 16:50:21 -03
+gpg: avec la clef RSA 5BDAE9B8B2F6C6BCBB0D6CE581F90EC3CD356060
+gpg: Bonne signature de « EuAndreh <eu@euandre.org> » [ultime]
+```
+
+On the first line we import the public key (funnily enough, available via
+HTTPS), and after cloning the code via the insecure `git://` protocol, we use
+`git verify-commit` to check the signature.
+
+The verification is successful, and we can see that the public key from the
+signature matches the fingerprint of the imported one. However
+`git verify-commit` doesn't have an option to check which public key you want
+to verify the commit against. Which means that if a MITM attack happens, the
+attacker could very easily serve a malicious repository with signed commits,
+and you'd have to verify the public key by yourself. That would need to happen
+for subsequent fetches, too.
+
+Even though this is possible, it is not very convenient, and certainly very
+brittle. Despite the fact that the Git Protocol is much faster, it being harder
+to make secure is a big downside.
+
+
+
+[Git Protocol]: https://git-scm.com/book/en/v2/Git-on-the-Server-The-Protocols#_the_git_protocol
+[remembering]: https://euandreh.xyz/remembering/
diff --git a/src/content/tils/2021/08/11/js-bigint-reviver.adoc b/src/content/tils/2021/08/11/js-bigint-reviver.adoc
new file mode 100644
index 0000000..d71174d
--- /dev/null
+++ b/src/content/tils/2021/08/11/js-bigint-reviver.adoc
@@ -0,0 +1,100 @@
+---
+
+title: Encoding and decoding JavaScript BigInt values with reviver
+
+date: 2021-08-11
+
+updated_at: 2021-08-13
+
+layout: post
+
+lang: en
+
+ref: encoding-and-decoding-javascript-bigint-values-with-reviver
+
+---
+
+`JSON.parse()` accepts a second parameter: a [`reviver()` function][reviver].
+It is a function that can be used to transform the `JSON` values as they're
+being parsed.
+
+[reviver]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#using_the_reviver_parameter
+
+As it turns out, when combined with JavaScript's [`BigInt`] type, you can parse
+and encode JavaScript `BigInt` numbers via JSON:
+
+[`BigInt`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt
+
+```javascript
+const bigIntReviver = (_, value) =>
+ typeof value === "string" && value.match(/^-?[0-9]+n$/)
+ ? BigInt(value.slice(0, value.length - 1))
+ : value;
+```
+
+I chose to interpret strings that contains only numbers and an ending `n` suffix
+as `BigInt` values, similar to how JavaScript interprets `123` (a number)
+differently from `123n` (a `bigint`);
+
+We do those checks before constructing the `BigInt` to avoid throwing needless
+exceptions and catching them on the parsing function, as this could easily
+become a bottleneck when parsing large JSON values.
+
+In order to do the full roundtrip, we now only need the `toJSON()` counterpart:
+
+```javascript
+BigInt.prototype.toJSON = function() {
+ return this.toString() + "n";
+};
+```
+
+With both `bigIntReviver` and `toJSON` defined, we can now successfully parse
+and encode JavaScript objects with `BigInt` values transparently:
+
+```javascript
+const s = `[
+ null,
+ true,
+ false,
+ -1,
+ 3.14,
+ "a string",
+ { "a-number": "-123" },
+ { "a-bigint": "-123n" }
+]`;
+
+const parsed = JSON.parse(s, bigIntReviver);
+const s2 = JSON.stringify(parsed);
+
+console.log(parsed);
+console.log(s2);
+
+console.log(typeof parsed[6]["a-number"])
+console.log(typeof parsed[7]["a-bigint"])
+```
+
+The output of the above is:
+
+```
+[
+ null,
+ true,
+ false,
+ -1,
+ 3.14,
+ 'a string',
+ { 'a-number': '-123' },
+ { 'a-bigint': -123n }
+]
+[null,true,false,-1,3.14,"a string",{"a-number":"-123"},{"a-bigint":"-123n"}]
+string
+bigint
+```
+
+If you're on a web browser, you can probably try copying and pasting the above
+code on the console right now, as is.
+
+Even though [`JSON`] doesn't include `BigInt` number, encoding and decoding them
+as strings is quite trivial on JavaScript.
+
+[`JSON`]: https://datatracker.ietf.org/doc/html/rfc8259
diff --git a/src/content/tils/index.adoc b/src/content/tils/index.adoc
new file mode 100644
index 0000000..4ae3b92
--- /dev/null
+++ b/src/content/tils/index.adoc
@@ -0,0 +1 @@
+= TIL
diff --git a/src/headers.txt b/src/headers.txt
new file mode 100644
index 0000000..994b8b8
--- /dev/null
+++ b/src/headers.txt
@@ -0,0 +1 @@
+about.html About
diff --git a/src/pages/root b/src/pages/root
new file mode 120000
index 0000000..efcdaa6
--- /dev/null
+++ b/src/pages/root
@@ -0,0 +1 @@
+../content \ No newline at end of file