summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore56
-rw-r--r--COPYING661
-rw-r--r--Makefile491
-rw-r--r--README3
-rw-r--r--TODOs.md360
-rw-r--r--_articles/2018-08-01-verifying-npm-ci-reproducibility.md148
-rw-r--r--_articles/2018-12-21-using-youtube-dl-to-manage-youtube-subscriptions.md274
-rw-r--r--_articles/2019-06-02-using-nixos-as-an-stateless-workstation.md152
-rw-r--r--_articles/2020-08-31-the-database-i-wish-i-had.md295
-rw-r--r--_articles/2020-10-19-feature-flags-differences-between-backend-frontend-and-mobile.md305
-rw-r--r--_articles/2020-10-20-how-not-to-interview-engineers.md331
-rw-r--r--_articles/2020-11-07-diy-an-offline-bug-tracker-with-text-files-git-and-email.md108
-rw-r--r--_articles/2020-11-08-the-next-paradigm-shift-in-programming-video-review.md164
-rw-r--r--_articles/2020-11-12-durable-persistent-trees-and-parser-combinators-building-a-database.md235
-rw-r--r--_articles/2021-01-26-ann-remembering-add-memory-to-dmenu-fzf-and-similar-tools.md190
-rw-r--r--_articles/2021-02-17-ann-fallible-fault-injection-library-for-stress-testing-failure-scenarios.md246
-rw-r--r--_articles/2021-04-29-a-relational-model-of-data-for-large-shared-data-banks-article-review.md130
-rw-r--r--_pastebins/2018-07-13-nix-string-padding.md19
-rw-r--r--_pastebins/2020-02-14-guix-shebang.md23
-rw-r--r--_screencasts/2021-02-07-autoqemu-automate-installation-and-ssh-setup-of-iso-os-images.md56
-rw-r--r--_slides/2020-10-19-rollout-feature-flag-experiment-operational-toggle.slides343
-rw-r--r--_slides/2020-11-14-on-local-first-beyond-the-crdt-silver-bullet.slides266
-rw-r--r--_tils/2020-08-12-nome-de-arquivo-com-timestamp-simplificado.md45
-rw-r--r--_tils/2020-10-11-search-changes-to-a-filename-pattern-in-git-history.md41
-rw-r--r--_tils/2020-11-08-find-broken-symlinks-with-find.md36
-rw-r--r--_tils/2020-11-12-useful-bash-variables.md72
-rw-r--r--_tils/2020-11-14-gpodder-as-a-media-subscription-manager.md33
-rw-r--r--_tils/2021-01-17-posix-sh-and-shebangs.md57
-rw-r--r--deps.mk1182
-rw-r--r--description1
-rw-r--r--long-description1
-rw-r--r--meta.capim11
-rwxr-xr-xmkdeps.sh166
-rw-r--r--music/choro-da-saudade.ly233
-rw-r--r--music/common.ly.include15
-rw-r--r--music/dengoso.ly310
-rw-r--r--music/guitar.ly.include33
-rw-r--r--music/marcha-dos-marinheiros.ly78
-rw-r--r--po/de.po161
-rw-r--r--po/en.po171
-rw-r--r--po/eo.po161
-rw-r--r--po/es.po161
-rw-r--r--po/euandre.org.pot161
-rw-r--r--po/fr.po161
-rw-r--r--po/note.txt5
-rw-r--r--po/po4a.cfg12
-rw-r--r--po/pt.po190
-rwxr-xr-xsrc/bin/pb60
l---------src/collections/en/blog1
l---------src/collections/en/pastebin1
l---------src/collections/en/podcast1
l---------src/collections/en/screencast1
l---------src/collections/en/til1
l---------src/collections/pt/hea1
-rw-r--r--src/content/.well-known/security.txt4
-rw-r--r--src/content/en/about.adoc11
-rw-r--r--src/content/en/blog/2018/07/17/guix-nixos.adoc (renamed from _articles/2018-07-17-running-guix-on-nixos.md)111
-rw-r--r--src/content/en/blog/2018/08/01/npm-ci-reproducibility.adoc147
-rw-r--r--src/content/en/blog/2018/12/21/ytdl-subs.adoc279
-rw-r--r--src/content/en/blog/2019/06/02/nixos-stateless-workstation.adoc146
-rw-r--r--src/content/en/blog/2020/08/10/guix-srht.adoc (renamed from _articles/2020-08-10-guix-inside-sourcehut-builds-sr-ht-ci.md)88
-rw-r--r--src/content/en/blog/2020/08/31/database-i-wish-i-had.adoc299
-rw-r--r--src/content/en/blog/2020/10/05/cargo2nix-demo.tar.gzbin0 -> 59565 bytes
-rw-r--r--src/content/en/blog/2020/10/05/cargo2nix.adoc (renamed from _articles/2020-10-05-cargo2nix-dramatically-simpler-rust-in-nix.md)52
-rw-r--r--src/content/en/blog/2020/10/05/cargo2nix.tar.gzbin0 -> 53327 bytes
-rw-r--r--src/content/en/blog/2020/10/05/swift2nix-demo.tar.gzbin0 -> 61691 bytes
-rw-r--r--src/content/en/blog/2020/10/05/swift2nix.adoc (renamed from _articles/2020-10-05-swift2nix-run-swift-inside-nix-builds.md)159
-rw-r--r--src/content/en/blog/2020/10/05/swift2nix.tar.gzbin0 -> 57917 bytes
-rw-r--r--src/content/en/blog/2020/10/19/feature-flags.adoc306
-rw-r--r--src/content/en/blog/2020/10/20/wrong-interviewing.adoc340
-rw-r--r--src/content/en/blog/2020/11/07/diy-bugs.adoc93
-rw-r--r--src/content/en/blog/2020/11/08/paradigm-shift-review.adoc154
-rw-r--r--src/content/en/blog/2020/11/12/database-parsers-trees.adoc226
-rw-r--r--src/content/en/blog/2020/11/14/local-first-review.adoc (renamed from _articles/2020-11-14-local-first-software-you-own-your-data-in-spite-of-the-cloud-article-review.md)239
-rw-r--r--src/content/en/blog/2021/01/26/remembering-ann.adoc216
-rw-r--r--src/content/en/blog/2021/02/17/fallible.adoc285
-rw-r--r--src/content/en/blog/2021/02/17/fallible.tar.gzbin0 -> 1915439 bytes
-rw-r--r--src/content/en/blog/2021/04/29/relational-review.adoc144
-rw-r--r--src/content/en/blog/categories.adoc2
-rw-r--r--src/content/en/blog/index.adoc1
-rw-r--r--src/content/en/index.adoc1
-rw-r--r--src/content/en/pastebin/2016/04/05/rpn.adoc (renamed from _pastebins/2016-04-05-rpn-macro-setup.md)19
-rw-r--r--src/content/en/pastebin/2018/07/11/nix-pinning.adoc (renamed from _pastebins/2018-07-11-nix-pinning.md)22
-rw-r--r--src/content/en/pastebin/2018/07/13/guix-nixos-systemd.adoc (renamed from _pastebins/2018-07-13-gnu-guix-systemd-daemon-for-nixos.md)23
-rw-r--r--src/content/en/pastebin/2018/07/13/guixbuilder-nixos.adoc (renamed from _pastebins/2018-07-13-guix-users-in-nixos-system-configuration.md)23
-rw-r--r--src/content/en/pastebin/2018/07/13/guixbuilder.adoc (renamed from _pastebins/2018-07-13-guix-builder-user-creation-commands.md)22
-rw-r--r--src/content/en/pastebin/2018/07/13/nix-strpad.adoc8
-rw-r--r--src/content/en/pastebin/2018/07/25/nix-exps.adoc (renamed from _pastebins/2018-07-25-nix-exps.md)22
-rw-r--r--src/content/en/pastebin/2018/07/25/nix-showdrv.adoc (renamed from _pastebins/2018-07-25-nix-show-derivation-sample-output.md)23
-rw-r--r--src/content/en/pastebin/2019/06/08/inconsistent-hash.adoc (renamed from _pastebins/2019-06-08-inconsistent-hash-of-buildgomodule.md)56
-rw-r--r--src/content/en/pastebin/2019/12/29/raku-tuple-type.adoc (renamed from _pastebins/2019-12-29-raku-tuple-type-annotation.md)25
-rw-r--r--src/content/en/pastebin/2020/01/04/guix-import-failure.adoc (renamed from _pastebins/2020-01-04-failure-on-guix-tex-live-importer.md)22
-rw-r--r--src/content/en/pastebin/2020/02/14/guix-shebang.adoc11
-rw-r--r--src/content/en/pastebin/2020/11/27/guix-build-local.adoc (renamed from _pastebins/2020-11-27-guix-build-local-module.md)39
-rw-r--r--src/content/en/pastebin/2020/12/15/guix-pack-fail.adoc (renamed from _pastebins/2020-12-15-failure-with-relocatable-guix-pack-tarball.md)34
-rw-r--r--src/content/en/pastebin/2021/04/03/naive-slugify-js.adoc (renamed from _pastebins/2021-04-03-javascript-naive-slugify.md)22
-rw-r--r--src/content/en/pastebin/2021/06/08/reading-session-pt1.adoc (renamed from _pastebins/2021-06-08-debit-reading-session-sicp-solutions-pt-1.md)19
-rw-r--r--src/content/en/pastebin/2021/06/22/curl-wget.adoc (renamed from _pastebins/2021-06-22-cloc-curl-and-wget.md)24
-rw-r--r--src/content/en/pastebin/2021/08/11/h1-spacing.adoc (renamed from _pastebins/2021-08-11-spaces-around-h1-tags.md)63
-rw-r--r--src/content/en/pastebin/2021/09/02/sicp-3-19.adoc (renamed from _pastebins/2021-09-02-sicp-exercise-3-19.md)25
-rw-r--r--src/content/en/pastebin/2021/09/03/sicp-persistent-queue.adoc (renamed from _pastebins/2021-09-03-sicp-persistent-amortized-o1-queue.md)37
-rw-r--r--src/content/en/pastebin/2022/07/14/git-cleanup.adoc (renamed from _pastebins/2022-07-14-git-cleanup-command.md)19
-rw-r--r--src/content/en/pastebin/2023/07/22/funcallable-amop.adoc (renamed from _pastebins/2023-07-22-funcallable-amop.md)20
-rw-r--r--src/content/en/pastebin/categories.adoc2
-rw-r--r--src/content/en/pastebin/index.adoc1
-rw-r--r--src/content/en/podcast/2020/12/19/test-entry.adoc (renamed from _podcasts/2020-12-19-a-test-entry.md)26
-rw-r--r--src/content/en/podcast/2020/12/19/test-entry.flac (renamed from resources/podcasts/2020-12-19-a-test-entry.flac)bin462864 -> 462864 bytes
-rw-r--r--src/content/en/podcast/2020/12/19/test-entry.oggbin0 -> 97060 bytes
-rw-r--r--src/content/en/podcast/categories.adoc2
-rw-r--r--src/content/en/podcast/index.adoc1
-rw-r--r--src/content/en/screencast/2021/02/07/autoqemu.adoc53
-rw-r--r--src/content/en/screencast/2021/02/07/autoqemu.tar.gzbin0 -> 506213 bytes
-rw-r--r--src/content/en/screencast/2021/02/07/autoqemu.webm (renamed from resources/screencasts/2021-02-07-autoqemu-automate-installation-and-ssh-setup-of-iso-os-images.webm)bin12103021 -> 12103021 bytes
-rw-r--r--src/content/en/screencast/categories.adoc2
-rw-r--r--src/content/en/screencast/index.adoc1
-rw-r--r--src/content/en/slide/2020/10/19/feature-flags.adoc230
-rw-r--r--src/content/en/slide/2020/11/14/local-first-hype.adoc204
-rw-r--r--src/content/en/til/2020/08/12/filename-timestamp.adoc (renamed from _tils/2020-08-12-simple-filename-timestamp.md)39
-rw-r--r--src/content/en/til/2020/08/13/code-jekyll.adoc (renamed from _tils/2020-08-13-anchor-headers-and-code-lines-in-jekyll.md)102
-rw-r--r--src/content/en/til/2020/08/14/browse-git.adoc (renamed from _tils/2020-08-14-browse-a-git-repository-at-a-specific-commit.md)68
-rw-r--r--src/content/en/til/2020/08/16/git-search.adoc (renamed from _tils/2020-08-16-search-in-git.md)52
-rw-r--r--src/content/en/til/2020/08/28/grep-online.adoc (renamed from _tils/2020-08-28-grep-online-repositories.md)84
-rw-r--r--src/content/en/til/2020/09/04/cli-email-fun-profit.adoc (renamed from _tils/2020-09-04-send-emails-using-the-command-line-for-fun-and-profit.md)52
-rw-r--r--src/content/en/til/2020/09/05/oldschool-pr.adoc (renamed from _tils/2020-09-05-pull-requests-with-git-the-old-school-way.md)78
-rw-r--r--src/content/en/til/2020/10/11/search-git-history.adoc29
-rw-r--r--src/content/en/til/2020/11/08/find-broken-symlink.adoc25
-rw-r--r--src/content/en/til/2020/11/12/diy-nix-bash-ci.adoc (renamed from _tils/2020-11-12-diy-bare-bones-ci-server-with-bash-and-nix.md)47
-rw-r--r--src/content/en/til/2020/11/12/git-bisect-automation.adoc (renamed from _tils/2020-11-12-git-bisect-automation.md)26
-rw-r--r--src/content/en/til/2020/11/12/useful-bashvars.adoc61
-rw-r--r--src/content/en/til/2020/11/14/gpodder-media.adoc21
-rw-r--r--src/content/en/til/2020/11/30/git-notes-ci.adoc (renamed from _tils/2020-11-30-storing-ci-data-on-git-notes.md)64
-rw-r--r--src/content/en/til/2020/12/15/shellcheck-repo.adoc (renamed from _tils/2020-12-15-awk-snippet-shellcheck-all-scripts-in-a-repository.md)94
-rw-r--r--src/content/en/til/2020/12/29/svg.adoc (renamed from _tils/2020-12-29-svg-favicon.md)51
-rw-r--r--src/content/en/til/2021/01/12/curl-awk-emails.adoc (renamed from _tils/2021-01-12-awk-snippet-send-email-to-multiple-recipients-with-curl.md)100
-rw-r--r--src/content/en/til/2021/01/17/posix-shebang.adoc58
-rw-r--r--src/content/en/til/2021/04/24/cl-generic-precedence.adoc (renamed from _tils/2021-04-24-common-lisp-argument-precedence-order-parameterization-of-a-generic-function.md)98
-rw-r--r--src/content/en/til/2021/04/24/clojure-autocurry.adoc (renamed from _tils/2021-04-24-clojure-auto-curry.md)68
-rw-r--r--src/content/en/til/2021/04/24/scm-nif.adoc (renamed from _tils/2021-04-24-three-way-conditional-for-number-signs-on-lisp.md)56
-rw-r--r--src/content/en/til/2021/07/23/git-tls-gpg.adoc (renamed from _tils/2021-07-23-gpg-verification-of-git-repositories-without-tls.md)43
-rw-r--r--src/content/en/til/2021/08/11/js-bigint-reviver.adoc (renamed from _tils/2021-08-11-encoding-and-decoding-javascript-bigint-values-with-reviver.md)83
-rw-r--r--src/content/en/til/categories.adoc2
-rw-r--r--src/content/en/til/index.adoc7
-rw-r--r--src/content/favicon.icobin0 -> 1150 bytes
-rw-r--r--src/content/favicon.pngbin0 -> 103 bytes
-rw-r--r--src/content/img/atom.svg6
-rw-r--r--src/content/img/envelope/dark.svg6
-rw-r--r--src/content/img/envelope/light.svg6
-rw-r--r--src/content/img/favicon.svg (renamed from static/lord-favicon.svg)0
-rw-r--r--src/content/img/link/dark.svg7
-rw-r--r--src/content/img/link/light.svg7
-rw-r--r--src/content/img/lock/dark.svg6
-rw-r--r--src/content/img/lock/light.svg6
-rw-r--r--src/content/img/logo/dark.svg62
-rw-r--r--src/content/img/logo/light.svg62
-rw-r--r--src/content/pt/hea/2020/08/12/arquivo-datado.adoc29
-rw-r--r--src/content/pt/hea/categorias.adoc6
-rw-r--r--src/content/pt/hea/index.adoc11
-rw-r--r--src/content/pt/sobre.adoc12
-rw-r--r--src/content/public.asc86
-rw-r--r--src/content/public.asc.txt99
l---------src/content/s1
l---------src/content/security.txt1
-rw-r--r--src/content/style.css95
-rw-r--r--src/headers/de.txt13
-rw-r--r--src/headers/en.txt13
-rw-r--r--src/headers/eo.txt13
-rw-r--r--src/headers/es.txt13
-rw-r--r--src/headers/fr.txt13
-rw-r--r--src/headers/pt.txt13
-rw-r--r--src/headers/ref.txt13
-rw-r--r--src/linkonly-dirs.txt5
-rw-r--r--src/names/categories/de.txt1
-rw-r--r--src/names/categories/en.txt1
-rw-r--r--src/names/categories/eo.txt1
-rw-r--r--src/names/categories/es.txt1
-rw-r--r--src/names/categories/fr.txt1
-rw-r--r--src/names/categories/pt.txt1
-rw-r--r--src/names/categories/ref.txt1
-rw-r--r--src/names/category/de.txt1
-rw-r--r--src/names/category/en.txt1
-rw-r--r--src/names/category/eo.txt1
-rw-r--r--src/names/category/es.txt1
-rw-r--r--src/names/category/fr.txt1
-rw-r--r--src/names/category/pt.txt1
-rw-r--r--src/names/category/ref.txt1
l---------src/pages/en1
l---------src/pages/pt1
l---------src/slides/en1
-rw-r--r--src/static.conf11
-rw-r--r--src/symlinks.txt74
-rw-r--r--static/atom.svg5
-rw-r--r--static/attachments/apollo-server-demo.tar.gzbin22681600 -> 0 bytes
-rw-r--r--static/attachments/autoqemu.tar.gzbin808960 -> 0 bytes
-rw-r--r--static/attachments/cargo2nix-demo.tar.gzbin174080 -> 0 bytes
-rw-r--r--static/attachments/cargo2nix.tar.gzbin143360 -> 0 bytes
-rw-r--r--static/attachments/fallible.tar.gzbin3174400 -> 0 bytes
-rw-r--r--static/attachments/swift2nix-demo.tar.gzbin174080 -> 0 bytes
-rw-r--r--static/attachments/swift2nix.tar.gzbin143360 -> 0 bytes
-rw-r--r--static/envelope.svg4
-rw-r--r--static/link.svg5
-rw-r--r--static/lock.svg4
201 files changed, 8127 insertions, 6827 deletions
diff --git a/.gitignore b/.gitignore
index 34ed78d..874fc80 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,35 +1,23 @@
-/generated.mk
-
-# Nix
-/result*
-/tmp/
-
-# Jekyll
-/.bundle/
-/_site/
-/public/
-/.jekyll-cache/
-/images/graphviz/
-
-# Generated resources
-*.ogg
+*.htmlbody
+*.embedded-config
+*.conf
+*.snippets
+*.links
+*.caslinks
+*.html
+*.indexentry
+*.feedentry
+*.mapentry
+*.sortdata
+*.categorydata
+*.txt
+*.xml
+*.htmlheader
+*.htmlfooter
+*.htmllisting
+*.ps
+*.pdf
+*.gz
+*.sentinel
*.torrent
-*.checksum
-/static/lord-favicon.png
-/music/*.pdf
-/music/*.midi
-/static/lord-favicon.ico
-/favicon.ico
-
-/drafts/
-
-/deps.mk
-/files.mk
-/torrent-files.txt
-
-/TODOs.html
-
-# JEKYLL_COMPAT
-/src/content/
-
-/logs/
+/src/content/.gitignore
diff --git a/COPYING b/COPYING
deleted file mode 100644
index be3f7b2..0000000
--- a/COPYING
+++ /dev/null
@@ -1,661 +0,0 @@
- GNU AFFERO GENERAL PUBLIC LICENSE
- Version 3, 19 November 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU Affero General Public License is a free, copyleft license for
-software and other kinds of works, specifically designed to ensure
-cooperation with the community in the case of network server software.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-our General Public Licenses are intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- Developers that use our General Public Licenses protect your rights
-with two steps: (1) assert copyright on the software, and (2) offer
-you this License which gives you legal permission to copy, distribute
-and/or modify the software.
-
- A secondary benefit of defending all users' freedom is that
-improvements made in alternate versions of the program, if they
-receive widespread use, become available for other developers to
-incorporate. Many developers of free software are heartened and
-encouraged by the resulting cooperation. However, in the case of
-software used on network servers, this result may fail to come about.
-The GNU General Public License permits making a modified version and
-letting the public access it on a server without ever releasing its
-source code to the public.
-
- The GNU Affero General Public License is designed specifically to
-ensure that, in such cases, the modified source code becomes available
-to the community. It requires the operator of a network server to
-provide the source code of the modified version running there to the
-users of that server. Therefore, public use of a modified version, on
-a publicly accessible server, gives the public access to the source
-code of the modified version.
-
- An older license, called the Affero General Public License and
-published by Affero, was designed to accomplish similar goals. This is
-a different license, not a version of the Affero GPL, but Affero has
-released a new version of the Affero GPL which permits relicensing under
-this license.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU Affero General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Remote Network Interaction; Use with the GNU General Public License.
-
- Notwithstanding any other provision of this License, if you modify the
-Program, your modified version must prominently offer all users
-interacting with it remotely through a computer network (if your version
-supports such interaction) an opportunity to receive the Corresponding
-Source of your version by providing access to the Corresponding Source
-from a network server at no charge, through some standard or customary
-means of facilitating copying of software. This Corresponding Source
-shall include the Corresponding Source for any work covered by version 3
-of the GNU General Public License that is incorporated pursuant to the
-following paragraph.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the work with which it is combined will remain governed by version
-3 of the GNU General Public License.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU Affero General Public License from time to time. Such new versions
-will be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU Affero General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU Affero General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU Affero General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
- <one line to give the program's name and a brief idea of what it does.>
- Copyright (C) <year> <name of author>
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU Affero General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU Affero General Public License for more details.
-
- You should have received a copy of the GNU Affero General Public License
- along with this program. If not, see <https://www.gnu.org/licenses/>.
-
-Also add information on how to contact you by electronic and paper mail.
-
- If your software can interact with users remotely through a computer
-network, you should also make sure that it provides a way for users to
-get its source. For example, if your program is a web application, its
-interface could display a "Source" link that leads users to an archive
-of the code. There are many ways you could offer source, and different
-solutions will be better for different programs; see section 13 for the
-specific requirements.
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU AGPL, see
-<https://www.gnu.org/licenses/>.
diff --git a/Makefile b/Makefile
index 24c3e6c..518ac92 100644
--- a/Makefile
+++ b/Makefile
@@ -1,16 +1,487 @@
.POSIX:
-.DEFAULT:
- $(MAKE) generated.mk
- $(MAKE) -f dynamic.mk $<
+DATE = 1970-01-01
+VERSION = 0.1.0
+NAME = euandre.org
+NAME_UC = $(NAME)
+LANGUAGES = en
+## Installation prefix. Defaults to "/usr".
+PREFIX = /usr
+BINDIR = $(PREFIX)/bin
+LIBDIR = $(PREFIX)/lib
+INCLUDEDIR = $(PREFIX)/include
+SRCDIR = $(PREFIX)/src/$(NAME)
+SHAREDIR = $(PREFIX)/share
+LOCALEDIR = $(SHAREDIR)/locale
+MANDIR = $(SHAREDIR)/man
+DOCDIR = $(SHAREDIR)/doc/$(NAME)
+HTMLDIR = $(SHAREDIR)/html/$(NAME)
+EXEC = ./
+## Where to store the installation. Empty by default.
+DESTDIR =
+LDLIBS =
+PUBURL = public.asc.txt
+FFMFLAGS = -y -hide_banner -loglevel warning
+BASEURL = /
-all: generated.mk
- $(MAKE) -f dynamic.mk all
-generated.mk: ALWAYS JEKYLL_COMPAT
- sh src/development/dynmake.sh > $@
+.SUFFIXES:
+.SUFFIXES: .adoc .conf .snippets .indexentry .feedentry .mapentry .sortdata .xml
+.SUFFIXES: .htmlbody .htmlheader .htmlfooter .htmllisting .html .links .caslinks
+.SUFFIXES: .txt .categorydata .gz .torrent .flac .ogg .ps .pdf .sentinel
-ALWAYS:
+.adoc.conf:
+ mkwb conf src/global.conf $< > $@
+
+.adoc.htmlbody:
+ mkwb htmlbody $< > $@
+
+.htmlbody.html:
+ mkwb html $< > $@
+
+.conf.htmlheader:
+ mkwb html -H $< > $@
+
+.conf.htmlfooter:
+ mkwb html -F $< > $@
+
+.adoc.snippets:
+ mkwb snippets $< > $@
+
+.conf.indexentry:
+ mkwb indexentry $< > $@
+
+.htmlbody.feedentry:
+ mkwb feedentry $< > $@
+
+.conf.mapentry:
+ mkwb mapentry $< > $@
+
+.conf.sortdata:
+ mkwb sortdata $< > $@
+
+.conf.categorydata:
+ mkwb categorydata $< > $@
+
+.adoc.links:
+ mkwb links $< > $@
+
+.links.caslinks:
+ grep -Ev '^(link|image):' $< | xargs -I_ sh -c '\
+ printf "%s\n" "_" | sha256sum | \
+ printf "%s\t%s\n" "`cut -d" " -f1`" "_"' > $@
+
+.flac.ogg:
+ ffmpeg $(FFMFLAGS) -i $< -ar 48000 -vn -c:a libvorbis -b:a 320k $@
+
+.adoc.ps:
+ eslaides < $< > $@
+
+.ps.pdf:
+ ps2pdf - < $< > $@
+
+
+
+all:
+include deps.mk
+
+
+listings.adoc = $(categories.adoc) $(indexes.adoc)
+sources.adoc = $(articles.adoc) $(listings.adoc) $(pages.adoc)
+sources.htmlbody = $(sources.adoc:.adoc=.htmlbody)
+sources.html = $(sources.adoc:.adoc=.html)
+sources.snippets = $(sources.adoc:.adoc=.snippets)
+sources.snippets.gz = $(sources.adoc:.adoc=.snippets.gz)
+sources.conf = $(sources.adoc:.adoc=.conf)
+sources.links = $(sources.adoc:.adoc=.links)
+sources.caslinks = $(sources.adoc:.adoc=.caslinks)
+sources.mapentry = $(sources.adoc:.adoc=.mapentry)
+articles.indexentry = $(articles.adoc:.adoc=.indexentry)
+articles.feedentry = $(articles.adoc:.adoc=.feedentry)
+articles.sortdata = $(articles.adoc:.adoc=.sortdata)
+articles.categorydata = $(articles.adoc:.adoc=.categorydata)
+listings.htmlheader = $(listings.adoc:.adoc=.htmlheader)
+listings.htmlfooter = $(listings.adoc:.adoc=.htmlfooter)
+listings.html = $(listings.adoc:.adoc=.html)
+indexes.htmllisting = $(indexes.adoc:.adoc=.htmllisting)
+categories.htmllisting = $(categories.adoc:.adoc=.htmllisting)
+categories.txt = $(categories.adoc:.adoc=.txt)
+categories.xml = $(categories.adoc:.adoc=.xml)
+categories.xml.gz = $(categories.adoc:.adoc=.xml.gz)
+sources.media.torrent = $(sources.media:=.torrent)
+slides.ps = $(slides.adoc:.adoc=.ps)
+slides.pdf = $(slides.adoc:.adoc=.pdf)
+
+sources = \
+ $(sources.adoc) \
+ $(sources.extras) \
+ $(images.svg) \
+ src/content/favicon.ico \
+ src/content/favicon.png \
+ src/content/style.css \
+ src/content/$(PUBURL) \
+
+dynamic-contents = \
+ $(sources.html) \
+ $(slides.pdf) \
+ $(feeds.xml) \
+ $(sources.media.torrent) \
+ src/content/sitemap.xml \
+
+static-contents = \
+ $(sources.extras) \
+ $(images.svg) \
+ src/content/favicon.ico \
+ src/content/favicon.png \
+ src/content/style.css \
+ src/content/$(PUBURL) \
+ src/content/.well-known/security.txt \
+
+dynamic-contents.gz = $(dynamic-contents:=.gz)
+
+static-contents.gz = \
+ $(images.svg:=.gz) \
+ src/content/favicon.ico.gz \
+ src/content/style.css.gz \
+ src/content/$(PUBURL).gz \
+ src/content/.well-known/security.txt.gz \
+
+contents.gz = \
+ $(dynamic-contents.gz) \
+ $(static-contents.gz) \
+
+contents = \
+ $(dynamic-contents) \
+ $(static-contents) \
+
+all-filelists = \
+ $(sources.snippets) \
+ $(sources.snippets.gz) \
+ $(categories.xml) \
+ $(categories.xml.gz) \
+
+all-contents = \
+ $(contents) \
+ $(contents.gz) \
+
+captured-assets = \
+ src/content/$(PUBURL) \
+ src/content/favicon.ico \
+ src/content/favicon.png \
+
+captured-assets.sentinel = $(captured-assets:=.sentinel)
+
+
+derived-assets = \
+ $(dynamic-contents) \
+ $(contents.gz) \
+ $(sources.html) \
+ $(sources.htmlbody) \
+ $(sources.snippets) \
+ $(sources.snippets.gz) \
+ $(sources.conf) \
+ $(sources.links) \
+ $(sources.caslinks) \
+ $(sources.mapentry) \
+ src/dyn.conf \
+ src/base.conf \
+ src/global.conf \
+ $(articles.indexentry) \
+ $(articles.feedentry) \
+ $(articles.sortdata) \
+ $(articles.categorydata) \
+ $(listings.htmlheader) \
+ $(listings.htmlfooter) \
+ $(listings.html) \
+ $(indexes.htmllisting) \
+ $(categories.htmllisting) \
+ $(categories.txt) \
+ $(categories.xml) \
+ $(categories.xml.gz) \
+ $(slides.ps) \
+ $(slides.pdf) \
+ email.txt \
+ baseurl.txt \
+ fingerprint.txt \
+ expiry.txt \
+ expiry-epoch.txt \
+ now.txt \
+ src/content/.well-known/security.txt \
+ src/all-contents.txt \
+ src/all-filelists.txt \
+ src/all-symlinks.txt \
+ src/install.txt \
+ src/sort-expected.txt \
+ src/sort-given.txt \
+ src/sources.txt \
+ install.txt \
+ sources.txt \
+ src/content/.gitignore \
+ $(captured-assets.sentinel) \
+
+side-assets = \
+ src/collections/*/*/*/*/*/*.html.*.txt \
+ src/collections/*/*/*/*/*/*.txt.gz \
+ src/collections/*/*/index.html.*.txt \
+ src/collections/*/*/sortdata.txt \
+ src/collections/*/*/feed.*.xml \
+ src/collections/*/*/feed.*.xml.gz \
+ src/collections/*/*/*.sortdata \
+ src/pages/*/*.html.*.txt \
+ src/content/.well-known/ \
+ `cat src/all-symlinks.txt 2>/dev/null` \
+ `cat src/linkonly-dirs.txt 2>/dev/null` \
+
+
+
+## Default target. Builds all artifacts required for testing
+## and installation.
+all: $(derived-assets)
+all: $(captured-assets)
+
+
+$(derived-assets): Makefile deps.mk
+$(sources.conf): src/global.conf
+
+
+src/content/.gitignore: src/symlinks.txt
+ cd src/content/ && mkwb symlinks ../symlinks.txt > $(@F)
+
+src/dyn.conf: email.txt baseurl.txt fingerprint.txt
+ printf "export url_pre='%s'\n" "`cat baseurl.txt`" > $@
+ printf "export email='%s'\n" "`cat email.txt`" >> $@
+ printf "export publickey='%s'\n" "`cat fingerprint.txt`" >> $@
+ printf "export publickey_url='$(PUBURL)'\n" >> $@
+ printf 'export sourcecode_url="$$url_pre/git/$(NAME)"\n' >> $@
+
+src/base.conf: src/dyn.conf src/static.conf
+ cat src/dyn.conf src/static.conf > $@
+
+src/global.conf: src/base.conf
+ mkwb conf -G src/base.conf > $@
+
+$(listings.html):
+ cat $*.htmlheader $*.htmlbody $*.htmllisting $*.htmlfooter > $@
+
+$(indexes.htmllisting):
+ mkwb indexbody $*.conf > $@
+
+$(categories.htmllisting):
+ mkwb categoriesbody $*.conf > $@
+
+$(categories.txt): src/global.conf
+ mkwb categories src/global.conf $(@D) > $@
+
+$(categories.xml):
+ for f in `cat $*.txt`; do \
+ c="`printf '%s\n' "$$f" | cut -d. -f2`"; \
+ mkwb feed src/global.conf "$$f" > $(@D)/feed."$$c".xml; \
+ printf '%s\n' $(@D)/feed."$$c".xml; \
+ done > $@
+
+$(feeds.xml):
+ mkwb feed src/global.conf $(@D)/sortdata.txt > $@
+
+$(contents.gz):
+ gzip -9fk $*
+ touch $@
+
+$(sources.snippets.gz) $(categories.xml.gz):
+ if [ -s $* ]; then gzip -9fk `cat $*`; fi
+ sed 's/$$/.gz/' $* > $@
+
+src/content/$(PUBURL).gz: src/content/$(PUBURL).sentinel
+src/content/$(PUBURL).sentinel: email.txt
+ gpg --export --armour "`cat email.txt`" | ifne ifnew $*
+ touch $@
+
+src/content/favicon.ico.gz: src/content/favicon.ico.sentinel
+src/content/favicon.ico.sentinel: src/content/img/favicon.svg
+ convert src/content/img/favicon.svg -strip ico:- | ifnew $*
+ touch $@
+
+src/content/favicon.png.sentinel: src/content/img/favicon.svg
+ convert src/content/img/favicon.svg -strip png:- | ifnew $*
+ touch $@
+
+$(sources.media.torrent):
+ F="`printf '%s\n' $* | cut -d/ -f3-`" && \
+ mktorrent -xfd -n $(*F) -o $@ -w "https://$(NAME)$(BASEURL)$${F}" $*
+
+src/content/sitemap.xml.gz: src/content/sitemap.xml
+src/content/sitemap.xml: $(sources.mapentry)
+ mkwb sitemap $(sources.mapentry) > $@
+
+email.txt: meta.capim
+ cat meta.capim | awk '$$1 == ":email" && $$0=$$2' | tr -d '"' > $@
-JEKYLL_COMPAT:
- sh src/development/JEKYLL_COMPAT/copy-content.sh
+baseurl.txt: meta.capim
+ cat meta.capim | awk '$$1 == ":baseurl" && $$0=$$2' | tr -d '"' > $@
+
+fingerprint.txt: src/content/$(PUBURL)
+ gpg --always-trust --no-keyring --show-key --with-colons \
+ src/content/$(PUBURL) | \
+ awk -F: '/^pub:/ { print $$5 }' > $@
+
+expiry.txt: src/content/$(PUBURL)
+ gpg --always-trust --no-keyring --show-key --with-colons \
+ src/content/$(PUBURL) | \
+ awk -F: '/^pub:/ { print $$7 }' | \
+ xargs -I% date -Is -d@% > $@
+
+expiry-epoch.txt: expiry.txt
+ date -d "`cat expiry.txt`" '+%s' > $@
+
+now.txt:
+ now > $@
+
+src/content/.well-known/security.txt.gz: src/content/.well-known/security.txt
+src/content/.well-known/security.txt: email.txt baseurl.txt expiry.txt
+ mkdir -p $(@D)
+ printf 'Contact: mailto:%s\n' "`cat email.txt`" > $@
+ printf 'Expires: %s\n' "`cat expiry.txt`" >> $@
+ printf 'Encryption: %s/$(PUBURL)\n' "`cat baseurl.txt`" >> $@
+ printf 'Preferred-Languages: en, pt, fr, eo, es, de\n' >> $@
+
+src/sources.txt:
+ printf '%s\n' $(sources) > $@
+
+src/all-contents.txt:
+ printf '%s\n' $(all-contents) > $@
+
+src/all-filelists.txt: src/all-symlinks.txt
+ printf '%s\n' $(all-filelists) src/all-symlinks.txt > $@
+
+src/all-symlinks.txt: src/content/.gitignore
+ sed 's|^|src/content|' src/content/.gitignore > $@
+
+src/install.txt: src/all-contents.txt src/all-filelists.txt $(all-filelists)
+ cat src/all-contents.txt `cat src/all-filelists.txt` > $@
+
+sources.txt: src/sources.txt
+install.txt: src/install.txt
+sources.txt install.txt:
+ sed 's|^src/content/||' src/$(@F) > $@
+
+
+
+src/sort-expected.txt:
+ dirname $(articles.adoc) | env LANG=POSIX.UTF-8 sort | uniq -c | \
+ awk '{ printf "%s\t%s\n", $$2, $$1 }' > $@
+
+src/sort-given.txt: $(sources.conf) src/sort-expected.txt
+ awk '{ \
+ "grep \"^export sort=\" " $$1 "/*.conf | wc -l" | getline cnt; \
+ printf "%s\t%s\n", $$1, cnt; \
+ }' src/sort-expected.txt > $@
+
+check-unit-sorting: src/sort-expected.txt src/sort-given.txt
+ diff -U10 src/sort-expected.txt src/sort-given.txt
+
+
+.SUFFIXES: .updatedat-check
+sources.updatedat-check = $(sources.adoc:.adoc=.updatedat-check)
+$(sources.updatedat-check):
+ . ./$*.conf && if [ -n "$$updatedat_epoch" ] && \
+ [ "$$updatedat_epoch" -le "$$date_epoch" ]; then exit 3; fi
+
+check-unit-updatedat: $(sources.updatedat-check)
+
+
+.SUFFIXES: .links-internal-check
+sources.links-internal-check = $(sources.adoc:.adoc=.links-internal-check)
+$(sources.links-internal-check): $(sources.html) $(slides.pdf)
+ grep -E '^(link|image):' $*.links | cut -d: -f2- | \
+ xargs -I% test -e $(*D)/%
+
+check-unit-links-internal: $(sources.links-internal-check)
+
+
+check-unit-links-external:
+
+
+symlink-deps = \
+ $(sources.html) \
+ $(feeds.xml) \
+ $(categories.xml) \
+ src/content/.well-known/security.txt \
+ $(sources.media.torrent) \
+
+check-unit-links-symlinks: src/all-symlinks.txt $(symlink-deps)
+ find `cat src/all-symlinks.txt` | xargs -n1 test -e
+
+
+check-unit-links: check-unit-links-internal check-unit-links-external
+check-unit-links: check-unit-links-symlinks
+
+
+MAXSIZE = 52428800 # from spec: https://www.sitemaps.org/protocol.html
+check-unit-sitemap-size: src/content/sitemap.xml
+ test "`stat --printf='%s' src/content/sitemap.xml`" -le $(MAXSIZE)
+
+check-unit-sitemap-count: src/content/sitemap.xml
+ test "`grep -cF '</url>' src/content/sitemap.xml`" -le 50000
+
+check-unit-sitemap: check-unit-sitemap-count check-unit-sitemap-size
+
+
+check-unit-expiry: expiry-epoch.txt now.txt
+ test "`cat expiry-epoch.txt`" -gt "`cat now.txt`"
+
+
+check-unit: check-unit-sorting check-unit-updatedat check-unit-links
+check-unit: check-unit-sitemap check-unit-expiry
+
+
+integration-tests = \
+
+.PRECIOUS: $(integration-tests)
+$(integration-tests): ALWAYS
+ sh $@
+
+check-integration: $(integration-tests)
+
+
+## Run all tests. Each test suite is isolated, so that a parallel
+## build can run tests at the same time. The required artifacts
+## are created if missing.
+check: check-unit check-integration
+
+
+
+i18n:
+ po4a po/po4a.cfg
+
+
+
+## Remove *all* derived artifacts produced during the build.
+## A dedicated test asserts that this is always true.
+clean:
+ rm -rf $(derived-assets) $(side-assets)
+
+
+## Installs into $(DESTDIR)$(PREFIX). Its dependency target
+## ensures that all installable artifacts are crafted beforehand.
+install: all
+ rsync --mkpath -a --files-from=install.txt src/content/ \
+ '$(DESTDIR)$(HTMLDIR)'
+ rsync --mkpath -a --files-from=sources.txt src/content/ \
+ '$(DESTDIR)$(SRCDIR)'
+
+## Uninstalls from $(DESTDIR)$(PREFIX). This is a perfect mirror
+## of the "install" target, and removes *all* that was installed.
+## A dedicated test asserts that this is always true.
+uninstall:
+ rm -rf \
+ '$(DESTDIR)$(SRCDIR)' \
+ '$(DESTDIR)$(HTMLDIR)' \
+
+
+
+PORT = 3333
+## Run file server for local installed static files.
+run:
+ serve -n -p $(PORT) -d '$(DESTDIR)$(HTMLDIR)'
+
+
+ALWAYS:
diff --git a/README b/README
deleted file mode 100644
index ffa9f06..0000000
--- a/README
+++ /dev/null
@@ -1,3 +0,0 @@
-My personal website. See it live in:
-
-https://euandre.org
diff --git a/TODOs.md b/TODOs.md
index 72abe17..1797ebc 100644
--- a/TODOs.md
+++ b/TODOs.md
@@ -3,87 +3,12 @@
## TODO Hand pick trackers for the generated torrents {#td-5cab66be-9f82-4d25-de87-fc6392c3b26f}
- TODO in 2022-06-04
-## TODO Consider using `.DEFAULT` to move all of `Makefile` into `dynamic.mk` {#td-330c7f45-2c2d-6066-b1a0-4357f7b09259}
-- TODO in 2022-06-01
-
----
-
-As in:
-
-```
-.DEFAULT:
- $(MAKE) -f dynamic.mk $<
-```
-
-## TODO Use Opus over Ogg for podcasts {#td-2de2f57c-0773-530f-f6cf-70836080f46f}
-- TODO in 2022-06-01
-
-## TODO Remove absolute path with `/`? Use `base-url`? {#td-c409cad6-6660-81d8-b7ad-a0d61329ef3c}
-- TODO in 2022-06-01
-
-## TODO When doing a deploy, cleanup public/ to ensure there is a rebuild {#td-f1ee23c0-b10a-a044-8351-e3cef4cb9d8c}
-- TODO in 2022-05-02
-
-## TODO Remove torrent files {#td-61f75b7a-aeab-91bf-d927-7766253faec7}
-- TODO in 2022-04-20
-
----
-
-Require less software: both `mktorrent` and online TURN/STUN servers.
-
## TODO Assert code blocks stick to atmost 80 columns {#td-75506021-884a-af77-ffbe-448816f70cc5}
- TODO in 2022-04-20
## TODO Detect linkrot {#td-dba2ff82-59b3-dd17-06a9-edbc9cffb264}
- TODO in 2022-04-13
-## TODO Revisit favicon references without links in articles {#td-3996432b-c5b8-a003-db82-c684035f58d5}
-- TODO in 2022-04-12
-
-## TODO Add link to public inbox in homepage {#td-a81fc81f-9d58-101f-fa4f-2ce4cab63173}
-- TODO in 2022-04-11
-
-## TODO Revisit `README.md` {#td-181014da-abbd-fbc3-ed42-a2c2d89f12a7}
-- TODO in 2022-03-25
-
-## TODO Rename pkgs.{nologin => shadow} {#td-db77b6f1-3044-e457-1672-5cf4314b47b2}
-- TODO in 2022-03-14
-
-## TODO Decouble title from URL {#td-233e5dde-b13e-a795-cc73-c031bd56c03c}
-- TODO in 2022-03-06
-
-## DOING Add `security.txt` {#td-a84b65c4-686c-c098-aa4b-cfba06a84825}
-- DOING in 2022-04-12
-
- Add the `.well-known/security.txt` file, with a `security.txt` symlink.
-
- Done in
- [`764b6f4d312ed6a68d8591447bbcc1ba1e13f272`](https://euandre.org/git/euandre.org/commit/?id=764b6f4d312ed6a68d8591447bbcc1ba1e13f272).
-- TODO in 2022-03-06
-
----
-
-Add check to ensure that its `Expires` field is in sync with the refreshed GPG
-key, and that this file in `euandre.org` is also in sync with `euandreh.xyz` and
-other domains.
-
-Probably just:
-```
-Contact: mailto:eu@euandre.org
-Expires: 2022-07-12T03:00:00.000Z
-Encryption: https://euandre.org/public-key.txt
-Preferred-Languages: en, pt, fr, eo, es
-```
-
-## TODO Replace Jekyll with a Makefile {#td-e795e852-a836-edae-0f95-48b28096ea9a}
-- TODO in 2022-03-06
-
----
-
-Consider using [m4] for templating.
-
-[m4]: http://mbreen.com/m4.html
-
## TODO Test robustness and responsiveness of HTML/CSS {#td-c76a7c83-20d2-a2cc-b48a-8499c6ca5b69}
- TODO in 2022-03-06
@@ -94,23 +19,6 @@ As described in <https://news.ycombinator.com/item?id=29519391>.
## TODO Serve over Gemini and/or Gopher too {#td-2fde23a6-1389-30fa-34ac-7d6750abf08a}
- TODO in 2022-03-06
-## TODO Recover `mktorrent` flags, in `Makefile.dynamic` {#td-d5370885-16f3-acf8-7e7a-a65d794242ee}
-- TODO in 2022-01-16
-
-## TODO FIXME {#td-ca1035f1-dadf-1f10-c371-d4f2bca682cb}
-- TODO in 2022-01-16
-
----
-
-The repository itself is about to be embedded under
-`resources/attachments/`.
-
-## TODO Cleanup `.gitignore` {#td-f52abfa9-1fbc-bcc6-048b-d4d770f571fc}
-- TODO in 2022-01-16
-
-## TODO Consider removing PNG and ICO favicons {#td-8876fc14-7f24-ebb1-4e60-adad79442005}
-- TODO in 2022-01-10
-
## TODO Send patch to upstream `bmake` for bug on `$*` macro {#td-ca621da2-2e94-ae6f-0e3d-db744c00a656}
- TODO in 2021-11-21
@@ -151,30 +59,6 @@ cp a/b.x a/b.y
This impacts the upcoming `lilypond -o $name` command in the Makefile.
-
-## TODO Move content into `src/content/`, `bin/` into `src/bin/` {#td-f9b510c0-c2c5-638e-e00f-3bbd35de31d9}
-- TODO in 2022-01-10
-
-## TODO `resources/` -> `artifacts/` {#td-d985f05d-3ecb-fc43-986b-b7089d17515d}
-- TODO in 2021-10-12
-
----
-
-`assets/`?
-
-## TODO `locale/` -> `po/` {#td-d282e753-9b2c-cd86-e64b-9148c5e84baf}
-- TODO in 2021-10-12
-
-## TODO Verify SVG now with size render better {#td-324de4da-0385-e832-0e4e-9f757efc9795}
-- TODO in 2021-10-12
-
-## TODO Use PostScript instead of Reveal.js {#td-6449803d-5774-b345-fde8-ad734af43e36}
-- TODO in 2021-10-12
-
----
-
-<i>À la</i> <https://github.com/jroimartin/ssg/>.
-
## TODO Remove `alt` from some icons {#td-d01a98aa-ef1e-cb21-8cba-7325d98b3db0}
- TODO in 2021-10-12
@@ -185,70 +69,15 @@ This impacts the upcoming `lilypond -o $name` command in the Makefile.
<https://jigsaw.w3.org/css-validator/>
-## TODO Reduce size of container image {#td-826dd92f-ba91-b7b4-1fcc-e5240f920e99}
-- TODO in 2021-10-12
-
-## TODO Enable directory listing on the server {#td-5a13db85-6ff1-ddde-b9cc-a0ddd2de5333}
-- TODO in 2021-10-12
-
-## TODO Setup CSS dark mode {#td-d85f64b1-24c2-efe5-f214-c4e5a0b9ea31}
-- TODO in 2021-10-12
-
----
-
-```css
-:root {
- --color: black;
- --background-color: white;
-}
-
-@media(prefers-color-scheme: dark) {
- :root {
- --color: white;
- --background-color: black;
- }
-}
-```
-
-## TODO Remove dependency on Jekyll {#td-94511264-e301-be51-23e2-cf960dcd603d}
-- TODO in 2021-10-02
-
----
-
-Possibly also remove usage of markdown.
-Either stick to CommonMark, or switch to plain HTML, or maybe asciidoc (once it
-gets its specification).
-
## TODO Add JID alongside email {#task-089dca19-14e2-e1c1-6a47-9af6ab8eb42a}
- TODO in 2021-09-05
-## TODO Use `<time>` tag for dates? {#task-a5ea943a-24f0-ac01-85b6-83490fe0e868}
-- TODO in 2021-08-14
-
-## TODO Manage translations in `_config.yml` and `_includes/public-inbox.html` with gettext {#task-041e3208-a497-f29c-23cd-9068b2c1d86e}
-- TODO in 2021-07-20
-
-## TODO Use `POSIX` over `C.UTF-8` in `$LANG` {#task-a98cd8d2-8376-0f4c-7cc1-6cb26434ac54}
-- TODO in 2021-06-22
-
-## TODO Always add assets generated by plugins to `site.static_files` {#task-3cf42559-3713-02c6-bba3-af5bac5512a7}
-- TODO in 2021-06-13
-
-## TODO Publish site on CI instead of my local machine {#task-6a3a99ec-dd86-b8b3-b1eb-f9b9a4298f3a}
-- TODO in 2021-06-07
-
## TODO Generate Opus for podcast {#task-93510453-111f-9b1f-575f-ca8c7c05883c}
- TODO in 2021-06-07
-## TODO Generate PDFs for slides {#task-d2ef646f-6232-d82f-dd77-507c6981ee80}
-- TODO in 2021-06-07
-
## TODO Podcast metadata: copy GNU World Order {#task-7b0aeb8d-282b-7ebf-4c5f-65c6cac1f0a1}
- TODO in 2021-06-07
-## TODO Publish over Gemini {#task-319021df-ebdb-40cd-8330-329e8d01b747}
-- TODO in 2021-02-22
-
## TODO Improve CSS of navigation bar {#task-ddd3795b-fd89-4d68-a78b-6adcb4702f92}
- TODO in 2021-02-21
@@ -256,7 +85,7 @@ gets its specification).
It doesn't resize well for small screens, and links are too small and close to each other.
-## TODO Screnncasts: Use scriptreplay and asciinema {#task-98a68a93-1294-4283-a78f-9b7f1150c729}
+## TODO Screencasts: Use scriptreplay and asciinema {#task-98a68a93-1294-4283-a78f-9b7f1150c729}
- TODO in 2021-02-11
## TODO Add hunspell dictionaries {#task-67783646-ef33-488b-a1d2-3ecd0b12dbb3}
@@ -265,84 +94,9 @@ It doesn't resize well for small screens, and links are too small and close to e
## TODO Shrink size of links at the end of slides {#task-7a53af07-57bf-4684-97c5-af43279848c0}
- TODO in 2021-02-11
-## TODO Improve workflow when adding media/breaking build with Git Annex symlink {#task-d3300610-6c8e-4681-8ccb-a516b0f57d2a}
-- TODO in 2021-02-07
-
-## TODO Add "view source" link to pages {#task-335bf01c-5549-4a5a-9ad1-7e5b68f12cb6}
-- TODO in 2021-02-05
-
----
-
-Point to the source code on the Git repository.
-
-## TODO Update "remembering" link {#task-b60b2559-e1e1-4f60-8e20-b919ebf1da43}
-- TODO in 2021-01-26
-
-## TODO Move to LilyPond 2.22.0 {#task-34d239aa-8984-4b34-9c6e-1cc2ca8d49fc}
-- TODO in 2021-01-12
-
-## DONE `dengoso.ly` [9/9] {#task-95ac1eec-1a56-47cb-b7ad-8cadf5737323}
-- DONE in 2021-01-03
-- TODO in 2021-01-01
-
----
-
-- [X] use "<quarter> = 100" over "Andante moderato for `meter`
-- [X] turn `cWithSlash` into a function
-- [X] move `pestana=` into shared `pestana.ly`
-- [X] ~~use fret-number over roman string on `pestana #"II" { }`~~
-- [X] `6 to D` on beginning of the song
-- [X] share notes between voices
-- [X] add string markers
-- [X] pestana over rests (measure 42)
-- [X] handle optional sharp on measure 77
-
-## TODO `choro-da-saudade.ly` [0/2] {#task-04afbdc0-4e2c-4176-b7c3-0c04087c09a8}
-- TODO in 2021-01-03
-
----
-
-- [ ] Make `D.S. al Coda` appear in MIDI
-
- ```lilypond
- bar ".."^markup { D.S. al Coda }
- cadenzaOn
- stopStaff
- once override TextScript.extra-offset = #'(0 . -3)
- <>^markup { D.S. al Coda }
- repeat unfold 3 {
- s1
- bar ""
- }
- cadenzaOff
- startStaff
- ```
-
-## TODO `marcha-dos-marinheiros.ly` {#task-bbc19f19-ca4f-4cbf-bc4d-e119ad06deef}
-- TODO in 2021-01-03
-
-## DONE Embed LilyPond source code in PDF and audio {#task-340da973-622c-45d6-8d3c-7db839586517}
-- DONE in 2021-01-03
-
- Done in
- [`bff2b07922272fd4c8f9ee0a7129d0a7c134dd38`](https://euandre.org/git/euandre.org/commit/?id=bff2b07922272fd4c8f9ee0a7129d0a7c134dd38).
-- TODO in 2021-01-03
-
----
-
-Use the `embed-source-code` options:
-
-```scheme
-#(ly:set-option 'embed-source-code #t)
-```
-
## TODO Add torrent for audio in `music-listing.html` {#task-aa314475-6a19-48fe-a752-7c479ad26be2}
- TODO in 2021-01-03
----
-
-Torrent for PDF, too?
-
## TODO Investigate the ICE STUN/TURN servers used by WebTorrent {#task-c7e75b41-d86e-423b-b4a1-7d193c3a6ef1}
- TODO in 2021-01-07
@@ -351,33 +105,7 @@ Torrent for PDF, too?
Depending on the provider, maybe host my own, like
[coturn](https://github.com/coturn/coturn).
-## DONE Generate `favicon.ico` from `favicon.svg` {#task-720e79b4-9e38-41c6-9958-cdadd67d2298}
-- DONE in 2021-01-03
-
- Done in
- [`433428cf6b4ef8a653f349e0a0d1fa0d10aab1e7`](https://euandre.org/git/euandre.org/commit/?id=bff2b07922272fd4c8f9ee0a7129d0a7c134dd38).
-- TODO in 2021-01-01
-
-## DONE Move `assert-content.sh` to Ruby {#task-a7b6b371-100c-48f4-a448-bfa39f88efce}
-- DONE in 2020-12-24
-- TODO in 2020-12-23
-
----
-
-Asserts will be faster since things will be already in memory.
-
-Should the generator for the torrents put the torrent files in the
-source tree? Are torrents source files or build artifacts?
-
## DONE Add page to listen an watch files with WebTorrent {#task-e5d2be9d-6471-40ba-a2d3-c7bc482bfaba}
-- DONE in 2020-12-24
-- TODO in 2020-12-23
-
-## DONE Add "web" Git Annex remote support {#task-c41d11b0-4235-4e8c-8e2b-bbbec7ee5c0b}
-- DONE in 2020-12-27
-
- Done in
- [`56da1a73ac0c211fbb14447b9175660d6757e795`](https://euandre.org/git/euandre.org/commit/?id=56da1a73ac0c211fbb14447b9175660d6757e795).
- TODO in 2020-12-23
## DONE Use SVG favicon {#task-29f1bded-3a56-410b-933d-e6a11f47656a}
@@ -396,10 +124,6 @@ $ inkscape -o icon-48x48.png -w 48 -h 48 icon.svg
$ convert icon-48x48.png favicon.ico
```
-## DONE Create screencasts collection {#task-fb8e4ad8-7c8b-4b62-a4f2-5f16f84ec7e6}
-- DONE in 2020-12-24
-- TODO in 2020-12-24
-
## DONE Add webtorrent-hybrid daemon {#task-1a2f5619-5d55-449f-a401-8c87afd5bea9}
- DONE in 2020-12-28
@@ -408,34 +132,6 @@ $ convert icon-48x48.png favicon.ico
will only upload to other TCP nodes.
- TODO in 2020-12-24
-## DONE Simplify `link-listing.html` by not requiring `entries` {#task-28dd5b16-1e32-42d4-960e-27a8725fe0b9}
-- DONE in 2020-12-24
-- TODO in 2020-12-24
-
-## DONE Use `.mkv` over `.webm` for embedding subtitles {#task-6587304c-ee0d-4f40-b046-48de94d00629}
-- DONE in 2020-12-28
-
- I didn't find the issues with web browsers easier to deal with
- using WebM files. I'll keep MKV and investigate further later.
-
- See also
- [`#task-9d75fe3a-b7e5-4cc5-9300-1054c7e981c0`](#task-9d75fe3a-b7e5-4cc5-9300-1054c7e981c0).
-
-- CANCELLED in 2020-12-28
-
- WebM is more web friendly than MKV. Right now there is no support
- for subtitles, only translated screencasts.
-
-- DONE in 2020-12-24
-- TODO in 2020-12-24
-
-## DONE Make media path the same as the show notes path {#task-7535169e-715e-409b-88c3-78c4bb482d95}
-- DONE in 2020-12-28
-
- Done in
- [`4d7a8207ee0931a157904cc58255d1950fa46178`](https://euandre.org/git/euandre.org/commit/?id=4d7a8207ee0931a157904cc58255d1950fa46178).
-- TODO in 2020-12-27
-
## DONE Assert media files have metadata {#task-8d8a9202-97d4-4c43-b042-15fd5c95da65}
- DONE in 2020-12-28
@@ -478,35 +174,17 @@ Ressurect the WebM file when experimenting with:
$ git revert 2beab5441b4fcddc849cfc44b99547d49593691d
```
-# Bugs
-
-# Improvements
-
-## TODO `dengoso.ly` [0/1] {#improvement-fd989cc2-b638-4943-8109-b8e3cac0aae1}
-- TODO in 2021-01-03
+## TODO Add test for duplicate IDs in the same HTML page {#b9661c53-6699-6e52-0541-77dd9a9face8}
+- TODO in 2025-04-16
---
-- [ ] measures 21~24: map previous notes, removing fingering notation instead of repeating notes
-
-## TODO Music sources {#improvement-610ed926-cc5e-418a-b046-27f4c4862d0d}
-- TODO in 2021-01-04
+# Bugs
-[Barrios](https://imslp.org/wiki/Category:Barrios_Mangor%C3%A9,_Agust%C3%ADn)
-(public domain),
-[Villa-Lobos](https://imslp.org/wiki/Category:Villa-Lobos,_Heitor)
-(public domain in 2030) and Garoto (public domain in 2026)
+# Improvements
# Questions
-## DONE Why insist on having `.ogg` files, but not the same for video? {#question-8203fbce-be76-4cc3-a62a-15f91bf22051}
-- DONE in 2020-12-24
-- TODO in 2020-12-24
-
----
-
-Because of subtitle embedding.
-
# Resources
## For bitmat images compression:
@@ -516,42 +194,20 @@ Because of subtitle embedding.
# Scratch
-Test editing files with ":set wm=80"
-Update static/attachments/autoqemu.tar.gz and other files
-mktorrent
-
Future-proof wesite (and also later software):
-- remove Jekyll
-- remove markdown, or embed markdown process, and write in HTML directly (and maybe commit the HTML)
- commit directly most derived data, such as torrent files, ogg media, SVG from graphviz input, etc., so that the absence of those tools don't impede the generation of existing pages of the website.
-Move brinquedoteca out of jekyll?
-
-
FIXMEs:
-- DIY:
- - CommonMark parser
- - envsubst, with errors on undefined variables
- - `date -d` isn't POSIX: simple replacement
+- `date -d` isn't POSIX: simple replacement
- translation not coming from english (like pt -> fr)
-- add linkchecker
-- how to use tmp fd over tmpfile?
-- po4a for markdown...
-- render slides
- shrink CSS
-- build makemake output in parallel
-- remove the need for makemake: it is the slowest step, and sometimes breaks the
- Makefile graph
-
-
Mobile-friendly tables:
https://many.pw/
-https://github.com/MichaelHu/markdown-slides/blob/v2.0/src/markdown.y
-https://github.com/MichaelHu/markdown-slides/blob/v2.0/src/markdown.lex
-
https://news.ycombinator.com/item?id=36739920
CSS from https://news.ycombinator.com/item?id=32972004
+
+add link to archived versions
diff --git a/_articles/2018-08-01-verifying-npm-ci-reproducibility.md b/_articles/2018-08-01-verifying-npm-ci-reproducibility.md
deleted file mode 100644
index f896c6c..0000000
--- a/_articles/2018-08-01-verifying-npm-ci-reproducibility.md
+++ /dev/null
@@ -1,148 +0,0 @@
----
-title: Verifying "npm ci" reproducibility
-date: 2018-08-01
-layout: post
-lang: en
-ref: verifying-npm-ci-reproducibility
-updated_at: 2019-05-22
----
-When [npm@5](https://blog.npmjs.org/post/161081169345/v500) came bringing
-[package-locks](https://docs.npmjs.com/files/package-locks) with it, I was
-confused about the benefits it provided, since running `npm install` more than
-once could resolve all the dependencies again and yield yet another fresh
-`package-lock.json` file. The message saying "you should add this file to
-version control" left me hesitant on what to do[^package-lock-message].
-
-However the [addition of `npm ci`](https://blog.npmjs.org/post/171556855892/introducing-npm-ci-for-faster-more-reliable)
-filled this gap: it's a stricter variation of `npm install` which
-guarantees that "[subsequent installs are able to generate identical trees](https://docs.npmjs.com/files/package-lock.json)". But are they
-really identical? I could see that I didn't have the same problems of
-different installation outputs, but I didn't know for **sure** if it
-was really identical.
-
-## Computing the hash of a directory's content
-
-I quickly searched for a way to check for the hash signature of an
-entire directory tree, but I couldn't find one. I've made a poor
-man's [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree)
-implementation using `sha256sum` and a few piped commands at the
-terminal:
-
-```bash
-merkle-tree () {
- dirname="${1-.}"
- pushd "$dirname"
- find . -type f | \
- sort | \
- xargs -I{} sha256sum "{}" | \
- sha256sum | \
- awk '{print $1}'
- popd
-}
-```
-
-Going through it line by line:
-
-- #1 we define a Bash function called `merkle-tree`;
-- #2 it accepts a single argument: the directory to compute the
- merkle tree from. If nothing is given, it runs on the current
- directory (`.`);
-- #3 we go to the directory, so we don't get different prefixes in
- `find`'s output (like `../a/b`);
-- #4 we get all files from the directory tree. Since we're using
- `sha256sum` to compute the hash of the file contents, we need to
- filter out folders from it;
-- #5 we need to sort the output, since different file systems and
- `find` implementations may return files in different orders;
-- #6 we use `xargs` to compute the hash of each file individually
- through `sha256sum`. Since a file may contain spaces we need to
- escape it with quotes;
-- #7 we compute the hash of the combined hashes. Since `sha256sum`
- output is formatted like `<hash> <filename>`, it produces a
- different final hash if a file ever changes name without changing
- it's content;
-- #8 we get the final hash output, excluding the `<filename>` (which
- is `-` in this case, aka `stdin`).
-
-### Positive points:
-
-1. ignore timestamp: running more than once on different installation
- yields the same hash;
-2. the name of the file is included in the final hash computation.
-
-### Limitations:
-
-1. it ignores empty folders from the hash computation;
-2. the implementation's only goal is to represent using a digest
- whether the content of a given directory is the same or not. Leaf
- presence checking is obviously missing from it.
-
-### Testing locally with sample data
-
-```bash
-mkdir /tmp/merkle-tree-test/
-cd /tmp/merkle-tree-test/
-mkdir -p a/b/ a/c/ d/
-echo "one" > a/b/one.txt
-echo "two" > a/c/two.txt
-echo "three" > d/three.txt
-merkle-tree . # output is be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
-merkle-tree . # output still is be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
-echo "four" > d/four.txt
-merkle-tree . # output is now b5464b958969ed81815641ace96b33f7fd52c20db71a7fccc45a36b3a2ae4d4c
-rm d/four.txt
-merkle-tree . # output back to be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
-echo "hidden-five" > a/b/one.txt
-merkle-tree . # output changed 471fae0d074947e4955e9ac53e95b56e4bc08d263d89d82003fb58a0ffba66f5
-```
-
-It seems to work for this simple test case.
-
-You can try copying and pasting it to verify the hash signatures.
-
-## Using `merkle-tree` to check the output of `npm ci`
-
-*I've done all of the following using Node.js v8.11.3 and npm@6.1.0.*
-
-In this test case I'll take the main repo of
-[Lerna](https://lernajs.io/)[^lerna-package-lock]:
-
-```bash
-cd /tmp/
-git clone https://github.com/lerna/lerna.git
-cd lerna/
-git checkout 57ff865c0839df75dbe1974971d7310f235e1109
-npm ci
-merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
-rm -rf node_modules/
-npm ci
-merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
-npm ci # test if it also works with an existing node_modules/ folder
-merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
-```
-
-Good job `npm ci` :)
-
-#6 and #9 take some time to run (21 seconds in my machine), but this
-specific use case isn't performance sensitive. The slowest step is
-computing the hash of each individual file.
-
-## Conclusion
-
-`npm ci` really "generates identical trees".
-
-I'm not aware of any other existing solution for verifying the hash
-signature of a directory. If you know any I'd
-[like to know](mailto:{{ site.author.email }}).
-
-## *Edit*
-
-2019-05-22: Fix spelling.
-
-[^package-lock-message]: The
- [documentation](https://docs.npmjs.com/cli/install#description) claims `npm
- install` is driven by the existing `package-lock.json`, but that's actually
- [a little bit tricky](https://github.com/npm/npm/issues/17979#issuecomment-332701215).
-
-[^lerna-package-lock]: Finding a big known repo that actually committed the
- `package-lock.json` file was harder than I expected.
diff --git a/_articles/2018-12-21-using-youtube-dl-to-manage-youtube-subscriptions.md b/_articles/2018-12-21-using-youtube-dl-to-manage-youtube-subscriptions.md
deleted file mode 100644
index 183c624..0000000
--- a/_articles/2018-12-21-using-youtube-dl-to-manage-youtube-subscriptions.md
+++ /dev/null
@@ -1,274 +0,0 @@
----
-title: Using "youtube-dl" to manage YouTube subscriptions
-date: 2018-12-21
-layout: post
-lang: en
-ref: using-youtube-dl-to-manage-youtube-subscriptions
----
-I've recently read the
-[announcement](https://www.reddit.com/r/DataHoarder/comments/9sg8q5/i_built_a_selfhosted_youtube_subscription_manager/)
-of a very nice [self-hosted YouTube subscription
-manager](https://github.com/chibicitiberiu/ytsm). I haven't used
-YouTube's built-in subscriptions for a while now, and haven't missed
-it at all. When I saw the announcement, I considered writing about the
-solution I've built on top of [youtube-dl](https://youtube-dl.org/).
-
-## Background: the problem with YouTube
-
-In many ways, I agree with [André Staltz's view on data ownership and
-privacy](https://staltz.com/what-happens-when-you-block-internet-giants.html):
-
-> I started with the basic premise that "I want to be in control of my
-> data". Sometimes that meant choosing when to interact with an internet
-> giant and how much I feel like revealing to them. Most of times it
-> meant not interacting with them at all. I don't want to let them be in
-> full control of how much they can know about me. I don't want to be in
-> autopilot mode. (...) Which leads us to YouTube. While I was able to
-> find alternatives to Gmail (Fastmail), Calendar (Fastmail), Translate
-> (Yandex Translate), *etc.* YouTube remains as the most indispensable
-> Google-owned web service. It is really really hard to avoid consuming
-> YouTube content. It was probably the smartest startup acquisition
-> ever. My privacy-oriented alternative is to watch YouTube videos
-> through Tor, which is technically feasible but not polite to use the
-> Tor bandwidth for these purposes. I'm still scratching my head with
-> this issue.
-
-Even though I don't use most alternative services he mentions, I do
-watch videos from YouTube. But I also feel uncomfortable logging in to
-YouTube with a Google account, watching videos, creating playlists and
-similar things.
-
-Using the mobile app is worse: you can't even block ads in there.
-You're in less control on what you share with YouTube and Google.
-
-## youtube-dl
-
-youtube-dl is a command-line tool for downloading videos, from YouTube
-and [many other sites](https://rg3.github.io/youtube-dl/supportedsites.html):
-
-```shell
-$ youtube-dl https://www.youtube.com/watch?v=rnMYZnY3uLA
-[youtube] rnMYZnY3uLA: Downloading webpage
-[youtube] rnMYZnY3uLA: Downloading video info webpage
-[download] Destination: A Origem da Vida _ Nerdologia-rnMYZnY3uLA.mp4
-[download] 100% of 32.11MiB in 00:12
-```
-
-It can be used to download individual videos as showed above, but it
-also has some interesting flags that we can use:
-
-- `--output`: use a custom template to create the name of the
- downloaded file;
-- `--download-archive`: use a text file for recording and remembering
- which videos were already downloaded;
-- `--prefer-free-formats`: prefer free video formats, like `webm`,
- `ogv` and Matroska `mkv`;
-- `--playlist-end`: how many videos to download from a "playlist" (a
- channel, a user or an actual playlist);
-- `--write-description`: write the video description to a
- `.description` file, useful for accessing links and extra content.
-
-Putting it all together:
-
-```shell
-$ youtube-dl "https://www.youtube.com/channel/UClu474HMt895mVxZdlIHXEA" \
- --download-archive ~/Nextcloud/cache/youtube-dl-seen.conf \
- --prefer-free-formats \
- --playlist-end 20 \
- --write-description \
- --output "~/Downloads/yt-dl/%(uploader)s/%(upload_date)s - %(title)s.%(ext)s"
-```
-
-This will download the latest 20 videos from the selected channel, and
-write down the video IDs in the `youtube-dl-seen.conf` file. Running it
-immediately after one more time won't have any effect.
-
-If the channel posts one more video, running the same command again will
-download only the last video, since the other 19 were already
-downloaded.
-
-With this basic setup you have a minimal subscription system at work,
-and you can create some functions to help you manage that:
-
-```shell
-#!/bin/sh
-
-export DEFAULT_PLAYLIST_END=15
-
-download() {
- youtube-dl "$1" \
- --download-archive ~/Nextcloud/cache/youtube-dl-seen.conf \
- --prefer-free-formats \
- --playlist-end $2 \
- --write-description \
- --output "~/Downloads/yt-dl/%(uploader)s/%(upload_date)s - %(title)s.%(ext)s"
-}
-export -f download
-
-
-download_user() {
- download "https://www.youtube.com/user/$1" ${2-$DEFAULT_PLAYLIST_END}
-}
-export -f download_user
-
-
-download_channel() {
- download "https://www.youtube.com/channel/$1" ${2-$DEFAULT_PLAYLIST_END}
-}
-export -f download_channel
-
-
-download_playlist() {
- download "https://www.youtube.com/playlist?list=$1" ${2-$DEFAULT_PLAYLIST_END}
-}
-export -f download_playlist
-```
-
-With these functions, you now can have a subscription fetching script to
-download the latest videos from your favorite channels:
-
-```shell
-#!/bin/sh
-
-download_user ClojureTV 15
-download_channel "UCmEClzCBDx-vrt0GuSKBd9g" 100
-download_playlist "PLqG7fA3EaMRPzL5jzd83tWcjCUH9ZUsbX" 15
-```
-
-Now, whenever you want to watch the latest videos, just run the above
-script and you'll get all of them in your local machine.
-
-## Tradeoffs
-
-### I've made it for myself, with my use case in mind
-
-1. Offline
-
- My internet speed it somewhat reasonable[^internet-speed], but it is really
- unstable. Either at work or at home, it's not uncommon to loose internet
- access for 2 minutes 3~5 times every day, and stay completely offline for a
- couple of hours once every week.
-
- Working through the hassle of keeping a playlist on disk has payed
- off many, many times. Sometimes I even not notice when the
- connection drops for some minutes, because I'm watching a video and
- working on some document, all on my local computer.
-
- There's also no quality adjustment for YouTube's web player, I
- always pick the higher quality and it doesn't change during the
- video. For some types of content, like a podcast with some tiny
- visual resources, this doesn't change much. For other types of
- content, like a keynote presentation with text written on the
- slides, watching on 144p isn't really an option.
-
- If the internet connection drops during the video download,
- youtube-dl will resume from where it stopped.
-
- This is an offline first benefit that I really like, and works well
- for me.
-
-2. Sync the "seen" file
-
- I already have a running instance of Nextcloud, so just dumping the
- `youtube-dl-seen.conf` file inside Nextcloud was a no-brainer.
-
- You could try putting it in a dedicated git repository, and wrap the
- script with an autocommit after every run. If you ever had a merge
- conflict, you'd simply accept all changes and then run:
-
- ```shell
- $ uniq youtube-dl-seen.conf > youtube-dl-seen.conf
- ```
-
- to tidy up the file.
-
-3. Doesn't work on mobile
-
- My primary device that I use everyday is my laptop, not my phone. It
- works well for me this way.
-
- Also, it's harder to add ad-blockers to mobile phones, and most
- mobile software still depends on Google's and Apple's blessing.
-
- If you wish, you can sync the videos to the SD card periodically,
- but that's a bit of extra manual work.
-
-### The Good
-
-1. Better privacy
-
- We don't even have to configure the ad-blocker to keep ads and
- trackers away!
-
- YouTube still has your IP address, so using a VPN is always a good
- idea. However, a timing analysis would be able to identify you
- (considering the current implementation).
-
-2. No need to self-host
-
- There's no host that needs maintenance. Everything runs locally.
-
- As long as you keep youtube-dl itself up to date and sync your
- "seen" file, there's little extra work to do.
-
-3. Track your subscriptions with git
-
- After creating a `subscriptions.sh` executable that downloads all
- the videos, you can add it to git and use it to track metadata about
- your subscriptions.
-
-### The Bad
-
-1. Maximum playlist size is your disk size
-
- This is a good thing for getting a realistic view on your actual
- "watch later" list. However I've run out of disk space many
- times, and now I need to be more aware of how much is left.
-
-### The Ugly
-
-We can only avoid all the bad parts of YouTube with youtube-dl as long
-as YouTube keeps the videos public and programmatically accessible. If
-YouTube ever blocks that we'd loose the ability to consume content this
-way, but also loose confidence on considering YouTube a healthy
-repository of videos on the internet.
-
-## Going beyond
-
-Since you're running everything locally, here are some possibilities to
-be explored:
-
-### A playlist that is too long for being downloaded all at once
-
-You can wrap the `download_playlist` function (let's call the wrapper
-`inc_download`) and instead of passing it a fixed number to the
-`--playlist-end` parameter, you can store the `$n` in a folder
-(something like `$HOME/.yt-db/$PLAYLIST_ID`) and increment it by `$step`
-every time you run `inc_download`.
-
-This way you can incrementally download videos from a huge playlist
-without filling your disk with gigabytes of content all at once.
-
-### Multiple computer scenario
-
-The `download_playlist` function could be aware of the specific machine
-that it is running on and apply specific policies depending on the
-machine: always download everything; only download videos that aren't
-present anywhere else; *etc.*
-
-## Conclusion
-
-youtube-dl is a great tool to keep at hand. It covers a really large
-range of video websites and works robustly.
-
-Feel free to copy and modify this code, and
-[send me](mailto:{{ site.author.email }}) suggestions of improvements or related
-content.
-
-## *Edit*
-
-2019-05-22: Fix spelling.
-
-[^internet-speed]: Considering how expensive it is and the many ways it could be
- better, but also how much it has improved over the last years, I say it's
- reasonable.
diff --git a/_articles/2019-06-02-using-nixos-as-an-stateless-workstation.md b/_articles/2019-06-02-using-nixos-as-an-stateless-workstation.md
deleted file mode 100644
index d78c1d5..0000000
--- a/_articles/2019-06-02-using-nixos-as-an-stateless-workstation.md
+++ /dev/null
@@ -1,152 +0,0 @@
----
-
-title: Using NixOS as an stateless workstation
-
-date: 2019-06-02
-
-layout: post
-
-lang: en
-
-ref: using-nixos-as-an-stateless-workstation
-
----
-
-Last week[^last-week] I changed back to an old[^old-computer] Samsung laptop, and installed
-[NixOS](https://nixos.org/) on it.
-
-After using NixOS on another laptop for around two years, I wanted
-verify how reproducible was my desktop environment, and how far does
-NixOS actually can go on recreating my whole OS from my configuration
-files and personal data. I gravitated towards NixOS after trying (and
-failing) to create an `install.sh` script that would imperatively
-install and configure my whole OS using apt-get. When I found a
-GNU/Linux distribution that was built on top of the idea of
-declaratively specifying the whole OS I was automatically convinced[^convinced-by-declarative-aspect].
-
-I was impressed. Even though I've been experiencing the benefits of Nix
-isolation daily, I always felt skeptical that something would be
-missing, because the devil is always on the details. But the result was
-much better than expected!
-
-There were only 2 missing configurations:
-
-1. tap-to-click on the touchpad wasn't enabled by default;
-2. the default theme from the gnome-terminal is "Black on white"
- instead of "White on black".
-
-That's all.
-
-I haven't checked if I can configure those in NixOS GNOME module, but I
-guess both are scriptable and could be set in a fictional `setup.sh`
-run.
-
-This makes me really happy, actually. More happy than I anticipated.
-
-Having such a powerful declarative OS makes me feel like my data is the
-really important stuff (as it should be), and I can interact with it on
-any workstation. All I need is an internet connection and a few hours to
-download everything. It feels like my physical workstation and the
-installed OS are serving me and my data, instead of me feeling as
-hostage to the specific OS configuration at the moment. Having a few
-backup copies of everything important extends such peacefulness.
-
-After this positive experience with recreating my OS from simple Nix
-expressions, I started to wonder how far I could go with this, and
-started considering other areas of improvements:
-
-### First run on a fresh NixOS installation
-
-Right now the initial setup relies on non-declarative manual tasks, like
-decrypting some credentials, or manually downloading **this** git
-repository with specific configurations before **that** one.
-
-I wonder what some areas of improvements are on this topic, and if
-investing on it is worth it (both time-wise and happiness-wise).
-
-### Emacs
-
-Right now I'm using the [Spacemacs](http://spacemacs.org/), which is a
-community package curation and configuration on top of
-[Emacs](https://www.gnu.org/software/emacs/).
-
-Spacemacs does support the notion of
-[layers](http://spacemacs.org/doc/LAYERS.html), which you can
-declaratively specify and let Spacemacs do the rest.
-
-However this solution isn't nearly as robust as Nix: being purely
-functional, Nix does describe everything required to build a derivation,
-and knows how to do so. Spacemacs it closer to more traditional package
-managers: even though the layers list is declarative, the installation
-is still very much imperative. I've had trouble with Spacemacs not
-behaving the same on different computers, both with identical
-configurations, only brought to convergence back again after a
-`git clean -fdx` inside `~/.emacs.d/`.
-
-The ideal solution would be managing Emacs packages with Nix itself.
-After a quick search I did found that [there is support for Emacs
-packages in
-Nix](https://nixos.org/nixos/manual/index.html#module-services-emacs-adding-packages).
-So far I was only aware of [Guix support for Emacs packages](https://www.gnu.org/software/guix/manual/en/html_node/Application-Setup.html#Emacs-Packages).
-
-This isn't a trivial change because Spacemacs does include extra
-curation and configuration on top of Emacs packages. I'm not sure the
-best way to improve this right now.
-
-### myrepos
-
-I'm using [myrepos](https://myrepos.branchable.com/) to manage all my
-git repositories, and the general rule I apply is to add any repository
-specific configuration in myrepos' `checkout` phase:
-
-```shell
-# sample ~/.mrconfig file snippet
-[dev/guix/guix]
-checkout =
- git clone https://git.savannah.gnu.org/git/guix.git guix
- cd guix/
- git config sendemail.to guix-patches@gnu.org
-```
-
-This way when I clone this repo again the email sending is already
-pre-configured.
-
-This works well enough, but the solution is too imperative, and my
-`checkout` phases tend to become brittle over time if not enough care is
-taken.
-
-### GNU Stow
-
-For my home profile and personal configuration I already have a few
-dozens of symlinks that I manage manually. This has worked so far, but
-the solution is sometimes fragile and [not declarative at all][symlinks]. I
-wonder if something like [GNU Stow][stow] can help me simplify this.
-
-[symlinks]: https://euandre.org/git/dotfiles/tree/bash/symlinks.sh?id=316939aa215181b1d22b69e94241eef757add98d
-[stow]: https://www.gnu.org/software/stow/
-
-## Conclusion
-
-I'm really satisfied with NixOS, and I intend to keep using it. If what
-I've said interests you, maybe try tinkering with the [Nix package
-manager](https://nixos.org/nix/) (not the whole NixOS) on your current
-distribution (it can live alongside any other package manager).
-
-If you have experience with declarative Emacs package managements, GNU
-Stow or any similar tool, *etc.*,
-[I'd like some tips](mailto:{{ site.author.email }}). If you don't have any
-experience at all, I'd still love to hear from you.
-
-[^last-week]: "Last week" as of the start of this writing, so around the end of
- May 2019.
-
-[^old-computer]: I was using a 32GB RAM, i7 and 250GB SSD Samsung laptop. The
- switch was back to a 8GB RAM, i5 and 500GB HDD Dell laptop. The biggest
- difference I noticed was on faster memory, both RAM availability and the
- disk speed, but I had 250GB less local storage space.
-
-[^convinced-by-declarative-aspect]: The declarative configuration aspect is
- something that I now completely take for granted, and wouldn't consider
- using something which isn't declarative. A good metric to show this is me
- realising that I can't pinpoint the moment when I decided to switch to
- NixOS. It's like I had a distant past when this wasn't true.
diff --git a/_articles/2020-08-31-the-database-i-wish-i-had.md b/_articles/2020-08-31-the-database-i-wish-i-had.md
deleted file mode 100644
index 7d127c1..0000000
--- a/_articles/2020-08-31-the-database-i-wish-i-had.md
+++ /dev/null
@@ -1,295 +0,0 @@
----
-title: The database I wish I had
-date: 2020-08-31
-updated_at: 2020-09-03
-layout: post
-lang: en
-ref: the-database-i-wish-i-had
-eu_categories: mediator
----
-
-I watched the talk
-"[Platform as a Reflection of Values: Joyent, Node.js and beyond][platform-values]"
-by Bryan Cantrill, and I think he was able to put into words something I already
-felt for some time: if there's no piece of software out there that reflects your
-values, it's time for you to build that software[^talk-time].
-
-[platform-values]: https://vimeo.com/230142234
-[^talk-time]: At the very end, at time 29:49. When talking about the draft of
- this article with a friend, he noted that Bryan O'Sullivan (a different
- Bryan) says a similar thing on his talk
- "[Running a startup on Haskell](https://www.youtube.com/watch?v=ZR3Jirqk6W8)",
- at time 4:15.
-
-I kind of agree with what he said, because this is already happening to me. I
-long for a database with a certain set of values, and for a few years I was just
-waiting for someone to finally write it. After watching his talk, Bryan is
-saying to me: "time to stop waiting, and start writing it yourself".
-
-So let me try to give an overview of such database, and go over its values.
-
-## Overview
-
-I want a database that allows me to create decentralized client-side
-applications that can sync data.
-
-The best one-line description I can give right now is:
-
-> It's sort of like PouchDB, Git, Datomic, SQLite and Mentat.
-
-A more descriptive version could be:
-
-> An embedded, immutable, syncable relational database.
-
-Let's go over what I mean by each of those aspects one by one.
-
-### Embedded
-
-I think the server-side database landscape is diverse and mature enough for
-my needs (even though I end up choosing SQLite most of the time), and what I'm
-after is a database to be embedded on client-side applications itself, be it
-desktop, browser, mobile, *etc.*
-
-The purpose of such database is not to keep some local cache of data in case of
-lost connectivity: we have good solutions for that already. It should serve as
-the source of truth, and allow the application to work on top of it.
-
-[**SQLite**][sqlite] is a great example of that: it is a very powerful
-relational database that runs [almost anywhere][sqlite-whentouse]. What I miss
-from it that SQLite doesn't provide is the ability to run it on the browser:
-even though you could compile it to WebAssembly, ~~it assumes a POSIX filesystem
-that would have to be emulated~~[^posix-sqlite].
-
-[sqlite]: https://sqlite.org/index.html
-[sqlite-whentouse]: https://sqlite.org/whentouse.html
-[^posix-sqlite]: It was [pointed out to me](https://news.ycombinator.com/item?id=24338881)
- that SQLite doesn't assume the existence of a POSIX filesystem, as I wrongly
- stated. Thanks for the correction.
-
- This makes me consider it as a storage backend all by itself. I
- initially considered having an SQLite storage backend as one implementation
- of the POSIX filesystem storage API that I mentioned. My goal was to rely on
- it so I could validate the correctness of the actual implementation, given
- SQLite's robustness.
-
- However it may even better to just use SQLite, and get an ACID backend
- without recreating a big part of SQLite from scratch. In fact, both Datomic
- and PouchDB didn't create an storage backend for themselves, they just
- plugged on what already existed and already worked. I'm beginning to think
- that it would be wiser to just do the same, and drop entirely the from
- scratch implementation that I mentioned.
-
- That's not to say that adding an IndexedDB compatibility layer to SQLite
- would be enough to make it fit the other requirements I mention on this
- page. SQLite still is an implementation of a update-in-place, SQL,
- table-oriented database. It is probably true that cherry-picking the
- relevant parts of SQLite (like storage access, consistency, crash recovery,
- parser generator, *etc.*) and leaving out the unwanted parts (SQL, tables,
- threading, *etc.*) would be better than including the full SQLite stack, but
- that's simply an optimization. Both could even coexist, if desired.
-
- SQLite would have to be treated similarly to how Datomic treats SQL
- databases: instead of having a table for each entities, spread attributes
- over the tables, *etc.*, it treats SQL databases as a key-value storage so it
- doesn't have to re-implement interacting with the disk that other databases
- do well.
-
- The tables would contain blocks of binary data, so there isn't a difference
- on how the SQLite storage backend behaves and how the IndexedDB storage
- backend behaves, much like how Datomic works the same regardless of the
- storage backend, same for PouchDB.
-
- I welcome corrections on what I said above, too.
-
-[**PouchDB**][pouchdb] is another great example: it's a full reimplementation of
-[CouchDB][couchdb] that targets JavaScript environments, mainly the browser and
-Node.js. However I want a tool that can be deployed anywhere, and not limit its
-applications to places that already have a JavaScript runtime environment, or
-force the developer to bundle a JavaScript runtime environment with their
-application. This is true for GTK+ applications, command line programs, Android
-apps, *etc.*
-
-[pouchdb]: https://pouchdb.com/
-[couchdb]: https://couchdb.apache.org/
-
-[**Mentat**][mentat] was an interesting project, but its reliance on SQLite
-makes it inherit most of the downsides (and benefits too) of SQLite itself.
-
-[mentat]: https://github.com/mozilla/mentat
-
-Having such a requirement imposes a different approach to storage: we have to
-decouple the knowledge about the intricacies of storage from the usage of
-storage itself, so that a module (say query processing) can access storage
-through an API without needing to know about its implementation. This allows
-the database to target a POSIX filesystems storage API and an IndexedDB storage
-API, and make the rest of the code agnostic about storage. PouchDB has such
-mechanism (called [adapters][pouchdb-adapters]) and Datomic has them too (called
-[storage services][datomic-storage-services]).
-
-[pouchdb-adapters]: https://pouchdb.com/adapters.html
-[datomic-storage-services]: https://docs.datomic.com/on-prem/storage.html
-
-This would allow the database to adapt to where it is embedded: when targeting
-the browser the IndexedDB storage API would provide the persistence layer
-that the database requires, and similarly the POSIX filesystem storage API would
-provide the persistence layer when targeting POSIX systems (like desktops,
-mobile, *etc.*).
-
-But there's also an extra restriction that comes from by being embedded: it
-needs to provide and embeddable artifact, most likely a binary library object
-that exposes a C compatible FFI, similar to
-[how SQLite does][sqlite-amalgamation]. Bundling a full runtime environment is
-possible, but doesn't make it a compelling solution for embedding. This rules
-out most languages, and leaves us with C, Rust, Zig, and similar options that
-can target POSIX systems and WebAssembly.
-
-[sqlite-amalgamation]: https://www.sqlite.org/amalgamation.html
-
-### Immutable
-
-Being immutable means that only new information is added, no in-place update
-ever happens, and nothing is ever deleted.
-
-Having an immutable database presents us with similar trade-offs found in
-persistent data structures, like lack of coordination when doing reads, caches
-being always coherent, and more usage of space.
-
-[**Datomic**][datomic] is the go to database example of this: it will only add
-information (datoms) and allows you to query them in a multitude of ways. Stuart
-Halloway calls it "accumulate-only" over "append-only"[^accumulate-only]:
-
-> It's accumulate-only, it is not append-only. So append-only, most people when
-> they say that they're implying something physical about what happens.
-
-[datomic]: https://www.datomic.com/
-[^accumulate-only]: Video "[Day of Datomic Part 2](https://vimeo.com/116315075)"
- on Datomic's information model, at time 12:28.
-
-Also a database can be append-only and overwrite existing information with new
-information, by doing clean-ups of "stale" data. I prefer to adopt the
-"accumulate-only" naming and approach.
-
-[**Git**][git] is another example of this: new commits are always added on top
-of the previous data, and it grows by adding commits instead of replacing
-existing ones.
-
-[git]: https://git-scm.com/
-
-Git repositories can only grow in size, and that is not only an acceptable
-condition, but also one of the reasons to use it.
-
-All this means that no in-place updates happens on data, and the database will
-be much more concerned about how compact and efficiently it stores data than how
-fast it does writes to disk. Being embedded, the storage limitation is either a)
-how much storage the device has or b) how much storage was designed for the
-application to consume. So even though the database could theoretically operate
-with hundreds of TBs, a browser page or mobile application wouldn't have access
-to this amount of storage. SQLite even [says][sqlite-limits] that it does
-support approximately 280 TBs of data, but those limits are untested.
-
-The upside of keeping everything is that you can have historical views of your
-data, which is very powerful. This also means that applications should turn this
-off when not relevant[^no-history].
-
-[sqlite-limits]: https://sqlite.org/limits.html
-[^no-history]: Similar to
- [Datomic's `:db/noHistory`](https://docs.datomic.com/cloud/best.html#nohistory-for-high-churn).
-
-### Syncable
-
-This is a frequent topic when talking about offline-first solutions. When
-building applications that:
-
-- can fully work offline,
-- stores data,
-- propagates that data to other application instances,
-
-then you'll need a conflict resolution strategy to handle all the situations
-where different application instances disagree. Those application instances
-could be a desktop and a browser version of the same application, or the same
-mobile app in different devices.
-
-A three-way merge seems to be the best approach, on top of which you could add
-application specific conflict resolution functions, like:
-
-- pick the change with higher timestamp;
-- if one change is a delete, pick it;
-- present the diff on the screen and allow the user to merge them.
-
-Some databases try to make this "easy", by choosing a strategy for you, but I've
-found that different applications require different conflict resolution
-strategies. Instead, the database should leave this up to the user to decide,
-and provide tools for them to do it.
-
-[**Three-way merges in version control**][3-way-merge] are the best example,
-performing automatic merges when possible and asking the user to resolve
-conflicts when they appear.
-
-The unit of conflict for a version control system is a line of text. The
-database equivalent would probably be a single attribute, not a full entity or a
-full row.
-
-Making all the conflict resolution logic be local should allow the database to
-have encrypted remotes similar to how [git-remote-gcrypt][git-remote-gcrypt]
-adds this functionality to Git. This would enable users to sync the application
-data across devices using an untrusted intermediary.
-
-[3-way-merge]: https://en.wikipedia.org/wiki/Merge_(version_control)
-[git-remote-gcrypt]: https://spwhitton.name/tech/code/git-remote-gcrypt/
-
-### Relational
-
-I want the power of relational queries on the client applications.
-
-Most of the arguments against traditional table-oriented relational databases
-are related to write performance, but those don't apply here. The bottlenecks
-for client applications usually aren't write throughput. Nobody is interested in
-differentiating between 1 MB/s or 10 MB/s when you're limited to 500 MB total.
-
-The relational model of the database could either be based on SQL and tables
-like in SQLite, or maybe [datalog][datalog] and [datoms][datoms] like in
-Datomic.
-
-[datalog]: https://docs.datomic.com/on-prem/query.html
-[datoms]: https://docs.datomic.com/cloud/whatis/data-model.html#datoms
-
-## From aspects to values
-
-Now let's try to translate the aspects above into values, as suggested by Bryan
-Cantrill.
-
-### Portability
-
-Being able to target so many different platforms is a bold goal, and the
-embedded nature of the database demands portability to be a core value.
-
-### Integrity
-
-When the local database becomes the source of truth of the application, it must
-provide consistency guarantees that enables applications to rely on it.
-
-### Expressiveness
-
-The database should empower applications to slice and dice the data in any way
-it wants to.
-
-## Next steps
-
-Since I can't find any database that fits these requirements, I've finally come
-to terms with doing it myself.
-
-It's probably going to take me a few years to do it, and making it portable
-between POSIX and IndexedDB will probably be the biggest challenge. I got myself
-a few books on databases to start.
-
-I wonder if I'll ever be able to get this done.
-
-## External links
-
-See discussions on [Reddit][reddit], [lobsters][lobsters], [HN][hn] and
-[a lengthy email exchange][lengthy-email].
-
-[reddit]: https://www.reddit.com/r/programming/comments/ijwz5b/the_database_i_wish_i_had/
-[lobsters]: https://lobste.rs/s/m9vkg4/database_i_wish_i_had
-[hn]: https://news.ycombinator.com/item?id=24337244
-[lengthy-email]: https://lists.sr.ht/~euandreh/public-inbox/%3C010101744a592b75-1dce9281-f0b8-4226-9d50-fd2c7901fa72-000000%40us-west-2.amazonses.com%3E
diff --git a/_articles/2020-10-19-feature-flags-differences-between-backend-frontend-and-mobile.md b/_articles/2020-10-19-feature-flags-differences-between-backend-frontend-and-mobile.md
deleted file mode 100644
index c62c2d1..0000000
--- a/_articles/2020-10-19-feature-flags-differences-between-backend-frontend-and-mobile.md
+++ /dev/null
@@ -1,305 +0,0 @@
----
-title: "Feature flags: differences between backend, frontend and mobile"
-date: 2020-10-19
-updated_at: 2020-11-03
-layout: post
-lang: en
-ref: feature-flags-differences-between-backend-frontend-and-mobile
-eu_categories: presentation
----
-
-*This article is derived from a [presentation][presentation] on the same
-subject.*
-
-When discussing about feature flags, I find that their
-costs and benefits are often well exposed and addressed. Online articles like
-"[Feature Toggle (aka Feature Flags)][feature-flags-article]" do a great job of
-explaining them in detail, giving great general guidance of how to apply
-techniques to adopt it.
-
-However the weight of those costs and benefits apply differently on backend,
-frontend or mobile, and those differences aren't covered. In fact, many of them
-stop making sense, or the decision of adopting a feature flag or not may change
-depending on the environment.
-
-In this article I try to make the distinction between environments and how
- feature flags apply to them, with some final best practices I've acquired when
- using them in production.
-
-[presentation]: {% link _slides/2020-10-19-rollout-feature-flag-experiment-operational-toggle.slides %}
-[feature-flags-article]: https://martinfowler.com/articles/feature-toggles.html
-
-## Why feature flags
-
-Feature flags in general tend to be cited on the context of
-[continuous deployment][cd]:
-
-> A: With continuous deployment, you deploy to production automatically
-
-> B: But how do I handle deployment failures, partial features, *etc.*?
-
-> A: With techniques like canary, monitoring and alarms, feature flags, *etc.*
-
-Though adopting continuous deployment doesn't force you to use feature
-flags, it creates a demand for it. The inverse is also true: using feature flags
-on the code points you more obviously to continuous deployment. Take the
-following code sample for example, that we will reference later on the article:
-
-```javascript
-function processTransaction() {
- validate();
- persist();
- // TODO: add call to notifyListeners()
-}
-```
-
-While being developed, being tested for suitability or something similar,
-`notifyListeners()` may not be included in the code at once. So instead of
-keeping it on a separate, long-lived branch, a feature flag can decide when the
-new, partially implemented function will be called:
-
-```javascript
-function processTransaction() {
- validate();
- persist();
- if (featureIsEnabled("activate-notify-listeners")) {
- notifyListeners();
- }
-}
-```
-
-This allows your code to include `notifyListeners()`, and decide when to call it
-at runtime. For the price of extra things around the code, you get more
-dynamicity.
-
-So the fundamental question to ask yourself when considering adding a feature
-flag should be:
-
-> Am I willing to pay with code complexity to get dynamicity?
-
-It is true that you can make the management of feature flags as
-straightforward as possible, but having no feature flags is simpler than having
-any. What you get in return is the ability to parameterize the behaviour of the
-application at runtime, without doing any code changes.
-
-Sometimes this added complexity may tilt the balance towards not using a feature
-flag, and sometimes the flexibility of changing behaviour at runtime is
-absolutely worth the added complexity. This can vary a lot by code base, feature, but
-fundamentally by environment: its much cheaper to deploy a new version of a
-service than to release a new version of an app.
-
-So the question of which environment is being targeted is key when reasoning
-about costs and benefits of feature flags.
-
-[cd]: https://www.atlassian.com/continuous-delivery/principles/continuous-integration-vs-delivery-vs-deployment
-
-## Control over the environment
-
-The key differentiator that makes the trade-offs apply differently is how much
-control you have over the environment.
-
-When running a **backend** service, you usually are paying for the servers
-themselves, and can tweak them as you wish. This means you have full control do
-to code changes as you wish. Not only that, you decide when to do it, and for
-how long the transition will last.
-
-On the **frontend** you have less control: even though you can choose to make a
-new version available any time you wish, you can't force[^force] clients to
-immediately switch to the new version. That means that a) clients could skip
-upgrades at any time and b) you always have to keep backward and forward
-compatibility in mind.
-
-Even though I'm mentioning frontend directly, it applies to other environment
-with similar characteristics: desktop applications, command-line programs,
-*etc*.
-
-On **mobile** you have even less control: app stores need to allow your app to
-be updated, which could bite you when least desired. Theoretically you could
-make you APK available on third party stores like [F-Droid][f-droid], or even
-make the APK itself available for direct download, which would give you the same
-characteristics of a frontend application, but that happens less often.
-
-On iOS you can't even do that. You have to get Apple's blessing on every single
-update. Even though we already know that is a [bad idea][apple] for over a
-decade now, there isn't a way around it. This is where you have the least
-control.
-
-In practice, the amount of control you have will change how much you value
-dynamicity: the less control you have, the more valuable it is. In other words,
-having a dynamic flag on the backend may or may not be worth it since you could
-always update the code immediately after, but on iOS it is basically always
-worth it.
-
-[f-droid]: https://f-droid.org/
-[^force]: Technically you could force a reload with JavaScript using
- `window.location.reload()`, but that not only is invasive and impolite, but
- also gives you the illusion that you have control over the client when you
- actually don't: clients with disabled JavaScript would be immune to such
- tactics.
-
-[apple]: http://www.paulgraham.com/apple.html
-
-## Rollout
-
-A rollout is used to *roll out* a new version of software.
-
-They are usually short-lived, being relevant as long as the new code is being
-deployed. The most common rule is percentages.
-
-On the **backend**, it is common to find it on the deployment infrastructure
-itself, like canary servers, blue/green deployments,
-[a kubernetes deployment rollout][k8s], *etc*. You could do those manually, by
-having a dynamic control on the code itself, but rollbacks are cheap enough that
-people usually do a normal deployment and just give some extra attention to the
-metrics dashboard.
-
-Any time you see a blue/green deployment, there is a rollout happening: most
-likely a load balancer is starting to direct traffic to the new server, until
-reaching 100% of the traffic. Effectively, that is a rollout.
-
-On the **frontend**, you can selectively pick which user's will be able to
-download the new version of a page. You could use geographical region, IP,
-cookie or something similar to make this decision.
-
-CDN propagation delays and people not refreshing their web
-pages are also rollouts by themselves, since old and new versions of the
-software will coexist.
-
-On **mobile**, the Play Store allows you to perform
-fine-grained [staged rollouts][staged-rollouts], and the App Store allows you to
-perform limited [phased releases][phased-releases].
-
-Both for Android and iOS, the user plays the role of making the download.
-
-In summary: since you control the servers on the backend, you can do rollouts at
-will, and those are often found automated away in base infrastructure. On the
-frontend and on mobile, there are ways to make new versions available, but users
-may not download them immediately, and many different versions of the software
-end up coexisting.
-
-[k8s]: https://kubernetes.io/docs/concepts/workloads/controllers/deployment/#creating-a-deployment
-[staged-rollouts]: https://support.google.com/googleplay/android-developer/answer/6346149?hl=en
-[phased-releases]: https://help.apple.com/app-store-connect/#/dev3d65fcee1
-
-## Feature flag
-
-A feature flag is a *flag* that tells the application on runtime to turn on or
-off a given *feature*. That means that the actual production code will have more
-than one possible code paths to go through, and that a new version of a feature
-coexists with the old version. The feature flag tells which part of the code to
-go through.
-
-They are usually medium-lived, being relevant as long as the new code is being
-developed. The most common rules are percentages, allow/deny lists, A/B groups
-and client version.
-
-On the **backend**, those are useful for things that have a long development
-cycle, or that needs to done by steps. Consider loading the feature flag rules
-in memory when the application starts, so that you avoid querying a database
-or an external service for applying a feature flag rule and avoid flakiness on
-the result due to intermittent network failures.
-
-Since on the **frontend** you don't control when to update the client software,
-you're left with applying the feature flag rule on the server, and exposing the
-value through an API for maximum dynamicity. This could be in the frontend code
-itself, and fallback to a "just refresh the page"/"just update to the latest
-version" strategy for less dynamic scenarios.
-
-On **mobile** you can't even rely on a "just update to the latest version"
-strategy, since the code for the app could be updated to a new feature and be
-blocked on the store. Those cases aren't recurrent, but you should always assume
-the store will deny updates on critical moments so you don't find yourself with
-no cards to play. That means the only control you actually have is via
-the backend, by parameterizing the runtime of the application using the API. In
-practice, you should always have a feature flag to control any relevant piece of
-code. There is no such thing as "too small code change for a feature flag". What
-you should ask yourself is:
-
-> If the code I'm writing breaks and stays broken for around a month, do I care?
-
-If you're doing an experimental screen, or something that will have a very small
-impact you might answer "no" to the above question. For everything else, the
-answer will be "yes": bug fixes, layout changes, refactoring, new screen,
-filesystem/database changes, *etc*.
-
-## Experiment
-
-An experiment is a feature flag where you care about analytical value of the
-flag, and how it might impact user's behaviour. A feature flag with analytics.
-
-They are also usually medium-lived, being relevant as long as the new code is
-being developed. The most common rule is A/B test.
-
-On the **backend**, an experiment rely on an analytical environment that will
-pick the A/B test groups and distributions, which means those can't be held in
-memory easily. That also means that you'll need a fallback value in case
-fetching the group for a given customer fails.
-
-On the **frontend** and on **mobile** they are no different from feature flags.
-
-## Operational toggle
-
-An operational toggle is like a system-level manual circuit breaker, where you
-turn on/off a feature, fail over the load to a different server, *etc*. They are
-useful switches to have during an incident.
-
-They are usually long-lived, being relevant as long as the code is in
-production. The most common rule is percentages.
-
-They can be feature flags that are promoted to operational toggles on the
-**backend**, or may be purposefully put in place preventively or after a
-postmortem analysis.
-
-On the **frontend** and on **mobile** they are similar to feature flags, where
-the "feature" is being turned on and off, and the client interprets this value
-to show if the "feature" is available or unavailable.
-
-## Best practices
-
-### Prefer dynamic content
-
-Even though feature flags give you more dynamicity, they're still somewhat
-manual: you have to create one for a specific feature and change it by hand.
-
-If you find yourself manually updating a feature flags every other day, or
-tweaking the percentages frequently, consider making it fully dynamic. Try
-using a dataset that is generated automatically, or computing the content on the
-fly.
-
-Say you have a configuration screen with a list of options and sub-options, and
-you're trying to find how to better structure this list. Instead of using a
-feature flag for switching between 3 and 5 options, make it fully dynamic. This
-way you'll be able to perform other tests that you didn't plan, and get more
-flexibility out of it.
-
-### Use the client version to negotiate feature flags
-
-After effectively finishing a feature, the old code that coexisted with the new
-one will be deleted, and all traces of the transition will vanish from the code
-base. However if you just remove the feature flags from the API, all of the old
-versions of clients that relied on that value to show the new feature will go
-downgrade to the old feature.
-
-This means that you should avoid deleting client-facing feature flags, and
-retire them instead: use the client version to decide when the feature is
-stable, and return `true` for every client with a version greater or equal to
-that. This way you can stop thinking about the feature flag, and you don't break
-or downgrade clients that didn't upgrade past the transition.
-
-### Beware of many nested feature flags
-
-Nested flags combine exponentially.
-
-Pick strategic entry points or transitions eligible for feature flags, and
-beware of their nesting.
-
-### Include feature flags in the development workflow
-
-Add feature flags to the list of things to think about during whiteboarding, and
-deleting/retiring a feature flags at the end of the development.
-
-### Always rely on a feature flag on the app
-
-Again, there is no such thing "too small for a feature flag". Too many feature
-flags is a good problem to have, not the opposite. Automate the process of
-creating a feature flag to lower its cost.
diff --git a/_articles/2020-10-20-how-not-to-interview-engineers.md b/_articles/2020-10-20-how-not-to-interview-engineers.md
deleted file mode 100644
index 9cdfefb..0000000
--- a/_articles/2020-10-20-how-not-to-interview-engineers.md
+++ /dev/null
@@ -1,331 +0,0 @@
----
-title: How not to interview engineers
-date: 2020-10-20
-updated_at: 2020-10-24
-layout: post
-lang: en
-ref: how-not-to-interview-engineers
----
-This is a response to Slava's
-"[How to interview engineers][how-to-interview-engineers]" article. I initially
-thought it was a satire, [as have others][poes-law-comment], but he has
-[doubled down on it][slava-on-satire]:
-
-> (...) Some parts are slightly exaggerated for sure, but the essay isn't meant
-> as a joke.
-
-That being true, he completely misses the point on how to improve hiring, and
-proposes a worse alternative on many aspects. It doesn't qualify as provocative,
-it is just wrong.
-
-I was comfortable taking it as a satire, and I would just ignore the whole thing
-if it wasn't (except for the technical memo part), but friends of mine
-considered it to be somewhat reasonable. This is a adapted version of parts of
-the discussions we had, risking becoming a gigantic showcase of
-[Poe's law][poes-law-wiki].
-
-In this piece, I will argument against his view, and propose an alternative
-approach to improve hiring.
-
-It is common to find people saying how broken technical hiring is, as well put
-in words by a phrase on [this comment][hn-satire]:
-
-> Everyone loves to read and write about how developer interviewing is flawed,
-> but no one wants to go out on a limb and make suggestions about how to improve
-> it.
-
-I guess Slava was trying to not fall on this trap, and make a suggestion on how
-to improve instead, which all went terribly wrong.
-
-[how-to-interview-engineers]: https://defmacro.substack.com/p/how-to-interview-engineers
-[poes-law-comment]: https://defmacro.substack.com/p/how-to-interview-engineers/comments#comment-599996
-[slava-on-satire]: https://twitter.com/spakhm/status/1315754730740617216
-[poes-law-wiki]: https://en.wikipedia.org/wiki/Poe%27s_law
-[hn-satire]: https://news.ycombinator.com/item?id=24757511
-
-## What not to do
-
-### Time candidates
-
-Timing the candidate shows up on the "talent" and "judgment" sections, and they
-are both bad ideas for the same reason: programming is not a performance.
-
-What do e-sports, musicians, actors and athletes have in common: performance
-psychologists.
-
-For a pianist, their state of mind during concerts is crucial: they not only
-must be able to deal with stage anxiety, but to become really successful they
-will have to learn how to exploit it. The time window of the concert is what
-people practice thousands of hours for, and it is what defines one's career,
-since how well all the practice went is irrelevant to the nature of the
-profession. Being able to leverage stage anxiety is an actual goal of them.
-
-That is also applicable to athletes, where the execution during a competition
-makes them sink or swim, regardless of how all the training was.
-
-The same cannot be said about composers, though. They are more like book
-writers, where the value is not on very few moments with high adrenaline, but on
-the aggregate over hours, days, weeks, months and years. A composer may have a
-deadline to finish a song in five weeks, but it doesn't really matter if it is
-done on a single night, every morning between 6 and 9, at the very last week, or
-any other way. No rigid time structure applies, only whatever fits best to the
-composer.
-
-Programming is more like composing than doing a concert, which is another way of
-saying that programming is not a performance. People don't practice algorithms
-for months to keep them at their fingertips, so that finally in a single
-afternoon they can sit down and write everything at once in a rigid 4 hours
-window, and launch it immediately after.
-
-Instead software is built iteratively, by making small additions, than
-refactoring the implementation, fixing bugs, writing a lot at once, *etc*.
-all while they get a firmer grasp of the problem, stop to think about it, come
-up with new ideas, *etc*.
-
-Some specifically plan for including spaced pauses, and call it
-"[Hammock Driven Development][hammock-driven-development]", which is just
-artist's "creative idleness" for hackers.
-
-Unless you're hiring for a live coding group, a competitive programming team, or
-a professional live demoer, timing the candidate that way is more harmful than
-useful. This type of timing doesn't find good programmers, it finds performant
-programmers, which isn't the same thing, and you'll end up with people who can
-do great work on small problems but who might be unable to deal with big
-problems, and loose those who can very well handle huge problems, slowly. If you
-are lucky you'll get performant people who can also handle big problems on the
-long term, but maybe not.
-
-An incident is the closest to a "performance" that it gets, and yet it is still
-dramatically different. Surely it is a high stress scenario, but while people
-are trying to find a root cause and solve the problem, only the downtime itself
-is visible to the exterior. It is like being part of the support staff backstage
-during a play: even though execution matters, you're still not on the spot.
-During an incident you're doing debugging in anger rather than live coding.
-
-Although giving a candidate the task to write a "technical memo" has
-potential to get a measure of the written communication skills of someone, doing
-so in a hard time window also misses the point for the same reasons.
-
-[hammock-driven-development]: https://www.youtube.com/watch?v=f84n5oFoZBc
-
-### Pay attention to typing speed
-
-Typing is speed in never the bottleneck of a programmer, no matter how great
-they are.
-
-As [Dijkstra said][dijkstra-typing]:
-
-> But programming, when stripped of all its circumstantial irrelevancies, boils
-> down to no more and no less than very effective thinking so as to avoid
-> unmastered complexity, to very vigorous separation of your many different
-> concerns.
-
-In other words, programming is not about typing, it is about thinking.
-
-Otherwise, the way to get those star programmers that can't type fast enough a
-huge productivity boost is to give them a touch typing course. If they are so
-productive with typing speed being a limitation, imagine what they could
-accomplish if they had razor sharp touch typing skills?
-
-Also, why stop there? A good touch typist can do 90 WPM (words per minute), and
-a great one can do 120 WPM, but with a stenography keyboard they get to 200
-WPM+. That is double the productivity! Why not try
-[speech-to-text][perl-out-loud]? Make them all use [J][j-lang] so they all need
-to type less! How come nobody thought of that?
-
-And if someone couldn't solve the programming puzzle in the given time window,
-but could come back in the following day with an implementation that is not only
-faster, but uses less memory, was simpler to understand and easier to read than
-anybody else? You'd be losing that person too.
-
-[dijkstra-typing]: https://www.cs.utexas.edu/users/EWD/transcriptions/EWD05xx/EWD512.html
-[j-lang]: https://www.jsoftware.com/#/
-[perl-out-loud]: https://www.youtube.com/watch?v=Mz3JeYfBTcY
-
-### IQ
-
-For "building an extraordinary team at a hard technology startup", intelligence
-is not the most important, [determination is][pg-determination].
-
-And talent isn't "IQ specialized for engineers". IQ itself isn't a measure of how
-intelligent someone is. Ever since Alfred Binet with Théodore Simon started to
-formalize what would become IQ tests years later, they already acknowledged
-limitations of the technique for measuring intelligence, which is
-[still true today][scihub-paper].
-
-So having a high IQ tells only how smart people are for a particular aspect of
-intelligence, which is not representative of programming. There are numerous
-aspects of programming that are covered by IQ measurement: how to name variables
-and functions, how to create models which are compatible with schema evolution,
-how to make the system dynamic for runtime parameterization without making it
-fragile, how to measure and observe performance and availability, how to pick
-between acquiring and paying technical debt, *etc*.
-
-Not to say about everything else that a programmer does that is not purely
-programming. Saying high IQ correlates with great programming is a stretch, at
-best.
-
-[pg-determination]: http://www.paulgraham.com/determination.html
-[scihub-paper]: https://sci-hub.do/https://psycnet.apa.org/doiLanding?doi=10.1037%2F1076-8971.6.1.33
-
-### Ditch HR
-
-Slava tangentially picks on HR, and I will digress on that a bit:
-
-> A good rule of thumb is that if a question could be asked by an intern in HR,
-> it's a non-differential signaling question.
-
-Stretching it, this is a rather snobbish view of HR. Why is it that an intern in
-HR can't make signaling questions? Could the same be said of an intern in
-engineering?
-
-In other words: is the question not signaling because the one
-asking is from HR, or because the one asking is an intern? If the latter, than
-he's just arguing that interns have no place in interviewing, but if the former
-than he was picking on HR.
-
-Extrapolating that, it is common to find people who don't value HR's work, and
-only see them as inferiors doing unpleasant work, and who aren't capable enough
-(or *smart* enough) to learn programming.
-
-This is equivalent to people who work primarily on backend, and see others working on
-frontend struggling and say: "isn't it just building views and showing them on
-the browser? How could it possibly be that hard? I bet I could do it better,
-with 20% of code". As you already know, the answer to it is "well, why don't you
-go do it, then?".
-
-This sense of superiority ignores the fact that HR have actual professionals
-doing actual hard work, not unlike programmers. If HR is inferior and so easy,
-why not automate everything away and get rid of a whole department?
-
-I don't attribute this world view to Slava, this is only an extrapolation of a
-snippet of the article.
-
-### Draconian mistreating of candidates
-
-If I found out that people employed theatrics in my interview so that I could
-feel I've "earned the privilege to work at your company", I would quit.
-
-If your moral compass is so broken that you are comfortable mistreating me while
-I'm a candidate, I immediately assume you will also mistreat me as an employee,
-and that the company is not a good place to work, as
-[evil begets stupidity][evil-begets-stupidity]:
-
-> But the other reason programmers are fussy, I think, is that evil begets
-> stupidity. An organization that wins by exercising power starts to lose the
-> ability to win by doing better work. And it's not fun for a smart person to
-> work in a place where the best ideas aren't the ones that win. I think the
-> reason Google embraced "Don't be evil" so eagerly was not so much to impress
-> the outside world as to inoculate themselves against arrogance.
-
-Paul Graham goes beyond "don't be evil" with a better motto:
-"[be good][pg-be-good]".
-
-Abusing the asymmetric nature of an interview to increase the chance that the
-candidate will accept the offer is, well, abusive. I doubt a solid team can
-actually be built on such poor foundations, surrounded by such evil measures.
-
-And if you really want to give engineers "the measure of whoever they're going
-to be working with", there are plenty of reasonable ways of doing it that don't
-include performing fake interviews.
-
-[pg-be-good]: http://www.paulgraham.com/good.html
-[evil-begets-stupidity]: http://www.paulgraham.com/apple.html
-
-### Personality tests
-
-Personality tests around the world need to be a) translated, b) adapted and c)
-validated. Even though a given test may be applicable and useful in a country,
-this doesn't imply it will work for other countries.
-
-Not only tests usually come with translation guidelines, but also its
-applicability needs to be validated again after the translation and adaptation
-is done to see if the test still measures what it is supposed to.
-
-That is also true within the same language. If a test is shown to work in
-England, it may not work in New Zealand, in spite of both speaking english. The
-cultural context difference is influent to the point of invalidating a test and
-making it be no longer valid.
-
-Irregardless of the validity of the proposed "big five" personality test,
-saying "just use attributes x, y and z this test and you'll be fine" is a rough
-simplification, much like saying "just use Raft for distributed systems, after
-all it has been proven to work" shows he throws all of that background away.
-
-So much as applying personality tests themselves is not a trivial task, and
-psychologists do need special training to become able to effectively apply one.
-
-### More cargo culting
-
-He calls the ill-defined "industry standard" to be cargo-culting, but his
-proposal isn't sound enough to not become one.
-
-Even if the ideas were good, they aren't solid enough, or based on solid
-enough things to make them stand out by themselves. Why is it that talent,
-judgment and personality are required to determine the fitness of a good
-candidate? Why not 2, 5, or 20 things? Why those specific 3? Why is talent
-defined like that? Is it just because he found talent to be like that?
-
-Isn't that definitionally also
-[cargo-culting][cargo-culting][^cargo-culting-archive]? Isn't he just repeating
-whatever he found to work form him, without understanding why?
-
-What Feynman proposes is actually the opposite:
-
-> In summary, the idea is to try to give **all** of the information to help others
-> to judge the value of your contribution; not just the information that leads
-> to judgment in one particular direction or another.
-
-What Slava did was just another form of cargo culting, but this was one that he
-believed to work.
-
-[cargo-culting]: http://calteches.library.caltech.edu/51/2/CargoCult.htm
-[^cargo-culting-archive]: [Archived version](https://web.archive.org/web/20201003090303/http://calteches.library.caltech.edu/51/2/CargoCult.htm).
-
-## What to do
-
-I will not give you a list of things that "worked for me, thus they are
-correct". I won't either critique the current "industry standard", nor what I've
-learned from interviewing engineers.
-
-Instead, I'd like to invite you to learn from history, and from what other
-professionals have to teach us.
-
-Programming isn't an odd profession, where everything about it is different from
-anything else. It is just another episode in the "technology" series, which has
-seasons since before recorded history. It may be an episode where things move a
-bit faster, but it is fundamentally the same.
-
-So here is the key idea: what people did *before* software engineering?
-
-What hiring is like for engineers in other areas? Don't civil, electrical and
-other types of engineering exist for much, much longer than software engineering
-does? What have those centuries of accumulated experience thought the world
-about technical hiring?
-
-What studies were performed on the different success rate of interviewing
-strategies? What have they done right and what have they done wrong?
-
-What is the purpose of HR? Why do they even exist? Do we need them, and if so,
-what for? What is the value they bring, since everybody insist on building an HR
-department in their companies? Is the existence of HR another form of cargo
-culting?
-
-What is industrial and organizational psychology? What is that field of study?
-What do they specialize in? What have they learned since the discipline
-appeared? What have they done right and wrong over history? Is is the current
-academic consensus on that area? What is a hot debate topic in academia on that
-area? What is the current bleeding edge of research? What can they teach us
-about hiring? What can they teach us about technical hiring?
-
-## Conclusion
-
-If all I've said makes me a "no hire" in the proposed framework, I'm really
-glad.
-
-This says less about my programming skills, and more about the employer's world
-view, and I hope not to be fooled into applying for a company that adopts this
-one.
-
-Claiming to be selecting "extraordinary engineers" isn't an excuse to reinvent
-the wheel, poorly.
diff --git a/_articles/2020-11-07-diy-an-offline-bug-tracker-with-text-files-git-and-email.md b/_articles/2020-11-07-diy-an-offline-bug-tracker-with-text-files-git-and-email.md
deleted file mode 100644
index b1dd117..0000000
--- a/_articles/2020-11-07-diy-an-offline-bug-tracker-with-text-files-git-and-email.md
+++ /dev/null
@@ -1,108 +0,0 @@
----
-
-title: DIY an offline bug tracker with text files, Git and email
-
-date: 2020-11-07
-
-updated_at: 2021-08-14
-
-layout: post
-
-lang: en
-
-ref: diy-an-offline-bug-tracker-with-text-files-git-and-email
-
----
-
-When [push comes to shove][youtube-dl-takedown-notice], the operational aspects
-of governance of a software project matter a lot. And everybody likes to chime
-in with their alternative of how to avoid single points of failure in project
-governance, just like I'm doing right now.
-
-The most valuable assets of a project are:
-
-1. source code
-2. discussions
-3. documentation
-4. builds
-5. tasks and bugs
-
-For **source code**, Git and other DVCS solve that already: everybody gets a
-full copy of the entire source code.
-
-If your code forge is compromised, moving it to a new one takes a couple of
-minutes, if there isn't a secondary remote serving as mirror already. In this
-case, no action is required.
-
-If you're having your **discussions** by email,
-"[taking this archive somewhere else and carrying on is effortless][sourcehut-ml]".
-
-Besides, make sure to backup archives of past discussions so that the history is
-also preserved when this migration happens.
-
-The **documentation** should
-[live inside the repository itself][writethedocs-in-repo][^writethedocs-in-repo],
-so that not only it gets first class treatment, but also gets distributed to
-everybody too. Migrating the code to a new forge already migrates the
-documentation with it.
-
-[^writethedocs-in-repo]: Described as "the ultimate marriage of the two". Starts
- at time 31:50.
-
-As long as you keep the **builds** vendor neutral, the migration should only
-involve adapting how you call your `tests.sh` from the format of
-`provider-1.yml` uses to the format that `provider-2.yml` accepts.
-It isn't valuable to carry the build history with the project, as this data
-quickly decays in value as weeks and months go by, but for simple text logs
-[using Git notes] may be just enough, and they would be replicated with the rest
-of the repository.
-
-[using Git notes]: {% link _tils/2020-11-30-storing-ci-data-on-git-notes.md %}
-
-But for **tasks and bugs** many rely on a vendor-specific service, where you
-register and manage those issues via a web browser. Some provide an
-[interface for interacting via email][todos-srht-email] or an API for
-[bridging local bugs with vendor-specific services][git-bug-bridges]. But
-they're all layers around the service, that disguises it as being a central
-point of failure, which when compromised would lead to data loss. When push comes
-to shove, you'd loose data.
-
-[youtube-dl-takedown-notice]: https://github.com/github/dmca/blob/master/2020/10/2020-10-23-RIAA.md
-[sourcehut-ml]: https://sourcehut.org/blog/2020-10-29-how-mailing-lists-prevent-censorship/
-[writethedocs-in-repo]: https://podcast.writethedocs.org/2017/01/25/episode-3-trends/
-[todos-srht-email]: https://man.sr.ht/todo.sr.ht/#email-access
-[git-bug-bridges]: https://github.com/MichaelMure/git-bug#bridges
-
-## Alternative: text files, Git and email
-
-Why not do the same as documentation, and move tasks and bugs into the
-repository itself?
-
-It requires no extra tool to be installed, and fits right in the already
-existing workflow for source code and documentation.
-
-I like to keep a [`TODOs.md`] file at the repository top-level, with
-two relevant sections: "tasks" and "bugs". Then when building the documentation
-I'll just [generate an HTML file from it], and [publish] it alongside the static
-website. All that is done on the main branch.
-
-Any issues discussions are done in the mailing list, and a reference to a
-discussion could be added to the ticket itself later on. External contributors
-can file tickets by sending a patch.
-
-The good thing about this solution is that it works for 99% of projects out
-there.
-
-For the other 1%, having Fossil's "[tickets][fossil-tickets]" could be an
-alternative, but you may not want to migrate your project to Fossil to get those
-niceties.
-
-Even though I keep a `TODOs.md` file on the main branch, you can have a `tasks`
- branch with a `task-n.md` file for each task, or any other way you like.
-
-These tools are familiar enough that you can adjust it to fit your workflow.
-
-[`TODOs.md`]: https://euandre.org/git/remembering/tree/TODOs.md?id=3f727802cb73ab7aa139ca52e729fd106ea916d0
-[generate an HTML file from it]: https://euandre.org/git/remembering/tree/aux/workflow/TODOs.sh?id=3f727802cb73ab7aa139ca52e729fd106ea916d0
-[publish]: https://euandreh.xyz/remembering/TODOs.html
-[fossil-tickets]: https://fossil-scm.org/home/doc/trunk/www/bugtheory.wiki
diff --git a/_articles/2020-11-08-the-next-paradigm-shift-in-programming-video-review.md b/_articles/2020-11-08-the-next-paradigm-shift-in-programming-video-review.md
deleted file mode 100644
index c98c131..0000000
--- a/_articles/2020-11-08-the-next-paradigm-shift-in-programming-video-review.md
+++ /dev/null
@@ -1,164 +0,0 @@
----
-
-title: The Next Paradigm Shift in Programming - video review
-
-date: 2020-11-08
-
-layout: post
-
-lang: en
-
-ref: the-next-paradigm-shift-in-programming-video-review
-
-eu_categories: video review
-
----
-
-This is a review with comments of
-"[The Next Paradigm Shift in Programming][video-link]", by Richard Feldman.
-
-This video was *strongly* suggested to me by a colleague. I wanted to discuss it
-with her, and when drafting my response I figured I could publish it publicly
-instead.
-
-Before anything else, let me just be clear: I really like the talk, and I think
-Richard is a great public speaker. I've watched several of his talks over the
-years, and I feel I've followed his career at a distance, with much respect.
-This isn't a piece criticizing him personally, and I agree with almost
-everything he said. These are just some comments but also nitpicks on a few
-topics I think he missed, or that I view differently.
-
-[video-link]: https://www.youtube.com/watch?v=6YbK8o9rZfI
-
-## Structured programming
-
-The historical overview at the beginning is very good. In fact, the very video I
-watched previously was about structured programming!
-
-Kevlin Henney on
-"[The Forgotten Art of Structured Programming][structured-programming]" does a
-deep-dive on the topic of structured programming, and how on his view it is
-still hidden in our code, when we do a `continue` or a `break` in some ways.
-Even though it is less common to see an explicit `goto` in code these days, many
-of the original arguments of Dijkstra against explicit `goto`s is applicable to
-other constructs, too.
-
-This is a very mature view, and I like how he goes beyond the
-"don't use `goto`s" heuristic and proposes and a much more nuanced understanding
-of what "structured programming" means.
-
-In a few minutes, Richard is able to condense most of the significant bits of
-Kevlin's talk in a didactical way. Good job.
-
-[structured-programming]: https://www.youtube.com/watch?v=SFv8Wm2HdNM
-
-## OOP like a distributed system
-
-Richard extrapolates Alan Kay's original vision of OOP, and he concludes that
-it is more like a distributed system that how people think about OOP these days.
-But he then states that this is a rather bad idea, and we shouldn't pursue it,
-given that distributed systems are known to be hard.
-
-However, his extrapolation isn't really impossible, bad or an absurd. In fact,
-it has been followed through by Erlang. Joe Armstrong used to say that
-"[Erlang might the only OOP language][erlang-oop]", since it actually adopted
-this paradigm.
-
-But Erlang is a functional language. So this "OOP as a distributed system" view
-is more about designing systems in the large than programs in the small.
-
-There is a switch of levels in this comparison I'm making, as can be done with
-any language or paradigm: you can have a functional-like system that is built
-with an OOP language (like a compiler, that given the same input will produce
-the same output), or an OOP-like system that is built with a functional language
-(Rich Hickey calls it
-"[OOP in the large][langsys]"[^the-language-of-the-system]).
-
-So this jump from in-process paradigm to distributed paradigm is rather a big
-one, and I don't think you he can argue that OOP has anything to say about
-software distribution across nodes. You can still have Erlang actors that run
-independently and send messages to each other without a network between them.
-Any OTP application deployed on a single node effectively works like that.
-
-I think he went a bit too far with this extrapolation. Even though I agree it is
-a logical a fair one, it isn't evidently bad as he painted. I would be fine
-working with a single-node OTP application and seeing someone call it "a *real*
-OOP program".
-
-[erlang-oop]: https://www.infoq.com/interviews/johnson-armstrong-oop/
-[langsys]: https://www.youtube.com/watch?v=ROor6_NGIWU
-[^the-language-of-the-system]: From 24:05 to 27:45.
-
-## First class immutability
-
-I agree with his view of languages moving towards the functional paradigm.
-But I think you can narrow down the "first-class immutability" feature he points
-out as present on modern functional programming languages to "first-class
-immutable data structures".
-
-I wouldn't categorize a language as "supporting functional programming style"
-without a library for functional data structures it. By discipline you can avoid
-side-effects, write pure functions as much as possible, and pass functions as
-arguments around is almost every language these days, but if when changing an
-element of a vector mutates things in-place, that is still not functional
-programming.
-
-To avoid that, you end-up needing to make clones of objects to pass to a
-function, using freezes or other workarounds. All those cases are when the
-underlying mix of OOP and functional programming fail.
-
-There are some languages with third-party libraries that provide functional data
-structures, like [immer][immer] for C++, or [ImmutableJS][immutablejs] for
-JavaScript.
-
-But functional programming is more easily achievable in languages that have them
-built-in, like Erlang, Elm and Clojure.
-
-[immer]: https://sinusoid.es/immer/
-[immutablejs]: https://immutable-js.github.io/immutable-js/
-
-## Managed side-effects
-
-His proposal of adopting managed side-effects as a first-class language concept
-is really intriguing.
-
-This is something you can achieve with a library, like [Redux][redux] for JavaScript or
-[re-frame][re-frame] for Clojure.
-
-I haven't worked with a language with managed side-effects at scale, and I don't
-feel this is a problem with Clojure or Erlang. But is this me finding a flaw in
-his argument or not acknowledging a benefit unknown to me? This is a provocative
-question I ask myself.
-
-Also all FP languages with managed side-effects I know are statically-typed, and
-all dynamically-typed FP languages I know don't have managed side-effects baked in.
-
-[redux]: https://redux.js.org/
-[re-frame]: https://github.com/Day8/re-frame
-
-## What about declarative programming?
-
-In "[Out of the Tar Pit][tar-pit]", B. Moseley and P. Marks go beyond his view
-of functional programming as the basis, and name a possible "functional
-relational programming" as an even better solution. They explicitly call out
-some flaws in most of the modern functional programming languages, and instead
-pick declarative programming as an even better starting paradigm.
-
-If the next paradigm shift is towards functional programming, will the following
-shift be towards declarative programming?
-
-[tar-pit]: http://curtclifton.net/papers/MoseleyMarks06a.pdf
-
-## Conclusion
-
-Beyond all Richard said, I also hear often bring up functional programming when
-talking about utilizing all cores of a computer, and how FP can help with that.
-
-Rich Hickey makes a great case for single-process FP on his famous talk
-"[Simple Made Easy][simple-made-easy]".
-
-[simple-made-easy]: https://www.infoq.com/presentations/Simple-Made-Easy/
-
-<!-- I find this conclusion too short, and it doesn't revisits the main points -->
-<!-- presented on the body of the article. I won't rewrite it now, but it would be an -->
-<!-- improvement to extend it to do so. -->
diff --git a/_articles/2020-11-12-durable-persistent-trees-and-parser-combinators-building-a-database.md b/_articles/2020-11-12-durable-persistent-trees-and-parser-combinators-building-a-database.md
deleted file mode 100644
index 05e800e..0000000
--- a/_articles/2020-11-12-durable-persistent-trees-and-parser-combinators-building-a-database.md
+++ /dev/null
@@ -1,235 +0,0 @@
----
-
-title: Durable persistent trees and parser combinators - building a database
-
-date: 2020-11-12
-
-updated_at: 2021-02-09
-
-layout: post
-
-lang: en
-
-ref: durable-persistent-trees-and-parser-combinators-building-a-database
-
-eu_categories: mediator
-
----
-
-I've received with certain frequency messages from people wanting to know if
-I've made any progress on the database project
-[I've written about]({% link _articles/2020-08-31-the-database-i-wish-i-had.md %}).
-
-There are a few areas where I've made progress, and here's a public post on it.
-
-## Proof-of-concept: DAG log
-
-The main thing I wanted to validate with a concrete implementation was the
-concept of modeling a DAG on a sequence of datoms.
-
-The notion of a *datom* is a rip-off from Datomic, which models data with time
-aware *facts*, which come from RDF. RDF's fact is a triple of
-subject-predicate-object, and Datomic's datoms add a time component to it:
-subject-predicate-object-time, A.K.A. entity-attribute-value-transaction:
-
-```clojure
-[[person :likes "pizza" 0 true]
- [person :likes "bread" 1 true]
- [person :likes "pizza" 1 false]]
-```
-
-The above datoms say:
-- at time 0, `person` like pizza;
-- at time 1, `person` stopped liking pizza, and started to like bread.
-
-Datomic ensures total consistency of this ever growing log by having a single
-writer, the transactor, that will enforce it when writing.
-
-In order to support disconnected clients, I needed a way to allow multiple
-writers, and I chose to do it by making the log not a list, but a
-directed acyclic graph (DAG):
-
-```clojure
-[[person :likes "pizza" 0 true]
- [0 :parent :db/root 0 true]
- [person :likes "bread" 1 true]
- [person :likes "pizza" 1 false]
- [1 :parent 0 1 true]]
-```
-
-The extra datoms above add more information to build the directionality to the
-log, and instead of a single consistent log, the DAG could have multiple leaves
-that coexist, much like how different Git branches can have different "latest"
-commits.
-
-In order to validate this idea, I started with a Clojure implementation. The
-goal was not to write the actual final code, but to make a proof-of-concept that
-would allow me to test and stretch the idea itself.
-
-This code [already exists][clj-poc], but is yet fairly incomplete:
-
-- the building of the index isn't done yet (with some
- [commented code][clj-poc-index] on the next step to be implemented)
-- the indexing is extremely inefficient, with [more][clj-poc-o2-0]
- [than][clj-poc-o2-1] [one][clj-poc-o2-2] occurrence of `O²` functions;
-- no query support yet.
-
-[clj-poc]: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n1
-[clj-poc-index]: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n295
-[clj-poc-o2-0]: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n130
-[clj-poc-o2-1]: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n146
-[clj-poc-o2-2]: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n253
-
-## Top-down *and* bottom-up
-
-However, as time passed and I started looking at what the final implementation
-would look like, I started to consider keeping the PoC around.
-
-The top-down approach (Clojure PoC) was in fact helping guide me with the
-bottom-up, and I now have "promoted" the Clojure PoC into a "reference
-implementation". It should now be a finished implementation that says what the
-expected behaviour is, and the actual code should match the behaviour.
-
-The good thing about a reference implementation is that it has no performance of
-resources boundary, so if it ends up being 1000x slower and using 500× more
-memory, it should be find. The code can be also 10x or 100x simpler, too.
-
-## Top-down: durable persistent trees
-
-In promoting the PoC into a reference implementation, this top-down approach now
-needs to go beyond doing everything in memory, and the index data structure now
-needs to be disk-based.
-
-Roughly speaking, most storage engines out there are based either on B-Trees or
-LSM Trees, or some variations of those.
-
-But when building an immutable database, update-in-place B-Trees aren't an
-option, as it doesn't accommodate keeping historical views of the tree. LSM Trees
-may seem a better alternative, but duplication on the files with compaction are
-also ways to delete old data which is indeed useful for a historical view.
-
-I think the thing I'm after is a mix of a Copy-on-Write B-Tree, which would keep
-historical versions with the write IO cost amortization of memtables of LSM
-Trees. I don't know of any B-Tree variant out there that resembles this, so I'll
-call it "Flushing Copy-on-Write B-Tree".
-
-I haven't written any code for this yet, so all I have is a high-level view of
-what it will look like:
-
-1. like Copy-on-Write B-Trees, changing a leaf involves creating a new leaf and
- building a new path from root to the leaf. The upside is that writes a lock
- free, and no coordination is needed between readers and writers, ever;
-
-1. the downside is that a single leaf update means at least `H` new nodes that
- will have to be flushed to disk, where `H` is the height of the tree. To avoid
- that, the writer creates these nodes exclusively on the in-memory memtable, to
- avoid flushing to disk on every leaf update;
-
-1. a background job will consolidate the memtable data every time it hits X MB,
- and persist it to disk, amortizing the cost of the Copy-on-Write B-Tree;
-
-1. readers than will have the extra job of getting the latest relevant
- disk-resident value and merge it with the memtable data.
-
-The key difference to existing Copy-on-Write B-Trees is that the new trees
-are only periodically written to disk, and the intermediate values are kept in
-memory. Since no node is ever updated, the page utilization is maximum as it
-doesn't need to keep space for future inserts and updates.
-
-And the key difference to existing LSM Trees is that no compaction is run:
-intermediate values are still relevant as the database grows. So this leaves out
-tombstones and value duplication done for write performance.
-
-One can delete intermediate index values to reclaim space, but no data is lost
-on the process, only old B-Tree values. And if the database ever comes back to
-that point (like when doing a historical query), the B-Tree will have to be
-rebuilt from a previous value. After all, the database *is* a set of datoms, and
-everything else is just derived data.
-
-Right now I'm still reading about other data structures that storage engines
-use, and I'll start implementing the "Flushing Copy-on-Write B-Tree" as I learn
-more[^learn-more-db] and mature it more.
-
-[^learn-more-db]: If you are interested in learning more about this too, the
- very best two resources on this subject are Andy Pavlo's
- "[Intro to Database Systems](https://www.youtube.com/playlist?list=PLSE8ODhjZXjbohkNBWQs_otTrBTrjyohi)"
- course and Alex Petrov's "[Database Internals](https://www.databass.dev/)" book.
-
-## Bottom-up: parser combinators and FFI
-
-I chose Rust as it has the best WebAssembly tooling support.
-
-My goal is not to build a Rust database, but a database that happens to be in
-Rust. In order to reach client platforms, the primary API is the FFI one.
-
-I'm not very happy with current tools for exposing Rust code via FFI to the
-external world: they either mix C with C++, which I don't want to do, or provide
-no access to the intermediate representation of the FFI, which would be useful
-for generating binding for any language that speaks FFI.
-
-I like better the path that the author of [cbindgen][cbindgen-crate]
-crate [proposes][rust-ffi]: emitting an data representation of the Rust C API
-(the author calls is a `ffi.json` file), and than building transformers from the
-data representation to the target language. This way you could generate a C API
-*and* the node-ffi bindings for JavaScript automatically from the Rust code.
-
-So the first thing to be done before moving on is an FFI exporter that doesn't
-mix C and C++, and generates said `ffi.json`, and than build a few transformers
-that take this `ffi.json` and generate the language bindings, be it C, C++,
-JavaScript, TypeScript, Kotlin, Swift, Dart, *etc*[^ffi-langs].
-
-[^ffi-langs]: Those are, specifically, the languages I'm more interested on. My
- goal is supporting client applications, and those languages are the most
- relevant for doing so: C for GTK, C++ for Qt, JavaScript and TypeScript for
- Node.js and browser, Kotlin for Android and Swing, Swift for iOS, and Dart
- for Flutter.
-
-I think the best way to get there is by taking the existing code for cbindgen,
-which uses the [syn][syn-crate] crate to parse the Rust code[^rust-syn], and
-adapt it to emit the metadata.
-
-[^rust-syn]: The fact that syn is an external crate to the Rust compiler points
- to a big warning: procedural macros are not first class in Rust. They are
- just like Babel plugins in JavaScript land, with the extra shortcoming that
- there is no specification for the Rust syntax, unlike JavaScript.
-
- As flawed as this may be, it seems to be generally acceptable and adopted,
- which works against building a solid ecosystem for Rust.
-
- The alternative that rust-ffi implements relies on internals of the Rust
- compiler, which isn't actually worst, just less common and less accepted.
-
-I've started a fork of cbindgen: ~~x-bindgen~~[^x-bindgen]. Right now it is
-just a copy of cbindgen verbatim, and I plan to remove all C and C++ emitting
-code from it, and add a IR emitting code instead.
-
-[^x-bindgen]: *EDIT*: now archived, the experimentation was fun. I've started to move more towards C, so this effort became deprecated.
-
-When starting working on x-bindgen, I realized I didn't know what to look for in
-a header file, as I haven't written any C code in many years. So as I was
-writing [libedn][libedn-repo], I didn't know how to build a good C API to
-expose. So I tried porting the code to C, and right now I'm working on building
-a *good* C API for a JSON parser using parser combinators:
-~~ParsecC~~ [^parsecc].
-
-[^parsecc]: *EDIT*: now also archived.
-
-After "finishing" ParsecC I'll have a good notion of what a good C API is, and
-I'll have a better direction towards how to expose code from libedn to other
-languages, and work on x-bindgen then.
-
-What both libedn and ParsecC are missing right now are proper error reporting,
-and property-based testing for libedn.
-
-[cbindgen-crate]: https://github.com/eqrion/cbindgen
-[syn-crate]: https://github.com/dtolnay/syn
-[rust-ffi]: https://blog.eqrion.net/future-directions-for-cbindgen/
-[libedn-repo]: https://euandre.org/git/libedn/
-
-## Conclusion
-
-I've learned a lot already, and I feel the journey I'm on is worth going
-through.
-
-If any of those topics interest you, message me to discuss more or contribute!
-Patches welcome!
diff --git a/_articles/2021-01-26-ann-remembering-add-memory-to-dmenu-fzf-and-similar-tools.md b/_articles/2021-01-26-ann-remembering-add-memory-to-dmenu-fzf-and-similar-tools.md
deleted file mode 100644
index 0d02384..0000000
--- a/_articles/2021-01-26-ann-remembering-add-memory-to-dmenu-fzf-and-similar-tools.md
+++ /dev/null
@@ -1,190 +0,0 @@
----
-
-title: "ANN: remembering - Add memory to dmenu, fzf and similar tools"
-
-date: 2021-01-26
-
-layout: post
-
-lang: en
-
-ref: ann-remembering-add-memory-to-dmenu-fzf-and-similar-tools
-
----
-
-Today I pushed v0.1.0 of [remembering], a tool to enhance the interactive usability of menu-like tools, such as [dmenu] and [fzf].
-
-## Previous solution
-
-I previously used [yeganesh] to fill this gap, but as I started to rely less on Emacs, I added fzf as my go-to tool for doing fuzzy searching on the terminal.
-But I didn't like that fzf always showed the same order of things, when I would only need 3 or 4 commonly used files.
-
-For those who don't know: yeganesh is a wrapper around dmenu that will remember your most used programs and put them on the beginning of the list of executables.
-This is very convenient for interactive prolonged use, as with time the things you usually want are right at the very beginning.
-
-But now I had this thing, yeganesh, that solved this problem for dmenu, but didn't for fzf.
-
-I initially considered patching yeganesh to support it, but I found it more coupled to dmenu than I would desire.
-I'd rather have something that knows nothing about dmenu, fzf or anything, but enhances tools like those in a useful way.
-
-[remembering]: https://euandreh.xyz/remembering/
-[dmenu]: https://tools.suckless.org/dmenu/
-[fzf]: https://github.com/junegunn/fzf
-[yeganesh]: http://dmwit.com/yeganesh/
-
-## Implementation
-
-Other than being decoupled from dmenu, another improvement I though that could be made on top of yeganesh is the programming language choice.
-Instead of Haskell, I went with POSIX sh.
-Sticking to POSIX sh makes it require less build-time dependencies. There aren't any, actually. Packaging is made much easier due to that.
-
-The good thing is that the program itself is small enough ([119 lines] on v0.1.0) that POSIX sh does the job just fine, combined with other POSIX utilities such as [getopts], [sort] and [awk].
-
-[119 lines]: https://euandre.org/git/remembering/tree/remembering?id=v0.1.0
-[getopts]: http://www.opengroup.org/onlinepubs/9699919799/utilities/getopts.html
-[sort]: http://www.opengroup.org/onlinepubs/9699919799/utilities/sort.html
-[awk]: http://www.opengroup.org/onlinepubs/9699919799/utilities/awk.html
-
-The behaviour is: given a program that will read from STDIN and write a single entry to STDOUT, `remembering` wraps that program, and rearranges STDIN so that previous choices appear at the beginning.
-
-Where you would do:
-
-```shell
-$ seq 5 | fzf
-
- 5
- 4
- 3
- 2
-> 1
- 5/5
->
-```
-
-And every time get the same order of numbers, now you can write:
-
-```shell
-$ seq 5 | remembering -p seq-fzf -c fzf
-
- 5
- 4
- 3
- 2
-> 1
- 5/5
->
-```
-
-On the first run, everything is the same. If you picked 4 on the previous example, the following run would be different:
-
-```shell
-$ seq 5 | remembering -p seq-fzf -c fzf
-
- 5
- 3
- 2
- 1
-> 4
- 5/5
->
-```
-
-As time passes, the list would adjust based on the frequency of your choices.
-
-I aimed for reusability, so that I could wrap diverse commands with `remembering` and it would be able to work. To accomplish that, a "profile" (the `-p something` part) stores data about different runs separately.
-
-I took the idea of building something small with few dependencies to other places too:
-- the manpages are written in troff directly;
-- the tests are just more POSIX sh files;
-- and a POSIX Makefile to `check` and `install`.
-
-I was aware of the value of sticking to coding to standards, but I had past experience mostly with programming language standards, such as ECMAScript, Common Lisp, Scheme, or with IndexedDB or DOM APIs.
-It felt good to rediscover these nice POSIX tools, which makes me remember of a quote by [Henry Spencer][poor-unix]:
-
-> Those who do not understand Unix are condemned to reinvent it, poorly.
-
-[poor-unix]: https://en.wikipedia.org/wiki/Henry_Spencer#cite_note-3
-
-## Usage examples
-
-Here are some functions I wrote myself that you may find useful:
-
-### Run a command with fzf on `$PWD`
-
-```shellcheck
-f() {
- profile="$f-shell-function(pwd | sed -e 's_/_-_g')"
- file="$(git ls-files | \
- remembering -p "$profile" \
- -c "fzf --select-1 --exit -0 --query \"$2\" --preview 'cat {}'")"
- if [ -n "$file" ]; then
- # shellcheck disable=2068
- history -s f $@
- history -s "$1" "$file"
- "$1" "$file"
-fi
-}
-```
-
-This way I can run `f vi` or `f vi config` at the root of a repository, and the list of files will always appear on the most used order.
-Adding `pwd` to the profile allows it to not mix data for different repositories.
-
-### Copy password to clipboard
-
-```shell
-choice="$(find "$HOME/.password-store" -type f | \
- grep -Ev '(.git|.gpg-id)' | \
- sed -e "s|$HOME/.password-store/||" -e 's/\.gpg$//' | \
- remembering -p password-store \
- -c 'dmenu -l 20 -i')"
-
-
-if [ -n "$choice" ]; then
- pass show "$choice" -c
-fi
-```
-
-Adding the above to a file and binding it to a keyboard shortcut, I can access the contents of my [password store][password-store], with the entries ordered by usage.
-
-[password-store]: https://www.passwordstore.org/
-
-### Replacing yeganesh
-
-Where I previously had:
-
-```shell
-exe=$(yeganesh -x) && exec $exe
-```
-
-Now I have:
-
-```shell
-exe=$(dmenu_path | remembering -p dmenu-exec -c dmenu) && exec $exe
-```
-
-This way, the executables appear on order of usage.
-
-If you don't have `dmenu_path`, you can get just the underlying `stest` tool that looks at the executables available in your `$PATH`. Here's a juicy one-liner to do it:
-
-```shell
-$ wget -O- https://dl.suckless.org/tools/dmenu-5.0.tar.gz | \
- tar Ozxf - dmenu-5.0/arg.h dmenu-5.0/stest.c | \
- sed 's|^#include "arg.h"$|// #include "arg.h"|' | \
- cc -xc - -o stest
-```
-
-With the `stest` utility you'll be able to list executables in your `$PATH` and pipe them to dmenu or something else yourself:
-```shell
-$ (IFS=:; ./stest -flx $PATH;) | sort -u | remembering -p another-dmenu-exec -c dmenu | sh
-```
-
-In fact, the code for `dmenu_path` is almost just like that.
-
-## Conclusion
-
-For my personal use, I've [packaged] `remembering` for GNU Guix and Nix. Packaging it to any other distribution should be trivial, or just downloading the tarball and running `[sudo] make install`.
-
-Patches welcome!
-
-[packaged]: https://euandre.org/git/package-repository/
-[nix-file]: https://euandre.org/git/dotfiles/tree/nixos/not-on-nixpkgs/remembering.nix?id=0831444f745cf908e940407c3e00a61f6152961f
diff --git a/_articles/2021-02-17-ann-fallible-fault-injection-library-for-stress-testing-failure-scenarios.md b/_articles/2021-02-17-ann-fallible-fault-injection-library-for-stress-testing-failure-scenarios.md
deleted file mode 100644
index 96c6f49..0000000
--- a/_articles/2021-02-17-ann-fallible-fault-injection-library-for-stress-testing-failure-scenarios.md
+++ /dev/null
@@ -1,246 +0,0 @@
----
-
-title: "ANN: fallible - Fault injection library for stress-testing failure scenarios"
-
-date: 2021-02-17
-
-updated_at: 2022-03-06
-
-layout: post
-
-lang: en
-
-ref: ann-fallible-fault-injection-library-for-stress-testing-failure-scenarios
-
----
-
-Yesterday I pushed v0.1.0 of [fallible], a miniscule library for fault-injection
-and stress-testing C programs.
-
-[fallible]: https://euandreh.xyz/fallible/
-
-## *EDIT*
-
-2021-06-12: As of [0.3.0] (and beyond), the macro interface improved and is a bit different from what is presented in this article. If you're interested, I encourage you to take a look at it.
-
-2022-03-06: I've [archived] the project for now. It still needs some maturing before being usable.
-
-[0.3.0]: https://euandreh.xyz/fallible/CHANGELOG.html
-[archived]: https://euandre.org/static/attachments/fallible.tar.gz
-
-## Existing solutions
-
-Writing robust code can be challenging, and tools like static analyzers, fuzzers and friends can help you get there with more certainty.
-As I would try to improve some of my C code and make it more robust, in order to handle system crashes, filled disks, out-of-memory and similar scenarios, I didn't find existing tooling to help me get there as I expected to find.
-I couldn't find existing tools to help me explicitly stress-test those failure scenarios.
-
-Take the "[Writing Robust Programs][gnu-std]" section of the GNU Coding Standards:
-
-[gnu-std]: https://www.gnu.org/prep/standards/standards.html#Semantics
-
-> Check every system call for an error return, unless you know you wish to ignore errors.
-> (...) Check every call to malloc or realloc to see if it returned NULL.
-
-From a robustness standpoint, this is a reasonable stance: if you want to have a robust program that knows how to fail when you're out of memory and `malloc` returns `NULL`, than you ought to check every call to `malloc`.
-
-Take a sample code snippet for clarity:
-
-```c
-void a_function() {
- char *s1 = malloc(A_NUMBER);
- strcpy(s1, "some string");
-
- char *s2 = malloc(A_NUMBER);
- strcpy(s2, "another string");
-}
-```
-
-At a first glance, this code is unsafe: if any of the calls to `malloc` returns `NULL`, `strcpy` will be given a `NULL` pointer.
-
-My first instinct was to change this code to something like this:
-
-```diff
-@@ -1,7 +1,15 @@
- void a_function() {
- char *s1 = malloc(A_NUMBER);
-+ if (!s1) {
-+ fprintf(stderr, "out of memory, exitting\n");
-+ exit(1);
-+ }
- strcpy(s1, "some string");
-
- char *s2 = malloc(A_NUMBER);
-+ if (!s2) {
-+ fprintf(stderr, "out of memory, exitting\n");
-+ exit(1);
-+ }
- strcpy(s2, "another string");
- }
-```
-
-As I later found out, there are at least 2 problems with this approach:
-
-1. **it doesn't compose**: this could arguably work if `a_function` was `main`.
- But if `a_function` lives inside a library, an `exit(1);` is a inelegant way of handling failures, and will catch the top-level `main` consuming the library by surprise;
-2. **it gives up instead of handling failures**: the actual handling goes a bit beyond stopping.
- What about open file handles, in-memory caches, unflushed bytes, etc.?
-
-If you could force only the second call to `malloc` to fail, [Valgrind] would correctly complain that the program exitted with unfreed memory.
-
-[Valgrind]: https://www.valgrind.org/
-
-So the last change to make the best version of the above code is:
-
-```diff
-@@ -1,15 +1,14 @@
--void a_function() {
-+bool a_function() {
- char *s1 = malloc(A_NUMBER);
- if (!s1) {
-- fprintf(stderr, "out of memory, exitting\n");
-- exit(1);
-+ return false;
- }
- strcpy(s1, "some string");
-
- char *s2 = malloc(A_NUMBER);
- if (!s2) {
-- fprintf(stderr, "out of memory, exitting\n");
-- exit(1);
-+ free(s1);
-+ return false;
- }
- strcpy(s2, "another string");
- }
-```
-
-Instead of returning `void`, `a_function` now returns `bool` to indicate whether an error ocurred during its execution.
-If `a_function` returned a pointer to something, the return value could be `NULL`, or an `int` that represents an error code.
-
-The code is now a) safe and b) failing gracefully, returning the control to the caller to properly handle the error case.
-
-After seeing similar patterns on well designed APIs, I adopted this practice for my own code, but was still left with manually verifying the correctness and robustness of it.
-
-How could I add assertions around my code that would help me make sure the `free(s1);` exists, before getting an error report?
-How do other people and projects solve this?
-
-From what I could see, either people a) hope for the best, b) write safe code but don't strees-test it or c) write ad-hoc code to stress it.
-
-The most proeminent case of c) is SQLite: it has a few wrappers around the familiar `malloc` to do fault injection, check for memory limits, add warnings, create shim layers for other environments, etc.
-All of that, however, is tightly couple with SQLite itself, and couldn't be easily pulled off for using somewhere else.
-
-When searching for it online, an [interesting thread] caught my atention: fail the call to `malloc` for each time it is called, and when the same stacktrace appears again, allow it to proceed.
-
-[interesting thread]: https://stackoverflow.com/questions/1711170/unit-testing-for-failed-malloc
-
-## Implementation
-
-A working implementation of that already exists: [mallocfail].
-It uses `LD_PRELOAD` to replace `malloc` at run-time, computes the SHA of the stacktrace and fails once for each SHA.
-
-I initially envisioned and started implementing something very similar to mallocfail.
-However I wanted it to go beyond out-of-memory scenarios, and using `LD_PRELOAD` for every possible corner that could fail wasn't a good idea on the long run.
-
-Also, mallocfail won't work together with tools such as Valgrind, who want to do their own override of `malloc` with `LD_PRELOAD`.
-
-I instead went with less automatic things: starting with a `fallible_should_fail(char *filename, int lineno)` function that fails once for each `filename`+`lineno` combination, I created macro wrappers around common functions such as `malloc`:
-
-```c
-void *fallible_malloc(size_t size, const char *const filename, int lineno) {
-#ifdef FALLIBLE
- if (fallible_should_fail(filename, lineno)) {
- return NULL;
- }
-#else
- (void)filename;
- (void)lineno;
-#endif
- return malloc(size);
-}
-
-#define MALLOC(size) fallible_malloc(size, __FILE__, __LINE__)
-```
-
-With this definition, I could replace the calls to `malloc` with `MALLOC` (or any other name that you want to `#define`):
-
-```diff
---- 3.c 2021-02-17 00:15:38.019706074 -0300
-+++ 4.c 2021-02-17 00:44:32.306885590 -0300
-@@ -1,11 +1,11 @@
- bool a_function() {
-- char *s1 = malloc(A_NUMBER);
-+ char *s1 = MALLOC(A_NUMBER);
- if (!s1) {
- return false;
- }
- strcpy(s1, "some string");
-
-- char *s2 = malloc(A_NUMBER);
-+ char *s2 = MALLOC(A_NUMBER);
- if (!s2) {
- free(s1);
- return false;
-```
-
-With this change, if the program gets compiled with the `-DFALLIBLE` flag the fault-injection mechanism will run, and `MALLOC` will fail once for each `filename`+`lineno` combination.
-When the flag is missing, `MALLOC` is a very thin wrapper around `malloc`, which compilers could remove entirely, and the `-lfallible` flags can be omitted.
-
-This applies not only to `malloc` or other `stdlib.h` functions.
-If `a_function` is important or relevant, I could add a wrapper around it too, that checks if `fallible_should_fail` to exercise if its callers are also doing the proper clean-up.
-
-The actual code is just this single function, [`fallible_should_fail`], which ended-up taking only ~40 lines.
-In fact, there are more lines of either Makefile (111), README.md (82) or troff (306) on this first version.
-
-The price for such fine-grained control is that this approach requires more manual work.
-
-[mallocfail]: https://github.com/ralight/mallocfail
-[`fallible_should_fail`]: https://euandre.org/git/fallible/tree/src/fallible.c?id=v0.1.0#n16
-
-## Usage examples
-
-### `MALLOC` from the `README.md`
-
-```c
-// leaky.c
-#include <string.h>
-#include <fallible_alloc.h>
-
-int main() {
- char *aaa = MALLOC(100);
- if (!aaa) {
- return 1;
- }
- strcpy(aaa, "a safe use of strcpy");
-
- char *bbb = MALLOC(100);
- if (!bbb) {
- // free(aaa);
- return 1;
- }
- strcpy(bbb, "not unsafe, but aaa is leaking");
-
- free(bbb);
- free(aaa);
- return 0;
-}
-```
-
-Compile with `-DFALLIBLE` and run [`fallible-check.1`][fallible-check]:
-```shell
-$ c99 -DFALLIBLE -o leaky leaky.c -lfallible
-$ fallible-check ./leaky
-Valgrind failed when we did not expect it to:
-(...suppressed output...)
-# exit status is 1
-```
-
-[fallible-check]: https://euandreh.xyz/fallible/fallible-check.1.html
-
-## Conclusion
-
-For my personal use, I'll [package] them for GNU Guix and Nix.
-Packaging it to any other distribution should be trivial, or just downloading the tarball and running `[sudo] make install`.
-
-Patches welcome!
-
-[package]: https://euandre.org/git/package-repository/
diff --git a/_articles/2021-04-29-a-relational-model-of-data-for-large-shared-data-banks-article-review.md b/_articles/2021-04-29-a-relational-model-of-data-for-large-shared-data-banks-article-review.md
deleted file mode 100644
index e15b478..0000000
--- a/_articles/2021-04-29-a-relational-model-of-data-for-large-shared-data-banks-article-review.md
+++ /dev/null
@@ -1,130 +0,0 @@
----
-
-title: A Relational Model of Data for Large Shared Data Banks - article-review
-
-date: 2021-04-29
-
-layout: post
-
-lang: en
-
-ref: a-relational-model-of-data-for-large-shared-data-banks-article-review
-
----
-
-This is a review of the article "[A Relational Model of Data for Large Shared Data Banks][codd-article]", by E. F. Codd.
-
-[codd-article]: https://www.seas.upenn.edu/~zives/03f/cis550/codd.pdf
-
-## Data Independence
-
-Codd brings the idea of *data independence* as a better approach to use on databases.
-This is contrast with the existing approaches, namely hierarquical (tree-based) and network-based.
-
-His main argument is that queries in applications shouldn't depende and be coupled with how the data is represented internally by the database system.
-This key idea is very powerful, and something that we strive for in many other places: decoupling the interface from the implementation.
-
-If the database system has this separation, it can kep the querying interface stable, while having the freedom to change its internal representation at will, for better performance, less storage, etc.
-
-This is true for most modern database systems.
-They can change from B-Trees with leafs containing pointers to data, to B-Trees with leafs containing the raw data , to hash tables.
-All that without changing the query interface, only its performance.
-
-Codd mentions that, from an information representation standpoint, any index is a duplication, but useful for perfomance.
-
-This data independence also impacts ordering (a *relation* doesn't rely on the insertion order).
-
-## Duplicates
-
-His definition of relational data is a bit differente from most modern database systems, namely **no duplicate rows**.
-
-I couldn't find a reason behind this restriction, though.
-For practical purposes, I find it useful to have it.
-
-## Relational Data
-
-In the article, Codd doesn't try to define a language, and today's most popular one is SQL.
-
-However, there is no restriction that says that "SQL database" and "relational database" are synonyms.
-One could have a relational database without using SQL at all, and it would still be a relational one.
-
-The main one that I have in mind, and the reason that led me to reading this paper in the first place, is Datomic.
-
-Is uses an [edn]-based representation for datalog queries[^edn-queries], and a particular schema used to represent data.
-
-Even though it looks very weird when coming from SQL, I'd argue that it ticks all the boxes (except for "no duplicates") that defines a relational database, since building relations and applying operations on them is possible.
-
-Compare and contrast a contrived example of possible representations of SQL and datalog of the same data:
-
-```sql
--- create schema
-CREATE TABLE people (
- id UUID PRIMARY KEY,
- name TEXT NOT NULL,
- manager_id UUID,
- FOREIGN KEY (manager_id) REFERENCES people (id)
-);
-
--- insert data
-INSERT INTO people (id, name, manager_id) VALUES
- ("d3f29960-ccf0-44e4-be66-1a1544677441", "Foo", "076356f4-1a0e-451c-b9c6-a6f56feec941"),
- ("076356f4-1a0e-451c-b9c6-a6f56feec941", "Bar");
-
--- query data, make a relation
-
-SELECT employees.name AS 'employee-name',
- managers.name AS 'manager-name'
-FROM people employees
-INNER JOIN people managers ON employees.manager_id = managers.id;
-```
-
-{% raw %}
-```
-;; create schema
-#{ {:db/ident :person/id
- :db/valueType :db.type/uuid
- :db/cardinality :db.cardinality/one
- :db/unique :db.unique/value}
- {:db/ident :person/name
- :db/valueType :db.type/string
- :db/cardinality :db.cardinality/one}
- {:db/ident :person/manager
- :db/valueType :db.type/ref
- :db/cardinality :db.cardinality/one}}
-
-;; insert data
-#{ {:person/id #uuid "d3f29960-ccf0-44e4-be66-1a1544677441"
- :person/name "Foo"
- :person/manager [:person/id #uuid "076356f4-1a0e-451c-b9c6-a6f56feec941"]}
- {:person/id #uuid "076356f4-1a0e-451c-b9c6-a6f56feec941"
- :person/name "Bar"}}
-
-;; query data, make a relation
-{:find [?employee-name ?manager-name]
- :where [[?person :person/name ?employee-name]
- [?person :person/manager ?manager]
- [?manager :person/name ?manager-name]]}
-```
-{% endraw %}
-
-(forgive any errors on the above SQL and datalog code, I didn't run them to check. Patches welcome!)
-
-This employee example comes from the paper, and both SQL and datalog representations match the paper definition of "relational".
-
-Both "Foo" and "Bar" are employees, and the data is normalized.
-SQL represents data as tables, and Datomic as datoms, but relations could be derived from both, which we could view as:
-
-```
-employee_name | manager_name
-----------------------------
-"Foo" | "Bar"
-```
-
-[^edn-queries]: You can think of it as JSON, but with a Clojure taste.
-[edn]: https://github.com/edn-format/edn
-
-## Conclusion
-
-The article also talks about operators, consistency and normalization, which are now so widespread and well-known that it feels a bit weird seeing someone advocating for it.
-
-I also stablish that `relational != SQL`, and other databases such as Datomic are also relational, following Codd's original definition.
diff --git a/_pastebins/2018-07-13-nix-string-padding.md b/_pastebins/2018-07-13-nix-string-padding.md
deleted file mode 100644
index 359bda5..0000000
--- a/_pastebins/2018-07-13-nix-string-padding.md
+++ /dev/null
@@ -1,19 +0,0 @@
----
-
-title: Nix string padding
-
-date: 2018-07-13
-
-layout: post
-
-lang: en
-
-eu_categories: nix
-
-ref: nix-string-padding
-
----
-
-```nix
-padString = (n: if n < 10 then "0" + toString n else toString n)
-```
diff --git a/_pastebins/2020-02-14-guix-shebang.md b/_pastebins/2020-02-14-guix-shebang.md
deleted file mode 100644
index 67d504d..0000000
--- a/_pastebins/2020-02-14-guix-shebang.md
+++ /dev/null
@@ -1,23 +0,0 @@
----
-
-title: Guix shebang
-
-date: 2020-02-14
-
-layout: post
-
-lang: en
-
-eu_categories: guix
-
-ref: guix-shebang
-
----
-
-```shell
-#!/usr/bin/env -S guix environment --ad-hoc bash -- bash
-set -Eeuo pipefail
-cd "$(dirname "${BASH_SOURCE[0]}")"
-
-pwd
-```
diff --git a/_screencasts/2021-02-07-autoqemu-automate-installation-and-ssh-setup-of-iso-os-images.md b/_screencasts/2021-02-07-autoqemu-automate-installation-and-ssh-setup-of-iso-os-images.md
deleted file mode 100644
index 92a79c2..0000000
--- a/_screencasts/2021-02-07-autoqemu-automate-installation-and-ssh-setup-of-iso-os-images.md
+++ /dev/null
@@ -1,56 +0,0 @@
----
-
-title: AutoQEMU - automate installation and SSH setup of ISO OS images
-
-date: 2021-02-07
-
-updated_at: 2022-03-06
-
-layout: post
-
-lang: en
-
-ref: autoqemu-automate-installation-and-ssh-setup-of-iso-os-images
-
-video: true
-
----
-
-After reading begriffs "[Tips for stable and portable software]", the
-"Begriffs Buildfarm?" section caught my attention, as this is something I would
-be interested in.
-
-After emailing the author, a [public thread] began on the subject.
-
-As we discussed how it could be done, I decided to experiment with the idea of
-automating the setup of virtual environments with QEMU.
-
-This screencast is a simple demo of automating the installation of
-Alpine Linux 3.12.3 standard x86_64 with AutoQEMU[^AutoQEMU], which is nothing
-more than POSIX sh, [expect] scripts and Makefiles glued together.
-
-[^AutoQEMU]: The solution was a little too brittle to scale, and some
-distributions proved to be particularly problematic. I've [archived] my
-progress if you're interested in what I've done, and maybe wish to continue.
-
-As of this writing, I just worked on it for 2~3 days, so everything is still
-pretty ad-hoc.
-
-The commands from the screencast were[^script-command]:
-
-[^script-command]: Only now, writing again what I ran on the screencast I thought that I should have tried something like [script](https://www.man7.org/linux/man-pages/man1/script.1.html). Maybe next time (thanks [klaatu](https://gnuworldorder.info/) for the tip!).
-
-```shell
-pushd `mktemp -d`
-git clone https://euandre.org/git/autoqemu .
-make
-make install PREFIX=$HOME/.local
-autoqemu ssh alpine
-```
-
-It assumes that `$HOME/.local/bin` is in `$PATH`.
-
-[Tips for stable and portable software]: https://begriffs.com/posts/2020-08-31-portable-stable-software.html
-[public thread]: https://talk.begriffs.com/pipermail/friends/2021-February/001263.html
-[archived]: https://euandre.org/static/attachments/autoqemu.tar.gz
-[expect]: https://core.tcl-lang.org/expect/index
diff --git a/_slides/2020-10-19-rollout-feature-flag-experiment-operational-toggle.slides b/_slides/2020-10-19-rollout-feature-flag-experiment-operational-toggle.slides
deleted file mode 100644
index 22770e6..0000000
--- a/_slides/2020-10-19-rollout-feature-flag-experiment-operational-toggle.slides
+++ /dev/null
@@ -1,343 +0,0 @@
----
-
-title: Rollout, feature flag, experiment, operational toggle
-
-date: 2020-10-19
-
-layout: slides
-
-lang: en
-
-ref: rollout-feature-flag-experiment-operational-toggle
-
-article: _articles/2020-10-19-feature-flags-differences-between-backend-frontend-and-mobile.md
-
----
-
-# Rollout, feature flag, experiment, operational toggle
-Different use cases for **backend**, **frontend** and **mobile**
-
----
-
-"Feature flags" tend to come up when talking about **continuous deployment**
-
-???
-
-I'm using "quotes" because I'm mixing up different meanings of "rollout"
-
----
-
-# CI
-continuous integration
-
-# CD
-continuous delivery
-
-# CD
-**continuous deployment**
-
-???
-
-Background: build vocabulary, why are feature flags related to CD
-
-CI solves: manual integration of long-lived branches
-
-CD solves: automation of deployment process
-
-CD solves: releases as frequent as possible
-
-That's where the "GoCD" name comes from
-
----
-
-# Types:
-1. rollout
-2. feature flag
-3. experiment
-4. operational toggle
-
----
-
-# rollout
-## For *rolling out* a new version of software
-
-**Short-lived** using **percentages**
-
-- a [new deployment of k8s][k8s]
-- new [APK released to the Play Store][apk]
-
-[k8s]: https://kubernetes.io/docs/concepts/workloads/controllers/deployment/#creating-a-deployment
-[apk]: https://support.google.com/googleplay/android-developer/answer/6346149?hl=en
-
-???
-
-Relevant as long as the new code is deployed
-
----
-
-# feature flag
-## For turning a feature *on* or *off*
-
-**Medium-lived** using **allow list**, **A/B test**, **percentage**,
-**app version**, *etc*.
-
-- `:new-chargeback-flow`
-- `:new-debit-card-activation-screen`
-
-???
-
-Relevant as long as the new code is being developed
-
----
-
-# experiment
-## For analyzing behaviour
-
-**Medium-lived** using **allow list** and **A/B test**
-
-- `:debit-withdrawal-test`
-
----
-
-# operational toggle
-## For disabling features in `#crash`-like situations
-
-**Long-lived** using **percentage**
-
-- `:bank-barcode-payment`
-- `:savings-bank-barcode-query-provider`
-
-???
-
-Lives for as long as the code is in production.
-
-It feels like a system-level circuit breaker.
-
----
-
-We now know about the types
-
-## But they have different relevance for **backend**, **frontend** and **mobile**
-
----
-
-# backend
-
-1. **rollout**: k8s blue/green, canary and ~`common-rollout`~ `common-xp`
-2. **feature flag**: ~`common-rollout`~ `common-xp` and datasets
-3. **experiment**: `common-xp`
-4. **operational toggle**: ~`common-rollout`~ `common-xp`
-
-???
-
-This is a bit why common-rollout isn't called *common-feature-flag*: it was
-initially designed with backend usage of mostly *rollouts* in mind, and just a
-bit *feature flags*.
-
-Avoid using configuration for doing operational toggles: it is less dynamic, so
-it defeats the purpose.
-
----
-
-# frontend
-
-1. **rollout**: CDN and page refreshes
-2. **feature flag**: percentages and maybe IPs (no `:customer/id` on the website)
-3. **experiment**: via dynamic backend control
-4. **operational toggle**: via dynamic backend control
-
----
-
-# mobile
-
-1. **rollout**: app stores
-2. **feature flag**: via dynamic backend control
-3. **experiment**: via dynamic backend control
-4. **operational toggle**: via dynamic backend control
-
----
-
-Key differentiator is
-## How much **control** we have over the **environment**
-
----
-
-## **backend**
-
-# Full control
-🎉
-
-???
-
-Can edit, update and even delete rollouts as desired.
-
-Mix and match at will!
-
----
-
-## **frontend**
-
-# Partial control
-
-When choose when to make a new version available
-
-???
-
-We can control when a new version is available, partially when someone will
-upgrade it.
-
-But it is easy to fallback to "reload the page and try again".
-
----
-
-## **mobile**
-
-# Very limited control
-
-- app stores can restrict updates (worse for iOS)
-- customers still have to download new versions
-
----
-
-# Costs
-
-- more complex code
-- compatibility with old app versions
-- nesting is exponential
-
----
-
-# Benefits
-
-- dynamicity
-
----
-
-## Weighting costs × benefits
-
-The less control we have, the more we value dynamicity
-
----
-
-## Weighting costs × benefits
-
-- backend: sometimes worth the cost
-- frontend: almost always worth cost
-- mobile: **always** worth cost
-
----
-
-# Best practices
-
----
-
-## Dynamic content > feature flag
-
-Always true for **mobile**, almost always for **frontend**
-
----
-
-## Use `:include-list` for named groups
-
-Always true for **backend**, **frontend** and **mobile**
-
-{% raw %}
-```clojure [2-3]
-{:rules
- #{{:type :include-list
- :content {:filename "debit-team-members.txt"}}}}
-```
-{% endraw %}
-
----
-
-## Always use `:app-version`
-
-only for **mobile**
-
-{% raw %}
-```clojure [2]
-{:rules
- #{{:type :app-version
- :content {:min-version #{{:platform :android
- :code 1000000}
- {:platform :ios
- :code 2000000}}}}}}
-```
-{% endraw %}
-
----
-
-## Extend ~`common-rollout`~ `common-xp` if required
-
-That's how `:include-list`, `:app-version`, *etc.* were born
-
----
-
-## Beware of many nested feature flags
-
-True for **backend**, **frontend** and **mobile**
-
-???
-
-Exponential growth of combinations
-
----
-
-## Don't delete app-facing feature flags
-
-True for **mobile**
-
-???
-
-This could break old app versions, only do this intentionally
-
-We don't have (yet) a strategy for dealing with LTS of the app, and we just say:
-"we'll support every app version out there".
-
----
-
-## Include a feature flag on the whiteboarding phase
-
----
-
-## Include deleting/retiring the feature flag at the end
-
----
-
-## Avoid renaming a feature flag
-
-Use `:app-version` with `:min-version` instead
-
----
-
-# And most importantly...
-
----
-
-# ***Always*** rely on a feature flag on the app
-
-Never do a hot fix, avoid expedited releases at all costs
-
-???
-
-The app is where we have less control, so the feature flag is how we get some of
-that control back.
-
-This doesn't mean you'll need 1 feature flag per PR
-
-There's not such thing as:
-"This is such a small thing, it doesn't need a feature flag"
-
-You should ask yourself:
-"It this crashes the app, am I OK with waiting for the next release train?"
-
----
-
-## Thank you!
-
-References:
-
-1. "[Feature Toggles (aka Feature Flags)](https://martinfowler.com/articles/feature-toggles.html)", by Pete Hodgson
-1. "[Continuous integration vs. continuous delivery vs. continuous deployment](https://www.atlassian.com/continuous-delivery/principles/continuous-integration-vs-delivery-vs-deployment)", by Sten Pittet
-1. [Accelerate](https://itrevolution.com/book/accelerate/), by N. Forsgren, J. Humble and G. Kim
diff --git a/_slides/2020-11-14-on-local-first-beyond-the-crdt-silver-bullet.slides b/_slides/2020-11-14-on-local-first-beyond-the-crdt-silver-bullet.slides
deleted file mode 100644
index 33fc239..0000000
--- a/_slides/2020-11-14-on-local-first-beyond-the-crdt-silver-bullet.slides
+++ /dev/null
@@ -1,266 +0,0 @@
----
-
-title: 'On "local-first": beyond the CRDT silver bullet'
-
-date: 2020-11-14
-
-layout: slides
-
-lang: en
-
-ref: on-local-first-beyond-the-crdt-silver-bullet
-
-article: _articles/2020-11-14-local-first-software-you-own-your-data-in-spite-of-the-cloud-article-review.md
-
----
-
-# On local-first
-
-Beyond the CRDT silver bullet
-
----
-
-# Part 1
-
-Exposition
-
----
-
-## "cloud apps" vs "old-fashioned apps"
-
----
-
-## Target
-
-- documents
-- files
-- personal data repositories
-
-Not: banking services, e-commerce, social networking, ride-sharing, *etc*.
-
----
-
-## 7 Ideals for local-first software
-
----
-
-### 1 - No Spinners: Your Work at Your Fingertips
-
----
-
-### 2 - Your Work Is Not Trapped on One Device
-
----
-
-### 3 - The Network Is Optional
-
----
-
-### 4 - Seamless Collaboration with Your Colleagues
-
----
-
-### 5 - The Long Now
-
----
-
-### 6 - Security and Privacy by Default
-
----
-
-### 7 - You Retain Ultimate Ownership and Control
-
----
-
-## Towards a Better Future
-
-CRDTs (Conflict-free Replicated Data Types) as a Foundational Technology
-
----
-
-### Use case
-
-```
-# in node A and node B
-s = "Hello, World"
-
-# in node A
-s = "Hello, Alice"
-
-# in node B
-s = "Hello, Bob"
-```
-
-How to reconcile those?
-- `Hello, ABloibce`
-- `Hello, AliceBob`
-- `Hello, BobAlice`
-- `Hello, Alice`
-- `Hello, Bob`
-
----
-
-Existing CRDTs differ:
-- performance
-- storage
-- compression
-- metadata overhead
-
----
-
-Hint towards the "automerge" CRDT
-
----
-
-*show comparison table, page 9*
-
----
-
-# Part 2
-
-Critique
-
----
-
-### Software license
-
-> In our opinion, maintaining control and ownership of data does not mean that
-> the software must necessarily be open source.
-
----
-
-#### Example 1 - intentional restriction
-
-```bash
-#!/bin/sh
-
-TODAY=$(date +%s)
-LICENSE_EXPIRATION=$(date -d 2020-10-27 +%s)
-
-if [ $TODAY -ge $LICENSE_EXPIRATION ]; then
- echo 'License expired!'
- exit 1
-fi
-
-echo $((2 + 2))
-```
-
-```bash
-# today
-$ ./useful-adder.sh
-4
-# tomorrow
-$ ./useful-adder.sh
-License expired!
-```
-
----
-
-#### Example 2 - unintentional restriction
-
-```bash
-# today
-$ useful-program
-# ...useful output...
-
-# tomorrow, with more data
-$ useful-program
-ERROR: Panic! Stack overflow!
-```
----
-
-### local-first **requires** free software
-
-Otherwise "The Long Now" (ideal nº5) is lost
-
----
-
-### Denial of existing solutions
-
-> In principle it is possible to collaborate without a repository service,
-> e.g. by sending patch files by email, but the majority of Git users rely
-> on GitHub.
-
-Solution: either GitHub+CRDTs or `git` **`send-email`**
-
----
-
-### Plain text formats
-
-> Git is highly optimized for code and similar line-based text file
-
-It even pulls software to the plain text direction, e.g.:
-- delivery-templates
-- `common-core.protocols.config`
-
-Why not exploit that more?
-
----
-
-### Ditching of web applications
-
-> The architecture of web apps remains fundamentally server-centric
-
-Disagree. Contrast [PouchDB][pouchdb] with Android [Instant Apps][instant-apps]
-
-[pouchdb]: https://pouchdb.com/
-[instant-apps]: https://developer.android.com/topic/google-play-instant
-
-???
-
-Talk on dynamic content
-
----
-
-### Costs are underrated
-
-- storage
-- backups
-- maintenance
-
-Example: blog vs vlog
-
----
-
-### Real-time collaboration a bit overrated
-
-It is only possible on the presence of reliable, medium-quality network
-connection
-
-> X also works when inside an elevator, subway or plane!
-
-<!-- 🤦‍ -->
-
----
-
-### On CRDTs and developer experience
-
-> For an app developer, how does the use of a CRDT-based data layer compare to
-> existing storage layers like a SQL database, a filesystem, or CoreData? Is a
-> distributed system harder to write software for?
-
-Yes.
-
-See "[A Note on Distributed Computing][note-dist-comp]"
-
-[note-dist-comp]: https://web.archive.org/web/20130116163535/http://labs.oracle.com/techrep/1994/smli_tr-94-29.pdf
-
----
-
-## Conclusion
-
-Why this is a "paper I love": it took offline-first and ran with it.
-
-But a pinch of CRDT won't make the world local-first.
-
-The tricky part is the end of the sentence: "**in spite of the Cloud**".
-
----
-
-## Thank you!
-
-References:
-
-1. "[Local-First Software: You Own Your Data, in spite of the Cloud](https://martin.kleppmann.com/papers/local-first.pdf)", by M. Kleppmann, A. Wiggins, P. Van Hardenberg and M. F. McGranaghan
-1. [The Morning Paper](https://blog.acolyer.org/2019/11/20/local-first-software/) article
-1. "[A Note on Distributed Computing](https://web.archive.org/web/20130116163535/http://labs.oracle.com/techrep/1994/smli_tr-94-29.pdf)", by J. Waldo, G. Wyant, A. Wollrath and S Kendall
diff --git a/_tils/2020-08-12-nome-de-arquivo-com-timestamp-simplificado.md b/_tils/2020-08-12-nome-de-arquivo-com-timestamp-simplificado.md
deleted file mode 100644
index 2e7fc32..0000000
--- a/_tils/2020-08-12-nome-de-arquivo-com-timestamp-simplificado.md
+++ /dev/null
@@ -1,45 +0,0 @@
----
-
-title: Nome de arquivo com timestamp simplificado
-
-date: 2020-08-12
-
-updated_at: 2020-11-04
-
-layout: post
-
-lang: pt
-
-ref: simple-filename-timestamp
-
-eu_categories: shell
-
----
-
-Quando vou escrever um post no Jekyll ou criar um arquivo de log com a data no
-nome, eu normalmente engasgo para achar um jeito direto de fazer isso. Há uma
-solução simples: `date -I`.
-
-```shell
-./meu-programa.sh > meu-programa.$(date -I).log
-cp template-de-post.md _posts/$(date -I)-slug-do-post.md
-```
-
-Usar essa ferramenta padrão do GNU/Linux permite que você simplesmente escreva
-`touch $(date -I).md` para criar um arquivo `2020-08-12.md`.
-
-Eu sempre tinha que parar para reler o `man date` ou buscar na internet de novo
-e de novo como fazer isso, e depois de sempre chegar no mesmo resultado ficou
-claro para mim que `date -I` quanto `date -Is` (`s` de segundos) são as
-respostas que eu estou procurando 95% do tempo:
-
-```shell
-# dentro do meu-programa.sh
-echo "Programa começou em $(date -Is)"
-# saída é:
-# Programa começou em 2020-08-12T09:15:16-03:00
-```
-
-Ambos os formatos de data são hierárquicos, com intervalos de tempo maior à
-esquerda. Isso significa que você pode facilmente ordená-los (e até usar TAB
-para completar) sem esforço ou ferramenta extra.
diff --git a/_tils/2020-10-11-search-changes-to-a-filename-pattern-in-git-history.md b/_tils/2020-10-11-search-changes-to-a-filename-pattern-in-git-history.md
deleted file mode 100644
index 251abe9..0000000
--- a/_tils/2020-10-11-search-changes-to-a-filename-pattern-in-git-history.md
+++ /dev/null
@@ -1,41 +0,0 @@
----
-
-title: Search changes to a filename pattern in Git history
-
-date: 2020-10-11
-
-layout: post
-
-lang: en
-
-ref: search-changes-to-a-filename-pattern-in-git-history
-
-eu_categories: git
-
----
-
-This is [yet][git-til-1] [another][git-til-2] ["search in Git"][git-til-3] TIL
-entry. You could say that Git has a unintuitive CLI, or that is it very
-powerful.
-
-I wanted to search for an old file that I new that was in the
-history of the repository, but was deleted some time ago. So I didn't really
-remember the name, only bits of it.
-
-I immediately went to the list of TILs I had written on searching in Git, but
-it wasn't readily obvious how to do it, so here it goes:
-
-```shell
-git log -- *pattern*
-```
-
-You could add globs before the pattern to match things on any directory, and add
-our `-p` friend to promptly see the diffs:
-
-```shell
-git log -p -- **/*pattern*
-```
-
-[git-til-1]: {% link _tils/2020-08-14-browse-a-git-repository-at-a-specific-commit.md %}
-[git-til-2]: {% link _tils/2020-08-16-search-in-git.md %}
-[git-til-3]: {% link _tils/2020-08-28-grep-online-repositories.md %}
diff --git a/_tils/2020-11-08-find-broken-symlinks-with-find.md b/_tils/2020-11-08-find-broken-symlinks-with-find.md
deleted file mode 100644
index bc97fc6..0000000
--- a/_tils/2020-11-08-find-broken-symlinks-with-find.md
+++ /dev/null
@@ -1,36 +0,0 @@
----
-
-title: Find broken symlinks with "find"
-
-date: 2020-11-08
-
-layout: post
-
-lang: en
-
-ref: find-broken-symlinks-with-find
-
-eu_categories: shell
-
----
-
-The `find` command knows how to show broken symlinks:
-
-```shell
-find . -xtype l
-```
-
-This was useful to me when combined with [Git Annex][git-annex]. Its
-[`wanted`][git-annex-wanted] option allows you to have a "sparse" checkout of
-the content, and save space by not having to copy every annexed file locally:
-
-```shell
-git annex wanted . 'exclude=Music/* and exclude=Videos/*'
-```
-
-You can `find` any broken symlinks outside those directories by querying with
-Git Annex itself, but `find . -xtype l` works on other places too, where broken
-symlinks might be a problem.
-
-[git-annex]: https://git-annex.branchable.com/
-[git-annex-wanted]: https://git-annex.branchable.com/git-annex-wanted/
diff --git a/_tils/2020-11-12-useful-bash-variables.md b/_tils/2020-11-12-useful-bash-variables.md
deleted file mode 100644
index 33a072e..0000000
--- a/_tils/2020-11-12-useful-bash-variables.md
+++ /dev/null
@@ -1,72 +0,0 @@
----
-
-title: Useful Bash variables
-
-date: 2020-11-12 1
-
-layout: post
-
-lang: en
-
-ref: useful-bash-variables
-
-eu_categories: shell
-
----
-
-[GNU Bash][gnu-bash] has a few two letter variables that may be useful when
-typing on the terminal.
-
-[gnu-bash]: https://www.gnu.org/software/bash/
-
-## `!!`: the text of the last command
-
-The [`!!` variable][previous-command] refers to the previous command, and I find
-useful when following chains for symlinks:
-
-[previous-command]: https://www.gnu.org/software/bash/manual/bash.html#Event-Designators
-
-```shell
-$ which git
-/run/current-system/sw/bin/git
-$ readlink $(!!)
-readlink $(which git)
-/nix/store/5bgr1xpm4m0r72h9049jbbhagxdyrnyb-git-2.28.0/bin/git
-```
-
-It is also useful when you forget to prefix `sudo` to a command that requires
-it:
-
-```shell
-$ requires-sudo.sh
-requires-sudo.sh: Permission denied
-$ sudo !!
-sudo ./requires-sudo.sh
-# all good
-```
-
-Bash prints the command expansion before executing it, so it is better for you
-to follow along what it is doing.
-
-## `$_`: most recent parameter
-
-The [`$_` variable][recent-parameter] will give you the most recent parameter
-you provided to a previous argument, which can save you typing sometimes:
-
-```shell
-# instead of...
-$ mkdir -p a/b/c/d/
-$ cd a/b/c/d/
-
-# ...you can:
-$ mkdir -p a/b/c/d/
-$ cd $_
-```
-
-[recent-parameter]: https://www.gnu.org/software/bash/manual/bash.html#Special-Parameters
-
-## Conclusion
-
-I wouldn't use those in a script, as it would make the script terser to read, I
-find those useful shortcut that are handy when writing at the interactive
-terminal.
diff --git a/_tils/2020-11-14-gpodder-as-a-media-subscription-manager.md b/_tils/2020-11-14-gpodder-as-a-media-subscription-manager.md
deleted file mode 100644
index a74b225..0000000
--- a/_tils/2020-11-14-gpodder-as-a-media-subscription-manager.md
+++ /dev/null
@@ -1,33 +0,0 @@
----
-
-title: gPodder as a media subscription manager
-
-date: 2020-11-14
-
-layout: post
-
-lang: en
-
-ref: gpodder-as-a-media-subscription-manager
-
----
-
-As we [re-discover][rss] the value of Atom/RSS feeds, most useful feed clients I
-know of don't support media, specifically audio and video.
-
-[gPodder][gpodder] does.
-
-It is mostly know as a desktop podcatcher. But the thing about podcasts is that
-the feed is provided through an RSS/Atom feed. So you can just use gPodder as
-your media feed client, where you have control of what you look at.
-
-I audio and video providers I know of offer an RSS/Atom view of their content,
-so you can, say, treat any YouTube channel like a feed on its own.
-
-gPodder will then managed your feeds, watched/unwatched, queue downloads, etc.
-
-Being obvious now, it was a big finding for me. If it got you interested, I
-recommend you giving gPodder a try.
-
-[rss]: https://www.charlieharrington.com/unexpected-useless-and-urgent
-[gpodder]: https://gpodder.github.io/
diff --git a/_tils/2021-01-17-posix-sh-and-shebangs.md b/_tils/2021-01-17-posix-sh-and-shebangs.md
deleted file mode 100644
index 938d1bd..0000000
--- a/_tils/2021-01-17-posix-sh-and-shebangs.md
+++ /dev/null
@@ -1,57 +0,0 @@
----
-
-title: POSIX sh and shebangs
-
-date: 2021-01-17
-
-layout: post
-
-lang: en
-
-ref: posix-sh-and-shebangs
-
----
-
-As I [keep moving][posix-awk-0] [towards POSIX][posix-awk-1], I'm on the process of migrating all my Bash scripts to POSIX sh.
-
-As I dropped `[[`, arrays and other Bashisms, I was left staring at the first line of every script, wondering what to do: what is the POSIX sh equivalent of `#!/usr/bin/env bash`?
-I already knew that POSIX says nothing about shebangs, and that the portable way to call a POSIX sh script is `sh script.sh`, but I didn't know what to do with that first line.
-
-What I had previously was:
-```shell
-#!/usr/bin/env bash
-set -Eeuo pipefail
-cd "$(dirname "${BASH_SOURCE[0]}")"
-```
-
-Obviously, the `$BASH_SOURCE` would be gone, and I would have to adapt some of my scripts to not rely on the script location.
-The `-E` and `-o pipefail` options were also gone, and would be replaced by nothing.
-
-I converted all of them to:
-```shell
-#!/bin/sh -eu
-```
-
-I moved the `-eu` options to the shebang line itself, striving for conciseness.
-But as I changed callers from `./script.sh` to `sh script.sh`, things started to fail.
-Some tests that should fail reported errors, but didn't return 1.
-
-My first reaction was to revert back to `./script.sh`, but the POSIX bug I caught is a strong strain, and when I went back to it, I figured that the callers were missing some flags.
-Specifically, `sh -eu script.sh`.
-
-Then it clicked: when running with `sh script.sh`, the shebang line with the sh options is ignored, as it is a comment!
-
-Which means that the shebang most friendly with POSIX is:
-
-```shell
-#!/bin/sh
-set -eu
-```
-
-1. when running via `./script.sh`, if the system has an executable at `/bin/sh`, it will be used to run the script;
-1. when running via `sh script.sh`, the sh options aren't ignored as previously.
-
-TIL.
-
-[posix-awk-0]: {% link _tils/2020-12-15-awk-snippet-shellcheck-all-scripts-in-a-repository.md %}
-[posix-awk-1]: {% link _tils/2021-01-12-awk-snippet-send-email-to-multiple-recipients-with-curl.md %}
diff --git a/deps.mk b/deps.mk
new file mode 100644
index 0000000..43a93e9
--- /dev/null
+++ b/deps.mk
@@ -0,0 +1,1182 @@
+pages.adoc = \
+ src/content/en/about.adoc \
+ src/content/en/index.adoc \
+ src/content/pt/sobre.adoc \
+
+articles.adoc = \
+ src/content/en/blog/2018/07/17/guix-nixos.adoc \
+ src/content/en/blog/2018/08/01/npm-ci-reproducibility.adoc \
+ src/content/en/blog/2018/12/21/ytdl-subs.adoc \
+ src/content/en/blog/2019/06/02/nixos-stateless-workstation.adoc \
+ src/content/en/blog/2020/08/10/guix-srht.adoc \
+ src/content/en/blog/2020/08/31/database-i-wish-i-had.adoc \
+ src/content/en/blog/2020/10/05/cargo2nix.adoc \
+ src/content/en/blog/2020/10/05/swift2nix.adoc \
+ src/content/en/blog/2020/10/19/feature-flags.adoc \
+ src/content/en/blog/2020/10/20/wrong-interviewing.adoc \
+ src/content/en/blog/2020/11/07/diy-bugs.adoc \
+ src/content/en/blog/2020/11/08/paradigm-shift-review.adoc \
+ src/content/en/blog/2020/11/12/database-parsers-trees.adoc \
+ src/content/en/blog/2020/11/14/local-first-review.adoc \
+ src/content/en/blog/2021/01/26/remembering-ann.adoc \
+ src/content/en/blog/2021/02/17/fallible.adoc \
+ src/content/en/blog/2021/04/29/relational-review.adoc \
+ src/content/en/pastebin/2016/04/05/rpn.adoc \
+ src/content/en/pastebin/2018/07/11/nix-pinning.adoc \
+ src/content/en/pastebin/2018/07/13/guix-nixos-systemd.adoc \
+ src/content/en/pastebin/2018/07/13/guixbuilder-nixos.adoc \
+ src/content/en/pastebin/2018/07/13/guixbuilder.adoc \
+ src/content/en/pastebin/2018/07/13/nix-strpad.adoc \
+ src/content/en/pastebin/2018/07/25/nix-exps.adoc \
+ src/content/en/pastebin/2018/07/25/nix-showdrv.adoc \
+ src/content/en/pastebin/2019/06/08/inconsistent-hash.adoc \
+ src/content/en/pastebin/2019/12/29/raku-tuple-type.adoc \
+ src/content/en/pastebin/2020/01/04/guix-import-failure.adoc \
+ src/content/en/pastebin/2020/02/14/guix-shebang.adoc \
+ src/content/en/pastebin/2020/11/27/guix-build-local.adoc \
+ src/content/en/pastebin/2020/12/15/guix-pack-fail.adoc \
+ src/content/en/pastebin/2021/04/03/naive-slugify-js.adoc \
+ src/content/en/pastebin/2021/06/08/reading-session-pt1.adoc \
+ src/content/en/pastebin/2021/06/22/curl-wget.adoc \
+ src/content/en/pastebin/2021/08/11/h1-spacing.adoc \
+ src/content/en/pastebin/2021/09/02/sicp-3-19.adoc \
+ src/content/en/pastebin/2021/09/03/sicp-persistent-queue.adoc \
+ src/content/en/pastebin/2022/07/14/git-cleanup.adoc \
+ src/content/en/pastebin/2023/07/22/funcallable-amop.adoc \
+ src/content/en/podcast/2020/12/19/test-entry.adoc \
+ src/content/en/screencast/2021/02/07/autoqemu.adoc \
+ src/content/en/til/2020/08/12/filename-timestamp.adoc \
+ src/content/en/til/2020/08/13/code-jekyll.adoc \
+ src/content/en/til/2020/08/14/browse-git.adoc \
+ src/content/en/til/2020/08/16/git-search.adoc \
+ src/content/en/til/2020/08/28/grep-online.adoc \
+ src/content/en/til/2020/09/04/cli-email-fun-profit.adoc \
+ src/content/en/til/2020/09/05/oldschool-pr.adoc \
+ src/content/en/til/2020/10/11/search-git-history.adoc \
+ src/content/en/til/2020/11/08/find-broken-symlink.adoc \
+ src/content/en/til/2020/11/12/diy-nix-bash-ci.adoc \
+ src/content/en/til/2020/11/12/git-bisect-automation.adoc \
+ src/content/en/til/2020/11/12/useful-bashvars.adoc \
+ src/content/en/til/2020/11/14/gpodder-media.adoc \
+ src/content/en/til/2020/11/30/git-notes-ci.adoc \
+ src/content/en/til/2020/12/15/shellcheck-repo.adoc \
+ src/content/en/til/2020/12/29/svg.adoc \
+ src/content/en/til/2021/01/12/curl-awk-emails.adoc \
+ src/content/en/til/2021/01/17/posix-shebang.adoc \
+ src/content/en/til/2021/04/24/cl-generic-precedence.adoc \
+ src/content/en/til/2021/04/24/clojure-autocurry.adoc \
+ src/content/en/til/2021/04/24/scm-nif.adoc \
+ src/content/en/til/2021/07/23/git-tls-gpg.adoc \
+ src/content/en/til/2021/08/11/js-bigint-reviver.adoc \
+ src/content/pt/hea/2020/08/12/arquivo-datado.adoc \
+
+slides.adoc = \
+ src/content/en/slide/2020/10/19/feature-flags.adoc \
+ src/content/en/slide/2020/11/14/local-first-hype.adoc \
+
+categories.adoc = \
+ src/content/en/blog/categories.adoc \
+ src/content/en/pastebin/categories.adoc \
+ src/content/en/podcast/categories.adoc \
+ src/content/en/screencast/categories.adoc \
+ src/content/en/til/categories.adoc \
+ src/content/pt/hea/categorias.adoc \
+
+indexes.adoc = \
+ src/content/en/blog/index.adoc \
+ src/content/en/pastebin/index.adoc \
+ src/content/en/podcast/index.adoc \
+ src/content/en/screencast/index.adoc \
+ src/content/en/til/index.adoc \
+ src/content/pt/hea/index.adoc \
+
+feeds.xml = \
+ src/content/en/blog/feed.xml \
+ src/content/en/pastebin/feed.xml \
+ src/content/en/podcast/feed.xml \
+ src/content/en/screencast/feed.xml \
+ src/content/en/til/feed.xml \
+ src/content/pt/hea/feed.xml \
+
+images.svg = \
+ src/content/img/atom.svg \
+ src/content/img/envelope/dark.svg \
+ src/content/img/envelope/light.svg \
+ src/content/img/favicon.svg \
+ src/content/img/link/dark.svg \
+ src/content/img/link/light.svg \
+ src/content/img/lock/dark.svg \
+ src/content/img/lock/light.svg \
+ src/content/img/logo/dark.svg \
+ src/content/img/logo/light.svg \
+
+sources.media = \
+ src/content/en/podcast/2020/12/19/test-entry.flac \
+ src/content/en/podcast/2020/12/19/test-entry.ogg \
+ src/content/en/screencast/2021/02/07/autoqemu.webm \
+
+sources.tarballs = \
+ src/content/en/blog/2020/10/05/cargo2nix-demo.tar.gz \
+ src/content/en/blog/2020/10/05/cargo2nix.tar.gz \
+ src/content/en/blog/2020/10/05/swift2nix-demo.tar.gz \
+ src/content/en/blog/2020/10/05/swift2nix.tar.gz \
+ src/content/en/blog/2021/02/17/fallible.tar.gz \
+ src/content/en/screencast/2021/02/07/autoqemu.tar.gz \
+
+sources.extras = \
+ src/content/en/blog/2020/10/05/cargo2nix-demo.tar.gz \
+ src/content/en/blog/2020/10/05/cargo2nix.tar.gz \
+ src/content/en/blog/2020/10/05/swift2nix-demo.tar.gz \
+ src/content/en/blog/2020/10/05/swift2nix.tar.gz \
+ src/content/en/blog/2021/02/17/fallible.tar.gz \
+ src/content/en/podcast/2020/12/19/test-entry.flac \
+ src/content/en/podcast/2020/12/19/test-entry.ogg \
+ src/content/en/screencast/2021/02/07/autoqemu.tar.gz \
+ src/content/en/screencast/2021/02/07/autoqemu.webm \
+
+sources.po = \
+ po/de.po \
+ po/en.po \
+ po/eo.po \
+ po/es.po \
+ po/euandre.org.pot \
+ po/fr.po \
+ po/pt.po \
+
+src/content/en/about.html.gz: src/content/en/about.html
+src/content/en/index.html.gz: src/content/en/index.html
+src/content/pt/sobre.html.gz: src/content/pt/sobre.html
+src/content/en/blog/2018/07/17/guix-nixos.html.gz: src/content/en/blog/2018/07/17/guix-nixos.html
+src/content/en/blog/2018/08/01/npm-ci-reproducibility.html.gz: src/content/en/blog/2018/08/01/npm-ci-reproducibility.html
+src/content/en/blog/2018/12/21/ytdl-subs.html.gz: src/content/en/blog/2018/12/21/ytdl-subs.html
+src/content/en/blog/2019/06/02/nixos-stateless-workstation.html.gz: src/content/en/blog/2019/06/02/nixos-stateless-workstation.html
+src/content/en/blog/2020/08/10/guix-srht.html.gz: src/content/en/blog/2020/08/10/guix-srht.html
+src/content/en/blog/2020/08/31/database-i-wish-i-had.html.gz: src/content/en/blog/2020/08/31/database-i-wish-i-had.html
+src/content/en/blog/2020/10/05/cargo2nix.html.gz: src/content/en/blog/2020/10/05/cargo2nix.html
+src/content/en/blog/2020/10/05/swift2nix.html.gz: src/content/en/blog/2020/10/05/swift2nix.html
+src/content/en/blog/2020/10/19/feature-flags.html.gz: src/content/en/blog/2020/10/19/feature-flags.html
+src/content/en/blog/2020/10/20/wrong-interviewing.html.gz: src/content/en/blog/2020/10/20/wrong-interviewing.html
+src/content/en/blog/2020/11/07/diy-bugs.html.gz: src/content/en/blog/2020/11/07/diy-bugs.html
+src/content/en/blog/2020/11/08/paradigm-shift-review.html.gz: src/content/en/blog/2020/11/08/paradigm-shift-review.html
+src/content/en/blog/2020/11/12/database-parsers-trees.html.gz: src/content/en/blog/2020/11/12/database-parsers-trees.html
+src/content/en/blog/2020/11/14/local-first-review.html.gz: src/content/en/blog/2020/11/14/local-first-review.html
+src/content/en/blog/2021/01/26/remembering-ann.html.gz: src/content/en/blog/2021/01/26/remembering-ann.html
+src/content/en/blog/2021/02/17/fallible.html.gz: src/content/en/blog/2021/02/17/fallible.html
+src/content/en/blog/2021/04/29/relational-review.html.gz: src/content/en/blog/2021/04/29/relational-review.html
+src/content/en/pastebin/2016/04/05/rpn.html.gz: src/content/en/pastebin/2016/04/05/rpn.html
+src/content/en/pastebin/2018/07/11/nix-pinning.html.gz: src/content/en/pastebin/2018/07/11/nix-pinning.html
+src/content/en/pastebin/2018/07/13/guix-nixos-systemd.html.gz: src/content/en/pastebin/2018/07/13/guix-nixos-systemd.html
+src/content/en/pastebin/2018/07/13/guixbuilder-nixos.html.gz: src/content/en/pastebin/2018/07/13/guixbuilder-nixos.html
+src/content/en/pastebin/2018/07/13/guixbuilder.html.gz: src/content/en/pastebin/2018/07/13/guixbuilder.html
+src/content/en/pastebin/2018/07/13/nix-strpad.html.gz: src/content/en/pastebin/2018/07/13/nix-strpad.html
+src/content/en/pastebin/2018/07/25/nix-exps.html.gz: src/content/en/pastebin/2018/07/25/nix-exps.html
+src/content/en/pastebin/2018/07/25/nix-showdrv.html.gz: src/content/en/pastebin/2018/07/25/nix-showdrv.html
+src/content/en/pastebin/2019/06/08/inconsistent-hash.html.gz: src/content/en/pastebin/2019/06/08/inconsistent-hash.html
+src/content/en/pastebin/2019/12/29/raku-tuple-type.html.gz: src/content/en/pastebin/2019/12/29/raku-tuple-type.html
+src/content/en/pastebin/2020/01/04/guix-import-failure.html.gz: src/content/en/pastebin/2020/01/04/guix-import-failure.html
+src/content/en/pastebin/2020/02/14/guix-shebang.html.gz: src/content/en/pastebin/2020/02/14/guix-shebang.html
+src/content/en/pastebin/2020/11/27/guix-build-local.html.gz: src/content/en/pastebin/2020/11/27/guix-build-local.html
+src/content/en/pastebin/2020/12/15/guix-pack-fail.html.gz: src/content/en/pastebin/2020/12/15/guix-pack-fail.html
+src/content/en/pastebin/2021/04/03/naive-slugify-js.html.gz: src/content/en/pastebin/2021/04/03/naive-slugify-js.html
+src/content/en/pastebin/2021/06/08/reading-session-pt1.html.gz: src/content/en/pastebin/2021/06/08/reading-session-pt1.html
+src/content/en/pastebin/2021/06/22/curl-wget.html.gz: src/content/en/pastebin/2021/06/22/curl-wget.html
+src/content/en/pastebin/2021/08/11/h1-spacing.html.gz: src/content/en/pastebin/2021/08/11/h1-spacing.html
+src/content/en/pastebin/2021/09/02/sicp-3-19.html.gz: src/content/en/pastebin/2021/09/02/sicp-3-19.html
+src/content/en/pastebin/2021/09/03/sicp-persistent-queue.html.gz: src/content/en/pastebin/2021/09/03/sicp-persistent-queue.html
+src/content/en/pastebin/2022/07/14/git-cleanup.html.gz: src/content/en/pastebin/2022/07/14/git-cleanup.html
+src/content/en/pastebin/2023/07/22/funcallable-amop.html.gz: src/content/en/pastebin/2023/07/22/funcallable-amop.html
+src/content/en/podcast/2020/12/19/test-entry.html.gz: src/content/en/podcast/2020/12/19/test-entry.html
+src/content/en/screencast/2021/02/07/autoqemu.html.gz: src/content/en/screencast/2021/02/07/autoqemu.html
+src/content/en/til/2020/08/12/filename-timestamp.html.gz: src/content/en/til/2020/08/12/filename-timestamp.html
+src/content/en/til/2020/08/13/code-jekyll.html.gz: src/content/en/til/2020/08/13/code-jekyll.html
+src/content/en/til/2020/08/14/browse-git.html.gz: src/content/en/til/2020/08/14/browse-git.html
+src/content/en/til/2020/08/16/git-search.html.gz: src/content/en/til/2020/08/16/git-search.html
+src/content/en/til/2020/08/28/grep-online.html.gz: src/content/en/til/2020/08/28/grep-online.html
+src/content/en/til/2020/09/04/cli-email-fun-profit.html.gz: src/content/en/til/2020/09/04/cli-email-fun-profit.html
+src/content/en/til/2020/09/05/oldschool-pr.html.gz: src/content/en/til/2020/09/05/oldschool-pr.html
+src/content/en/til/2020/10/11/search-git-history.html.gz: src/content/en/til/2020/10/11/search-git-history.html
+src/content/en/til/2020/11/08/find-broken-symlink.html.gz: src/content/en/til/2020/11/08/find-broken-symlink.html
+src/content/en/til/2020/11/12/diy-nix-bash-ci.html.gz: src/content/en/til/2020/11/12/diy-nix-bash-ci.html
+src/content/en/til/2020/11/12/git-bisect-automation.html.gz: src/content/en/til/2020/11/12/git-bisect-automation.html
+src/content/en/til/2020/11/12/useful-bashvars.html.gz: src/content/en/til/2020/11/12/useful-bashvars.html
+src/content/en/til/2020/11/14/gpodder-media.html.gz: src/content/en/til/2020/11/14/gpodder-media.html
+src/content/en/til/2020/11/30/git-notes-ci.html.gz: src/content/en/til/2020/11/30/git-notes-ci.html
+src/content/en/til/2020/12/15/shellcheck-repo.html.gz: src/content/en/til/2020/12/15/shellcheck-repo.html
+src/content/en/til/2020/12/29/svg.html.gz: src/content/en/til/2020/12/29/svg.html
+src/content/en/til/2021/01/12/curl-awk-emails.html.gz: src/content/en/til/2021/01/12/curl-awk-emails.html
+src/content/en/til/2021/01/17/posix-shebang.html.gz: src/content/en/til/2021/01/17/posix-shebang.html
+src/content/en/til/2021/04/24/cl-generic-precedence.html.gz: src/content/en/til/2021/04/24/cl-generic-precedence.html
+src/content/en/til/2021/04/24/clojure-autocurry.html.gz: src/content/en/til/2021/04/24/clojure-autocurry.html
+src/content/en/til/2021/04/24/scm-nif.html.gz: src/content/en/til/2021/04/24/scm-nif.html
+src/content/en/til/2021/07/23/git-tls-gpg.html.gz: src/content/en/til/2021/07/23/git-tls-gpg.html
+src/content/en/til/2021/08/11/js-bigint-reviver.html.gz: src/content/en/til/2021/08/11/js-bigint-reviver.html
+src/content/pt/hea/2020/08/12/arquivo-datado.html.gz: src/content/pt/hea/2020/08/12/arquivo-datado.html
+src/content/en/blog/index.html.gz: src/content/en/blog/index.html
+src/content/en/pastebin/index.html.gz: src/content/en/pastebin/index.html
+src/content/en/podcast/index.html.gz: src/content/en/podcast/index.html
+src/content/en/screencast/index.html.gz: src/content/en/screencast/index.html
+src/content/en/til/index.html.gz: src/content/en/til/index.html
+src/content/pt/hea/index.html.gz: src/content/pt/hea/index.html
+src/content/en/blog/categories.html.gz: src/content/en/blog/categories.html
+src/content/en/pastebin/categories.html.gz: src/content/en/pastebin/categories.html
+src/content/en/podcast/categories.html.gz: src/content/en/podcast/categories.html
+src/content/en/screencast/categories.html.gz: src/content/en/screencast/categories.html
+src/content/en/til/categories.html.gz: src/content/en/til/categories.html
+src/content/pt/hea/categorias.html.gz: src/content/pt/hea/categorias.html
+src/content/en/about.snippets.gz: src/content/en/about.snippets
+src/content/en/index.snippets.gz: src/content/en/index.snippets
+src/content/pt/sobre.snippets.gz: src/content/pt/sobre.snippets
+src/content/en/blog/2018/07/17/guix-nixos.snippets.gz: src/content/en/blog/2018/07/17/guix-nixos.snippets
+src/content/en/blog/2018/08/01/npm-ci-reproducibility.snippets.gz: src/content/en/blog/2018/08/01/npm-ci-reproducibility.snippets
+src/content/en/blog/2018/12/21/ytdl-subs.snippets.gz: src/content/en/blog/2018/12/21/ytdl-subs.snippets
+src/content/en/blog/2019/06/02/nixos-stateless-workstation.snippets.gz: src/content/en/blog/2019/06/02/nixos-stateless-workstation.snippets
+src/content/en/blog/2020/08/10/guix-srht.snippets.gz: src/content/en/blog/2020/08/10/guix-srht.snippets
+src/content/en/blog/2020/08/31/database-i-wish-i-had.snippets.gz: src/content/en/blog/2020/08/31/database-i-wish-i-had.snippets
+src/content/en/blog/2020/10/05/cargo2nix.snippets.gz: src/content/en/blog/2020/10/05/cargo2nix.snippets
+src/content/en/blog/2020/10/05/swift2nix.snippets.gz: src/content/en/blog/2020/10/05/swift2nix.snippets
+src/content/en/blog/2020/10/19/feature-flags.snippets.gz: src/content/en/blog/2020/10/19/feature-flags.snippets
+src/content/en/blog/2020/10/20/wrong-interviewing.snippets.gz: src/content/en/blog/2020/10/20/wrong-interviewing.snippets
+src/content/en/blog/2020/11/07/diy-bugs.snippets.gz: src/content/en/blog/2020/11/07/diy-bugs.snippets
+src/content/en/blog/2020/11/08/paradigm-shift-review.snippets.gz: src/content/en/blog/2020/11/08/paradigm-shift-review.snippets
+src/content/en/blog/2020/11/12/database-parsers-trees.snippets.gz: src/content/en/blog/2020/11/12/database-parsers-trees.snippets
+src/content/en/blog/2020/11/14/local-first-review.snippets.gz: src/content/en/blog/2020/11/14/local-first-review.snippets
+src/content/en/blog/2021/01/26/remembering-ann.snippets.gz: src/content/en/blog/2021/01/26/remembering-ann.snippets
+src/content/en/blog/2021/02/17/fallible.snippets.gz: src/content/en/blog/2021/02/17/fallible.snippets
+src/content/en/blog/2021/04/29/relational-review.snippets.gz: src/content/en/blog/2021/04/29/relational-review.snippets
+src/content/en/pastebin/2016/04/05/rpn.snippets.gz: src/content/en/pastebin/2016/04/05/rpn.snippets
+src/content/en/pastebin/2018/07/11/nix-pinning.snippets.gz: src/content/en/pastebin/2018/07/11/nix-pinning.snippets
+src/content/en/pastebin/2018/07/13/guix-nixos-systemd.snippets.gz: src/content/en/pastebin/2018/07/13/guix-nixos-systemd.snippets
+src/content/en/pastebin/2018/07/13/guixbuilder-nixos.snippets.gz: src/content/en/pastebin/2018/07/13/guixbuilder-nixos.snippets
+src/content/en/pastebin/2018/07/13/guixbuilder.snippets.gz: src/content/en/pastebin/2018/07/13/guixbuilder.snippets
+src/content/en/pastebin/2018/07/13/nix-strpad.snippets.gz: src/content/en/pastebin/2018/07/13/nix-strpad.snippets
+src/content/en/pastebin/2018/07/25/nix-exps.snippets.gz: src/content/en/pastebin/2018/07/25/nix-exps.snippets
+src/content/en/pastebin/2018/07/25/nix-showdrv.snippets.gz: src/content/en/pastebin/2018/07/25/nix-showdrv.snippets
+src/content/en/pastebin/2019/06/08/inconsistent-hash.snippets.gz: src/content/en/pastebin/2019/06/08/inconsistent-hash.snippets
+src/content/en/pastebin/2019/12/29/raku-tuple-type.snippets.gz: src/content/en/pastebin/2019/12/29/raku-tuple-type.snippets
+src/content/en/pastebin/2020/01/04/guix-import-failure.snippets.gz: src/content/en/pastebin/2020/01/04/guix-import-failure.snippets
+src/content/en/pastebin/2020/02/14/guix-shebang.snippets.gz: src/content/en/pastebin/2020/02/14/guix-shebang.snippets
+src/content/en/pastebin/2020/11/27/guix-build-local.snippets.gz: src/content/en/pastebin/2020/11/27/guix-build-local.snippets
+src/content/en/pastebin/2020/12/15/guix-pack-fail.snippets.gz: src/content/en/pastebin/2020/12/15/guix-pack-fail.snippets
+src/content/en/pastebin/2021/04/03/naive-slugify-js.snippets.gz: src/content/en/pastebin/2021/04/03/naive-slugify-js.snippets
+src/content/en/pastebin/2021/06/08/reading-session-pt1.snippets.gz: src/content/en/pastebin/2021/06/08/reading-session-pt1.snippets
+src/content/en/pastebin/2021/06/22/curl-wget.snippets.gz: src/content/en/pastebin/2021/06/22/curl-wget.snippets
+src/content/en/pastebin/2021/08/11/h1-spacing.snippets.gz: src/content/en/pastebin/2021/08/11/h1-spacing.snippets
+src/content/en/pastebin/2021/09/02/sicp-3-19.snippets.gz: src/content/en/pastebin/2021/09/02/sicp-3-19.snippets
+src/content/en/pastebin/2021/09/03/sicp-persistent-queue.snippets.gz: src/content/en/pastebin/2021/09/03/sicp-persistent-queue.snippets
+src/content/en/pastebin/2022/07/14/git-cleanup.snippets.gz: src/content/en/pastebin/2022/07/14/git-cleanup.snippets
+src/content/en/pastebin/2023/07/22/funcallable-amop.snippets.gz: src/content/en/pastebin/2023/07/22/funcallable-amop.snippets
+src/content/en/podcast/2020/12/19/test-entry.snippets.gz: src/content/en/podcast/2020/12/19/test-entry.snippets
+src/content/en/screencast/2021/02/07/autoqemu.snippets.gz: src/content/en/screencast/2021/02/07/autoqemu.snippets
+src/content/en/til/2020/08/12/filename-timestamp.snippets.gz: src/content/en/til/2020/08/12/filename-timestamp.snippets
+src/content/en/til/2020/08/13/code-jekyll.snippets.gz: src/content/en/til/2020/08/13/code-jekyll.snippets
+src/content/en/til/2020/08/14/browse-git.snippets.gz: src/content/en/til/2020/08/14/browse-git.snippets
+src/content/en/til/2020/08/16/git-search.snippets.gz: src/content/en/til/2020/08/16/git-search.snippets
+src/content/en/til/2020/08/28/grep-online.snippets.gz: src/content/en/til/2020/08/28/grep-online.snippets
+src/content/en/til/2020/09/04/cli-email-fun-profit.snippets.gz: src/content/en/til/2020/09/04/cli-email-fun-profit.snippets
+src/content/en/til/2020/09/05/oldschool-pr.snippets.gz: src/content/en/til/2020/09/05/oldschool-pr.snippets
+src/content/en/til/2020/10/11/search-git-history.snippets.gz: src/content/en/til/2020/10/11/search-git-history.snippets
+src/content/en/til/2020/11/08/find-broken-symlink.snippets.gz: src/content/en/til/2020/11/08/find-broken-symlink.snippets
+src/content/en/til/2020/11/12/diy-nix-bash-ci.snippets.gz: src/content/en/til/2020/11/12/diy-nix-bash-ci.snippets
+src/content/en/til/2020/11/12/git-bisect-automation.snippets.gz: src/content/en/til/2020/11/12/git-bisect-automation.snippets
+src/content/en/til/2020/11/12/useful-bashvars.snippets.gz: src/content/en/til/2020/11/12/useful-bashvars.snippets
+src/content/en/til/2020/11/14/gpodder-media.snippets.gz: src/content/en/til/2020/11/14/gpodder-media.snippets
+src/content/en/til/2020/11/30/git-notes-ci.snippets.gz: src/content/en/til/2020/11/30/git-notes-ci.snippets
+src/content/en/til/2020/12/15/shellcheck-repo.snippets.gz: src/content/en/til/2020/12/15/shellcheck-repo.snippets
+src/content/en/til/2020/12/29/svg.snippets.gz: src/content/en/til/2020/12/29/svg.snippets
+src/content/en/til/2021/01/12/curl-awk-emails.snippets.gz: src/content/en/til/2021/01/12/curl-awk-emails.snippets
+src/content/en/til/2021/01/17/posix-shebang.snippets.gz: src/content/en/til/2021/01/17/posix-shebang.snippets
+src/content/en/til/2021/04/24/cl-generic-precedence.snippets.gz: src/content/en/til/2021/04/24/cl-generic-precedence.snippets
+src/content/en/til/2021/04/24/clojure-autocurry.snippets.gz: src/content/en/til/2021/04/24/clojure-autocurry.snippets
+src/content/en/til/2021/04/24/scm-nif.snippets.gz: src/content/en/til/2021/04/24/scm-nif.snippets
+src/content/en/til/2021/07/23/git-tls-gpg.snippets.gz: src/content/en/til/2021/07/23/git-tls-gpg.snippets
+src/content/en/til/2021/08/11/js-bigint-reviver.snippets.gz: src/content/en/til/2021/08/11/js-bigint-reviver.snippets
+src/content/pt/hea/2020/08/12/arquivo-datado.snippets.gz: src/content/pt/hea/2020/08/12/arquivo-datado.snippets
+src/content/en/blog/index.snippets.gz: src/content/en/blog/index.snippets
+src/content/en/pastebin/index.snippets.gz: src/content/en/pastebin/index.snippets
+src/content/en/podcast/index.snippets.gz: src/content/en/podcast/index.snippets
+src/content/en/screencast/index.snippets.gz: src/content/en/screencast/index.snippets
+src/content/en/til/index.snippets.gz: src/content/en/til/index.snippets
+src/content/pt/hea/index.snippets.gz: src/content/pt/hea/index.snippets
+src/content/en/blog/categories.snippets.gz: src/content/en/blog/categories.snippets
+src/content/en/pastebin/categories.snippets.gz: src/content/en/pastebin/categories.snippets
+src/content/en/podcast/categories.snippets.gz: src/content/en/podcast/categories.snippets
+src/content/en/screencast/categories.snippets.gz: src/content/en/screencast/categories.snippets
+src/content/en/til/categories.snippets.gz: src/content/en/til/categories.snippets
+src/content/pt/hea/categorias.snippets.gz: src/content/pt/hea/categorias.snippets
+src/content/en/slide/2020/10/19/feature-flags.pdf.gz: src/content/en/slide/2020/10/19/feature-flags.pdf
+src/content/en/slide/2020/11/14/local-first-hype.pdf.gz: src/content/en/slide/2020/11/14/local-first-hype.pdf
+src/content/en/blog/feed.xml.gz: src/content/en/blog/feed.xml
+src/content/en/pastebin/feed.xml.gz: src/content/en/pastebin/feed.xml
+src/content/en/podcast/feed.xml.gz: src/content/en/podcast/feed.xml
+src/content/en/screencast/feed.xml.gz: src/content/en/screencast/feed.xml
+src/content/en/til/feed.xml.gz: src/content/en/til/feed.xml
+src/content/pt/hea/feed.xml.gz: src/content/pt/hea/feed.xml
+src/content/en/podcast/2020/12/19/test-entry.flac.torrent.gz: src/content/en/podcast/2020/12/19/test-entry.flac.torrent
+src/content/en/podcast/2020/12/19/test-entry.ogg.torrent.gz: src/content/en/podcast/2020/12/19/test-entry.ogg.torrent
+src/content/en/screencast/2021/02/07/autoqemu.webm.torrent.gz: src/content/en/screencast/2021/02/07/autoqemu.webm.torrent
+
+src/content/en/about.htmlbody src/content/en/about.snippets src/content/en/about.conf: src/content/en/about.adoc
+src/content/en/index.htmlbody src/content/en/index.snippets src/content/en/index.conf: src/content/en/index.adoc
+src/content/pt/sobre.htmlbody src/content/pt/sobre.snippets src/content/pt/sobre.conf: src/content/pt/sobre.adoc
+src/content/en/blog/2018/07/17/guix-nixos.htmlbody src/content/en/blog/2018/07/17/guix-nixos.snippets src/content/en/blog/2018/07/17/guix-nixos.conf: src/content/en/blog/2018/07/17/guix-nixos.adoc
+src/content/en/blog/2018/08/01/npm-ci-reproducibility.htmlbody src/content/en/blog/2018/08/01/npm-ci-reproducibility.snippets src/content/en/blog/2018/08/01/npm-ci-reproducibility.conf: src/content/en/blog/2018/08/01/npm-ci-reproducibility.adoc
+src/content/en/blog/2018/12/21/ytdl-subs.htmlbody src/content/en/blog/2018/12/21/ytdl-subs.snippets src/content/en/blog/2018/12/21/ytdl-subs.conf: src/content/en/blog/2018/12/21/ytdl-subs.adoc
+src/content/en/blog/2019/06/02/nixos-stateless-workstation.htmlbody src/content/en/blog/2019/06/02/nixos-stateless-workstation.snippets src/content/en/blog/2019/06/02/nixos-stateless-workstation.conf: src/content/en/blog/2019/06/02/nixos-stateless-workstation.adoc
+src/content/en/blog/2020/08/10/guix-srht.htmlbody src/content/en/blog/2020/08/10/guix-srht.snippets src/content/en/blog/2020/08/10/guix-srht.conf: src/content/en/blog/2020/08/10/guix-srht.adoc
+src/content/en/blog/2020/08/31/database-i-wish-i-had.htmlbody src/content/en/blog/2020/08/31/database-i-wish-i-had.snippets src/content/en/blog/2020/08/31/database-i-wish-i-had.conf: src/content/en/blog/2020/08/31/database-i-wish-i-had.adoc
+src/content/en/blog/2020/10/05/cargo2nix.htmlbody src/content/en/blog/2020/10/05/cargo2nix.snippets src/content/en/blog/2020/10/05/cargo2nix.conf: src/content/en/blog/2020/10/05/cargo2nix.adoc
+src/content/en/blog/2020/10/05/swift2nix.htmlbody src/content/en/blog/2020/10/05/swift2nix.snippets src/content/en/blog/2020/10/05/swift2nix.conf: src/content/en/blog/2020/10/05/swift2nix.adoc
+src/content/en/blog/2020/10/19/feature-flags.htmlbody src/content/en/blog/2020/10/19/feature-flags.snippets src/content/en/blog/2020/10/19/feature-flags.conf: src/content/en/blog/2020/10/19/feature-flags.adoc
+src/content/en/blog/2020/10/20/wrong-interviewing.htmlbody src/content/en/blog/2020/10/20/wrong-interviewing.snippets src/content/en/blog/2020/10/20/wrong-interviewing.conf: src/content/en/blog/2020/10/20/wrong-interviewing.adoc
+src/content/en/blog/2020/11/07/diy-bugs.htmlbody src/content/en/blog/2020/11/07/diy-bugs.snippets src/content/en/blog/2020/11/07/diy-bugs.conf: src/content/en/blog/2020/11/07/diy-bugs.adoc
+src/content/en/blog/2020/11/08/paradigm-shift-review.htmlbody src/content/en/blog/2020/11/08/paradigm-shift-review.snippets src/content/en/blog/2020/11/08/paradigm-shift-review.conf: src/content/en/blog/2020/11/08/paradigm-shift-review.adoc
+src/content/en/blog/2020/11/12/database-parsers-trees.htmlbody src/content/en/blog/2020/11/12/database-parsers-trees.snippets src/content/en/blog/2020/11/12/database-parsers-trees.conf: src/content/en/blog/2020/11/12/database-parsers-trees.adoc
+src/content/en/blog/2020/11/14/local-first-review.htmlbody src/content/en/blog/2020/11/14/local-first-review.snippets src/content/en/blog/2020/11/14/local-first-review.conf: src/content/en/blog/2020/11/14/local-first-review.adoc
+src/content/en/blog/2021/01/26/remembering-ann.htmlbody src/content/en/blog/2021/01/26/remembering-ann.snippets src/content/en/blog/2021/01/26/remembering-ann.conf: src/content/en/blog/2021/01/26/remembering-ann.adoc
+src/content/en/blog/2021/02/17/fallible.htmlbody src/content/en/blog/2021/02/17/fallible.snippets src/content/en/blog/2021/02/17/fallible.conf: src/content/en/blog/2021/02/17/fallible.adoc
+src/content/en/blog/2021/04/29/relational-review.htmlbody src/content/en/blog/2021/04/29/relational-review.snippets src/content/en/blog/2021/04/29/relational-review.conf: src/content/en/blog/2021/04/29/relational-review.adoc
+src/content/en/pastebin/2016/04/05/rpn.htmlbody src/content/en/pastebin/2016/04/05/rpn.snippets src/content/en/pastebin/2016/04/05/rpn.conf: src/content/en/pastebin/2016/04/05/rpn.adoc
+src/content/en/pastebin/2018/07/11/nix-pinning.htmlbody src/content/en/pastebin/2018/07/11/nix-pinning.snippets src/content/en/pastebin/2018/07/11/nix-pinning.conf: src/content/en/pastebin/2018/07/11/nix-pinning.adoc
+src/content/en/pastebin/2018/07/13/guix-nixos-systemd.htmlbody src/content/en/pastebin/2018/07/13/guix-nixos-systemd.snippets src/content/en/pastebin/2018/07/13/guix-nixos-systemd.conf: src/content/en/pastebin/2018/07/13/guix-nixos-systemd.adoc
+src/content/en/pastebin/2018/07/13/guixbuilder-nixos.htmlbody src/content/en/pastebin/2018/07/13/guixbuilder-nixos.snippets src/content/en/pastebin/2018/07/13/guixbuilder-nixos.conf: src/content/en/pastebin/2018/07/13/guixbuilder-nixos.adoc
+src/content/en/pastebin/2018/07/13/guixbuilder.htmlbody src/content/en/pastebin/2018/07/13/guixbuilder.snippets src/content/en/pastebin/2018/07/13/guixbuilder.conf: src/content/en/pastebin/2018/07/13/guixbuilder.adoc
+src/content/en/pastebin/2018/07/13/nix-strpad.htmlbody src/content/en/pastebin/2018/07/13/nix-strpad.snippets src/content/en/pastebin/2018/07/13/nix-strpad.conf: src/content/en/pastebin/2018/07/13/nix-strpad.adoc
+src/content/en/pastebin/2018/07/25/nix-exps.htmlbody src/content/en/pastebin/2018/07/25/nix-exps.snippets src/content/en/pastebin/2018/07/25/nix-exps.conf: src/content/en/pastebin/2018/07/25/nix-exps.adoc
+src/content/en/pastebin/2018/07/25/nix-showdrv.htmlbody src/content/en/pastebin/2018/07/25/nix-showdrv.snippets src/content/en/pastebin/2018/07/25/nix-showdrv.conf: src/content/en/pastebin/2018/07/25/nix-showdrv.adoc
+src/content/en/pastebin/2019/06/08/inconsistent-hash.htmlbody src/content/en/pastebin/2019/06/08/inconsistent-hash.snippets src/content/en/pastebin/2019/06/08/inconsistent-hash.conf: src/content/en/pastebin/2019/06/08/inconsistent-hash.adoc
+src/content/en/pastebin/2019/12/29/raku-tuple-type.htmlbody src/content/en/pastebin/2019/12/29/raku-tuple-type.snippets src/content/en/pastebin/2019/12/29/raku-tuple-type.conf: src/content/en/pastebin/2019/12/29/raku-tuple-type.adoc
+src/content/en/pastebin/2020/01/04/guix-import-failure.htmlbody src/content/en/pastebin/2020/01/04/guix-import-failure.snippets src/content/en/pastebin/2020/01/04/guix-import-failure.conf: src/content/en/pastebin/2020/01/04/guix-import-failure.adoc
+src/content/en/pastebin/2020/02/14/guix-shebang.htmlbody src/content/en/pastebin/2020/02/14/guix-shebang.snippets src/content/en/pastebin/2020/02/14/guix-shebang.conf: src/content/en/pastebin/2020/02/14/guix-shebang.adoc
+src/content/en/pastebin/2020/11/27/guix-build-local.htmlbody src/content/en/pastebin/2020/11/27/guix-build-local.snippets src/content/en/pastebin/2020/11/27/guix-build-local.conf: src/content/en/pastebin/2020/11/27/guix-build-local.adoc
+src/content/en/pastebin/2020/12/15/guix-pack-fail.htmlbody src/content/en/pastebin/2020/12/15/guix-pack-fail.snippets src/content/en/pastebin/2020/12/15/guix-pack-fail.conf: src/content/en/pastebin/2020/12/15/guix-pack-fail.adoc
+src/content/en/pastebin/2021/04/03/naive-slugify-js.htmlbody src/content/en/pastebin/2021/04/03/naive-slugify-js.snippets src/content/en/pastebin/2021/04/03/naive-slugify-js.conf: src/content/en/pastebin/2021/04/03/naive-slugify-js.adoc
+src/content/en/pastebin/2021/06/08/reading-session-pt1.htmlbody src/content/en/pastebin/2021/06/08/reading-session-pt1.snippets src/content/en/pastebin/2021/06/08/reading-session-pt1.conf: src/content/en/pastebin/2021/06/08/reading-session-pt1.adoc
+src/content/en/pastebin/2021/06/22/curl-wget.htmlbody src/content/en/pastebin/2021/06/22/curl-wget.snippets src/content/en/pastebin/2021/06/22/curl-wget.conf: src/content/en/pastebin/2021/06/22/curl-wget.adoc
+src/content/en/pastebin/2021/08/11/h1-spacing.htmlbody src/content/en/pastebin/2021/08/11/h1-spacing.snippets src/content/en/pastebin/2021/08/11/h1-spacing.conf: src/content/en/pastebin/2021/08/11/h1-spacing.adoc
+src/content/en/pastebin/2021/09/02/sicp-3-19.htmlbody src/content/en/pastebin/2021/09/02/sicp-3-19.snippets src/content/en/pastebin/2021/09/02/sicp-3-19.conf: src/content/en/pastebin/2021/09/02/sicp-3-19.adoc
+src/content/en/pastebin/2021/09/03/sicp-persistent-queue.htmlbody src/content/en/pastebin/2021/09/03/sicp-persistent-queue.snippets src/content/en/pastebin/2021/09/03/sicp-persistent-queue.conf: src/content/en/pastebin/2021/09/03/sicp-persistent-queue.adoc
+src/content/en/pastebin/2022/07/14/git-cleanup.htmlbody src/content/en/pastebin/2022/07/14/git-cleanup.snippets src/content/en/pastebin/2022/07/14/git-cleanup.conf: src/content/en/pastebin/2022/07/14/git-cleanup.adoc
+src/content/en/pastebin/2023/07/22/funcallable-amop.htmlbody src/content/en/pastebin/2023/07/22/funcallable-amop.snippets src/content/en/pastebin/2023/07/22/funcallable-amop.conf: src/content/en/pastebin/2023/07/22/funcallable-amop.adoc
+src/content/en/podcast/2020/12/19/test-entry.htmlbody src/content/en/podcast/2020/12/19/test-entry.snippets src/content/en/podcast/2020/12/19/test-entry.conf: src/content/en/podcast/2020/12/19/test-entry.adoc
+src/content/en/screencast/2021/02/07/autoqemu.htmlbody src/content/en/screencast/2021/02/07/autoqemu.snippets src/content/en/screencast/2021/02/07/autoqemu.conf: src/content/en/screencast/2021/02/07/autoqemu.adoc
+src/content/en/til/2020/08/12/filename-timestamp.htmlbody src/content/en/til/2020/08/12/filename-timestamp.snippets src/content/en/til/2020/08/12/filename-timestamp.conf: src/content/en/til/2020/08/12/filename-timestamp.adoc
+src/content/en/til/2020/08/13/code-jekyll.htmlbody src/content/en/til/2020/08/13/code-jekyll.snippets src/content/en/til/2020/08/13/code-jekyll.conf: src/content/en/til/2020/08/13/code-jekyll.adoc
+src/content/en/til/2020/08/14/browse-git.htmlbody src/content/en/til/2020/08/14/browse-git.snippets src/content/en/til/2020/08/14/browse-git.conf: src/content/en/til/2020/08/14/browse-git.adoc
+src/content/en/til/2020/08/16/git-search.htmlbody src/content/en/til/2020/08/16/git-search.snippets src/content/en/til/2020/08/16/git-search.conf: src/content/en/til/2020/08/16/git-search.adoc
+src/content/en/til/2020/08/28/grep-online.htmlbody src/content/en/til/2020/08/28/grep-online.snippets src/content/en/til/2020/08/28/grep-online.conf: src/content/en/til/2020/08/28/grep-online.adoc
+src/content/en/til/2020/09/04/cli-email-fun-profit.htmlbody src/content/en/til/2020/09/04/cli-email-fun-profit.snippets src/content/en/til/2020/09/04/cli-email-fun-profit.conf: src/content/en/til/2020/09/04/cli-email-fun-profit.adoc
+src/content/en/til/2020/09/05/oldschool-pr.htmlbody src/content/en/til/2020/09/05/oldschool-pr.snippets src/content/en/til/2020/09/05/oldschool-pr.conf: src/content/en/til/2020/09/05/oldschool-pr.adoc
+src/content/en/til/2020/10/11/search-git-history.htmlbody src/content/en/til/2020/10/11/search-git-history.snippets src/content/en/til/2020/10/11/search-git-history.conf: src/content/en/til/2020/10/11/search-git-history.adoc
+src/content/en/til/2020/11/08/find-broken-symlink.htmlbody src/content/en/til/2020/11/08/find-broken-symlink.snippets src/content/en/til/2020/11/08/find-broken-symlink.conf: src/content/en/til/2020/11/08/find-broken-symlink.adoc
+src/content/en/til/2020/11/12/diy-nix-bash-ci.htmlbody src/content/en/til/2020/11/12/diy-nix-bash-ci.snippets src/content/en/til/2020/11/12/diy-nix-bash-ci.conf: src/content/en/til/2020/11/12/diy-nix-bash-ci.adoc
+src/content/en/til/2020/11/12/git-bisect-automation.htmlbody src/content/en/til/2020/11/12/git-bisect-automation.snippets src/content/en/til/2020/11/12/git-bisect-automation.conf: src/content/en/til/2020/11/12/git-bisect-automation.adoc
+src/content/en/til/2020/11/12/useful-bashvars.htmlbody src/content/en/til/2020/11/12/useful-bashvars.snippets src/content/en/til/2020/11/12/useful-bashvars.conf: src/content/en/til/2020/11/12/useful-bashvars.adoc
+src/content/en/til/2020/11/14/gpodder-media.htmlbody src/content/en/til/2020/11/14/gpodder-media.snippets src/content/en/til/2020/11/14/gpodder-media.conf: src/content/en/til/2020/11/14/gpodder-media.adoc
+src/content/en/til/2020/11/30/git-notes-ci.htmlbody src/content/en/til/2020/11/30/git-notes-ci.snippets src/content/en/til/2020/11/30/git-notes-ci.conf: src/content/en/til/2020/11/30/git-notes-ci.adoc
+src/content/en/til/2020/12/15/shellcheck-repo.htmlbody src/content/en/til/2020/12/15/shellcheck-repo.snippets src/content/en/til/2020/12/15/shellcheck-repo.conf: src/content/en/til/2020/12/15/shellcheck-repo.adoc
+src/content/en/til/2020/12/29/svg.htmlbody src/content/en/til/2020/12/29/svg.snippets src/content/en/til/2020/12/29/svg.conf: src/content/en/til/2020/12/29/svg.adoc
+src/content/en/til/2021/01/12/curl-awk-emails.htmlbody src/content/en/til/2021/01/12/curl-awk-emails.snippets src/content/en/til/2021/01/12/curl-awk-emails.conf: src/content/en/til/2021/01/12/curl-awk-emails.adoc
+src/content/en/til/2021/01/17/posix-shebang.htmlbody src/content/en/til/2021/01/17/posix-shebang.snippets src/content/en/til/2021/01/17/posix-shebang.conf: src/content/en/til/2021/01/17/posix-shebang.adoc
+src/content/en/til/2021/04/24/cl-generic-precedence.htmlbody src/content/en/til/2021/04/24/cl-generic-precedence.snippets src/content/en/til/2021/04/24/cl-generic-precedence.conf: src/content/en/til/2021/04/24/cl-generic-precedence.adoc
+src/content/en/til/2021/04/24/clojure-autocurry.htmlbody src/content/en/til/2021/04/24/clojure-autocurry.snippets src/content/en/til/2021/04/24/clojure-autocurry.conf: src/content/en/til/2021/04/24/clojure-autocurry.adoc
+src/content/en/til/2021/04/24/scm-nif.htmlbody src/content/en/til/2021/04/24/scm-nif.snippets src/content/en/til/2021/04/24/scm-nif.conf: src/content/en/til/2021/04/24/scm-nif.adoc
+src/content/en/til/2021/07/23/git-tls-gpg.htmlbody src/content/en/til/2021/07/23/git-tls-gpg.snippets src/content/en/til/2021/07/23/git-tls-gpg.conf: src/content/en/til/2021/07/23/git-tls-gpg.adoc
+src/content/en/til/2021/08/11/js-bigint-reviver.htmlbody src/content/en/til/2021/08/11/js-bigint-reviver.snippets src/content/en/til/2021/08/11/js-bigint-reviver.conf: src/content/en/til/2021/08/11/js-bigint-reviver.adoc
+src/content/pt/hea/2020/08/12/arquivo-datado.htmlbody src/content/pt/hea/2020/08/12/arquivo-datado.snippets src/content/pt/hea/2020/08/12/arquivo-datado.conf: src/content/pt/hea/2020/08/12/arquivo-datado.adoc
+src/content/en/blog/index.htmlbody src/content/en/blog/index.snippets src/content/en/blog/index.conf: src/content/en/blog/index.adoc
+src/content/en/pastebin/index.htmlbody src/content/en/pastebin/index.snippets src/content/en/pastebin/index.conf: src/content/en/pastebin/index.adoc
+src/content/en/podcast/index.htmlbody src/content/en/podcast/index.snippets src/content/en/podcast/index.conf: src/content/en/podcast/index.adoc
+src/content/en/screencast/index.htmlbody src/content/en/screencast/index.snippets src/content/en/screencast/index.conf: src/content/en/screencast/index.adoc
+src/content/en/til/index.htmlbody src/content/en/til/index.snippets src/content/en/til/index.conf: src/content/en/til/index.adoc
+src/content/pt/hea/index.htmlbody src/content/pt/hea/index.snippets src/content/pt/hea/index.conf: src/content/pt/hea/index.adoc
+src/content/en/blog/categories.htmlbody src/content/en/blog/categories.snippets src/content/en/blog/categories.conf: src/content/en/blog/categories.adoc
+src/content/en/pastebin/categories.htmlbody src/content/en/pastebin/categories.snippets src/content/en/pastebin/categories.conf: src/content/en/pastebin/categories.adoc
+src/content/en/podcast/categories.htmlbody src/content/en/podcast/categories.snippets src/content/en/podcast/categories.conf: src/content/en/podcast/categories.adoc
+src/content/en/screencast/categories.htmlbody src/content/en/screencast/categories.snippets src/content/en/screencast/categories.conf: src/content/en/screencast/categories.adoc
+src/content/en/til/categories.htmlbody src/content/en/til/categories.snippets src/content/en/til/categories.conf: src/content/en/til/categories.adoc
+src/content/pt/hea/categorias.htmlbody src/content/pt/hea/categorias.snippets src/content/pt/hea/categorias.conf: src/content/pt/hea/categorias.adoc
+src/content/en/about.html: src/content/en/about.conf src/content/en/about.htmlbody
+src/content/en/index.html: src/content/en/index.conf src/content/en/index.htmlbody
+src/content/pt/sobre.html: src/content/pt/sobre.conf src/content/pt/sobre.htmlbody
+src/content/en/blog/2018/07/17/guix-nixos.html: src/content/en/blog/2018/07/17/guix-nixos.conf src/content/en/blog/2018/07/17/guix-nixos.htmlbody
+src/content/en/blog/2018/08/01/npm-ci-reproducibility.html: src/content/en/blog/2018/08/01/npm-ci-reproducibility.conf src/content/en/blog/2018/08/01/npm-ci-reproducibility.htmlbody
+src/content/en/blog/2018/12/21/ytdl-subs.html: src/content/en/blog/2018/12/21/ytdl-subs.conf src/content/en/blog/2018/12/21/ytdl-subs.htmlbody
+src/content/en/blog/2019/06/02/nixos-stateless-workstation.html: src/content/en/blog/2019/06/02/nixos-stateless-workstation.conf src/content/en/blog/2019/06/02/nixos-stateless-workstation.htmlbody
+src/content/en/blog/2020/08/10/guix-srht.html: src/content/en/blog/2020/08/10/guix-srht.conf src/content/en/blog/2020/08/10/guix-srht.htmlbody
+src/content/en/blog/2020/08/31/database-i-wish-i-had.html: src/content/en/blog/2020/08/31/database-i-wish-i-had.conf src/content/en/blog/2020/08/31/database-i-wish-i-had.htmlbody
+src/content/en/blog/2020/10/05/cargo2nix.html: src/content/en/blog/2020/10/05/cargo2nix.conf src/content/en/blog/2020/10/05/cargo2nix.htmlbody
+src/content/en/blog/2020/10/05/swift2nix.html: src/content/en/blog/2020/10/05/swift2nix.conf src/content/en/blog/2020/10/05/swift2nix.htmlbody
+src/content/en/blog/2020/10/19/feature-flags.html: src/content/en/blog/2020/10/19/feature-flags.conf src/content/en/blog/2020/10/19/feature-flags.htmlbody
+src/content/en/blog/2020/10/20/wrong-interviewing.html: src/content/en/blog/2020/10/20/wrong-interviewing.conf src/content/en/blog/2020/10/20/wrong-interviewing.htmlbody
+src/content/en/blog/2020/11/07/diy-bugs.html: src/content/en/blog/2020/11/07/diy-bugs.conf src/content/en/blog/2020/11/07/diy-bugs.htmlbody
+src/content/en/blog/2020/11/08/paradigm-shift-review.html: src/content/en/blog/2020/11/08/paradigm-shift-review.conf src/content/en/blog/2020/11/08/paradigm-shift-review.htmlbody
+src/content/en/blog/2020/11/12/database-parsers-trees.html: src/content/en/blog/2020/11/12/database-parsers-trees.conf src/content/en/blog/2020/11/12/database-parsers-trees.htmlbody
+src/content/en/blog/2020/11/14/local-first-review.html: src/content/en/blog/2020/11/14/local-first-review.conf src/content/en/blog/2020/11/14/local-first-review.htmlbody
+src/content/en/blog/2021/01/26/remembering-ann.html: src/content/en/blog/2021/01/26/remembering-ann.conf src/content/en/blog/2021/01/26/remembering-ann.htmlbody
+src/content/en/blog/2021/02/17/fallible.html: src/content/en/blog/2021/02/17/fallible.conf src/content/en/blog/2021/02/17/fallible.htmlbody
+src/content/en/blog/2021/04/29/relational-review.html: src/content/en/blog/2021/04/29/relational-review.conf src/content/en/blog/2021/04/29/relational-review.htmlbody
+src/content/en/pastebin/2016/04/05/rpn.html: src/content/en/pastebin/2016/04/05/rpn.conf src/content/en/pastebin/2016/04/05/rpn.htmlbody
+src/content/en/pastebin/2018/07/11/nix-pinning.html: src/content/en/pastebin/2018/07/11/nix-pinning.conf src/content/en/pastebin/2018/07/11/nix-pinning.htmlbody
+src/content/en/pastebin/2018/07/13/guix-nixos-systemd.html: src/content/en/pastebin/2018/07/13/guix-nixos-systemd.conf src/content/en/pastebin/2018/07/13/guix-nixos-systemd.htmlbody
+src/content/en/pastebin/2018/07/13/guixbuilder-nixos.html: src/content/en/pastebin/2018/07/13/guixbuilder-nixos.conf src/content/en/pastebin/2018/07/13/guixbuilder-nixos.htmlbody
+src/content/en/pastebin/2018/07/13/guixbuilder.html: src/content/en/pastebin/2018/07/13/guixbuilder.conf src/content/en/pastebin/2018/07/13/guixbuilder.htmlbody
+src/content/en/pastebin/2018/07/13/nix-strpad.html: src/content/en/pastebin/2018/07/13/nix-strpad.conf src/content/en/pastebin/2018/07/13/nix-strpad.htmlbody
+src/content/en/pastebin/2018/07/25/nix-exps.html: src/content/en/pastebin/2018/07/25/nix-exps.conf src/content/en/pastebin/2018/07/25/nix-exps.htmlbody
+src/content/en/pastebin/2018/07/25/nix-showdrv.html: src/content/en/pastebin/2018/07/25/nix-showdrv.conf src/content/en/pastebin/2018/07/25/nix-showdrv.htmlbody
+src/content/en/pastebin/2019/06/08/inconsistent-hash.html: src/content/en/pastebin/2019/06/08/inconsistent-hash.conf src/content/en/pastebin/2019/06/08/inconsistent-hash.htmlbody
+src/content/en/pastebin/2019/12/29/raku-tuple-type.html: src/content/en/pastebin/2019/12/29/raku-tuple-type.conf src/content/en/pastebin/2019/12/29/raku-tuple-type.htmlbody
+src/content/en/pastebin/2020/01/04/guix-import-failure.html: src/content/en/pastebin/2020/01/04/guix-import-failure.conf src/content/en/pastebin/2020/01/04/guix-import-failure.htmlbody
+src/content/en/pastebin/2020/02/14/guix-shebang.html: src/content/en/pastebin/2020/02/14/guix-shebang.conf src/content/en/pastebin/2020/02/14/guix-shebang.htmlbody
+src/content/en/pastebin/2020/11/27/guix-build-local.html: src/content/en/pastebin/2020/11/27/guix-build-local.conf src/content/en/pastebin/2020/11/27/guix-build-local.htmlbody
+src/content/en/pastebin/2020/12/15/guix-pack-fail.html: src/content/en/pastebin/2020/12/15/guix-pack-fail.conf src/content/en/pastebin/2020/12/15/guix-pack-fail.htmlbody
+src/content/en/pastebin/2021/04/03/naive-slugify-js.html: src/content/en/pastebin/2021/04/03/naive-slugify-js.conf src/content/en/pastebin/2021/04/03/naive-slugify-js.htmlbody
+src/content/en/pastebin/2021/06/08/reading-session-pt1.html: src/content/en/pastebin/2021/06/08/reading-session-pt1.conf src/content/en/pastebin/2021/06/08/reading-session-pt1.htmlbody
+src/content/en/pastebin/2021/06/22/curl-wget.html: src/content/en/pastebin/2021/06/22/curl-wget.conf src/content/en/pastebin/2021/06/22/curl-wget.htmlbody
+src/content/en/pastebin/2021/08/11/h1-spacing.html: src/content/en/pastebin/2021/08/11/h1-spacing.conf src/content/en/pastebin/2021/08/11/h1-spacing.htmlbody
+src/content/en/pastebin/2021/09/02/sicp-3-19.html: src/content/en/pastebin/2021/09/02/sicp-3-19.conf src/content/en/pastebin/2021/09/02/sicp-3-19.htmlbody
+src/content/en/pastebin/2021/09/03/sicp-persistent-queue.html: src/content/en/pastebin/2021/09/03/sicp-persistent-queue.conf src/content/en/pastebin/2021/09/03/sicp-persistent-queue.htmlbody
+src/content/en/pastebin/2022/07/14/git-cleanup.html: src/content/en/pastebin/2022/07/14/git-cleanup.conf src/content/en/pastebin/2022/07/14/git-cleanup.htmlbody
+src/content/en/pastebin/2023/07/22/funcallable-amop.html: src/content/en/pastebin/2023/07/22/funcallable-amop.conf src/content/en/pastebin/2023/07/22/funcallable-amop.htmlbody
+src/content/en/podcast/2020/12/19/test-entry.html: src/content/en/podcast/2020/12/19/test-entry.conf src/content/en/podcast/2020/12/19/test-entry.htmlbody
+src/content/en/screencast/2021/02/07/autoqemu.html: src/content/en/screencast/2021/02/07/autoqemu.conf src/content/en/screencast/2021/02/07/autoqemu.htmlbody
+src/content/en/til/2020/08/12/filename-timestamp.html: src/content/en/til/2020/08/12/filename-timestamp.conf src/content/en/til/2020/08/12/filename-timestamp.htmlbody
+src/content/en/til/2020/08/13/code-jekyll.html: src/content/en/til/2020/08/13/code-jekyll.conf src/content/en/til/2020/08/13/code-jekyll.htmlbody
+src/content/en/til/2020/08/14/browse-git.html: src/content/en/til/2020/08/14/browse-git.conf src/content/en/til/2020/08/14/browse-git.htmlbody
+src/content/en/til/2020/08/16/git-search.html: src/content/en/til/2020/08/16/git-search.conf src/content/en/til/2020/08/16/git-search.htmlbody
+src/content/en/til/2020/08/28/grep-online.html: src/content/en/til/2020/08/28/grep-online.conf src/content/en/til/2020/08/28/grep-online.htmlbody
+src/content/en/til/2020/09/04/cli-email-fun-profit.html: src/content/en/til/2020/09/04/cli-email-fun-profit.conf src/content/en/til/2020/09/04/cli-email-fun-profit.htmlbody
+src/content/en/til/2020/09/05/oldschool-pr.html: src/content/en/til/2020/09/05/oldschool-pr.conf src/content/en/til/2020/09/05/oldschool-pr.htmlbody
+src/content/en/til/2020/10/11/search-git-history.html: src/content/en/til/2020/10/11/search-git-history.conf src/content/en/til/2020/10/11/search-git-history.htmlbody
+src/content/en/til/2020/11/08/find-broken-symlink.html: src/content/en/til/2020/11/08/find-broken-symlink.conf src/content/en/til/2020/11/08/find-broken-symlink.htmlbody
+src/content/en/til/2020/11/12/diy-nix-bash-ci.html: src/content/en/til/2020/11/12/diy-nix-bash-ci.conf src/content/en/til/2020/11/12/diy-nix-bash-ci.htmlbody
+src/content/en/til/2020/11/12/git-bisect-automation.html: src/content/en/til/2020/11/12/git-bisect-automation.conf src/content/en/til/2020/11/12/git-bisect-automation.htmlbody
+src/content/en/til/2020/11/12/useful-bashvars.html: src/content/en/til/2020/11/12/useful-bashvars.conf src/content/en/til/2020/11/12/useful-bashvars.htmlbody
+src/content/en/til/2020/11/14/gpodder-media.html: src/content/en/til/2020/11/14/gpodder-media.conf src/content/en/til/2020/11/14/gpodder-media.htmlbody
+src/content/en/til/2020/11/30/git-notes-ci.html: src/content/en/til/2020/11/30/git-notes-ci.conf src/content/en/til/2020/11/30/git-notes-ci.htmlbody
+src/content/en/til/2020/12/15/shellcheck-repo.html: src/content/en/til/2020/12/15/shellcheck-repo.conf src/content/en/til/2020/12/15/shellcheck-repo.htmlbody
+src/content/en/til/2020/12/29/svg.html: src/content/en/til/2020/12/29/svg.conf src/content/en/til/2020/12/29/svg.htmlbody
+src/content/en/til/2021/01/12/curl-awk-emails.html: src/content/en/til/2021/01/12/curl-awk-emails.conf src/content/en/til/2021/01/12/curl-awk-emails.htmlbody
+src/content/en/til/2021/01/17/posix-shebang.html: src/content/en/til/2021/01/17/posix-shebang.conf src/content/en/til/2021/01/17/posix-shebang.htmlbody
+src/content/en/til/2021/04/24/cl-generic-precedence.html: src/content/en/til/2021/04/24/cl-generic-precedence.conf src/content/en/til/2021/04/24/cl-generic-precedence.htmlbody
+src/content/en/til/2021/04/24/clojure-autocurry.html: src/content/en/til/2021/04/24/clojure-autocurry.conf src/content/en/til/2021/04/24/clojure-autocurry.htmlbody
+src/content/en/til/2021/04/24/scm-nif.html: src/content/en/til/2021/04/24/scm-nif.conf src/content/en/til/2021/04/24/scm-nif.htmlbody
+src/content/en/til/2021/07/23/git-tls-gpg.html: src/content/en/til/2021/07/23/git-tls-gpg.conf src/content/en/til/2021/07/23/git-tls-gpg.htmlbody
+src/content/en/til/2021/08/11/js-bigint-reviver.html: src/content/en/til/2021/08/11/js-bigint-reviver.conf src/content/en/til/2021/08/11/js-bigint-reviver.htmlbody
+src/content/pt/hea/2020/08/12/arquivo-datado.html: src/content/pt/hea/2020/08/12/arquivo-datado.conf src/content/pt/hea/2020/08/12/arquivo-datado.htmlbody
+src/content/en/blog/index.html: src/content/en/blog/index.conf src/content/en/blog/index.htmlbody
+src/content/en/pastebin/index.html: src/content/en/pastebin/index.conf src/content/en/pastebin/index.htmlbody
+src/content/en/podcast/index.html: src/content/en/podcast/index.conf src/content/en/podcast/index.htmlbody
+src/content/en/screencast/index.html: src/content/en/screencast/index.conf src/content/en/screencast/index.htmlbody
+src/content/en/til/index.html: src/content/en/til/index.conf src/content/en/til/index.htmlbody
+src/content/pt/hea/index.html: src/content/pt/hea/index.conf src/content/pt/hea/index.htmlbody
+src/content/en/blog/categories.html: src/content/en/blog/categories.conf src/content/en/blog/categories.htmlbody
+src/content/en/pastebin/categories.html: src/content/en/pastebin/categories.conf src/content/en/pastebin/categories.htmlbody
+src/content/en/podcast/categories.html: src/content/en/podcast/categories.conf src/content/en/podcast/categories.htmlbody
+src/content/en/screencast/categories.html: src/content/en/screencast/categories.conf src/content/en/screencast/categories.htmlbody
+src/content/en/til/categories.html: src/content/en/til/categories.conf src/content/en/til/categories.htmlbody
+src/content/pt/hea/categorias.html: src/content/pt/hea/categorias.conf src/content/pt/hea/categorias.htmlbody
+
+src/content/en/about.updatedat-check: src/content/en/about.conf
+src/content/en/index.updatedat-check: src/content/en/index.conf
+src/content/pt/sobre.updatedat-check: src/content/pt/sobre.conf
+src/content/en/blog/2018/07/17/guix-nixos.updatedat-check: src/content/en/blog/2018/07/17/guix-nixos.conf
+src/content/en/blog/2018/08/01/npm-ci-reproducibility.updatedat-check: src/content/en/blog/2018/08/01/npm-ci-reproducibility.conf
+src/content/en/blog/2018/12/21/ytdl-subs.updatedat-check: src/content/en/blog/2018/12/21/ytdl-subs.conf
+src/content/en/blog/2019/06/02/nixos-stateless-workstation.updatedat-check: src/content/en/blog/2019/06/02/nixos-stateless-workstation.conf
+src/content/en/blog/2020/08/10/guix-srht.updatedat-check: src/content/en/blog/2020/08/10/guix-srht.conf
+src/content/en/blog/2020/08/31/database-i-wish-i-had.updatedat-check: src/content/en/blog/2020/08/31/database-i-wish-i-had.conf
+src/content/en/blog/2020/10/05/cargo2nix.updatedat-check: src/content/en/blog/2020/10/05/cargo2nix.conf
+src/content/en/blog/2020/10/05/swift2nix.updatedat-check: src/content/en/blog/2020/10/05/swift2nix.conf
+src/content/en/blog/2020/10/19/feature-flags.updatedat-check: src/content/en/blog/2020/10/19/feature-flags.conf
+src/content/en/blog/2020/10/20/wrong-interviewing.updatedat-check: src/content/en/blog/2020/10/20/wrong-interviewing.conf
+src/content/en/blog/2020/11/07/diy-bugs.updatedat-check: src/content/en/blog/2020/11/07/diy-bugs.conf
+src/content/en/blog/2020/11/08/paradigm-shift-review.updatedat-check: src/content/en/blog/2020/11/08/paradigm-shift-review.conf
+src/content/en/blog/2020/11/12/database-parsers-trees.updatedat-check: src/content/en/blog/2020/11/12/database-parsers-trees.conf
+src/content/en/blog/2020/11/14/local-first-review.updatedat-check: src/content/en/blog/2020/11/14/local-first-review.conf
+src/content/en/blog/2021/01/26/remembering-ann.updatedat-check: src/content/en/blog/2021/01/26/remembering-ann.conf
+src/content/en/blog/2021/02/17/fallible.updatedat-check: src/content/en/blog/2021/02/17/fallible.conf
+src/content/en/blog/2021/04/29/relational-review.updatedat-check: src/content/en/blog/2021/04/29/relational-review.conf
+src/content/en/pastebin/2016/04/05/rpn.updatedat-check: src/content/en/pastebin/2016/04/05/rpn.conf
+src/content/en/pastebin/2018/07/11/nix-pinning.updatedat-check: src/content/en/pastebin/2018/07/11/nix-pinning.conf
+src/content/en/pastebin/2018/07/13/guix-nixos-systemd.updatedat-check: src/content/en/pastebin/2018/07/13/guix-nixos-systemd.conf
+src/content/en/pastebin/2018/07/13/guixbuilder-nixos.updatedat-check: src/content/en/pastebin/2018/07/13/guixbuilder-nixos.conf
+src/content/en/pastebin/2018/07/13/guixbuilder.updatedat-check: src/content/en/pastebin/2018/07/13/guixbuilder.conf
+src/content/en/pastebin/2018/07/13/nix-strpad.updatedat-check: src/content/en/pastebin/2018/07/13/nix-strpad.conf
+src/content/en/pastebin/2018/07/25/nix-exps.updatedat-check: src/content/en/pastebin/2018/07/25/nix-exps.conf
+src/content/en/pastebin/2018/07/25/nix-showdrv.updatedat-check: src/content/en/pastebin/2018/07/25/nix-showdrv.conf
+src/content/en/pastebin/2019/06/08/inconsistent-hash.updatedat-check: src/content/en/pastebin/2019/06/08/inconsistent-hash.conf
+src/content/en/pastebin/2019/12/29/raku-tuple-type.updatedat-check: src/content/en/pastebin/2019/12/29/raku-tuple-type.conf
+src/content/en/pastebin/2020/01/04/guix-import-failure.updatedat-check: src/content/en/pastebin/2020/01/04/guix-import-failure.conf
+src/content/en/pastebin/2020/02/14/guix-shebang.updatedat-check: src/content/en/pastebin/2020/02/14/guix-shebang.conf
+src/content/en/pastebin/2020/11/27/guix-build-local.updatedat-check: src/content/en/pastebin/2020/11/27/guix-build-local.conf
+src/content/en/pastebin/2020/12/15/guix-pack-fail.updatedat-check: src/content/en/pastebin/2020/12/15/guix-pack-fail.conf
+src/content/en/pastebin/2021/04/03/naive-slugify-js.updatedat-check: src/content/en/pastebin/2021/04/03/naive-slugify-js.conf
+src/content/en/pastebin/2021/06/08/reading-session-pt1.updatedat-check: src/content/en/pastebin/2021/06/08/reading-session-pt1.conf
+src/content/en/pastebin/2021/06/22/curl-wget.updatedat-check: src/content/en/pastebin/2021/06/22/curl-wget.conf
+src/content/en/pastebin/2021/08/11/h1-spacing.updatedat-check: src/content/en/pastebin/2021/08/11/h1-spacing.conf
+src/content/en/pastebin/2021/09/02/sicp-3-19.updatedat-check: src/content/en/pastebin/2021/09/02/sicp-3-19.conf
+src/content/en/pastebin/2021/09/03/sicp-persistent-queue.updatedat-check: src/content/en/pastebin/2021/09/03/sicp-persistent-queue.conf
+src/content/en/pastebin/2022/07/14/git-cleanup.updatedat-check: src/content/en/pastebin/2022/07/14/git-cleanup.conf
+src/content/en/pastebin/2023/07/22/funcallable-amop.updatedat-check: src/content/en/pastebin/2023/07/22/funcallable-amop.conf
+src/content/en/podcast/2020/12/19/test-entry.updatedat-check: src/content/en/podcast/2020/12/19/test-entry.conf
+src/content/en/screencast/2021/02/07/autoqemu.updatedat-check: src/content/en/screencast/2021/02/07/autoqemu.conf
+src/content/en/til/2020/08/12/filename-timestamp.updatedat-check: src/content/en/til/2020/08/12/filename-timestamp.conf
+src/content/en/til/2020/08/13/code-jekyll.updatedat-check: src/content/en/til/2020/08/13/code-jekyll.conf
+src/content/en/til/2020/08/14/browse-git.updatedat-check: src/content/en/til/2020/08/14/browse-git.conf
+src/content/en/til/2020/08/16/git-search.updatedat-check: src/content/en/til/2020/08/16/git-search.conf
+src/content/en/til/2020/08/28/grep-online.updatedat-check: src/content/en/til/2020/08/28/grep-online.conf
+src/content/en/til/2020/09/04/cli-email-fun-profit.updatedat-check: src/content/en/til/2020/09/04/cli-email-fun-profit.conf
+src/content/en/til/2020/09/05/oldschool-pr.updatedat-check: src/content/en/til/2020/09/05/oldschool-pr.conf
+src/content/en/til/2020/10/11/search-git-history.updatedat-check: src/content/en/til/2020/10/11/search-git-history.conf
+src/content/en/til/2020/11/08/find-broken-symlink.updatedat-check: src/content/en/til/2020/11/08/find-broken-symlink.conf
+src/content/en/til/2020/11/12/diy-nix-bash-ci.updatedat-check: src/content/en/til/2020/11/12/diy-nix-bash-ci.conf
+src/content/en/til/2020/11/12/git-bisect-automation.updatedat-check: src/content/en/til/2020/11/12/git-bisect-automation.conf
+src/content/en/til/2020/11/12/useful-bashvars.updatedat-check: src/content/en/til/2020/11/12/useful-bashvars.conf
+src/content/en/til/2020/11/14/gpodder-media.updatedat-check: src/content/en/til/2020/11/14/gpodder-media.conf
+src/content/en/til/2020/11/30/git-notes-ci.updatedat-check: src/content/en/til/2020/11/30/git-notes-ci.conf
+src/content/en/til/2020/12/15/shellcheck-repo.updatedat-check: src/content/en/til/2020/12/15/shellcheck-repo.conf
+src/content/en/til/2020/12/29/svg.updatedat-check: src/content/en/til/2020/12/29/svg.conf
+src/content/en/til/2021/01/12/curl-awk-emails.updatedat-check: src/content/en/til/2021/01/12/curl-awk-emails.conf
+src/content/en/til/2021/01/17/posix-shebang.updatedat-check: src/content/en/til/2021/01/17/posix-shebang.conf
+src/content/en/til/2021/04/24/cl-generic-precedence.updatedat-check: src/content/en/til/2021/04/24/cl-generic-precedence.conf
+src/content/en/til/2021/04/24/clojure-autocurry.updatedat-check: src/content/en/til/2021/04/24/clojure-autocurry.conf
+src/content/en/til/2021/04/24/scm-nif.updatedat-check: src/content/en/til/2021/04/24/scm-nif.conf
+src/content/en/til/2021/07/23/git-tls-gpg.updatedat-check: src/content/en/til/2021/07/23/git-tls-gpg.conf
+src/content/en/til/2021/08/11/js-bigint-reviver.updatedat-check: src/content/en/til/2021/08/11/js-bigint-reviver.conf
+src/content/pt/hea/2020/08/12/arquivo-datado.updatedat-check: src/content/pt/hea/2020/08/12/arquivo-datado.conf
+src/content/en/blog/index.updatedat-check: src/content/en/blog/index.conf
+src/content/en/pastebin/index.updatedat-check: src/content/en/pastebin/index.conf
+src/content/en/podcast/index.updatedat-check: src/content/en/podcast/index.conf
+src/content/en/screencast/index.updatedat-check: src/content/en/screencast/index.conf
+src/content/en/til/index.updatedat-check: src/content/en/til/index.conf
+src/content/pt/hea/index.updatedat-check: src/content/pt/hea/index.conf
+src/content/en/blog/categories.updatedat-check: src/content/en/blog/categories.conf
+src/content/en/pastebin/categories.updatedat-check: src/content/en/pastebin/categories.conf
+src/content/en/podcast/categories.updatedat-check: src/content/en/podcast/categories.conf
+src/content/en/screencast/categories.updatedat-check: src/content/en/screencast/categories.conf
+src/content/en/til/categories.updatedat-check: src/content/en/til/categories.conf
+src/content/pt/hea/categorias.updatedat-check: src/content/pt/hea/categorias.conf
+src/content/en/about.links-internal-check: src/content/en/about.links
+src/content/en/index.links-internal-check: src/content/en/index.links
+src/content/pt/sobre.links-internal-check: src/content/pt/sobre.links
+src/content/en/blog/2018/07/17/guix-nixos.links-internal-check: src/content/en/blog/2018/07/17/guix-nixos.links
+src/content/en/blog/2018/08/01/npm-ci-reproducibility.links-internal-check: src/content/en/blog/2018/08/01/npm-ci-reproducibility.links
+src/content/en/blog/2018/12/21/ytdl-subs.links-internal-check: src/content/en/blog/2018/12/21/ytdl-subs.links
+src/content/en/blog/2019/06/02/nixos-stateless-workstation.links-internal-check: src/content/en/blog/2019/06/02/nixos-stateless-workstation.links
+src/content/en/blog/2020/08/10/guix-srht.links-internal-check: src/content/en/blog/2020/08/10/guix-srht.links
+src/content/en/blog/2020/08/31/database-i-wish-i-had.links-internal-check: src/content/en/blog/2020/08/31/database-i-wish-i-had.links
+src/content/en/blog/2020/10/05/cargo2nix.links-internal-check: src/content/en/blog/2020/10/05/cargo2nix.links
+src/content/en/blog/2020/10/05/swift2nix.links-internal-check: src/content/en/blog/2020/10/05/swift2nix.links
+src/content/en/blog/2020/10/19/feature-flags.links-internal-check: src/content/en/blog/2020/10/19/feature-flags.links
+src/content/en/blog/2020/10/20/wrong-interviewing.links-internal-check: src/content/en/blog/2020/10/20/wrong-interviewing.links
+src/content/en/blog/2020/11/07/diy-bugs.links-internal-check: src/content/en/blog/2020/11/07/diy-bugs.links
+src/content/en/blog/2020/11/08/paradigm-shift-review.links-internal-check: src/content/en/blog/2020/11/08/paradigm-shift-review.links
+src/content/en/blog/2020/11/12/database-parsers-trees.links-internal-check: src/content/en/blog/2020/11/12/database-parsers-trees.links
+src/content/en/blog/2020/11/14/local-first-review.links-internal-check: src/content/en/blog/2020/11/14/local-first-review.links
+src/content/en/blog/2021/01/26/remembering-ann.links-internal-check: src/content/en/blog/2021/01/26/remembering-ann.links
+src/content/en/blog/2021/02/17/fallible.links-internal-check: src/content/en/blog/2021/02/17/fallible.links
+src/content/en/blog/2021/04/29/relational-review.links-internal-check: src/content/en/blog/2021/04/29/relational-review.links
+src/content/en/pastebin/2016/04/05/rpn.links-internal-check: src/content/en/pastebin/2016/04/05/rpn.links
+src/content/en/pastebin/2018/07/11/nix-pinning.links-internal-check: src/content/en/pastebin/2018/07/11/nix-pinning.links
+src/content/en/pastebin/2018/07/13/guix-nixos-systemd.links-internal-check: src/content/en/pastebin/2018/07/13/guix-nixos-systemd.links
+src/content/en/pastebin/2018/07/13/guixbuilder-nixos.links-internal-check: src/content/en/pastebin/2018/07/13/guixbuilder-nixos.links
+src/content/en/pastebin/2018/07/13/guixbuilder.links-internal-check: src/content/en/pastebin/2018/07/13/guixbuilder.links
+src/content/en/pastebin/2018/07/13/nix-strpad.links-internal-check: src/content/en/pastebin/2018/07/13/nix-strpad.links
+src/content/en/pastebin/2018/07/25/nix-exps.links-internal-check: src/content/en/pastebin/2018/07/25/nix-exps.links
+src/content/en/pastebin/2018/07/25/nix-showdrv.links-internal-check: src/content/en/pastebin/2018/07/25/nix-showdrv.links
+src/content/en/pastebin/2019/06/08/inconsistent-hash.links-internal-check: src/content/en/pastebin/2019/06/08/inconsistent-hash.links
+src/content/en/pastebin/2019/12/29/raku-tuple-type.links-internal-check: src/content/en/pastebin/2019/12/29/raku-tuple-type.links
+src/content/en/pastebin/2020/01/04/guix-import-failure.links-internal-check: src/content/en/pastebin/2020/01/04/guix-import-failure.links
+src/content/en/pastebin/2020/02/14/guix-shebang.links-internal-check: src/content/en/pastebin/2020/02/14/guix-shebang.links
+src/content/en/pastebin/2020/11/27/guix-build-local.links-internal-check: src/content/en/pastebin/2020/11/27/guix-build-local.links
+src/content/en/pastebin/2020/12/15/guix-pack-fail.links-internal-check: src/content/en/pastebin/2020/12/15/guix-pack-fail.links
+src/content/en/pastebin/2021/04/03/naive-slugify-js.links-internal-check: src/content/en/pastebin/2021/04/03/naive-slugify-js.links
+src/content/en/pastebin/2021/06/08/reading-session-pt1.links-internal-check: src/content/en/pastebin/2021/06/08/reading-session-pt1.links
+src/content/en/pastebin/2021/06/22/curl-wget.links-internal-check: src/content/en/pastebin/2021/06/22/curl-wget.links
+src/content/en/pastebin/2021/08/11/h1-spacing.links-internal-check: src/content/en/pastebin/2021/08/11/h1-spacing.links
+src/content/en/pastebin/2021/09/02/sicp-3-19.links-internal-check: src/content/en/pastebin/2021/09/02/sicp-3-19.links
+src/content/en/pastebin/2021/09/03/sicp-persistent-queue.links-internal-check: src/content/en/pastebin/2021/09/03/sicp-persistent-queue.links
+src/content/en/pastebin/2022/07/14/git-cleanup.links-internal-check: src/content/en/pastebin/2022/07/14/git-cleanup.links
+src/content/en/pastebin/2023/07/22/funcallable-amop.links-internal-check: src/content/en/pastebin/2023/07/22/funcallable-amop.links
+src/content/en/podcast/2020/12/19/test-entry.links-internal-check: src/content/en/podcast/2020/12/19/test-entry.links
+src/content/en/screencast/2021/02/07/autoqemu.links-internal-check: src/content/en/screencast/2021/02/07/autoqemu.links
+src/content/en/til/2020/08/12/filename-timestamp.links-internal-check: src/content/en/til/2020/08/12/filename-timestamp.links
+src/content/en/til/2020/08/13/code-jekyll.links-internal-check: src/content/en/til/2020/08/13/code-jekyll.links
+src/content/en/til/2020/08/14/browse-git.links-internal-check: src/content/en/til/2020/08/14/browse-git.links
+src/content/en/til/2020/08/16/git-search.links-internal-check: src/content/en/til/2020/08/16/git-search.links
+src/content/en/til/2020/08/28/grep-online.links-internal-check: src/content/en/til/2020/08/28/grep-online.links
+src/content/en/til/2020/09/04/cli-email-fun-profit.links-internal-check: src/content/en/til/2020/09/04/cli-email-fun-profit.links
+src/content/en/til/2020/09/05/oldschool-pr.links-internal-check: src/content/en/til/2020/09/05/oldschool-pr.links
+src/content/en/til/2020/10/11/search-git-history.links-internal-check: src/content/en/til/2020/10/11/search-git-history.links
+src/content/en/til/2020/11/08/find-broken-symlink.links-internal-check: src/content/en/til/2020/11/08/find-broken-symlink.links
+src/content/en/til/2020/11/12/diy-nix-bash-ci.links-internal-check: src/content/en/til/2020/11/12/diy-nix-bash-ci.links
+src/content/en/til/2020/11/12/git-bisect-automation.links-internal-check: src/content/en/til/2020/11/12/git-bisect-automation.links
+src/content/en/til/2020/11/12/useful-bashvars.links-internal-check: src/content/en/til/2020/11/12/useful-bashvars.links
+src/content/en/til/2020/11/14/gpodder-media.links-internal-check: src/content/en/til/2020/11/14/gpodder-media.links
+src/content/en/til/2020/11/30/git-notes-ci.links-internal-check: src/content/en/til/2020/11/30/git-notes-ci.links
+src/content/en/til/2020/12/15/shellcheck-repo.links-internal-check: src/content/en/til/2020/12/15/shellcheck-repo.links
+src/content/en/til/2020/12/29/svg.links-internal-check: src/content/en/til/2020/12/29/svg.links
+src/content/en/til/2021/01/12/curl-awk-emails.links-internal-check: src/content/en/til/2021/01/12/curl-awk-emails.links
+src/content/en/til/2021/01/17/posix-shebang.links-internal-check: src/content/en/til/2021/01/17/posix-shebang.links
+src/content/en/til/2021/04/24/cl-generic-precedence.links-internal-check: src/content/en/til/2021/04/24/cl-generic-precedence.links
+src/content/en/til/2021/04/24/clojure-autocurry.links-internal-check: src/content/en/til/2021/04/24/clojure-autocurry.links
+src/content/en/til/2021/04/24/scm-nif.links-internal-check: src/content/en/til/2021/04/24/scm-nif.links
+src/content/en/til/2021/07/23/git-tls-gpg.links-internal-check: src/content/en/til/2021/07/23/git-tls-gpg.links
+src/content/en/til/2021/08/11/js-bigint-reviver.links-internal-check: src/content/en/til/2021/08/11/js-bigint-reviver.links
+src/content/pt/hea/2020/08/12/arquivo-datado.links-internal-check: src/content/pt/hea/2020/08/12/arquivo-datado.links
+src/content/en/blog/index.links-internal-check: src/content/en/blog/index.links
+src/content/en/pastebin/index.links-internal-check: src/content/en/pastebin/index.links
+src/content/en/podcast/index.links-internal-check: src/content/en/podcast/index.links
+src/content/en/screencast/index.links-internal-check: src/content/en/screencast/index.links
+src/content/en/til/index.links-internal-check: src/content/en/til/index.links
+src/content/pt/hea/index.links-internal-check: src/content/pt/hea/index.links
+src/content/en/blog/categories.links-internal-check: src/content/en/blog/categories.links
+src/content/en/pastebin/categories.links-internal-check: src/content/en/pastebin/categories.links
+src/content/en/podcast/categories.links-internal-check: src/content/en/podcast/categories.links
+src/content/en/screencast/categories.links-internal-check: src/content/en/screencast/categories.links
+src/content/en/til/categories.links-internal-check: src/content/en/til/categories.links
+src/content/pt/hea/categorias.links-internal-check: src/content/pt/hea/categorias.links
+src/content/en/about.caslinks: src/content/en/about.links
+src/content/en/index.caslinks: src/content/en/index.links
+src/content/pt/sobre.caslinks: src/content/pt/sobre.links
+src/content/en/blog/2018/07/17/guix-nixos.caslinks: src/content/en/blog/2018/07/17/guix-nixos.links
+src/content/en/blog/2018/08/01/npm-ci-reproducibility.caslinks: src/content/en/blog/2018/08/01/npm-ci-reproducibility.links
+src/content/en/blog/2018/12/21/ytdl-subs.caslinks: src/content/en/blog/2018/12/21/ytdl-subs.links
+src/content/en/blog/2019/06/02/nixos-stateless-workstation.caslinks: src/content/en/blog/2019/06/02/nixos-stateless-workstation.links
+src/content/en/blog/2020/08/10/guix-srht.caslinks: src/content/en/blog/2020/08/10/guix-srht.links
+src/content/en/blog/2020/08/31/database-i-wish-i-had.caslinks: src/content/en/blog/2020/08/31/database-i-wish-i-had.links
+src/content/en/blog/2020/10/05/cargo2nix.caslinks: src/content/en/blog/2020/10/05/cargo2nix.links
+src/content/en/blog/2020/10/05/swift2nix.caslinks: src/content/en/blog/2020/10/05/swift2nix.links
+src/content/en/blog/2020/10/19/feature-flags.caslinks: src/content/en/blog/2020/10/19/feature-flags.links
+src/content/en/blog/2020/10/20/wrong-interviewing.caslinks: src/content/en/blog/2020/10/20/wrong-interviewing.links
+src/content/en/blog/2020/11/07/diy-bugs.caslinks: src/content/en/blog/2020/11/07/diy-bugs.links
+src/content/en/blog/2020/11/08/paradigm-shift-review.caslinks: src/content/en/blog/2020/11/08/paradigm-shift-review.links
+src/content/en/blog/2020/11/12/database-parsers-trees.caslinks: src/content/en/blog/2020/11/12/database-parsers-trees.links
+src/content/en/blog/2020/11/14/local-first-review.caslinks: src/content/en/blog/2020/11/14/local-first-review.links
+src/content/en/blog/2021/01/26/remembering-ann.caslinks: src/content/en/blog/2021/01/26/remembering-ann.links
+src/content/en/blog/2021/02/17/fallible.caslinks: src/content/en/blog/2021/02/17/fallible.links
+src/content/en/blog/2021/04/29/relational-review.caslinks: src/content/en/blog/2021/04/29/relational-review.links
+src/content/en/pastebin/2016/04/05/rpn.caslinks: src/content/en/pastebin/2016/04/05/rpn.links
+src/content/en/pastebin/2018/07/11/nix-pinning.caslinks: src/content/en/pastebin/2018/07/11/nix-pinning.links
+src/content/en/pastebin/2018/07/13/guix-nixos-systemd.caslinks: src/content/en/pastebin/2018/07/13/guix-nixos-systemd.links
+src/content/en/pastebin/2018/07/13/guixbuilder-nixos.caslinks: src/content/en/pastebin/2018/07/13/guixbuilder-nixos.links
+src/content/en/pastebin/2018/07/13/guixbuilder.caslinks: src/content/en/pastebin/2018/07/13/guixbuilder.links
+src/content/en/pastebin/2018/07/13/nix-strpad.caslinks: src/content/en/pastebin/2018/07/13/nix-strpad.links
+src/content/en/pastebin/2018/07/25/nix-exps.caslinks: src/content/en/pastebin/2018/07/25/nix-exps.links
+src/content/en/pastebin/2018/07/25/nix-showdrv.caslinks: src/content/en/pastebin/2018/07/25/nix-showdrv.links
+src/content/en/pastebin/2019/06/08/inconsistent-hash.caslinks: src/content/en/pastebin/2019/06/08/inconsistent-hash.links
+src/content/en/pastebin/2019/12/29/raku-tuple-type.caslinks: src/content/en/pastebin/2019/12/29/raku-tuple-type.links
+src/content/en/pastebin/2020/01/04/guix-import-failure.caslinks: src/content/en/pastebin/2020/01/04/guix-import-failure.links
+src/content/en/pastebin/2020/02/14/guix-shebang.caslinks: src/content/en/pastebin/2020/02/14/guix-shebang.links
+src/content/en/pastebin/2020/11/27/guix-build-local.caslinks: src/content/en/pastebin/2020/11/27/guix-build-local.links
+src/content/en/pastebin/2020/12/15/guix-pack-fail.caslinks: src/content/en/pastebin/2020/12/15/guix-pack-fail.links
+src/content/en/pastebin/2021/04/03/naive-slugify-js.caslinks: src/content/en/pastebin/2021/04/03/naive-slugify-js.links
+src/content/en/pastebin/2021/06/08/reading-session-pt1.caslinks: src/content/en/pastebin/2021/06/08/reading-session-pt1.links
+src/content/en/pastebin/2021/06/22/curl-wget.caslinks: src/content/en/pastebin/2021/06/22/curl-wget.links
+src/content/en/pastebin/2021/08/11/h1-spacing.caslinks: src/content/en/pastebin/2021/08/11/h1-spacing.links
+src/content/en/pastebin/2021/09/02/sicp-3-19.caslinks: src/content/en/pastebin/2021/09/02/sicp-3-19.links
+src/content/en/pastebin/2021/09/03/sicp-persistent-queue.caslinks: src/content/en/pastebin/2021/09/03/sicp-persistent-queue.links
+src/content/en/pastebin/2022/07/14/git-cleanup.caslinks: src/content/en/pastebin/2022/07/14/git-cleanup.links
+src/content/en/pastebin/2023/07/22/funcallable-amop.caslinks: src/content/en/pastebin/2023/07/22/funcallable-amop.links
+src/content/en/podcast/2020/12/19/test-entry.caslinks: src/content/en/podcast/2020/12/19/test-entry.links
+src/content/en/screencast/2021/02/07/autoqemu.caslinks: src/content/en/screencast/2021/02/07/autoqemu.links
+src/content/en/til/2020/08/12/filename-timestamp.caslinks: src/content/en/til/2020/08/12/filename-timestamp.links
+src/content/en/til/2020/08/13/code-jekyll.caslinks: src/content/en/til/2020/08/13/code-jekyll.links
+src/content/en/til/2020/08/14/browse-git.caslinks: src/content/en/til/2020/08/14/browse-git.links
+src/content/en/til/2020/08/16/git-search.caslinks: src/content/en/til/2020/08/16/git-search.links
+src/content/en/til/2020/08/28/grep-online.caslinks: src/content/en/til/2020/08/28/grep-online.links
+src/content/en/til/2020/09/04/cli-email-fun-profit.caslinks: src/content/en/til/2020/09/04/cli-email-fun-profit.links
+src/content/en/til/2020/09/05/oldschool-pr.caslinks: src/content/en/til/2020/09/05/oldschool-pr.links
+src/content/en/til/2020/10/11/search-git-history.caslinks: src/content/en/til/2020/10/11/search-git-history.links
+src/content/en/til/2020/11/08/find-broken-symlink.caslinks: src/content/en/til/2020/11/08/find-broken-symlink.links
+src/content/en/til/2020/11/12/diy-nix-bash-ci.caslinks: src/content/en/til/2020/11/12/diy-nix-bash-ci.links
+src/content/en/til/2020/11/12/git-bisect-automation.caslinks: src/content/en/til/2020/11/12/git-bisect-automation.links
+src/content/en/til/2020/11/12/useful-bashvars.caslinks: src/content/en/til/2020/11/12/useful-bashvars.links
+src/content/en/til/2020/11/14/gpodder-media.caslinks: src/content/en/til/2020/11/14/gpodder-media.links
+src/content/en/til/2020/11/30/git-notes-ci.caslinks: src/content/en/til/2020/11/30/git-notes-ci.links
+src/content/en/til/2020/12/15/shellcheck-repo.caslinks: src/content/en/til/2020/12/15/shellcheck-repo.links
+src/content/en/til/2020/12/29/svg.caslinks: src/content/en/til/2020/12/29/svg.links
+src/content/en/til/2021/01/12/curl-awk-emails.caslinks: src/content/en/til/2021/01/12/curl-awk-emails.links
+src/content/en/til/2021/01/17/posix-shebang.caslinks: src/content/en/til/2021/01/17/posix-shebang.links
+src/content/en/til/2021/04/24/cl-generic-precedence.caslinks: src/content/en/til/2021/04/24/cl-generic-precedence.links
+src/content/en/til/2021/04/24/clojure-autocurry.caslinks: src/content/en/til/2021/04/24/clojure-autocurry.links
+src/content/en/til/2021/04/24/scm-nif.caslinks: src/content/en/til/2021/04/24/scm-nif.links
+src/content/en/til/2021/07/23/git-tls-gpg.caslinks: src/content/en/til/2021/07/23/git-tls-gpg.links
+src/content/en/til/2021/08/11/js-bigint-reviver.caslinks: src/content/en/til/2021/08/11/js-bigint-reviver.links
+src/content/pt/hea/2020/08/12/arquivo-datado.caslinks: src/content/pt/hea/2020/08/12/arquivo-datado.links
+src/content/en/blog/index.caslinks: src/content/en/blog/index.links
+src/content/en/pastebin/index.caslinks: src/content/en/pastebin/index.links
+src/content/en/podcast/index.caslinks: src/content/en/podcast/index.links
+src/content/en/screencast/index.caslinks: src/content/en/screencast/index.links
+src/content/en/til/index.caslinks: src/content/en/til/index.links
+src/content/pt/hea/index.caslinks: src/content/pt/hea/index.links
+src/content/en/blog/categories.caslinks: src/content/en/blog/categories.links
+src/content/en/pastebin/categories.caslinks: src/content/en/pastebin/categories.links
+src/content/en/podcast/categories.caslinks: src/content/en/podcast/categories.links
+src/content/en/screencast/categories.caslinks: src/content/en/screencast/categories.links
+src/content/en/til/categories.caslinks: src/content/en/til/categories.links
+src/content/pt/hea/categorias.caslinks: src/content/pt/hea/categorias.links
+
+src/content/en/blog/2018/07/17/guix-nixos.feedentry: src/content/en/blog/2018/07/17/guix-nixos.conf src/content/en/blog/2018/07/17/guix-nixos.htmlbody
+src/content/en/blog/2018/08/01/npm-ci-reproducibility.feedentry: src/content/en/blog/2018/08/01/npm-ci-reproducibility.conf src/content/en/blog/2018/08/01/npm-ci-reproducibility.htmlbody
+src/content/en/blog/2018/12/21/ytdl-subs.feedentry: src/content/en/blog/2018/12/21/ytdl-subs.conf src/content/en/blog/2018/12/21/ytdl-subs.htmlbody
+src/content/en/blog/2019/06/02/nixos-stateless-workstation.feedentry: src/content/en/blog/2019/06/02/nixos-stateless-workstation.conf src/content/en/blog/2019/06/02/nixos-stateless-workstation.htmlbody
+src/content/en/blog/2020/08/10/guix-srht.feedentry: src/content/en/blog/2020/08/10/guix-srht.conf src/content/en/blog/2020/08/10/guix-srht.htmlbody
+src/content/en/blog/2020/08/31/database-i-wish-i-had.feedentry: src/content/en/blog/2020/08/31/database-i-wish-i-had.conf src/content/en/blog/2020/08/31/database-i-wish-i-had.htmlbody
+src/content/en/blog/2020/10/05/cargo2nix.feedentry: src/content/en/blog/2020/10/05/cargo2nix.conf src/content/en/blog/2020/10/05/cargo2nix.htmlbody
+src/content/en/blog/2020/10/05/swift2nix.feedentry: src/content/en/blog/2020/10/05/swift2nix.conf src/content/en/blog/2020/10/05/swift2nix.htmlbody
+src/content/en/blog/2020/10/19/feature-flags.feedentry: src/content/en/blog/2020/10/19/feature-flags.conf src/content/en/blog/2020/10/19/feature-flags.htmlbody
+src/content/en/blog/2020/10/20/wrong-interviewing.feedentry: src/content/en/blog/2020/10/20/wrong-interviewing.conf src/content/en/blog/2020/10/20/wrong-interviewing.htmlbody
+src/content/en/blog/2020/11/07/diy-bugs.feedentry: src/content/en/blog/2020/11/07/diy-bugs.conf src/content/en/blog/2020/11/07/diy-bugs.htmlbody
+src/content/en/blog/2020/11/08/paradigm-shift-review.feedentry: src/content/en/blog/2020/11/08/paradigm-shift-review.conf src/content/en/blog/2020/11/08/paradigm-shift-review.htmlbody
+src/content/en/blog/2020/11/12/database-parsers-trees.feedentry: src/content/en/blog/2020/11/12/database-parsers-trees.conf src/content/en/blog/2020/11/12/database-parsers-trees.htmlbody
+src/content/en/blog/2020/11/14/local-first-review.feedentry: src/content/en/blog/2020/11/14/local-first-review.conf src/content/en/blog/2020/11/14/local-first-review.htmlbody
+src/content/en/blog/2021/01/26/remembering-ann.feedentry: src/content/en/blog/2021/01/26/remembering-ann.conf src/content/en/blog/2021/01/26/remembering-ann.htmlbody
+src/content/en/blog/2021/02/17/fallible.feedentry: src/content/en/blog/2021/02/17/fallible.conf src/content/en/blog/2021/02/17/fallible.htmlbody
+src/content/en/blog/2021/04/29/relational-review.feedentry: src/content/en/blog/2021/04/29/relational-review.conf src/content/en/blog/2021/04/29/relational-review.htmlbody
+src/content/en/pastebin/2016/04/05/rpn.feedentry: src/content/en/pastebin/2016/04/05/rpn.conf src/content/en/pastebin/2016/04/05/rpn.htmlbody
+src/content/en/pastebin/2018/07/11/nix-pinning.feedentry: src/content/en/pastebin/2018/07/11/nix-pinning.conf src/content/en/pastebin/2018/07/11/nix-pinning.htmlbody
+src/content/en/pastebin/2018/07/13/guix-nixos-systemd.feedentry: src/content/en/pastebin/2018/07/13/guix-nixos-systemd.conf src/content/en/pastebin/2018/07/13/guix-nixos-systemd.htmlbody
+src/content/en/pastebin/2018/07/13/guixbuilder-nixos.feedentry: src/content/en/pastebin/2018/07/13/guixbuilder-nixos.conf src/content/en/pastebin/2018/07/13/guixbuilder-nixos.htmlbody
+src/content/en/pastebin/2018/07/13/guixbuilder.feedentry: src/content/en/pastebin/2018/07/13/guixbuilder.conf src/content/en/pastebin/2018/07/13/guixbuilder.htmlbody
+src/content/en/pastebin/2018/07/13/nix-strpad.feedentry: src/content/en/pastebin/2018/07/13/nix-strpad.conf src/content/en/pastebin/2018/07/13/nix-strpad.htmlbody
+src/content/en/pastebin/2018/07/25/nix-exps.feedentry: src/content/en/pastebin/2018/07/25/nix-exps.conf src/content/en/pastebin/2018/07/25/nix-exps.htmlbody
+src/content/en/pastebin/2018/07/25/nix-showdrv.feedentry: src/content/en/pastebin/2018/07/25/nix-showdrv.conf src/content/en/pastebin/2018/07/25/nix-showdrv.htmlbody
+src/content/en/pastebin/2019/06/08/inconsistent-hash.feedentry: src/content/en/pastebin/2019/06/08/inconsistent-hash.conf src/content/en/pastebin/2019/06/08/inconsistent-hash.htmlbody
+src/content/en/pastebin/2019/12/29/raku-tuple-type.feedentry: src/content/en/pastebin/2019/12/29/raku-tuple-type.conf src/content/en/pastebin/2019/12/29/raku-tuple-type.htmlbody
+src/content/en/pastebin/2020/01/04/guix-import-failure.feedentry: src/content/en/pastebin/2020/01/04/guix-import-failure.conf src/content/en/pastebin/2020/01/04/guix-import-failure.htmlbody
+src/content/en/pastebin/2020/02/14/guix-shebang.feedentry: src/content/en/pastebin/2020/02/14/guix-shebang.conf src/content/en/pastebin/2020/02/14/guix-shebang.htmlbody
+src/content/en/pastebin/2020/11/27/guix-build-local.feedentry: src/content/en/pastebin/2020/11/27/guix-build-local.conf src/content/en/pastebin/2020/11/27/guix-build-local.htmlbody
+src/content/en/pastebin/2020/12/15/guix-pack-fail.feedentry: src/content/en/pastebin/2020/12/15/guix-pack-fail.conf src/content/en/pastebin/2020/12/15/guix-pack-fail.htmlbody
+src/content/en/pastebin/2021/04/03/naive-slugify-js.feedentry: src/content/en/pastebin/2021/04/03/naive-slugify-js.conf src/content/en/pastebin/2021/04/03/naive-slugify-js.htmlbody
+src/content/en/pastebin/2021/06/08/reading-session-pt1.feedentry: src/content/en/pastebin/2021/06/08/reading-session-pt1.conf src/content/en/pastebin/2021/06/08/reading-session-pt1.htmlbody
+src/content/en/pastebin/2021/06/22/curl-wget.feedentry: src/content/en/pastebin/2021/06/22/curl-wget.conf src/content/en/pastebin/2021/06/22/curl-wget.htmlbody
+src/content/en/pastebin/2021/08/11/h1-spacing.feedentry: src/content/en/pastebin/2021/08/11/h1-spacing.conf src/content/en/pastebin/2021/08/11/h1-spacing.htmlbody
+src/content/en/pastebin/2021/09/02/sicp-3-19.feedentry: src/content/en/pastebin/2021/09/02/sicp-3-19.conf src/content/en/pastebin/2021/09/02/sicp-3-19.htmlbody
+src/content/en/pastebin/2021/09/03/sicp-persistent-queue.feedentry: src/content/en/pastebin/2021/09/03/sicp-persistent-queue.conf src/content/en/pastebin/2021/09/03/sicp-persistent-queue.htmlbody
+src/content/en/pastebin/2022/07/14/git-cleanup.feedentry: src/content/en/pastebin/2022/07/14/git-cleanup.conf src/content/en/pastebin/2022/07/14/git-cleanup.htmlbody
+src/content/en/pastebin/2023/07/22/funcallable-amop.feedentry: src/content/en/pastebin/2023/07/22/funcallable-amop.conf src/content/en/pastebin/2023/07/22/funcallable-amop.htmlbody
+src/content/en/podcast/2020/12/19/test-entry.feedentry: src/content/en/podcast/2020/12/19/test-entry.conf src/content/en/podcast/2020/12/19/test-entry.htmlbody
+src/content/en/screencast/2021/02/07/autoqemu.feedentry: src/content/en/screencast/2021/02/07/autoqemu.conf src/content/en/screencast/2021/02/07/autoqemu.htmlbody
+src/content/en/til/2020/08/12/filename-timestamp.feedentry: src/content/en/til/2020/08/12/filename-timestamp.conf src/content/en/til/2020/08/12/filename-timestamp.htmlbody
+src/content/en/til/2020/08/13/code-jekyll.feedentry: src/content/en/til/2020/08/13/code-jekyll.conf src/content/en/til/2020/08/13/code-jekyll.htmlbody
+src/content/en/til/2020/08/14/browse-git.feedentry: src/content/en/til/2020/08/14/browse-git.conf src/content/en/til/2020/08/14/browse-git.htmlbody
+src/content/en/til/2020/08/16/git-search.feedentry: src/content/en/til/2020/08/16/git-search.conf src/content/en/til/2020/08/16/git-search.htmlbody
+src/content/en/til/2020/08/28/grep-online.feedentry: src/content/en/til/2020/08/28/grep-online.conf src/content/en/til/2020/08/28/grep-online.htmlbody
+src/content/en/til/2020/09/04/cli-email-fun-profit.feedentry: src/content/en/til/2020/09/04/cli-email-fun-profit.conf src/content/en/til/2020/09/04/cli-email-fun-profit.htmlbody
+src/content/en/til/2020/09/05/oldschool-pr.feedentry: src/content/en/til/2020/09/05/oldschool-pr.conf src/content/en/til/2020/09/05/oldschool-pr.htmlbody
+src/content/en/til/2020/10/11/search-git-history.feedentry: src/content/en/til/2020/10/11/search-git-history.conf src/content/en/til/2020/10/11/search-git-history.htmlbody
+src/content/en/til/2020/11/08/find-broken-symlink.feedentry: src/content/en/til/2020/11/08/find-broken-symlink.conf src/content/en/til/2020/11/08/find-broken-symlink.htmlbody
+src/content/en/til/2020/11/12/diy-nix-bash-ci.feedentry: src/content/en/til/2020/11/12/diy-nix-bash-ci.conf src/content/en/til/2020/11/12/diy-nix-bash-ci.htmlbody
+src/content/en/til/2020/11/12/git-bisect-automation.feedentry: src/content/en/til/2020/11/12/git-bisect-automation.conf src/content/en/til/2020/11/12/git-bisect-automation.htmlbody
+src/content/en/til/2020/11/12/useful-bashvars.feedentry: src/content/en/til/2020/11/12/useful-bashvars.conf src/content/en/til/2020/11/12/useful-bashvars.htmlbody
+src/content/en/til/2020/11/14/gpodder-media.feedentry: src/content/en/til/2020/11/14/gpodder-media.conf src/content/en/til/2020/11/14/gpodder-media.htmlbody
+src/content/en/til/2020/11/30/git-notes-ci.feedentry: src/content/en/til/2020/11/30/git-notes-ci.conf src/content/en/til/2020/11/30/git-notes-ci.htmlbody
+src/content/en/til/2020/12/15/shellcheck-repo.feedentry: src/content/en/til/2020/12/15/shellcheck-repo.conf src/content/en/til/2020/12/15/shellcheck-repo.htmlbody
+src/content/en/til/2020/12/29/svg.feedentry: src/content/en/til/2020/12/29/svg.conf src/content/en/til/2020/12/29/svg.htmlbody
+src/content/en/til/2021/01/12/curl-awk-emails.feedentry: src/content/en/til/2021/01/12/curl-awk-emails.conf src/content/en/til/2021/01/12/curl-awk-emails.htmlbody
+src/content/en/til/2021/01/17/posix-shebang.feedentry: src/content/en/til/2021/01/17/posix-shebang.conf src/content/en/til/2021/01/17/posix-shebang.htmlbody
+src/content/en/til/2021/04/24/cl-generic-precedence.feedentry: src/content/en/til/2021/04/24/cl-generic-precedence.conf src/content/en/til/2021/04/24/cl-generic-precedence.htmlbody
+src/content/en/til/2021/04/24/clojure-autocurry.feedentry: src/content/en/til/2021/04/24/clojure-autocurry.conf src/content/en/til/2021/04/24/clojure-autocurry.htmlbody
+src/content/en/til/2021/04/24/scm-nif.feedentry: src/content/en/til/2021/04/24/scm-nif.conf src/content/en/til/2021/04/24/scm-nif.htmlbody
+src/content/en/til/2021/07/23/git-tls-gpg.feedentry: src/content/en/til/2021/07/23/git-tls-gpg.conf src/content/en/til/2021/07/23/git-tls-gpg.htmlbody
+src/content/en/til/2021/08/11/js-bigint-reviver.feedentry: src/content/en/til/2021/08/11/js-bigint-reviver.conf src/content/en/til/2021/08/11/js-bigint-reviver.htmlbody
+src/content/pt/hea/2020/08/12/arquivo-datado.feedentry: src/content/pt/hea/2020/08/12/arquivo-datado.conf src/content/pt/hea/2020/08/12/arquivo-datado.htmlbody
+src/content/en/blog/2018/07/17/guix-nixos.sortdata: src/content/en/blog/2018/07/17/guix-nixos.conf
+src/content/en/blog/2018/08/01/npm-ci-reproducibility.sortdata: src/content/en/blog/2018/08/01/npm-ci-reproducibility.conf
+src/content/en/blog/2018/12/21/ytdl-subs.sortdata: src/content/en/blog/2018/12/21/ytdl-subs.conf
+src/content/en/blog/2019/06/02/nixos-stateless-workstation.sortdata: src/content/en/blog/2019/06/02/nixos-stateless-workstation.conf
+src/content/en/blog/2020/08/10/guix-srht.sortdata: src/content/en/blog/2020/08/10/guix-srht.conf
+src/content/en/blog/2020/08/31/database-i-wish-i-had.sortdata: src/content/en/blog/2020/08/31/database-i-wish-i-had.conf
+src/content/en/blog/2020/10/05/cargo2nix.sortdata: src/content/en/blog/2020/10/05/cargo2nix.conf
+src/content/en/blog/2020/10/05/swift2nix.sortdata: src/content/en/blog/2020/10/05/swift2nix.conf
+src/content/en/blog/2020/10/19/feature-flags.sortdata: src/content/en/blog/2020/10/19/feature-flags.conf
+src/content/en/blog/2020/10/20/wrong-interviewing.sortdata: src/content/en/blog/2020/10/20/wrong-interviewing.conf
+src/content/en/blog/2020/11/07/diy-bugs.sortdata: src/content/en/blog/2020/11/07/diy-bugs.conf
+src/content/en/blog/2020/11/08/paradigm-shift-review.sortdata: src/content/en/blog/2020/11/08/paradigm-shift-review.conf
+src/content/en/blog/2020/11/12/database-parsers-trees.sortdata: src/content/en/blog/2020/11/12/database-parsers-trees.conf
+src/content/en/blog/2020/11/14/local-first-review.sortdata: src/content/en/blog/2020/11/14/local-first-review.conf
+src/content/en/blog/2021/01/26/remembering-ann.sortdata: src/content/en/blog/2021/01/26/remembering-ann.conf
+src/content/en/blog/2021/02/17/fallible.sortdata: src/content/en/blog/2021/02/17/fallible.conf
+src/content/en/blog/2021/04/29/relational-review.sortdata: src/content/en/blog/2021/04/29/relational-review.conf
+src/content/en/pastebin/2016/04/05/rpn.sortdata: src/content/en/pastebin/2016/04/05/rpn.conf
+src/content/en/pastebin/2018/07/11/nix-pinning.sortdata: src/content/en/pastebin/2018/07/11/nix-pinning.conf
+src/content/en/pastebin/2018/07/13/guix-nixos-systemd.sortdata: src/content/en/pastebin/2018/07/13/guix-nixos-systemd.conf
+src/content/en/pastebin/2018/07/13/guixbuilder-nixos.sortdata: src/content/en/pastebin/2018/07/13/guixbuilder-nixos.conf
+src/content/en/pastebin/2018/07/13/guixbuilder.sortdata: src/content/en/pastebin/2018/07/13/guixbuilder.conf
+src/content/en/pastebin/2018/07/13/nix-strpad.sortdata: src/content/en/pastebin/2018/07/13/nix-strpad.conf
+src/content/en/pastebin/2018/07/25/nix-exps.sortdata: src/content/en/pastebin/2018/07/25/nix-exps.conf
+src/content/en/pastebin/2018/07/25/nix-showdrv.sortdata: src/content/en/pastebin/2018/07/25/nix-showdrv.conf
+src/content/en/pastebin/2019/06/08/inconsistent-hash.sortdata: src/content/en/pastebin/2019/06/08/inconsistent-hash.conf
+src/content/en/pastebin/2019/12/29/raku-tuple-type.sortdata: src/content/en/pastebin/2019/12/29/raku-tuple-type.conf
+src/content/en/pastebin/2020/01/04/guix-import-failure.sortdata: src/content/en/pastebin/2020/01/04/guix-import-failure.conf
+src/content/en/pastebin/2020/02/14/guix-shebang.sortdata: src/content/en/pastebin/2020/02/14/guix-shebang.conf
+src/content/en/pastebin/2020/11/27/guix-build-local.sortdata: src/content/en/pastebin/2020/11/27/guix-build-local.conf
+src/content/en/pastebin/2020/12/15/guix-pack-fail.sortdata: src/content/en/pastebin/2020/12/15/guix-pack-fail.conf
+src/content/en/pastebin/2021/04/03/naive-slugify-js.sortdata: src/content/en/pastebin/2021/04/03/naive-slugify-js.conf
+src/content/en/pastebin/2021/06/08/reading-session-pt1.sortdata: src/content/en/pastebin/2021/06/08/reading-session-pt1.conf
+src/content/en/pastebin/2021/06/22/curl-wget.sortdata: src/content/en/pastebin/2021/06/22/curl-wget.conf
+src/content/en/pastebin/2021/08/11/h1-spacing.sortdata: src/content/en/pastebin/2021/08/11/h1-spacing.conf
+src/content/en/pastebin/2021/09/02/sicp-3-19.sortdata: src/content/en/pastebin/2021/09/02/sicp-3-19.conf
+src/content/en/pastebin/2021/09/03/sicp-persistent-queue.sortdata: src/content/en/pastebin/2021/09/03/sicp-persistent-queue.conf
+src/content/en/pastebin/2022/07/14/git-cleanup.sortdata: src/content/en/pastebin/2022/07/14/git-cleanup.conf
+src/content/en/pastebin/2023/07/22/funcallable-amop.sortdata: src/content/en/pastebin/2023/07/22/funcallable-amop.conf
+src/content/en/podcast/2020/12/19/test-entry.sortdata: src/content/en/podcast/2020/12/19/test-entry.conf
+src/content/en/screencast/2021/02/07/autoqemu.sortdata: src/content/en/screencast/2021/02/07/autoqemu.conf
+src/content/en/til/2020/08/12/filename-timestamp.sortdata: src/content/en/til/2020/08/12/filename-timestamp.conf
+src/content/en/til/2020/08/13/code-jekyll.sortdata: src/content/en/til/2020/08/13/code-jekyll.conf
+src/content/en/til/2020/08/14/browse-git.sortdata: src/content/en/til/2020/08/14/browse-git.conf
+src/content/en/til/2020/08/16/git-search.sortdata: src/content/en/til/2020/08/16/git-search.conf
+src/content/en/til/2020/08/28/grep-online.sortdata: src/content/en/til/2020/08/28/grep-online.conf
+src/content/en/til/2020/09/04/cli-email-fun-profit.sortdata: src/content/en/til/2020/09/04/cli-email-fun-profit.conf
+src/content/en/til/2020/09/05/oldschool-pr.sortdata: src/content/en/til/2020/09/05/oldschool-pr.conf
+src/content/en/til/2020/10/11/search-git-history.sortdata: src/content/en/til/2020/10/11/search-git-history.conf
+src/content/en/til/2020/11/08/find-broken-symlink.sortdata: src/content/en/til/2020/11/08/find-broken-symlink.conf
+src/content/en/til/2020/11/12/diy-nix-bash-ci.sortdata: src/content/en/til/2020/11/12/diy-nix-bash-ci.conf
+src/content/en/til/2020/11/12/git-bisect-automation.sortdata: src/content/en/til/2020/11/12/git-bisect-automation.conf
+src/content/en/til/2020/11/12/useful-bashvars.sortdata: src/content/en/til/2020/11/12/useful-bashvars.conf
+src/content/en/til/2020/11/14/gpodder-media.sortdata: src/content/en/til/2020/11/14/gpodder-media.conf
+src/content/en/til/2020/11/30/git-notes-ci.sortdata: src/content/en/til/2020/11/30/git-notes-ci.conf
+src/content/en/til/2020/12/15/shellcheck-repo.sortdata: src/content/en/til/2020/12/15/shellcheck-repo.conf
+src/content/en/til/2020/12/29/svg.sortdata: src/content/en/til/2020/12/29/svg.conf
+src/content/en/til/2021/01/12/curl-awk-emails.sortdata: src/content/en/til/2021/01/12/curl-awk-emails.conf
+src/content/en/til/2021/01/17/posix-shebang.sortdata: src/content/en/til/2021/01/17/posix-shebang.conf
+src/content/en/til/2021/04/24/cl-generic-precedence.sortdata: src/content/en/til/2021/04/24/cl-generic-precedence.conf
+src/content/en/til/2021/04/24/clojure-autocurry.sortdata: src/content/en/til/2021/04/24/clojure-autocurry.conf
+src/content/en/til/2021/04/24/scm-nif.sortdata: src/content/en/til/2021/04/24/scm-nif.conf
+src/content/en/til/2021/07/23/git-tls-gpg.sortdata: src/content/en/til/2021/07/23/git-tls-gpg.conf
+src/content/en/til/2021/08/11/js-bigint-reviver.sortdata: src/content/en/til/2021/08/11/js-bigint-reviver.conf
+src/content/pt/hea/2020/08/12/arquivo-datado.sortdata: src/content/pt/hea/2020/08/12/arquivo-datado.conf
+src/content/en/blog/2018/07/17/guix-nixos.categorydata: src/content/en/blog/2018/07/17/guix-nixos.conf
+src/content/en/blog/2018/08/01/npm-ci-reproducibility.categorydata: src/content/en/blog/2018/08/01/npm-ci-reproducibility.conf
+src/content/en/blog/2018/12/21/ytdl-subs.categorydata: src/content/en/blog/2018/12/21/ytdl-subs.conf
+src/content/en/blog/2019/06/02/nixos-stateless-workstation.categorydata: src/content/en/blog/2019/06/02/nixos-stateless-workstation.conf
+src/content/en/blog/2020/08/10/guix-srht.categorydata: src/content/en/blog/2020/08/10/guix-srht.conf
+src/content/en/blog/2020/08/31/database-i-wish-i-had.categorydata: src/content/en/blog/2020/08/31/database-i-wish-i-had.conf
+src/content/en/blog/2020/10/05/cargo2nix.categorydata: src/content/en/blog/2020/10/05/cargo2nix.conf
+src/content/en/blog/2020/10/05/swift2nix.categorydata: src/content/en/blog/2020/10/05/swift2nix.conf
+src/content/en/blog/2020/10/19/feature-flags.categorydata: src/content/en/blog/2020/10/19/feature-flags.conf
+src/content/en/blog/2020/10/20/wrong-interviewing.categorydata: src/content/en/blog/2020/10/20/wrong-interviewing.conf
+src/content/en/blog/2020/11/07/diy-bugs.categorydata: src/content/en/blog/2020/11/07/diy-bugs.conf
+src/content/en/blog/2020/11/08/paradigm-shift-review.categorydata: src/content/en/blog/2020/11/08/paradigm-shift-review.conf
+src/content/en/blog/2020/11/12/database-parsers-trees.categorydata: src/content/en/blog/2020/11/12/database-parsers-trees.conf
+src/content/en/blog/2020/11/14/local-first-review.categorydata: src/content/en/blog/2020/11/14/local-first-review.conf
+src/content/en/blog/2021/01/26/remembering-ann.categorydata: src/content/en/blog/2021/01/26/remembering-ann.conf
+src/content/en/blog/2021/02/17/fallible.categorydata: src/content/en/blog/2021/02/17/fallible.conf
+src/content/en/blog/2021/04/29/relational-review.categorydata: src/content/en/blog/2021/04/29/relational-review.conf
+src/content/en/pastebin/2016/04/05/rpn.categorydata: src/content/en/pastebin/2016/04/05/rpn.conf
+src/content/en/pastebin/2018/07/11/nix-pinning.categorydata: src/content/en/pastebin/2018/07/11/nix-pinning.conf
+src/content/en/pastebin/2018/07/13/guix-nixos-systemd.categorydata: src/content/en/pastebin/2018/07/13/guix-nixos-systemd.conf
+src/content/en/pastebin/2018/07/13/guixbuilder-nixos.categorydata: src/content/en/pastebin/2018/07/13/guixbuilder-nixos.conf
+src/content/en/pastebin/2018/07/13/guixbuilder.categorydata: src/content/en/pastebin/2018/07/13/guixbuilder.conf
+src/content/en/pastebin/2018/07/13/nix-strpad.categorydata: src/content/en/pastebin/2018/07/13/nix-strpad.conf
+src/content/en/pastebin/2018/07/25/nix-exps.categorydata: src/content/en/pastebin/2018/07/25/nix-exps.conf
+src/content/en/pastebin/2018/07/25/nix-showdrv.categorydata: src/content/en/pastebin/2018/07/25/nix-showdrv.conf
+src/content/en/pastebin/2019/06/08/inconsistent-hash.categorydata: src/content/en/pastebin/2019/06/08/inconsistent-hash.conf
+src/content/en/pastebin/2019/12/29/raku-tuple-type.categorydata: src/content/en/pastebin/2019/12/29/raku-tuple-type.conf
+src/content/en/pastebin/2020/01/04/guix-import-failure.categorydata: src/content/en/pastebin/2020/01/04/guix-import-failure.conf
+src/content/en/pastebin/2020/02/14/guix-shebang.categorydata: src/content/en/pastebin/2020/02/14/guix-shebang.conf
+src/content/en/pastebin/2020/11/27/guix-build-local.categorydata: src/content/en/pastebin/2020/11/27/guix-build-local.conf
+src/content/en/pastebin/2020/12/15/guix-pack-fail.categorydata: src/content/en/pastebin/2020/12/15/guix-pack-fail.conf
+src/content/en/pastebin/2021/04/03/naive-slugify-js.categorydata: src/content/en/pastebin/2021/04/03/naive-slugify-js.conf
+src/content/en/pastebin/2021/06/08/reading-session-pt1.categorydata: src/content/en/pastebin/2021/06/08/reading-session-pt1.conf
+src/content/en/pastebin/2021/06/22/curl-wget.categorydata: src/content/en/pastebin/2021/06/22/curl-wget.conf
+src/content/en/pastebin/2021/08/11/h1-spacing.categorydata: src/content/en/pastebin/2021/08/11/h1-spacing.conf
+src/content/en/pastebin/2021/09/02/sicp-3-19.categorydata: src/content/en/pastebin/2021/09/02/sicp-3-19.conf
+src/content/en/pastebin/2021/09/03/sicp-persistent-queue.categorydata: src/content/en/pastebin/2021/09/03/sicp-persistent-queue.conf
+src/content/en/pastebin/2022/07/14/git-cleanup.categorydata: src/content/en/pastebin/2022/07/14/git-cleanup.conf
+src/content/en/pastebin/2023/07/22/funcallable-amop.categorydata: src/content/en/pastebin/2023/07/22/funcallable-amop.conf
+src/content/en/podcast/2020/12/19/test-entry.categorydata: src/content/en/podcast/2020/12/19/test-entry.conf
+src/content/en/screencast/2021/02/07/autoqemu.categorydata: src/content/en/screencast/2021/02/07/autoqemu.conf
+src/content/en/til/2020/08/12/filename-timestamp.categorydata: src/content/en/til/2020/08/12/filename-timestamp.conf
+src/content/en/til/2020/08/13/code-jekyll.categorydata: src/content/en/til/2020/08/13/code-jekyll.conf
+src/content/en/til/2020/08/14/browse-git.categorydata: src/content/en/til/2020/08/14/browse-git.conf
+src/content/en/til/2020/08/16/git-search.categorydata: src/content/en/til/2020/08/16/git-search.conf
+src/content/en/til/2020/08/28/grep-online.categorydata: src/content/en/til/2020/08/28/grep-online.conf
+src/content/en/til/2020/09/04/cli-email-fun-profit.categorydata: src/content/en/til/2020/09/04/cli-email-fun-profit.conf
+src/content/en/til/2020/09/05/oldschool-pr.categorydata: src/content/en/til/2020/09/05/oldschool-pr.conf
+src/content/en/til/2020/10/11/search-git-history.categorydata: src/content/en/til/2020/10/11/search-git-history.conf
+src/content/en/til/2020/11/08/find-broken-symlink.categorydata: src/content/en/til/2020/11/08/find-broken-symlink.conf
+src/content/en/til/2020/11/12/diy-nix-bash-ci.categorydata: src/content/en/til/2020/11/12/diy-nix-bash-ci.conf
+src/content/en/til/2020/11/12/git-bisect-automation.categorydata: src/content/en/til/2020/11/12/git-bisect-automation.conf
+src/content/en/til/2020/11/12/useful-bashvars.categorydata: src/content/en/til/2020/11/12/useful-bashvars.conf
+src/content/en/til/2020/11/14/gpodder-media.categorydata: src/content/en/til/2020/11/14/gpodder-media.conf
+src/content/en/til/2020/11/30/git-notes-ci.categorydata: src/content/en/til/2020/11/30/git-notes-ci.conf
+src/content/en/til/2020/12/15/shellcheck-repo.categorydata: src/content/en/til/2020/12/15/shellcheck-repo.conf
+src/content/en/til/2020/12/29/svg.categorydata: src/content/en/til/2020/12/29/svg.conf
+src/content/en/til/2021/01/12/curl-awk-emails.categorydata: src/content/en/til/2021/01/12/curl-awk-emails.conf
+src/content/en/til/2021/01/17/posix-shebang.categorydata: src/content/en/til/2021/01/17/posix-shebang.conf
+src/content/en/til/2021/04/24/cl-generic-precedence.categorydata: src/content/en/til/2021/04/24/cl-generic-precedence.conf
+src/content/en/til/2021/04/24/clojure-autocurry.categorydata: src/content/en/til/2021/04/24/clojure-autocurry.conf
+src/content/en/til/2021/04/24/scm-nif.categorydata: src/content/en/til/2021/04/24/scm-nif.conf
+src/content/en/til/2021/07/23/git-tls-gpg.categorydata: src/content/en/til/2021/07/23/git-tls-gpg.conf
+src/content/en/til/2021/08/11/js-bigint-reviver.categorydata: src/content/en/til/2021/08/11/js-bigint-reviver.conf
+src/content/pt/hea/2020/08/12/arquivo-datado.categorydata: src/content/pt/hea/2020/08/12/arquivo-datado.conf
+
+src/content/en/blog/index.htmlheader src/content/en/blog/index.htmlfooter: src/content/en/blog/index.conf
+src/content/en/pastebin/index.htmlheader src/content/en/pastebin/index.htmlfooter: src/content/en/pastebin/index.conf
+src/content/en/podcast/index.htmlheader src/content/en/podcast/index.htmlfooter: src/content/en/podcast/index.conf
+src/content/en/screencast/index.htmlheader src/content/en/screencast/index.htmlfooter: src/content/en/screencast/index.conf
+src/content/en/til/index.htmlheader src/content/en/til/index.htmlfooter: src/content/en/til/index.conf
+src/content/pt/hea/index.htmlheader src/content/pt/hea/index.htmlfooter: src/content/pt/hea/index.conf
+src/content/en/blog/categories.htmlheader src/content/en/blog/categories.htmlfooter: src/content/en/blog/categories.conf
+src/content/en/pastebin/categories.htmlheader src/content/en/pastebin/categories.htmlfooter: src/content/en/pastebin/categories.conf
+src/content/en/podcast/categories.htmlheader src/content/en/podcast/categories.htmlfooter: src/content/en/podcast/categories.conf
+src/content/en/screencast/categories.htmlheader src/content/en/screencast/categories.htmlfooter: src/content/en/screencast/categories.conf
+src/content/en/til/categories.htmlheader src/content/en/til/categories.htmlfooter: src/content/en/til/categories.conf
+src/content/pt/hea/categorias.htmlheader src/content/pt/hea/categorias.htmlfooter: src/content/pt/hea/categorias.conf
+src/content/en/blog/index.htmllisting: src/content/en/blog/index.conf
+src/content/en/pastebin/index.htmllisting: src/content/en/pastebin/index.conf
+src/content/en/podcast/index.htmllisting: src/content/en/podcast/index.conf
+src/content/en/screencast/index.htmllisting: src/content/en/screencast/index.conf
+src/content/en/til/index.htmllisting: src/content/en/til/index.conf
+src/content/pt/hea/index.htmllisting: src/content/pt/hea/index.conf
+src/content/en/blog/categories.htmllisting: src/content/en/blog/categories.conf
+src/content/en/pastebin/categories.htmllisting: src/content/en/pastebin/categories.conf
+src/content/en/podcast/categories.htmllisting: src/content/en/podcast/categories.conf
+src/content/en/screencast/categories.htmllisting: src/content/en/screencast/categories.conf
+src/content/en/til/categories.htmllisting: src/content/en/til/categories.conf
+src/content/pt/hea/categorias.htmllisting: src/content/pt/hea/categorias.conf
+src/content/en/blog/index.html: src/content/en/blog/index.htmlheader src/content/en/blog/index.htmlfooter
+src/content/en/pastebin/index.html: src/content/en/pastebin/index.htmlheader src/content/en/pastebin/index.htmlfooter
+src/content/en/podcast/index.html: src/content/en/podcast/index.htmlheader src/content/en/podcast/index.htmlfooter
+src/content/en/screencast/index.html: src/content/en/screencast/index.htmlheader src/content/en/screencast/index.htmlfooter
+src/content/en/til/index.html: src/content/en/til/index.htmlheader src/content/en/til/index.htmlfooter
+src/content/pt/hea/index.html: src/content/pt/hea/index.htmlheader src/content/pt/hea/index.htmlfooter
+src/content/en/blog/categories.html: src/content/en/blog/categories.htmlheader src/content/en/blog/categories.htmlfooter
+src/content/en/pastebin/categories.html: src/content/en/pastebin/categories.htmlheader src/content/en/pastebin/categories.htmlfooter
+src/content/en/podcast/categories.html: src/content/en/podcast/categories.htmlheader src/content/en/podcast/categories.htmlfooter
+src/content/en/screencast/categories.html: src/content/en/screencast/categories.htmlheader src/content/en/screencast/categories.htmlfooter
+src/content/en/til/categories.html: src/content/en/til/categories.htmlheader src/content/en/til/categories.htmlfooter
+src/content/pt/hea/categorias.html: src/content/pt/hea/categorias.htmlheader src/content/pt/hea/categorias.htmlfooter
+src/content/en/blog/index.html: src/content/en/blog/index.htmllisting src/content/en/blog/index.htmlbody
+src/content/en/pastebin/index.html: src/content/en/pastebin/index.htmllisting src/content/en/pastebin/index.htmlbody
+src/content/en/podcast/index.html: src/content/en/podcast/index.htmllisting src/content/en/podcast/index.htmlbody
+src/content/en/screencast/index.html: src/content/en/screencast/index.htmllisting src/content/en/screencast/index.htmlbody
+src/content/en/til/index.html: src/content/en/til/index.htmllisting src/content/en/til/index.htmlbody
+src/content/pt/hea/index.html: src/content/pt/hea/index.htmllisting src/content/pt/hea/index.htmlbody
+src/content/en/blog/categories.html: src/content/en/blog/categories.htmllisting src/content/en/blog/categories.htmlbody
+src/content/en/pastebin/categories.html: src/content/en/pastebin/categories.htmllisting src/content/en/pastebin/categories.htmlbody
+src/content/en/podcast/categories.html: src/content/en/podcast/categories.htmllisting src/content/en/podcast/categories.htmlbody
+src/content/en/screencast/categories.html: src/content/en/screencast/categories.htmllisting src/content/en/screencast/categories.htmlbody
+src/content/en/til/categories.html: src/content/en/til/categories.htmllisting src/content/en/til/categories.htmlbody
+src/content/pt/hea/categorias.html: src/content/pt/hea/categorias.htmllisting src/content/pt/hea/categorias.htmlbody
+
+src/content/en/podcast/2020/12/19/test-entry.flac.torrent: src/content/en/podcast/2020/12/19/test-entry.flac
+src/content/en/podcast/2020/12/19/test-entry.ogg.torrent: src/content/en/podcast/2020/12/19/test-entry.ogg
+src/content/en/screencast/2021/02/07/autoqemu.webm.torrent: src/content/en/screencast/2021/02/07/autoqemu.webm
+
+src/content/en/slide/2020/10/19/feature-flags.ps: src/content/en/slide/2020/10/19/feature-flags.adoc
+src/content/en/slide/2020/11/14/local-first-hype.ps: src/content/en/slide/2020/11/14/local-first-hype.adoc
+src/content/en/slide/2020/10/19/feature-flags.pdf: src/content/en/slide/2020/10/19/feature-flags.ps
+src/content/en/slide/2020/11/14/local-first-hype.pdf: src/content/en/slide/2020/11/14/local-first-hype.ps
+
+
+
+articles.en.blog.adoc = \
+ src/content/en/blog/2018/07/17/guix-nixos.adoc \
+ src/content/en/blog/2018/08/01/npm-ci-reproducibility.adoc \
+ src/content/en/blog/2018/12/21/ytdl-subs.adoc \
+ src/content/en/blog/2019/06/02/nixos-stateless-workstation.adoc \
+ src/content/en/blog/2020/08/10/guix-srht.adoc \
+ src/content/en/blog/2020/08/31/database-i-wish-i-had.adoc \
+ src/content/en/blog/2020/10/05/cargo2nix.adoc \
+ src/content/en/blog/2020/10/05/swift2nix.adoc \
+ src/content/en/blog/2020/10/19/feature-flags.adoc \
+ src/content/en/blog/2020/10/20/wrong-interviewing.adoc \
+ src/content/en/blog/2020/11/07/diy-bugs.adoc \
+ src/content/en/blog/2020/11/08/paradigm-shift-review.adoc \
+ src/content/en/blog/2020/11/12/database-parsers-trees.adoc \
+ src/content/en/blog/2020/11/14/local-first-review.adoc \
+ src/content/en/blog/2021/01/26/remembering-ann.adoc \
+ src/content/en/blog/2021/02/17/fallible.adoc \
+ src/content/en/blog/2021/04/29/relational-review.adoc \
+
+articles.en.blog.sortdata = $(articles.en.blog.adoc:.adoc=.sortdata)
+articles.en.blog.indexentry = $(articles.en.blog.adoc:.adoc=.indexentry)
+articles.en.blog.feedentry = $(articles.en.blog.adoc:.adoc=.feedentry)
+articles.en.blog.categorydata = $(articles.en.blog.adoc:.adoc=.categorydata)
+src/content/en/blog/sortdata.txt: deps.mk
+ printf '%s\n' $(articles.en.blog.sortdata) > $@
+
+src/content/en/blog/feed.xml: src/content/en/blog/sortdata.txt
+src/content/en/blog/feed.xml: $(articles.en.blog.sortdata)
+src/content/en/blog/index.htmllisting: src/content/en/blog/sortdata.txt
+src/content/en/blog/index.htmllisting: $(articles.en.blog.sortdata)
+src/content/en/blog/categories.htmllisting: src/content/en/blog/sortdata.txt
+src/content/en/blog/categories.htmllisting: $(articles.en.blog.sortdata)
+src/content/en/blog/categories.txt: src/content/en/blog/sortdata.txt
+src/content/en/blog/categories.txt: $(articles.en.blog.sortdata)
+src/content/en/blog/index.htmllisting src/content/en/blog/categories.htmllisting: $(articles.en.blog.indexentry)
+src/content/en/blog/categories.txt: $(articles.en.blog.categorydata)
+src/content/en/blog/categories.txt src/content/en/blog/feed.xml: $(articles.en.blog.feedentry)
+src/content/en/blog/categories.htmllisting src/content/en/blog/categories.xml: src/content/en/blog/categories.txt
+src/content/en/blog/categories.xml.gz: src/content/en/blog/categories.xml
+
+
+articles.en.pastebin.adoc = \
+ src/content/en/pastebin/2016/04/05/rpn.adoc \
+ src/content/en/pastebin/2018/07/11/nix-pinning.adoc \
+ src/content/en/pastebin/2018/07/13/guix-nixos-systemd.adoc \
+ src/content/en/pastebin/2018/07/13/guixbuilder-nixos.adoc \
+ src/content/en/pastebin/2018/07/13/guixbuilder.adoc \
+ src/content/en/pastebin/2018/07/13/nix-strpad.adoc \
+ src/content/en/pastebin/2018/07/25/nix-exps.adoc \
+ src/content/en/pastebin/2018/07/25/nix-showdrv.adoc \
+ src/content/en/pastebin/2019/06/08/inconsistent-hash.adoc \
+ src/content/en/pastebin/2019/12/29/raku-tuple-type.adoc \
+ src/content/en/pastebin/2020/01/04/guix-import-failure.adoc \
+ src/content/en/pastebin/2020/02/14/guix-shebang.adoc \
+ src/content/en/pastebin/2020/11/27/guix-build-local.adoc \
+ src/content/en/pastebin/2020/12/15/guix-pack-fail.adoc \
+ src/content/en/pastebin/2021/04/03/naive-slugify-js.adoc \
+ src/content/en/pastebin/2021/06/08/reading-session-pt1.adoc \
+ src/content/en/pastebin/2021/06/22/curl-wget.adoc \
+ src/content/en/pastebin/2021/08/11/h1-spacing.adoc \
+ src/content/en/pastebin/2021/09/02/sicp-3-19.adoc \
+ src/content/en/pastebin/2021/09/03/sicp-persistent-queue.adoc \
+ src/content/en/pastebin/2022/07/14/git-cleanup.adoc \
+ src/content/en/pastebin/2023/07/22/funcallable-amop.adoc \
+
+articles.en.pastebin.sortdata = $(articles.en.pastebin.adoc:.adoc=.sortdata)
+articles.en.pastebin.indexentry = $(articles.en.pastebin.adoc:.adoc=.indexentry)
+articles.en.pastebin.feedentry = $(articles.en.pastebin.adoc:.adoc=.feedentry)
+articles.en.pastebin.categorydata = $(articles.en.pastebin.adoc:.adoc=.categorydata)
+src/content/en/pastebin/sortdata.txt: deps.mk
+ printf '%s\n' $(articles.en.pastebin.sortdata) > $@
+
+src/content/en/pastebin/feed.xml: src/content/en/pastebin/sortdata.txt
+src/content/en/pastebin/feed.xml: $(articles.en.pastebin.sortdata)
+src/content/en/pastebin/index.htmllisting: src/content/en/pastebin/sortdata.txt
+src/content/en/pastebin/index.htmllisting: $(articles.en.pastebin.sortdata)
+src/content/en/pastebin/categories.htmllisting: src/content/en/pastebin/sortdata.txt
+src/content/en/pastebin/categories.htmllisting: $(articles.en.pastebin.sortdata)
+src/content/en/pastebin/categories.txt: src/content/en/pastebin/sortdata.txt
+src/content/en/pastebin/categories.txt: $(articles.en.pastebin.sortdata)
+src/content/en/pastebin/index.htmllisting src/content/en/pastebin/categories.htmllisting: $(articles.en.pastebin.indexentry)
+src/content/en/pastebin/categories.txt: $(articles.en.pastebin.categorydata)
+src/content/en/pastebin/categories.txt src/content/en/pastebin/feed.xml: $(articles.en.pastebin.feedentry)
+src/content/en/pastebin/categories.htmllisting src/content/en/pastebin/categories.xml: src/content/en/pastebin/categories.txt
+src/content/en/pastebin/categories.xml.gz: src/content/en/pastebin/categories.xml
+
+
+articles.en.podcast.adoc = \
+ src/content/en/podcast/2020/12/19/test-entry.adoc \
+
+articles.en.podcast.sortdata = $(articles.en.podcast.adoc:.adoc=.sortdata)
+articles.en.podcast.indexentry = $(articles.en.podcast.adoc:.adoc=.indexentry)
+articles.en.podcast.feedentry = $(articles.en.podcast.adoc:.adoc=.feedentry)
+articles.en.podcast.categorydata = $(articles.en.podcast.adoc:.adoc=.categorydata)
+src/content/en/podcast/sortdata.txt: deps.mk
+ printf '%s\n' $(articles.en.podcast.sortdata) > $@
+
+src/content/en/podcast/feed.xml: src/content/en/podcast/sortdata.txt
+src/content/en/podcast/feed.xml: $(articles.en.podcast.sortdata)
+src/content/en/podcast/index.htmllisting: src/content/en/podcast/sortdata.txt
+src/content/en/podcast/index.htmllisting: $(articles.en.podcast.sortdata)
+src/content/en/podcast/categories.htmllisting: src/content/en/podcast/sortdata.txt
+src/content/en/podcast/categories.htmllisting: $(articles.en.podcast.sortdata)
+src/content/en/podcast/categories.txt: src/content/en/podcast/sortdata.txt
+src/content/en/podcast/categories.txt: $(articles.en.podcast.sortdata)
+src/content/en/podcast/index.htmllisting src/content/en/podcast/categories.htmllisting: $(articles.en.podcast.indexentry)
+src/content/en/podcast/categories.txt: $(articles.en.podcast.categorydata)
+src/content/en/podcast/categories.txt src/content/en/podcast/feed.xml: $(articles.en.podcast.feedentry)
+src/content/en/podcast/categories.htmllisting src/content/en/podcast/categories.xml: src/content/en/podcast/categories.txt
+src/content/en/podcast/categories.xml.gz: src/content/en/podcast/categories.xml
+
+
+articles.en.screencast.adoc = \
+ src/content/en/screencast/2021/02/07/autoqemu.adoc \
+
+articles.en.screencast.sortdata = $(articles.en.screencast.adoc:.adoc=.sortdata)
+articles.en.screencast.indexentry = $(articles.en.screencast.adoc:.adoc=.indexentry)
+articles.en.screencast.feedentry = $(articles.en.screencast.adoc:.adoc=.feedentry)
+articles.en.screencast.categorydata = $(articles.en.screencast.adoc:.adoc=.categorydata)
+src/content/en/screencast/sortdata.txt: deps.mk
+ printf '%s\n' $(articles.en.screencast.sortdata) > $@
+
+src/content/en/screencast/feed.xml: src/content/en/screencast/sortdata.txt
+src/content/en/screencast/feed.xml: $(articles.en.screencast.sortdata)
+src/content/en/screencast/index.htmllisting: src/content/en/screencast/sortdata.txt
+src/content/en/screencast/index.htmllisting: $(articles.en.screencast.sortdata)
+src/content/en/screencast/categories.htmllisting: src/content/en/screencast/sortdata.txt
+src/content/en/screencast/categories.htmllisting: $(articles.en.screencast.sortdata)
+src/content/en/screencast/categories.txt: src/content/en/screencast/sortdata.txt
+src/content/en/screencast/categories.txt: $(articles.en.screencast.sortdata)
+src/content/en/screencast/index.htmllisting src/content/en/screencast/categories.htmllisting: $(articles.en.screencast.indexentry)
+src/content/en/screencast/categories.txt: $(articles.en.screencast.categorydata)
+src/content/en/screencast/categories.txt src/content/en/screencast/feed.xml: $(articles.en.screencast.feedentry)
+src/content/en/screencast/categories.htmllisting src/content/en/screencast/categories.xml: src/content/en/screencast/categories.txt
+src/content/en/screencast/categories.xml.gz: src/content/en/screencast/categories.xml
+
+
+articles.en.til.adoc = \
+ src/content/en/til/2020/08/12/filename-timestamp.adoc \
+ src/content/en/til/2020/08/13/code-jekyll.adoc \
+ src/content/en/til/2020/08/14/browse-git.adoc \
+ src/content/en/til/2020/08/16/git-search.adoc \
+ src/content/en/til/2020/08/28/grep-online.adoc \
+ src/content/en/til/2020/09/04/cli-email-fun-profit.adoc \
+ src/content/en/til/2020/09/05/oldschool-pr.adoc \
+ src/content/en/til/2020/10/11/search-git-history.adoc \
+ src/content/en/til/2020/11/08/find-broken-symlink.adoc \
+ src/content/en/til/2020/11/12/diy-nix-bash-ci.adoc \
+ src/content/en/til/2020/11/12/git-bisect-automation.adoc \
+ src/content/en/til/2020/11/12/useful-bashvars.adoc \
+ src/content/en/til/2020/11/14/gpodder-media.adoc \
+ src/content/en/til/2020/11/30/git-notes-ci.adoc \
+ src/content/en/til/2020/12/15/shellcheck-repo.adoc \
+ src/content/en/til/2020/12/29/svg.adoc \
+ src/content/en/til/2021/01/12/curl-awk-emails.adoc \
+ src/content/en/til/2021/01/17/posix-shebang.adoc \
+ src/content/en/til/2021/04/24/cl-generic-precedence.adoc \
+ src/content/en/til/2021/04/24/clojure-autocurry.adoc \
+ src/content/en/til/2021/04/24/scm-nif.adoc \
+ src/content/en/til/2021/07/23/git-tls-gpg.adoc \
+ src/content/en/til/2021/08/11/js-bigint-reviver.adoc \
+
+articles.en.til.sortdata = $(articles.en.til.adoc:.adoc=.sortdata)
+articles.en.til.indexentry = $(articles.en.til.adoc:.adoc=.indexentry)
+articles.en.til.feedentry = $(articles.en.til.adoc:.adoc=.feedentry)
+articles.en.til.categorydata = $(articles.en.til.adoc:.adoc=.categorydata)
+src/content/en/til/sortdata.txt: deps.mk
+ printf '%s\n' $(articles.en.til.sortdata) > $@
+
+src/content/en/til/feed.xml: src/content/en/til/sortdata.txt
+src/content/en/til/feed.xml: $(articles.en.til.sortdata)
+src/content/en/til/index.htmllisting: src/content/en/til/sortdata.txt
+src/content/en/til/index.htmllisting: $(articles.en.til.sortdata)
+src/content/en/til/categories.htmllisting: src/content/en/til/sortdata.txt
+src/content/en/til/categories.htmllisting: $(articles.en.til.sortdata)
+src/content/en/til/categories.txt: src/content/en/til/sortdata.txt
+src/content/en/til/categories.txt: $(articles.en.til.sortdata)
+src/content/en/til/index.htmllisting src/content/en/til/categories.htmllisting: $(articles.en.til.indexentry)
+src/content/en/til/categories.txt: $(articles.en.til.categorydata)
+src/content/en/til/categories.txt src/content/en/til/feed.xml: $(articles.en.til.feedentry)
+src/content/en/til/categories.htmllisting src/content/en/til/categories.xml: src/content/en/til/categories.txt
+src/content/en/til/categories.xml.gz: src/content/en/til/categories.xml
+
+
+articles.pt.hea.adoc = \
+ src/content/pt/hea/2020/08/12/arquivo-datado.adoc \
+
+articles.pt.hea.sortdata = $(articles.pt.hea.adoc:.adoc=.sortdata)
+articles.pt.hea.indexentry = $(articles.pt.hea.adoc:.adoc=.indexentry)
+articles.pt.hea.feedentry = $(articles.pt.hea.adoc:.adoc=.feedentry)
+articles.pt.hea.categorydata = $(articles.pt.hea.adoc:.adoc=.categorydata)
+src/content/pt/hea/sortdata.txt: deps.mk
+ printf '%s\n' $(articles.pt.hea.sortdata) > $@
+
+src/content/pt/hea/feed.xml: src/content/pt/hea/sortdata.txt
+src/content/pt/hea/feed.xml: $(articles.pt.hea.sortdata)
+src/content/pt/hea/index.htmllisting: src/content/pt/hea/sortdata.txt
+src/content/pt/hea/index.htmllisting: $(articles.pt.hea.sortdata)
+src/content/pt/hea/categorias.htmllisting: src/content/pt/hea/sortdata.txt
+src/content/pt/hea/categorias.htmllisting: $(articles.pt.hea.sortdata)
+src/content/pt/hea/categorias.txt: src/content/pt/hea/sortdata.txt
+src/content/pt/hea/categorias.txt: $(articles.pt.hea.sortdata)
+src/content/pt/hea/index.htmllisting src/content/pt/hea/categorias.htmllisting: $(articles.pt.hea.indexentry)
+src/content/pt/hea/categorias.txt: $(articles.pt.hea.categorydata)
+src/content/pt/hea/categorias.txt src/content/pt/hea/feed.xml: $(articles.pt.hea.feedentry)
+src/content/pt/hea/categorias.htmllisting src/content/pt/hea/categorias.xml: src/content/pt/hea/categorias.txt
+src/content/pt/hea/categorias.xml.gz: src/content/pt/hea/categorias.xml
diff --git a/description b/description
deleted file mode 100644
index d9838c6..0000000
--- a/description
+++ /dev/null
@@ -1 +0,0 @@
-Blog and personal website (euandre.org).
diff --git a/long-description b/long-description
deleted file mode 100644
index d9838c6..0000000
--- a/long-description
+++ /dev/null
@@ -1 +0,0 @@
-Blog and personal website (euandre.org).
diff --git a/meta.capim b/meta.capim
new file mode 100644
index 0000000..a64fa37
--- /dev/null
+++ b/meta.capim
@@ -0,0 +1,11 @@
+{
+ :description "Blog and personal website (euandre.org)."
+ :baseurl "https://euandre.org"
+ :email "eu@euandre.org"
+ :dependencies {
+ :build #{
+ eslaides
+ mkwb
+ }
+ }
+}
diff --git a/mkdeps.sh b/mkdeps.sh
new file mode 100755
index 0000000..9df3e5a
--- /dev/null
+++ b/mkdeps.sh
@@ -0,0 +1,166 @@
+#!/bin/sh
+set -eu
+
+export LANG=POSIX.UTF-8
+
+
+normalize() {
+ xargs realpath --relative-to=. | sort
+}
+
+pages() {
+ find src/pages/*/*.adoc -type f | normalize
+}
+
+articles() {
+ find src/collections/*/*/*/ -type f -name '*.adoc' | normalize
+}
+
+slides() {
+ find src/slides/*/*/ -type f -name '*.adoc' | normalize
+}
+
+indexes() {
+ find src/collections/*/*/index.adoc | normalize
+}
+
+categories() {
+ grep -l '^:type: categories$' src/collections/*/*/*.adoc | normalize
+}
+
+media() {
+ find src/content/ \( -type f -and \! -type l \) -and \( \
+ -name '*.flac' -or \
+ -name '*.ogg' -or \
+ -name '*.webm' \
+ \)
+}
+
+tarballs() {
+ find src/content/ \( -type f -and \! -type l \) -and \( \
+ -name '*.tar.gz' \
+ \)
+}
+
+extras() {
+ media
+ tarballs
+}
+
+listings() {
+ indexes
+ categories
+}
+
+files() {
+ pages
+ articles
+ listings
+}
+
+
+pages | varlist 'pages.adoc'
+articles | varlist 'articles.adoc'
+slides | varlist 'slides.adoc'
+categories | varlist 'categories.adoc'
+indexes | varlist 'indexes.adoc'
+indexes | sed 's|/index\.adoc$|/feed.xml|' | varlist 'feeds.xml'
+find src/content/img/ -name '*.svg' | varlist 'images.svg'
+media | varlist 'sources.media'
+tarballs | varlist 'sources.tarballs'
+extras | varlist 'sources.extras'
+find po/*.po po/*.pot | varlist 'sources.po'
+
+
+{
+ files | sed 's/^\(.*\)\.adoc$/\1.html/'
+ files | sed 's/^\(.*\)\.adoc$/\1.snippets/'
+ slides | sed 's/^\(.*\)\.adoc$/\1.pdf/'
+ indexes | sed 's|^\(.*\)/index\.adoc$|\1/feed.xml|'
+ media | sed 's/^\(.*\)$/\1.torrent/'
+} | sed 's/^\(.*\)$/\1.gz:\t\1/'
+printf '\n'
+
+files | sed 's/^\(.*\)\.adoc$/\1.htmlbody\t\1.snippets\t\1.conf:\t\1.adoc/'
+files | sed 's/^\(.*\)\.adoc$/\1.html:\t\1.conf\t\1.htmlbody/'
+printf '\n'
+
+files | sed 's/^\(.*\)\.adoc$/\1.updatedat-check:\t\1.conf/'
+files | sed 's/^\(.*\)\.adoc$/\1.links-internal-check:\t\1.links/'
+files | sed 's/^\(.*\)\.adoc$/\1.caslinks:\t\1.links/'
+printf '\n'
+
+articles | sed 's/^\(.*\)\.adoc$/\1.feedentry:\t\1.conf\t\1.htmlbody/'
+articles | sed 's/^\(.*\)\.adoc$/\1.sortdata:\t\1.conf/'
+articles | sed 's/^\(.*\)\.adoc$/\1.categorydata:\t\1.conf/'
+printf '\n'
+
+listings | sed 's/^\(.*\)\.adoc$/\1.htmlheader\t\1.htmlfooter:\t\1.conf/'
+listings | sed 's/^\(.*\)\.adoc$/\1.htmllisting:\t\1.conf/'
+listings | sed 's/^\(.*\)\.adoc$/\1.html:\t\1.htmlheader\t\1.htmlfooter/'
+listings | sed 's/^\(.*\)\.adoc$/\1.html:\t\1.htmllisting\t\1.htmlbody/'
+printf '\n'
+
+media | sed 's/^\(.*\)$/\1.torrent:\t\1/'
+printf '\n'
+
+slides | sed 's/^\(.*\)\.adoc$/\1.ps:\t\1.adoc/'
+slides | sed 's/^\(.*\)\.adoc$/\1.pdf:\t\1.ps/'
+printf '\n'
+
+
+
+collectionentries() {
+ langlink="$1"
+ colllink="$2"
+ lang="$(basename "$langlink")"
+ c="$(printf '%s' "$colllink" | normalize)"
+ plural="$(cat src/names/categories/"$lang".txt)"
+
+ printf '\n\n'
+ name="$(basename "$c")"
+ art=articles."$lang"
+ find "$c"/*/ -type f -name '*.adoc' | varlist "$art.$name.adoc"
+ echo "$art.$name.sortdata = \$($art.$name.adoc:.adoc=.sortdata)"
+ echo "$art.$name.indexentry = \$($art.$name.adoc:.adoc=.indexentry)"
+ echo "$art.$name.feedentry = \$($art.$name.adoc:.adoc=.feedentry)"
+ echo "$art.$name.categorydata = \$($art.$name.adoc:.adoc=.categorydata)"
+
+ printf '%s/sortdata.txt:\tdeps.mk\n' "$c"
+ printf '\tprintf %s $(%s.%s.sortdata) > $@\n\n' "'%s\n'" "$art" "$name"
+
+ listings="
+ feed.xml
+ index.htmllisting
+ $plural.htmllisting
+ $plural.txt
+ "
+ for lst in $listings; do
+ printf '%s/%s:\t%s/sortdata.txt\n' "$c" "$lst" "$c"
+ printf '%s/%s:\t$(%s.%s.sortdata)\n' "$c" "$lst" "$art" "$name"
+ done
+
+ printf '%s/index.htmllisting\t' "$c"
+ printf '%s/%s.htmllisting:\t' "$c" "$plural"
+ printf '$(%s.%s.indexentry)\n' "$art" "$name"
+
+ printf '%s/%s.txt:\t' "$c" "$plural"
+ printf '$(%s.%s.categorydata)\n' "$art" "$name"
+
+ printf '%s/%s.txt\t' "$c" "$plural"
+ printf '%s/feed.xml:\t' "$c"
+ printf '$(%s.%s.feedentry)\n' "$art" "$name"
+
+ printf '%s/%s.htmllisting\t' "$c" "$plural"
+ printf '%s/%s.xml:\t' "$c" "$plural"
+ printf '%s/%s.txt\n' "$c" "$plural"
+
+ printf '%s/%s.xml.gz:\t%s/%s.xml\n' "$c" "$plural" "$c" "$plural"
+}
+
+
+for langlink in src/collections/*; do
+ for colllink in "$langlink"/*; do
+ collectionentries "$langlink" "$colllink"
+ done
+done
diff --git a/music/choro-da-saudade.ly b/music/choro-da-saudade.ly
deleted file mode 100644
index 2f6e686..0000000
--- a/music/choro-da-saudade.ly
+++ /dev/null
@@ -1,233 +0,0 @@
-\version "2.20.0"
-\pointAndClickOff
-#(ly:set-option 'embed-source-code #t)
-
-\include "common.ly.include"
-\include "guitar.ly.include"
-
-\header {
- title = "Choro da Saudade"
- composer = "Agustín Barrios"
- copyright = \copyright
- tagline = \tagline
-}
-
-globals = {
- \clef "treble_8"
- \key g \minor
- \time 2/4
- \set fingeringOrientations = #'(left)
- \tempo Largo 4 = 50
-}
-
-upper = \relative c {
- \globals
-
- \repeat volta 2 {
- <g-0>16 <d'-0> <bes'-2 ees-4> d, <bes'-2 d-3> d, d, d' |
- <a-1> <d-0> <c'-4 ees-3> d, <c'-4 d-2> d, d, d' |
- }
-
- <g,-0>8. <d'-0>16 <bes'-2>\tenuto( <a-1>) a <g-0> |
- \repeat volta 2 {
- \mark \markup { \musicglyph #"scripts.segno" }
- \repeat unfold 2 {
- <ees'-2>4 <cis-1>16( <d-2>) <fis-1> <a-4> |
- \pestanaSpan "III" <bes-4>8.\startTextSpan <g-1>16 <d-1> <bes-1> <g-3>( <d-0>)\stopTextSpan |
- \pestanaSpan "V" <c-1>\startTextSpan <d'-2> <fis-3>( <g-4>) <a-1> <bes-2> <c-4> a\stopTextSpan |
- }
- \alternative {
- {
- <d-4>4\fermata <c-4>16( <bes-2>) <a-1> <g\2-4> |
- <bes-2>4 <a-1>16( <bes-2>) <c-4> a |
- \pestanaSpan "I" <g-4>4\startTextSpan <f-1>16 <ees-4> <d-3> <c-1>\stopTextSpan |
- \pestanaSpan "III" <f-4>(\startTextSpan <d-1>) <bes-1> <g-1> <f-1>( <d-0>) <bes-1> <g-9>\stopTextSpan |
- <fis-2>8.\fermata <d'-0>16 <bes'-2>\tenuto( <a-1>) a <g-0> |
- }
-
- {
- <d''-4>4 \pestanaSpan "VII" <b-1>16\startTextSpan <g-2> <f-4> <d-1>\stopTextSpan |
- \pestanaSpan "V" <c-1>\startTextSpan <g-1> <ees-4> <c-1> <a-3>( <bes-4>) <c-1> <ees-4>\stopTextSpan |
- \pestanaSpan "III" <d-0>\startTextSpan <bes'-1> <d g-1>8 \appoggiatura { <d-1>16 <f-4> } <ees-2>16 <d-1> <c-3> <bes-1>\stopTextSpan |
- <a,-1>( <cis-4>) <e-1> <g-0> <d-0> <fis-3> <a-1> <d-2> \mark \markup { \musicglyph #"scripts.coda" } |
- }
- }
- }
- \alternative {
- { <g,-0> <d-0> g d bes'\tenuto( a) a g | }
- { <g-0> <bes-2 d-3>8 <bes d>16 <bes d> <bes ees-4>8 <bes d>16 | }
- }
-
- % part B
- r <c-4 d-2>8 <c d>16 r <bes-1 d-2>8 <bes d> 16 |
- r <a-1 d-2>8 <a d>16 <a d> <a cis-1>8 <a d>16 |
- r <b-0 g'-4>8 <b g'>16 r <c-1 g'-4>8 <c g'>16 |
-
- \pestanaSpan "III" r\startTextSpan <d-1 g-1>8 <d g>16 <d g> <f-4 g-1>8 <e-3 g-1>16 |
- r <ees-2 g-1>8 <ees g>16 r <ees g>8 <ees g>16\stopTextSpan |
- \pestanaSpan "II" r\startTextSpan <c-4 ees-3 fis-1>8 <c ees fis>16 <c ees fis> <c ees g-2>8\stopTextSpan \pestanaSpan "III" <bes-1 d-1 bes'-4>16\startTextSpan |
- r <c-3 d-1 a'-4>8 <c d a'>16 r <c-3 dis-2 a'-4>8 <c dis a'>16\stopTextSpan |
-
- r <g-2 cis-3 bes'-4>8 <g cis bes'>16 r <g cis bes'>8 <g cis bes'>16 |
- r <d'-2 g-3 bes-1>8 <d g bes>16 <d g bes> <d g c-4>8 <d g bes>16 |
- r <cis-3 g'-4 a-2>8 <cis g' a>16 r <cis g' a>8 <cis g' a>16 |
- \pestanaSpan "V" r\startTextSpan <c-1 fis-3 a-1>8 <c fis a>16 <c fis a> <c fis bes-2>8 <c fis a>16\stopTextSpan |
-
- r <b-3 f'-4 g-2>8 <b f' g>16 r <b f' g>8 <b f' g>16 |
- \pestanaSpan "III" r\startTextSpan <bes-1 e-3 g-1>8 <bes e g>16 <bes e g> <bes e a-4>8 <bes e g>16\stopTextSpan |
- \pestanaSpan "I" r\startTextSpan <a-2 c-1 f-1>8 <a c f>16\stopTextSpan r <f'-1 bes-2 f'-4>8 <f bes f'>16 |
- r <d-1 gis-4>8 <d gis>16 r <cis-2 g'-3 e'-4>8 <cis g' e'>16 |
-
- \pestanaSpan "V" r\startTextSpan <c-1 fis-3 a-1>8 <c fis a>16\stopTextSpan r <b-0 f'-3 d'-4>8 <b f' d'>16 |
- \pestanaSpan "V" r\startTextSpan <g-1 ees'-3 c'-4>8 <g ees' c'>16 <g ees' c'> <g ees' b'-2>8 <g ees' c'>16\stopTextSpan |
- \pestanaSpan "III" r\startTextSpan <bes-1 d-1 bes'-4>8 <bes d bes'>16 <bes d bes'> <bes d a'-3>8 <bes d g-1>16\stopTextSpan |
- r <g-2 cis-3 bes'-4>8 <g cis bes'>16 \pestanaSpan "III" r\startTextSpan <c-3 d-1 a'-4>8 <c d a'>16 |
-
- <bes-1 d-1 g-1>8.\stopTextSpan <d,-0>16 <bes'-2>\tenuto( <a-1>) a <g-0> |
-
- \bar "||"
- <g-0>8. <d'-1\3>16\mordent <b-3\4> d <g-2> <b-1>
- \key g \major
- \repeat volta 2 {
- <g,,-0>16 <d'''-4> <b,-0 e-2> g, <gis-1\6> <d'''-4> <b,-0 f'-3> gis, |
- <a-1\6> <d''-4> <c,-3 fis-1> \corda "5" <d,-1>\startTextSpan <e-3>\stopTextSpan <g-0> <b-4\4> <d-1> |
- <g,,-0> <g''-1> <bes,-2 d-1> g, <g'-4\4> <fis'-1> <bes,-2 cis-1> g |
-
- <fis-3> <e'-0> <a,-1 d-2> e' <fis-2\3> <a-1> <c-4> <d-1> |
- <ais,,-1> <e'''-4> <cis,-2 g'-3> ais, \pestanaSpan "VII" <b-3>\startTextSpan <d''-4> <d,-1 fis-1> b,\stopTextSpan |
- <cis-1> <cis''-4> <ais,-2 e'-3> cis, <d-1> <b''-1> <b,-3 d-1> d, |
- <e-1> <d''-2> <b,-0 cis-3> e,, <fis-3> <fis''-1> <e,-1 ais-2> fis' |
-
- \rall <b,,-3>\startTextSpan <fis'-4> <d'-2> <cis-1> <c-4>\fermata <d-2>\fermata <fis-1>\fermata <a-4>\fermata\glissando |
- \once \override NoteColumn.glissando-skip = ##t <g,,-0>16\stopTextSpan <d'''-4> <b,-0 e-2> g, <gis-1\6> <d'''-4> <b,-0 f'-3> gis, |
- <a-1\6> <d''-4> <c,-3 fis-1> <c'-2>\mordent <a-4\2> <fis-1> <d-1\3> <c-4\4> |
- <b-1> <f'-2 e'-4>8 <f e'>16 <g,,-0> <f''-1 b-2 g'-4>8 <f b e-3>16 |
-
- <a,-3>16 <c-1 e-1 c'-4>8. r4 |
- \pestanaSpan "VII" <a,-1\6>16\startTextSpan <c''-3> <e,-4 g-2> a,,\stopTextSpan \pestanaSpan "VI" <cis-1\5>\startTextSpan <ais''-1> <cis,-1 g'-4> cis,\stopTextSpan |
- <d-0> <b''-1>8 <d,-1 g-2>16 r4 |
- <e,,-1>16 <g'-4 b-3 cis-1>8 <g b cis>16 <d,-0> <c''-1 fis-3 b-4>8 <c fis a-1>16 |
- }
- \alternative {
- { <b-2 d-1 g-1>8. <d-1\3>16\mordent <b-3\4> <d-1> <g-2> <b-1> | }
- { <b,-2 d-1 g-1>\tenuto <bes-2 d-3>8 <bes d>16 <bes d> <bes ees-4>8 <bes d>16 | }
- }
-
- \bar "||"
-
- <g-0>4 <b'-2 d-3 g-4>
-
- \bar "|."
-}
-
-lower = \relative c' {
- \globals
- \set stringNumberOrientations = #'(down)
-
- \repeat volta 2 {
- g,4. d8 |
- a'4. d,8 |
- }
-
- g8. r16 r4 |
- \repeat volta 2 {
- r16 <a-1> <g'-3> <c-4> d,,4 |
- <g-0>8 <bes'-1 d-1> r4 |
- <c,-1>2 |
-
- r16 <bes-2\6> <d'-1> <g-3> r4 |
- r16 <d,,-0> <d'-0> <c'-1> r4 |
- r16 <ees,,-1> <ees'-1> <a-2> r4 |
- d,,2 |
-
- fis8. r16 r4 |
- r16 <a-1> <g'-3> <c-4> d,,4 |
- <g-0>8 <bes'-1 d-1> r4 |
- <c,-1>2 |
-
- r16 <b-2\6> <f''-3> <g-1> r4 |
- r2 |
- d,4 d, |
- r d |
- }
- \alternative {
- { <g-0> r | }
- { <g-0>4. d8 | }
- }
-
- % part B
- <a'-1>4 <g-0> |
- <fis-3>4. <d-0>8 |
- <g-0>4 <a-2>\glissando |
- <b-2>4. <g-0>8 |
-
- <c-3>4 <bes-1> |
- <a-1>4. <g-0>8 |
- <fis-2>4\glissando <f-1>\glissando |
- <e-1>\glissando <ees-1> |
-
- <d-0>4. d8 |
- \slashedGrace cis'\glissando <e-1\5>4\glissando <ees-1> |
- <d-0>4. d8 |
- <d-1\5>4 <des-1> |
-
- <c-2>4. <bes-1>8 |
- <ees-1>4 <d-0> |
- <e-3\5> <a,-1\6> |
- <d,-0> <g-0> |
-
- <c-1>4. c8 |
- <d-0>4. d8 |
- <e,-1>4 <fis-2> |
- <g-0>8. r16 r4 |
-
- \bar "||"
-
- \repeat volta 2 {
- <g-0>4 b' |
- g,8. g16 gis8. gis16 |
- a8. d16 e4 |
- g,8. g16 g'8. g16 |
-
- fis8 a fis'4 |
- ais,,8. ais16 b8. b16 |
- cis8. cis16 d8. d16 |
- e8. <e,-1>32( <eis-2>) fis8 e' |
-
- b4 <d, fis'-3> |
- g8. g16 gis8. gis16 |
- a4 r |
- b' g, |
-
- \corda "4" a'8.\startTextSpan <gis-2>16 <b-3> <a-1>8\stopTextSpan \once \set stringNumberOrientations = #'(up) <e-3\5>16 |
- a,8. a16 cis8. cis16 |
- d4 \corda "5" <cis-1>16\startTextSpan <e-4> <d-2>\mordent <b-3>\stopTextSpan |
- e,4 d |
- }
- \alternative {
- { <g-0> b' | }
- { <g,-0>4. <d-0>8 | }
- }
-
- \bar "||"
-
- <g-0>4 <g-0 d'-0>
-
- \bar "|."
-}
-
-staff = \new Staff <<
- \upper
- \\
- \lower
->>
-
-\score {
- \staff
- \layout { }
-}
-
-\score {
- \unfoldRepeats \staff
- \midi { }
-}
diff --git a/music/common.ly.include b/music/common.ly.include
deleted file mode 100644
index b6ba93a..0000000
--- a/music/common.ly.include
+++ /dev/null
@@ -1,15 +0,0 @@
-copyright = \markup {
- \with-url #"https://creativecommons.org/licenses/by-sa/4.0/" \line {
- CC BY-SA 4.0
- }
-}
-tagline = \markup {
- \with-url #"https://euandre.org/music.html" \line {
- euandre.org/music.html
- }
-}
-
-rall = {
- \override TextSpanner.bound-details.left.text = "rall."
- \override TextSpanner.bound-details.right.text = "a tempo"
-}
diff --git a/music/dengoso.ly b/music/dengoso.ly
deleted file mode 100644
index b5a3fae..0000000
--- a/music/dengoso.ly
+++ /dev/null
@@ -1,310 +0,0 @@
-\version "2.20.0"
-\pointAndClickOff
-#(ly:set-option 'embed-source-code #t)
-
-\include "common.ly.include"
-\include "guitar.ly.include"
-
-\header {
- title = "Dengoso"
- composer = "João Pernambuco"
- copyright = \copyright
- tagline = \tagline
-}
-
-globals = {
- \clef "treble_8"
- \key d \major
- \time 2/4
- \set fingeringOrientations = #'(left)
- \tempo Andante 4 = 100
-}
-
-upper = \relative c {
- \globals
-
- r16 <d-0>8 <e-1>16 <fis-3> <e-1>8 <d-0>16 |
- \corda "4" <a'-3>8.\startTextSpan a16 <g-1>8. g16 |
- <fis-3>8. fis16 <f-2>8. <e-1>16\stopTextSpan |
- r2 |
-
- r16 d8 e16 fis e8 d16 |
- \corda "4" a'8.\startTextSpan a16 g8. g16 |
- fis8. fis16 f8. e16\stopTextSpan |
- r2 |
-
- <d'-2>8 \pestanaSpan "II" <fis,-3 a-1>\startTextSpan fis' d16\stopTextSpan <e-0>~ |
- e2 |
- d8 \pestanaSpan "II" <fis, a>\startTextSpan fis' d16\stopTextSpan e~ |
- e2 |
-
- \repeat volta 2 {
- \pestanaSpan "V" a16\startTextSpan <a,-3 d-4>8 <a d>16 r16 <g'-4>8 <fis-3>16 |
- a16 <g,-1 cis-2>8 <g cis>16 r16 <cis-2> <fis-4> <cis-2>\stopTextSpan |
- <a-1 e'-0>16 <a d>8 \pestanaSingle "II" <a cis>16 <a b>8 a |
- <b-0>16 <cis,-3 g'-4>8 <cis g'>16 \pestanaSpan "II" cis'8\startTextSpan <g a>\stopTextSpan |
-
- <d'-2>16 <fis,-3 a-1>8 <fis a>16 <fis'-4>8 <a,-3 c-2> |
- <e'-0>16 <a,-2 cis-3>8 <a cis>16 r16 <g-0> <cis-3> <e-0> |
- <d gis d'>16 <d gis d'>8 <d gis d'>16 <d gis cis>8 <d gis b> |
- <e'-4>16 <cis,-2 g'-3>8 <cis g'>16 <cis g' b\2>8 <cis-2 g'-3 a-1> |
-
- \pestanaSpan "V" a'16\startTextSpan <a, d>8 <a d>16 r16 <g'>8 <fis>16 |
- a16 <g, cis>8 <g cis>16 r16 <cis> <fis> <cis>\stopTextSpan |
- <a e'>16 <a d>8 \pestanaSingle "II" <a cis>16 <a b>8 a |
- <b>16 <cis, g'>8 <cis g'>16 \pestanaSpan "II" cis'8\startTextSpan <g a>\stopTextSpan |
-
- d'16 <fis, a>8 <fis a>16 dis'8 <fis, a> |
- <e'-0>8 <b-3 d-2>16 <ais-2 cis-1> <g b e> <d'-2>( <b-0>) <g-0> |
- <fis-3> <d'-2> <a-1> <fis-3> <e-1> <cis'-2> <g-0> <e-1>
- }
- \alternative {
- { d, \pestanaSpan "II" a''\startTextSpan d fis g fis g gis\stopTextSpan | }
- { r4 \pestanaSingle "VII" <d fis d'> | }
- }
-
- \repeat volta 2 {
- \key d \minor
-
- <a-3>16 a <d-4 f-2> a a a <d f> a |
- <g-3> g <bes-1 e-4> g g g <bes e> g |
- <f-3> f <a-2 d-4> f f f <a d> f |
- r <g-0 cis-2> <g cis> e' r <bes-3>( <a-2>) <g-0> |
-
- <fis-4> <c-3> <dis-1>( <fis-4>) <a-2> <fis-4> c' a |
- <d-4> <bes-3>( <g-0>) <d-0> <g-0> <d-0> <bes'-3>( <g-0>) |
- <gis-1> <d-0> <b'-9> <gis-1> <d'-4>( <b-0>) <f'-4\2>( <d-1>) |
- <bes'-2>( <a-1>) <g-4> <f-2> <e-1> <d-3> <cis-2> <bes-4\4> |
-
- <a-3> a <d-4 f-2> a a a <d f> a |
- <g-3> g <bes-1 e-4> g g g <bes e> g |
- \pestanaSpan "III" f,\startTextSpan f <bes' d> f, f f <bes' d> f,\stopTextSpan |
- \pestanaSpan "II" r\startTextSpan <a' cis> <a cis>( e')~ e4\stopTextSpan
-
- \pestanaSpan "III" <bes-1>16(\startTextSpan <c-4>) <d-1> <e-4> <f-4>\stopTextSpan <g-1> <a-3> <bes-4> |
- <bes-4>( <a-3>) <gis-2> <a-3> <d-4>( <bes-1>) <g-3> <e-0> |
- <f-1> <d-4> <a-2> <f-3> <e-1> <cis'-2> <g-0> <e-1>
- }
- \alternative {
- { r <g-0> <gis-1> <a-2> r <bes-3> <a-2> <g-0> | }
- { r4 \pestanaSingle "X" <f' a d> | }
- }
-
- \key d \major
- \pestanaSpan "V" a16\startTextSpan <a, d>8 <a d>16 r16 g'8 fis16 |
- a16 <g, cis>8 <g cis>16 r16 cis fis cis\stopTextSpan |
- <a e'>16 <a d>8 \pestanaSingle "II" <a cis>16 <a b>8 a |
- b16 <cis, g'>8 <cis g'>16 \pestanaSpan "II" cis'8\startTextSpan <g a>\stopTextSpan |
-
- d'16 <fis, a>8 <fis a>16 fis'8 <a, c> |
- e'16 <a, cis>8 <a cis>16 r16 g cis e |
- <d gis d'>16 <d gis d'>8 <d gis d'>16 <d gis cis>8 <d gis b> |
- e'16 <cis, g'>8 <cis g'>16 <cis g' b\2>8 <cis g' a> |
-
- \pestanaSpan "V" a'16\startTextSpan <a, d>8 <a d>16 r16 g'8 fis16 |
- a16 <g, cis>8 <g cis>16 r16 cis fis cis\stopTextSpan |
- <a e'>16 <a d>8 \pestanaSingle "II" <a cis>16 <a b>8 a |
- b16 <cis, g'>8 <cis g'>16 \pestanaSpan "II" cis'8\startTextSpan <g a>\stopTextSpan |
-
- d'16 <fis, a>8 <fis a>16 dis'8 <fis, a> |
- e'8 <b d>16 <ais cis> <g b e> d'( b) g |
- fis d' a fis e cis' g e
- r4 \pestanaSingle "VII" <d' fis d'> |
- \bar "||"
-
- \key d \minor
- <f-2>16 <a,-3 d-4>8 <a d>16 f'8 <a, d> |
- <e'-4>16 <g,-3 bes-1>8 <g bes>16 <d'-4>8 <f,-3 a-2> |
- <cis'-4>16 <e,-3 g-0>8 <e g>16 <f'-1>8 <g, cis> |
- <e'-0>16 <g,-0 cis-4>8 <g cis>16 <d'-4>8 <f,-3 a-2> |
-
- <f'-2>16 <a,-3 d-4>8 <a d>16 f'8 <a, d> |
- \pestanaSpan "V" <e'-1>16\startTextSpan <a,-3 c-1>8 <a c>16 a,8 <a' c> |
- <a-3>16 <c-1> <a'-4> c,\stopTextSpan <aes-3> <b-1> <b'-4> b, |
- \pestanaSpan "V" <a-3>\startTextSpan <c-1> <a'-1> c, a c a' c,\stopTextSpan |
-
- <f-2> <a,-3 d-4>8 <a d>16 f'8 <a, d> |
- f'16 <a, d>8 <a d>16 <e'-1>8 <a,-3 cis-2> |
- \pestanaSpan "III" <g'-1>16\startTextSpan <bes,-1 e-3>8 <bes e>16 g'8 <bes, e> |
- g'16 <bes, e>8 <bes e>16\stopTextSpan \pestanaSpan "V" <a'-1>8\startTextSpan <c,-1 fis-3>\stopTextSpan |
-
- \pestanaSpan "V" <bes'-2>16\startTextSpan <d,-3 g-4>8 <d g>16 <d g> <a'-1>8 <g-4>16 |
- <f-2> <a,-3 d-4>8 <a d>16 <a d> <e'-0>8 <d-4>16 |
- <c-2>^"[#]"
- <a-3>8 a16 a <f'-2>8 <e-1>16 |
- <d-4> <a-3>8 a16 a a8 a16\stopTextSpan |
-
- \key d \major
- \pestanaSpan "V" a'16\startTextSpan <a, d>8 <a d>16 r16 g'8 fis16 |
- a16 <g, cis>8 <g cis>16 r16 cis fis cis\stopTextSpan |
- <a e'>16 <a d>8 \pestanaSingle "II" <a cis>16 <a b>8 a |
- b16 <cis, g'>8 <cis g'>16 \pestanaSpan "II" cis'8\startTextSpan <g a>\stopTextSpan |
-
- d'16 <fis, a>8 <fis a>16 fis'8 <a, c> |
- e'16 <a, cis>8 <a cis>16 r16 g cis e |
- <d gis d'>16 <d gis d'>8 <d gis d'>16 <d gis cis>8 <d gis b> |
- e'16 <cis, g'>8 <cis g'>16 <cis g' b\2>8 <cis g' a> |
-
- \pestanaSpan "V" a'16\startTextSpan <a, d>8 <a d>16 r16 g'8 fis16 |
- a16 <g, cis>8 <g cis>16 r16 cis fis cis\stopTextSpan |
- <a e'>16 <a d>8 \pestanaSingle "II" <a cis>16 <a b>8 a |
- b16 <cis, g'>8 <cis g'>16 \pestanaSpan "II" cis'8\startTextSpan <g a>\stopTextSpan |
-
- d'16 <fis, a>8 <fis a>16 dis'8 <fis, a> |
- e'8 <b d>16 <ais cis> <g b e> d'( b) g |
- fis d' a fis e cis' g e
- r4 \pestanaSingle "VII" <d' fis d'> |
- \bar "|."
-}
-
-lower = \relative c {
- \globals
- \set stringNumberOrientations = #'(down)
-
- r2_\markup { \circle 6 = D } |
- r8 a r a |
- r8 a r a |
- r16 <a-0>8 <b-1>16 <cis-3> <b-1>8 <a-0>16 |
-
- d16 r8. r4
- r8 a r a |
- r8 a r a |
- r16 a8 b16 cis b8 a16 |
-
- d,4. a''16 a,~ |
- a a8 b16 cis b8 a16
- d,4. a''16 a,~ |
- a a8 b16 cis b8 a16 |
-
- \repeat volta 2 {
- d,4 dis' |
- <e-3> g, |
- <fis'-3>16 fis8 fis16 fis8 fis |
- e,4 cis' |
-
- d, dis |
- <e-1> <ais-1> |
- e' e |
- a, r4 |
-
- d, <dis'-2> |
- e g, |
- fis'16 fis8 fis16 fis8 fis |
- e,4 cis' |
-
- d, fis |
- <e-4>8 r8 r4 |
- fis'8. fis16 e8. e16 |
- }
- \alternative {
- { d,2 | }
- { d8 a' d4 | }
- }
-
- \repeat volta 2 {
- \key d \minor
-
- a'16\4 a8 a16 a a8 a16 |
- g\4 g8 g16 g g8 g16 |
- f f8 f16 f f8 f16 |
- e4 a, |
-
- r2 |
- d8. r16 r4 |
- r2 |
- a2 |
-
- a'16 a8 a16 a a8 a16 |
- g g8 g16 g g8 g16 |
- f, f8 f16 f f8 f16 |
- e4 r16 <e-1>( <f-2>) <fis-3> |
-
- g4 r4 |
- d g' |
- d8. f16 e8. e16 |
- }
- \alternative {
- { d4 r | }
- { d8 a d4 | }
- }
-
- \key d \major
- d,4 dis' |
- <e> g, |
- <fis'>16 fis8 fis16 fis8 fis |
- e,4 cis' |
-
- d, dis |
- <e> <ais> |
- e' e |
- a, r4 |
-
- d, <dis'> |
- e g, |
- fis'16 fis8 fis16 fis8 fis |
- e,4 cis' |
-
- d, fis |
- <e>8 r8 r4 |
- fis'8. fis16 e8. e16 |
- d,8 a' d4 |
-
- \key d \minor
- d,4 d |
- a' a |
- a a |
- a d, |
-
- d d |
- a' a |
- a' aes |
- a a |
-
- d,, d |
- d a' |
- a a |
- a d |
-
- <g-1> r |
- d, r |
- a'2 |
- d, |
-
- \key d \major
- d4 dis' |
- e g, |
- fis'16 fis8 fis16 fis8 fis |
- e,4 cis' |
-
- d, dis |
- e ais |
- e' e |
- a, r4 |
-
- d, dis' |
- e g, |
- fis'16 fis8 fis16 fis8 fis |
- e,4 cis' |
-
- d, fis |
- e8 r8 r4 |
- fis'8. fis16 e8. e16 |
- d,8 a' d4 |
- \bar "|."
-}
-
-staff = \new Staff <<
- \upper
- \\
- \lower
->>
-
-\score {
- \staff
- \layout { }
-}
-
-\score {
- \unfoldRepeats \staff
- \midi { }
-}
diff --git a/music/guitar.ly.include b/music/guitar.ly.include
deleted file mode 100644
index 592ad5c..0000000
--- a/music/guitar.ly.include
+++ /dev/null
@@ -1,33 +0,0 @@
-
-pestanaPrivate = \markup {
- \fontsize #-2
- \combine
- \roman C
- \translate #'(0.6 . -0.4) \draw-line #'(0 . 2.0)
-}
-
-pestanaSingle =
- #(define-music-function (fret-number music) (string? ly:music?)
- #{
- $music ^\markup { \pestanaPrivate \fontsize #-2 $fret-number }
- #})
-
-pestanaSpan =
- #(define-music-function (fret-number) (string?)
- #{
- \once \override TextSpanner.font-shape = #'upright
- \once \override TextSpanner.style = #'solid
- \once \override TextSpanner.bound-details.left.stencil-align-dir-y = #CENTER
- \once \override TextSpanner.bound-details.right.text = \markup { \draw-line #'(0 . -.5) }
- \once \override TextSpanner.bound-details.left.text = \markup { \pestanaPrivate \fontsize #-2 $fret-number }
- #})
-
-corda =
- #(define-music-function (string-number) (string?)
- #{
- \once \override TextSpanner.style = #'solid
- \once \override TextSpanner.font-size = #-5
- \once \override TextSpanner.bound-details.left.stencil-align-dir-y = #CENTER
- \once \override TextSpanner.bound-details.right.text = \markup { \draw-line #'( 0 . -.5) }
- \once \override TextSpanner.bound-details.left.text = \markup { \circle \number $string-number }
- #})
diff --git a/music/marcha-dos-marinheiros.ly b/music/marcha-dos-marinheiros.ly
deleted file mode 100644
index 48cc1c6..0000000
--- a/music/marcha-dos-marinheiros.ly
+++ /dev/null
@@ -1,78 +0,0 @@
-\version "2.20.0"
-\pointAndClickOff
-#(ly:set-option 'embed-source-code #t)
-
-\include "common.ly.include"
-\include "guitar.ly.include"
-
-\header {
- title = "Marcha dos Marinheiros"
- composer = "Canhoto"
- copyright = \copyright
- tagline = \tagline
-}
-
-globals = {
- \clef "treble_8"
- \key a \minor
- \time 2/4
- \set fingeringOrientations = #'(left)
- \tempo Adagio 4 = 70
-}
-
-upper = \relative c {
- \globals
-}
-
-lower = \relative c {
- \globals
- \set stringNumberOrientations = #'(down)
-
- d4~ d16 <b-2> <c-3> <d-0> |
- <e-2>4~ e16 <a,-0> <b-2> <c-3> |
- d4~ d16 <gis,-4> <a-0> <b-2> |
- <c-4>4 a8 e |
-
- d'4~ d16 <b-2> <c-3> <d-0> |
- <e-2>4~ e16 e <d-0> <c-4> |
- <e-2>8 <d-0> <c-4> <b-2> |
- a4~ a16 e' ees d |
-
- \repeat volta 2 {
- \mark \markup { \musicglyph #"scripts.segno" }
- <c-4>4 <b-2>8 <e,-0> |
- <a-0>4~ a16 <a'-2> <aes-1> <g-0> |
- <f-3>4 <e-1>8 <a,-0> ||
- <d-0>4~ d16 d e f |
-
- d'8 c b a |
- }
- \alternative {
- {
- gis4~ gis16 d e f |
- e8 d c b |
- a4~ a16 e' ees d |
- }
- {
- c'8 b a f |
- e d c b |
- a2 |
- }
- }
-}
-
-staff = \new Staff <<
- \upper
- \\
- \lower
->>
-
-\score {
- \staff
- \layout { }
-}
-
-\score {
- \unfoldRepeats \staff
- \midi { }
-}
diff --git a/po/de.po b/po/de.po
new file mode 100644
index 0000000..3524d73
--- /dev/null
+++ b/po/de.po
@@ -0,0 +1,161 @@
+# German translations for PACKAGE package
+# Copyright (C) 2025 Free Software Foundation, Inc.
+# This file is distributed under the same license as the PACKAGE package.
+# Automatically generated, 2025.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"POT-Creation-Date: 2025-05-01 19:02-0300\n"
+"PO-Revision-Date: 2025-04-29 19:46-0300\n"
+"Last-Translator: Automatically generated\n"
+"Language-Team: none\n"
+"Language: de\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+#. type: Plain text
+#: src/headers/ref.txt:2
+msgid "en/blog/\tBlog"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:4
+msgid "en/til/\tTIL"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:6
+msgid "en/podcast/\tPodcasts"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:8
+msgid "en/screencast/\tScreencasts"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:10
+msgid "en/pastebin/\tPastebins"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:12
+msgid "en/slide/\tSlides"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:13
+msgid "en/about.html\tAbout"
+msgstr ""
+
+#. type: Plain text
+#: src/names/categories/ref.txt:1
+msgid "categories"
+msgstr ""
+
+#. type: Plain text
+#: src/names/category/ref.txt:1
+msgid "category"
+msgstr ""
+
+#. type: Title =
+#: src/content/en/about.adoc:1
+#, no-wrap
+msgid "About"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/about.adoc:9
+msgid ""
+"Hi, I'm EuAndreh. I write software and, occasionally, music. You can find "
+"my contact information in the footer of this page, or mail my {mailto}"
+"[public inbox] ({archive}[archive])."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/about.adoc:11
+msgid ""
+"This is my personal website where I write articles, publish software and "
+"more related work."
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/index.adoc:1
+#, no-wrap
+msgid "Today I Learned"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:6
+msgid "**T**oday **I** **L**earned: small entries of useful knowledge."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:7
+msgid "Shameless rip-off of {anna-e-so}[Anna e só]."
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/categories.adoc:1
+#, no-wrap
+msgid "Articles by category"
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:1
+#, no-wrap
+msgid "Simple filename timestamp"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:7
+msgid ""
+"When writing Jekyll posts or creating log files with dates on them, I "
+"usually struggle with finding a direct way of accomplishing that. There's a "
+"simple solution: `date -I`."
+msgstr ""
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:12
+#, no-wrap
+msgid ""
+"./my-program.sh > my-program.$(date -I).log\n"
+"cp post-template.md _posts/$(date -I)-post-slug.md\n"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:16
+msgid ""
+"Using this built-in GNU/Linux tool allows you to `touch $(date -I).md` to "
+"readily create a `2020-08-12.md` file."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:20
+msgid ""
+"I always had to read `man date` or search the web over and over, and after "
+"doing this repeatedly it became clear that both `date -I` and `date -Is` "
+"(`s` here stands for seconds) are the thing that I'm looking for 95% of the "
+"time:"
+msgstr ""
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:27
+#, no-wrap
+msgid ""
+"# inside my-program.sh\n"
+"echo \"Program started at $(date -Is)\"\n"
+"# output is:\n"
+"# Program started at 2020-08-12T09:04:58-03:00\n"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:31
+msgid ""
+"Both date formats are hierarchical, having the bigger time intervals to the "
+"left. This means that you can easily sort them (and even tab-complete "
+"them) with no extra effort or tool required."
+msgstr ""
diff --git a/po/en.po b/po/en.po
new file mode 100644
index 0000000..cc244ff
--- /dev/null
+++ b/po/en.po
@@ -0,0 +1,171 @@
+# English translations for PACKAGE package
+# Copyright (C) 2025 Free Software Foundation, Inc.
+# This file is distributed under the same license as the PACKAGE package.
+# Automatically generated, 2025.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"POT-Creation-Date: 2025-05-01 19:02-0300\n"
+"PO-Revision-Date: 2025-04-29 19:46-0300\n"
+"Last-Translator: Automatically generated\n"
+"Language-Team: none\n"
+"Language: en\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+#. type: Plain text
+#: src/headers/ref.txt:2
+msgid "en/blog/\tBlog"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:4
+msgid "en/til/\tTIL"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:6
+msgid "en/podcast/\tPodcasts"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:8
+msgid "en/screencast/\tScreencasts"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:10
+msgid "en/pastebin/\tPastebins"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:12
+msgid "en/slide/\tSlides"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:13
+msgid "en/about.html\tAbout"
+msgstr ""
+
+#. type: Plain text
+#: src/names/categories/ref.txt:1
+msgid "categories"
+msgstr ""
+
+#. type: Plain text
+#: src/names/category/ref.txt:1
+msgid "category"
+msgstr ""
+
+#. type: Title =
+#: src/content/en/about.adoc:1
+#, no-wrap
+msgid "About"
+msgstr "About"
+
+#. type: Plain text
+#: src/content/en/about.adoc:9
+#, fuzzy
+#| msgid ""
+#| "Hi, I'm EuAndreh. I write software and occasionally music. You can find "
+#| "my contact information in the footer of this page, or mail my {mailto}"
+#| "[public inbox] ({archive}[archive])."
+msgid ""
+"Hi, I'm EuAndreh. I write software and, occasionally, music. You can find "
+"my contact information in the footer of this page, or mail my {mailto}"
+"[public inbox] ({archive}[archive])."
+msgstr ""
+"Hi, I'm EuAndreh. I write software and occasionally music. You can find my "
+"contact information in the footer of this page, or mail my {mailto}[public "
+"inbox] ({archive}[archive])."
+
+#. type: Plain text
+#: src/content/en/about.adoc:11
+msgid ""
+"This is my personal website where I write articles, publish software and "
+"more related work."
+msgstr ""
+"This is my personal website where I write articles, publish software and "
+"more related work."
+
+#. type: Title =
+#: src/content/en/til/index.adoc:1
+#, no-wrap
+msgid "Today I Learned"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:6
+msgid "**T**oday **I** **L**earned: small entries of useful knowledge."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:7
+msgid "Shameless rip-off of {anna-e-so}[Anna e só]."
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/categories.adoc:1
+#, no-wrap
+msgid "Articles by category"
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:1
+#, no-wrap
+msgid "Simple filename timestamp"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:7
+msgid ""
+"When writing Jekyll posts or creating log files with dates on them, I "
+"usually struggle with finding a direct way of accomplishing that. There's a "
+"simple solution: `date -I`."
+msgstr ""
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:12
+#, no-wrap
+msgid ""
+"./my-program.sh > my-program.$(date -I).log\n"
+"cp post-template.md _posts/$(date -I)-post-slug.md\n"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:16
+msgid ""
+"Using this built-in GNU/Linux tool allows you to `touch $(date -I).md` to "
+"readily create a `2020-08-12.md` file."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:20
+msgid ""
+"I always had to read `man date` or search the web over and over, and after "
+"doing this repeatedly it became clear that both `date -I` and `date -Is` "
+"(`s` here stands for seconds) are the thing that I'm looking for 95% of the "
+"time:"
+msgstr ""
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:27
+#, no-wrap
+msgid ""
+"# inside my-program.sh\n"
+"echo \"Program started at $(date -Is)\"\n"
+"# output is:\n"
+"# Program started at 2020-08-12T09:04:58-03:00\n"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:31
+msgid ""
+"Both date formats are hierarchical, having the bigger time intervals to the "
+"left. This means that you can easily sort them (and even tab-complete "
+"them) with no extra effort or tool required."
+msgstr ""
diff --git a/po/eo.po b/po/eo.po
new file mode 100644
index 0000000..d0cda59
--- /dev/null
+++ b/po/eo.po
@@ -0,0 +1,161 @@
+# Esperanto translations for PACKAGE package
+# Copyright (C) 2025 Free Software Foundation, Inc.
+# This file is distributed under the same license as the PACKAGE package.
+# Automatically generated, 2025.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"POT-Creation-Date: 2025-05-01 19:02-0300\n"
+"PO-Revision-Date: 2025-04-29 19:46-0300\n"
+"Last-Translator: Automatically generated\n"
+"Language-Team: none\n"
+"Language: eo\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+#. type: Plain text
+#: src/headers/ref.txt:2
+msgid "en/blog/\tBlog"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:4
+msgid "en/til/\tTIL"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:6
+msgid "en/podcast/\tPodcasts"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:8
+msgid "en/screencast/\tScreencasts"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:10
+msgid "en/pastebin/\tPastebins"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:12
+msgid "en/slide/\tSlides"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:13
+msgid "en/about.html\tAbout"
+msgstr ""
+
+#. type: Plain text
+#: src/names/categories/ref.txt:1
+msgid "categories"
+msgstr ""
+
+#. type: Plain text
+#: src/names/category/ref.txt:1
+msgid "category"
+msgstr ""
+
+#. type: Title =
+#: src/content/en/about.adoc:1
+#, no-wrap
+msgid "About"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/about.adoc:9
+msgid ""
+"Hi, I'm EuAndreh. I write software and, occasionally, music. You can find "
+"my contact information in the footer of this page, or mail my {mailto}"
+"[public inbox] ({archive}[archive])."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/about.adoc:11
+msgid ""
+"This is my personal website where I write articles, publish software and "
+"more related work."
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/index.adoc:1
+#, no-wrap
+msgid "Today I Learned"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:6
+msgid "**T**oday **I** **L**earned: small entries of useful knowledge."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:7
+msgid "Shameless rip-off of {anna-e-so}[Anna e só]."
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/categories.adoc:1
+#, no-wrap
+msgid "Articles by category"
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:1
+#, no-wrap
+msgid "Simple filename timestamp"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:7
+msgid ""
+"When writing Jekyll posts or creating log files with dates on them, I "
+"usually struggle with finding a direct way of accomplishing that. There's a "
+"simple solution: `date -I`."
+msgstr ""
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:12
+#, no-wrap
+msgid ""
+"./my-program.sh > my-program.$(date -I).log\n"
+"cp post-template.md _posts/$(date -I)-post-slug.md\n"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:16
+msgid ""
+"Using this built-in GNU/Linux tool allows you to `touch $(date -I).md` to "
+"readily create a `2020-08-12.md` file."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:20
+msgid ""
+"I always had to read `man date` or search the web over and over, and after "
+"doing this repeatedly it became clear that both `date -I` and `date -Is` "
+"(`s` here stands for seconds) are the thing that I'm looking for 95% of the "
+"time:"
+msgstr ""
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:27
+#, no-wrap
+msgid ""
+"# inside my-program.sh\n"
+"echo \"Program started at $(date -Is)\"\n"
+"# output is:\n"
+"# Program started at 2020-08-12T09:04:58-03:00\n"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:31
+msgid ""
+"Both date formats are hierarchical, having the bigger time intervals to the "
+"left. This means that you can easily sort them (and even tab-complete "
+"them) with no extra effort or tool required."
+msgstr ""
diff --git a/po/es.po b/po/es.po
new file mode 100644
index 0000000..43e2cbf
--- /dev/null
+++ b/po/es.po
@@ -0,0 +1,161 @@
+# Spanish translations for PACKAGE package
+# Copyright (C) 2025 Free Software Foundation, Inc.
+# This file is distributed under the same license as the PACKAGE package.
+# Automatically generated, 2025.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"POT-Creation-Date: 2025-05-01 19:02-0300\n"
+"PO-Revision-Date: 2025-04-29 19:46-0300\n"
+"Last-Translator: Automatically generated\n"
+"Language-Team: none\n"
+"Language: es\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
+#. type: Plain text
+#: src/headers/ref.txt:2
+msgid "en/blog/\tBlog"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:4
+msgid "en/til/\tTIL"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:6
+msgid "en/podcast/\tPodcasts"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:8
+msgid "en/screencast/\tScreencasts"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:10
+msgid "en/pastebin/\tPastebins"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:12
+msgid "en/slide/\tSlides"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:13
+msgid "en/about.html\tAbout"
+msgstr ""
+
+#. type: Plain text
+#: src/names/categories/ref.txt:1
+msgid "categories"
+msgstr ""
+
+#. type: Plain text
+#: src/names/category/ref.txt:1
+msgid "category"
+msgstr ""
+
+#. type: Title =
+#: src/content/en/about.adoc:1
+#, no-wrap
+msgid "About"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/about.adoc:9
+msgid ""
+"Hi, I'm EuAndreh. I write software and, occasionally, music. You can find "
+"my contact information in the footer of this page, or mail my {mailto}"
+"[public inbox] ({archive}[archive])."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/about.adoc:11
+msgid ""
+"This is my personal website where I write articles, publish software and "
+"more related work."
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/index.adoc:1
+#, no-wrap
+msgid "Today I Learned"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:6
+msgid "**T**oday **I** **L**earned: small entries of useful knowledge."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:7
+msgid "Shameless rip-off of {anna-e-so}[Anna e só]."
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/categories.adoc:1
+#, no-wrap
+msgid "Articles by category"
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:1
+#, no-wrap
+msgid "Simple filename timestamp"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:7
+msgid ""
+"When writing Jekyll posts or creating log files with dates on them, I "
+"usually struggle with finding a direct way of accomplishing that. There's a "
+"simple solution: `date -I`."
+msgstr ""
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:12
+#, no-wrap
+msgid ""
+"./my-program.sh > my-program.$(date -I).log\n"
+"cp post-template.md _posts/$(date -I)-post-slug.md\n"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:16
+msgid ""
+"Using this built-in GNU/Linux tool allows you to `touch $(date -I).md` to "
+"readily create a `2020-08-12.md` file."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:20
+msgid ""
+"I always had to read `man date` or search the web over and over, and after "
+"doing this repeatedly it became clear that both `date -I` and `date -Is` "
+"(`s` here stands for seconds) are the thing that I'm looking for 95% of the "
+"time:"
+msgstr ""
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:27
+#, no-wrap
+msgid ""
+"# inside my-program.sh\n"
+"echo \"Program started at $(date -Is)\"\n"
+"# output is:\n"
+"# Program started at 2020-08-12T09:04:58-03:00\n"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:31
+msgid ""
+"Both date formats are hierarchical, having the bigger time intervals to the "
+"left. This means that you can easily sort them (and even tab-complete "
+"them) with no extra effort or tool required."
+msgstr ""
diff --git a/po/euandre.org.pot b/po/euandre.org.pot
new file mode 100644
index 0000000..783e8ab
--- /dev/null
+++ b/po/euandre.org.pot
@@ -0,0 +1,161 @@
+# SOME DESCRIPTIVE TITLE
+# Copyright (C) YEAR Free Software Foundation, Inc.
+# This file is distributed under the same license as the PACKAGE package.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"POT-Creation-Date: 2025-05-01 19:02-0300\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <LL@li.org>\n"
+"Language: \n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#. type: Plain text
+#: src/headers/ref.txt:2
+msgid "en/blog/\tBlog"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:4
+msgid "en/til/\tTIL"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:6
+msgid "en/podcast/\tPodcasts"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:8
+msgid "en/screencast/\tScreencasts"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:10
+msgid "en/pastebin/\tPastebins"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:12
+msgid "en/slide/\tSlides"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:13
+msgid "en/about.html\tAbout"
+msgstr ""
+
+#. type: Plain text
+#: src/names/categories/ref.txt:1
+msgid "categories"
+msgstr ""
+
+#. type: Plain text
+#: src/names/category/ref.txt:1
+msgid "category"
+msgstr ""
+
+#. type: Title =
+#: src/content/en/about.adoc:1
+#, no-wrap
+msgid "About"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/about.adoc:9
+msgid ""
+"Hi, I'm EuAndreh. I write software and, occasionally, music. You can find "
+"my contact information in the footer of this page, or mail my "
+"{mailto}[public inbox] ({archive}[archive])."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/about.adoc:11
+msgid ""
+"This is my personal website where I write articles, publish software and "
+"more related work."
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/index.adoc:1
+#, no-wrap
+msgid "Today I Learned"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:6
+msgid "**T**oday **I** **L**earned: small entries of useful knowledge."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:7
+msgid "Shameless rip-off of {anna-e-so}[Anna e só]."
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/categories.adoc:1
+#, no-wrap
+msgid "Articles by category"
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:1
+#, no-wrap
+msgid "Simple filename timestamp"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:7
+msgid ""
+"When writing Jekyll posts or creating log files with dates on them, I "
+"usually struggle with finding a direct way of accomplishing that. There's a "
+"simple solution: `date -I`."
+msgstr ""
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:12
+#, no-wrap
+msgid ""
+"./my-program.sh > my-program.$(date -I).log\n"
+"cp post-template.md _posts/$(date -I)-post-slug.md\n"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:16
+msgid ""
+"Using this built-in GNU/Linux tool allows you to `touch $(date -I).md` to "
+"readily create a `2020-08-12.md` file."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:20
+msgid ""
+"I always had to read `man date` or search the web over and over, and after "
+"doing this repeatedly it became clear that both `date -I` and `date -Is` "
+"(`s` here stands for seconds) are the thing that I'm looking for 95% of the "
+"time:"
+msgstr ""
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:27
+#, no-wrap
+msgid ""
+"# inside my-program.sh\n"
+"echo \"Program started at $(date -Is)\"\n"
+"# output is:\n"
+"# Program started at 2020-08-12T09:04:58-03:00\n"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:31
+msgid ""
+"Both date formats are hierarchical, having the bigger time intervals to the "
+"left. This means that you can easily sort them (and even tab-complete them) "
+"with no extra effort or tool required."
+msgstr ""
diff --git a/po/fr.po b/po/fr.po
new file mode 100644
index 0000000..7093686
--- /dev/null
+++ b/po/fr.po
@@ -0,0 +1,161 @@
+# French translations for PACKAGE package
+# Copyright (C) 2025 Free Software Foundation, Inc.
+# This file is distributed under the same license as the PACKAGE package.
+# Automatically generated, 2025.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"POT-Creation-Date: 2025-05-01 19:02-0300\n"
+"PO-Revision-Date: 2025-04-29 19:46-0300\n"
+"Last-Translator: Automatically generated\n"
+"Language-Team: none\n"
+"Language: fr\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
+#. type: Plain text
+#: src/headers/ref.txt:2
+msgid "en/blog/\tBlog"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:4
+msgid "en/til/\tTIL"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:6
+msgid "en/podcast/\tPodcasts"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:8
+msgid "en/screencast/\tScreencasts"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:10
+msgid "en/pastebin/\tPastebins"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:12
+msgid "en/slide/\tSlides"
+msgstr ""
+
+#. type: Plain text
+#: src/headers/ref.txt:13
+msgid "en/about.html\tAbout"
+msgstr ""
+
+#. type: Plain text
+#: src/names/categories/ref.txt:1
+msgid "categories"
+msgstr ""
+
+#. type: Plain text
+#: src/names/category/ref.txt:1
+msgid "category"
+msgstr ""
+
+#. type: Title =
+#: src/content/en/about.adoc:1
+#, no-wrap
+msgid "About"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/about.adoc:9
+msgid ""
+"Hi, I'm EuAndreh. I write software and, occasionally, music. You can find "
+"my contact information in the footer of this page, or mail my {mailto}"
+"[public inbox] ({archive}[archive])."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/about.adoc:11
+msgid ""
+"This is my personal website where I write articles, publish software and "
+"more related work."
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/index.adoc:1
+#, no-wrap
+msgid "Today I Learned"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:6
+msgid "**T**oday **I** **L**earned: small entries of useful knowledge."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:7
+msgid "Shameless rip-off of {anna-e-so}[Anna e só]."
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/categories.adoc:1
+#, no-wrap
+msgid "Articles by category"
+msgstr ""
+
+#. type: Title =
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:1
+#, no-wrap
+msgid "Simple filename timestamp"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:7
+msgid ""
+"When writing Jekyll posts or creating log files with dates on them, I "
+"usually struggle with finding a direct way of accomplishing that. There's a "
+"simple solution: `date -I`."
+msgstr ""
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:12
+#, no-wrap
+msgid ""
+"./my-program.sh > my-program.$(date -I).log\n"
+"cp post-template.md _posts/$(date -I)-post-slug.md\n"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:16
+msgid ""
+"Using this built-in GNU/Linux tool allows you to `touch $(date -I).md` to "
+"readily create a `2020-08-12.md` file."
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:20
+msgid ""
+"I always had to read `man date` or search the web over and over, and after "
+"doing this repeatedly it became clear that both `date -I` and `date -Is` "
+"(`s` here stands for seconds) are the thing that I'm looking for 95% of the "
+"time:"
+msgstr ""
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:27
+#, no-wrap
+msgid ""
+"# inside my-program.sh\n"
+"echo \"Program started at $(date -Is)\"\n"
+"# output is:\n"
+"# Program started at 2020-08-12T09:04:58-03:00\n"
+msgstr ""
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:31
+msgid ""
+"Both date formats are hierarchical, having the bigger time intervals to the "
+"left. This means that you can easily sort them (and even tab-complete "
+"them) with no extra effort or tool required."
+msgstr ""
diff --git a/po/note.txt b/po/note.txt
new file mode 100644
index 0000000..45279a4
--- /dev/null
+++ b/po/note.txt
@@ -0,0 +1,5 @@
+PO4A-HEADER: mode=eof
+
+
+
+// Generated from po4a(1).
diff --git a/po/po4a.cfg b/po/po4a.cfg
new file mode 100644
index 0000000..77f48a6
--- /dev/null
+++ b/po/po4a.cfg
@@ -0,0 +1,12 @@
+[options] --keep 0 --master-charset UTF-8 --localized-charset UTF-8 --addendum-charset UTF-8
+
+[po_directory] po
+
+[type: text] src/headers/ref.txt $lang:src/headers/$lang.txt
+[type: text] src/names/categories/ref.txt $lang:src/names/categories/$lang.txt
+[type: text] src/names/category/ref.txt $lang:src/names/category/$lang.txt
+
+[type: asciidoc] src/content/en/about.adoc pt:src/content/pt/sobre.adoc add_$lang:po/note.txt
+[type: asciidoc] src/content/en/til/index.adoc pt:src/content/pt/hea/index.adoc add_$lang:po/note.txt
+[type: asciidoc] src/content/en/til/categories.adoc pt:src/content/pt/hea/categorias.adoc add_$lang:po/note.txt
+[type: asciidoc] src/content/en/til/2020/08/12/filename-timestamp.adoc pt:src/content/pt/hea/2020/08/12/arquivo-datado.adoc add_$lang:po/note.txt
diff --git a/po/pt.po b/po/pt.po
new file mode 100644
index 0000000..4794358
--- /dev/null
+++ b/po/pt.po
@@ -0,0 +1,190 @@
+# Portuguese translations for PACKAGE package
+# Copyright (C) 2025 Free Software Foundation, Inc.
+# This file is distributed under the same license as the PACKAGE package.
+# Automatically generated, 2025.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: \n"
+"POT-Creation-Date: 2025-05-01 19:02-0300\n"
+"PO-Revision-Date: 2025-05-01 19:08-0300\n"
+"Last-Translator: EuAndreh <eu@euandre.org>\n"
+"Language-Team: none\n"
+"Language: pt\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=(n != 1);\n"
+"X-Generator: Poedit 3.5\n"
+
+#. type: Plain text
+#: src/headers/ref.txt:2
+msgid "en/blog/\tBlog"
+msgstr "#"
+
+#. type: Plain text
+#: src/headers/ref.txt:4
+msgid "en/til/\tTIL"
+msgstr "pt/hea/\tHEA"
+
+#. type: Plain text
+#: src/headers/ref.txt:6
+msgid "en/podcast/\tPodcasts"
+msgstr "#"
+
+#. type: Plain text
+#: src/headers/ref.txt:8
+msgid "en/screencast/\tScreencasts"
+msgstr "#"
+
+#. type: Plain text
+#: src/headers/ref.txt:10
+msgid "en/pastebin/\tPastebins"
+msgstr "#"
+
+#. type: Plain text
+#: src/headers/ref.txt:12
+msgid "en/slide/\tSlides"
+msgstr "#"
+
+#. type: Plain text
+#: src/headers/ref.txt:13
+msgid "en/about.html\tAbout"
+msgstr "pt/sobre.html\tSobre"
+
+#. type: Plain text
+#: src/names/categories/ref.txt:1
+msgid "categories"
+msgstr "categorias"
+
+#. type: Plain text
+#: src/names/category/ref.txt:1
+msgid "category"
+msgstr "categoria"
+
+#. type: Title =
+#: src/content/en/about.adoc:1
+#, no-wrap
+msgid "About"
+msgstr "Sobre"
+
+#. type: Plain text
+#: src/content/en/about.adoc:9
+msgid ""
+"Hi, I'm EuAndreh. I write software and, occasionally, music. You can find "
+"my contact information in the footer of this page, or mail my {mailto}"
+"[public inbox] ({archive}[archive])."
+msgstr ""
+"Oi, eu sou EuAndreh. Eu escrevo software e, ocasionalmente, música. Você "
+"encontra meu dados para entrar em contato no rodapé desta página, ou pode "
+"mandar também uma mensagem para minha {mailto}[caixa de entrada pública] "
+"({archive}[arquivo])."
+
+#. type: Plain text
+#: src/content/en/about.adoc:11
+msgid ""
+"This is my personal website where I write articles, publish software and "
+"more related work."
+msgstr ""
+"Esse é o meu site pessoal onde eu escrevo artigos, publico software e outros "
+"trabalhos relacionados."
+
+#. type: Title =
+#: src/content/en/til/index.adoc:1
+#, no-wrap
+msgid "Today I Learned"
+msgstr "Hoje Eu Aprendi"
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:6
+msgid "**T**oday **I** **L**earned: small entries of useful knowledge."
+msgstr ""
+"**H**oje **E**u **A**prendi (do inglês, _**T**oday **I** **L**earned_): "
+"pequenas postagens de conhecimentos úteis."
+
+#. type: Plain text
+#: src/content/en/til/index.adoc:7
+msgid "Shameless rip-off of {anna-e-so}[Anna e só]."
+msgstr "Cópia descarada da {anna-e-so}[Anna e só]."
+
+#. type: Title =
+#: src/content/en/til/categories.adoc:1
+#, no-wrap
+msgid "Articles by category"
+msgstr "Artigos por categoria"
+
+#. type: Title =
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:1
+#, no-wrap
+msgid "Simple filename timestamp"
+msgstr ""
+"Nome de arquivo com dia e hora de forma simplificada\n"
+":updatedat: 2025-04-30"
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:7
+msgid ""
+"When writing Jekyll posts or creating log files with dates on them, I "
+"usually struggle with finding a direct way of accomplishing that. There's a "
+"simple solution: `date -I`."
+msgstr ""
+"Quando vou escrever um artigo no Jekyll ou criar um arquivo de log com a "
+"data no nome, eu normalmente engasgo para achar um jeito direto de fazer "
+"isso. Há uma solução simples: `date -I`."
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:12
+#, no-wrap
+msgid ""
+"./my-program.sh > my-program.$(date -I).log\n"
+"cp post-template.md _posts/$(date -I)-post-slug.md\n"
+msgstr ""
+"./meu-programa.sh > meu-programa.$(date -I).log\n"
+"cp template-de-artigo.md _posts/$(date -I)-slug-do-artigo.md\n"
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:16
+msgid ""
+"Using this built-in GNU/Linux tool allows you to `touch $(date -I).md` to "
+"readily create a `2020-08-12.md` file."
+msgstr ""
+"Usar essa ferramenta padrão do GNU/Linux permite que você simplesmente "
+"escreva `touch $(date -I).md` para criar um arquivo `2020-08-12.md`."
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:20
+msgid ""
+"I always had to read `man date` or search the web over and over, and after "
+"doing this repeatedly it became clear that both `date -I` and `date -Is` "
+"(`s` here stands for seconds) are the thing that I'm looking for 95% of the "
+"time:"
+msgstr ""
+"Eu sempre tinha que para para reler o `man date` ou buscar na internet de "
+"novo e de novo como fazer isso, e depois de sempre chegar no mesmo resultado "
+"ficou claro para mim que tanto `date -I` quanto `date -Is` (`s` de segundos) "
+"são as respostas que eu estou procurando 95% do tempo:"
+
+#. type: delimited block -
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:27
+#, no-wrap
+msgid ""
+"# inside my-program.sh\n"
+"echo \"Program started at $(date -Is)\"\n"
+"# output is:\n"
+"# Program started at 2020-08-12T09:04:58-03:00\n"
+msgstr ""
+"# dentro do meu programa.sh\n"
+"echo \"Programa começou em $(date -Is)\"\n"
+"# saída é:\n"
+"# Programa começou em 2020-08-12T09:15:16-03:00\n"
+
+#. type: Plain text
+#: src/content/en/til/2020/08/12/filename-timestamp.adoc:31
+msgid ""
+"Both date formats are hierarchical, having the bigger time intervals to the "
+"left. This means that you can easily sort them (and even tab-complete "
+"them) with no extra effort or tool required."
+msgstr ""
+"Ambos os formatos de data são hierárquicos, com intervalos de tempo maior à "
+"esquerda. Isso significa que você pode facilmente ordená-los (e até usar "
+"TAB para completar) sem esforço ou ferramenta extra."
diff --git a/src/bin/pb b/src/bin/pb
new file mode 100755
index 0000000..c934124
--- /dev/null
+++ b/src/bin/pb
@@ -0,0 +1,60 @@
+#!/bin/sh
+set -eu
+
+usage() {
+ cat <<-'EOF'
+ Usage: pb FILENAME.adoc [CONTENT]
+ EOF
+}
+
+
+TITLE=FIXME
+while getopts 't:' flag; do
+ case "$flag" in
+ (t)
+ TITLE="$OPTARG"
+ ;;
+ (*)
+ usage >&2
+ exit 2
+ ;;
+ esac
+done
+shift $((OPTIND - 1))
+
+
+FILENAME="${1:-}"
+SOURCE="${2:-}"
+eval "$(assert-arg -- "$FILENAME" 'FILENAME')"
+FILENAME="${FILENAME%.adoc}.adoc"
+
+cd "$(dirname "$(readlink -f -- "$0")")"/../../
+
+DIR=src/content/en/pastebins/"$(timestamp | sed 's|-|/|g')"
+OUT="$DIR"/"$FILENAME"
+
+if [ -e "$OUT" ]; then
+ printf '"%s" already exists.\n' "$OUT" >&2
+ exit 2
+fi
+
+if [ -n "$SOURCE" ]; then
+ CONTENT="$(cat "$SOURCE")"
+else
+ if [ -t 0 ]; then
+ CONTENT='FIXME'
+ else
+ printf 'Reading from STDIN...\n' >&2
+ CONTENT="$(cat)"
+ exec 0</dev/tty
+ fi
+fi
+
+mkdir -p "$DIR"
+printf '= %s\n\n[source,sh]\n----\n%s\n----\n' "$TITLE" "$CONTENT" > "$OUT"
+${VISUAL:-${EDITOR:-vi}} "$OUT"
+
+sh mkdeps.sh > deps.mk
+git reset .
+git add "$OUT" deps.mk
+git commit -m "pb(1): Auto-add \"$OUT\""
diff --git a/src/collections/en/blog b/src/collections/en/blog
new file mode 120000
index 0000000..973ecf5
--- /dev/null
+++ b/src/collections/en/blog
@@ -0,0 +1 @@
+../../content/en/blog \ No newline at end of file
diff --git a/src/collections/en/pastebin b/src/collections/en/pastebin
new file mode 120000
index 0000000..5168ff3
--- /dev/null
+++ b/src/collections/en/pastebin
@@ -0,0 +1 @@
+../../content/en/pastebin \ No newline at end of file
diff --git a/src/collections/en/podcast b/src/collections/en/podcast
new file mode 120000
index 0000000..2334546
--- /dev/null
+++ b/src/collections/en/podcast
@@ -0,0 +1 @@
+../../content/en/podcast \ No newline at end of file
diff --git a/src/collections/en/screencast b/src/collections/en/screencast
new file mode 120000
index 0000000..e04adf4
--- /dev/null
+++ b/src/collections/en/screencast
@@ -0,0 +1 @@
+../../content/en/screencast \ No newline at end of file
diff --git a/src/collections/en/til b/src/collections/en/til
new file mode 120000
index 0000000..6519d09
--- /dev/null
+++ b/src/collections/en/til
@@ -0,0 +1 @@
+../../content/en/til \ No newline at end of file
diff --git a/src/collections/pt/hea b/src/collections/pt/hea
new file mode 120000
index 0000000..ce49d8f
--- /dev/null
+++ b/src/collections/pt/hea
@@ -0,0 +1 @@
+../../content/pt/hea \ No newline at end of file
diff --git a/src/content/.well-known/security.txt b/src/content/.well-known/security.txt
deleted file mode 100644
index f588b54..0000000
--- a/src/content/.well-known/security.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Contact: mailto:eu@euandre.org
-Expires: 2022-07-12T03:00:00.000Z
-Encryption: https://euandre.org/public.asc
-Preferred-Languages: en, pt, fr, eo, es
diff --git a/src/content/en/about.adoc b/src/content/en/about.adoc
new file mode 100644
index 0000000..14d5e22
--- /dev/null
+++ b/src/content/en/about.adoc
@@ -0,0 +1,11 @@
+= About
+
+:mailto: mailto:~euandreh/public-inbox@lists.sr.ht
+:archive: https://lists.sr.ht/~euandreh/public-inbox
+
+Hi, I'm EuAndreh. I write software and, occasionally, music. You can find my
+contact information in the footer of this page, or mail my {mailto}[public
+inbox] ({archive}[archive]).
+
+This is my personal website where I write articles, publish software and more
+related work.
diff --git a/_articles/2018-07-17-running-guix-on-nixos.md b/src/content/en/blog/2018/07/17/guix-nixos.adoc
index 6005f9f..42290f6 100644
--- a/_articles/2018-07-17-running-guix-on-nixos.md
+++ b/src/content/en/blog/2018/07/17/guix-nixos.adoc
@@ -1,30 +1,26 @@
----
-title: Running Guix on NixOS
-date: 2018-07-17
-layout: post
-lang: en
-ref: running-guix-on-nixos
----
-I wanted to run
-Guix on a NixOS machine. Even though the Guix manual explains how to do it
-[step by step][0], I needed a few extra ones to make it work properly.
+= Running Guix on NixOS
-[0]: https://www.gnu.org/software/guix/manual/en/html_node/Binary-Installation.html#Binary-Installation
+:install-step: https://www.gnu.org/software/guix/manual/en/html_node/Binary-Installation.html#Binary-Installation
-I couldn't just install GuixSD because my wireless network card
-doesn't have any free drivers (yet).
+I wanted to run Guix on a NixOS machine. Even though the Guix manual explains
+how to do it {install-step}[step by step], I needed a few extra ones to make it
+work properly.
-## Creating `guixbuilder` users
+I couldn't just install GuixSD because my wireless network card doesn't have any
+free drivers (yet).
-Guix requires you to create non-root users that will be used to perform
-the builds in the isolated environments.
+== Creating `guixbuilder` users
-The [manual][1] already provides you with a ready to run (as root) command for
-creating the build users:
+:manual: https://www.gnu.org/software/guix/manual/en/html_node/Build-Environment-Setup.html#Build-Environment-Setup
-[1]: https://www.gnu.org/software/guix/manual/en/html_node/Build-Environment-Setup.html#Build-Environment-Setup
+Guix requires you to create non-root users that will be used to perform the
+builds in the isolated environments.
-```bash
+The {manual}[manual] already provides you with a ready to run (as root) command
+for creating the build users:
+
+[source,sh]
+----
groupadd --system guixbuild
for i in `seq -w 1 10`;
do
@@ -33,15 +29,16 @@ do
-c "Guix build user $i" --system \
guixbuilder$i;
done
-```
+----
-However, In my personal NixOS I have disabled [`users.mutableUsers`][2], which
-means that even if I run the above command it means that they'll be removed once
-I rebuild my OS:
+:mutable-users: https://nixos.org/nixos/manual/index.html#sec-user-management
-[2]: https://nixos.org/nixos/manual/index.html#sec-user-management
+However, In my personal NixOS I have disabled
+{mutable-users}[`users.mutableUsers`], which means that even if I run the above
+command it means that they'll be removed once I rebuild my OS:
-```shell
+[source,sh]
+----
$ sudo nixos-rebuild switch
(...)
removing user ‘guixbuilder7’
@@ -55,12 +52,13 @@ removing user ‘guixbuilder2’
removing user ‘guixbuilder8’
removing user ‘guixbuilder5’
(...)
-```
+----
-Instead of enabling `users.mutableUsers` I could add the Guix users by
-adding them to my system configuration:
+Instead of enabling `users.mutableUsers` I could add the Guix users by adding
+them to my system configuration:
-```nix
+[source,nix]
+----
{ config, pkgs, ...}:
{
@@ -101,26 +99,27 @@ adding them to my system configuration:
};
};
}
-```
+----
+
+Here I used `fold` and the `//` operator to merge all of the configuration sets
+into a single `extraUsers` value.
-Here I used `fold` and the `//` operator to merge all of the
-configuration sets into a single `extraUsers` value.
+== Creating the `systemd` service
-## Creating the `systemd` service
+:service-file: https://git.savannah.gnu.org/cgit/guix.git/tree/etc/guix-daemon.service.in?id=00c86a888488b16ce30634d3a3a9d871ed6734a2
One other thing missing was the `systemd` service.
-First I couldn't just copy the `.service` file to `/etc` since in NixOS
-that folder isn't writable. But also I wanted the service to be better
-integrated with the OS.
+First I couldn't just copy the `.service` file to `/etc` since in NixOS that
+folder isn't writable. But also I wanted the service to be better integrated
+with the OS.
That was a little easier than creating the users, all I had to do was translate
-the provided [`guix-daemon.service.in`][3] configuration to an equivalent Nix
-expression
-
-[3]: https://git.savannah.gnu.org/cgit/guix.git/tree/etc/guix-daemon.service.in?id=00c86a888488b16ce30634d3a3a9d871ed6734a2
+the provided {service-file}[`guix-daemon.service.in`] configuration to an
+equivalent Nix expression:
-```ini
+[source,ini]
+----
# This is a "service unit file" for the systemd init system to launch
# 'guix-daemon'. Drop it in /etc/systemd/system or similar to have
# 'guix-daemon' automatically started.
@@ -142,11 +141,12 @@ TasksMax=8192
[Install]
WantedBy=multi-user.target
-```
+----
This sample `systemd` configuration file became:
-```nix
+[source,nix]
+----
guix-daemon = {
enable = true;
description = "Build daemon for GNU Guix";
@@ -160,12 +160,13 @@ guix-daemon = {
};
wantedBy = [ "multi-user.target" ];
};
-```
+----
-There you go! After running `sudo nixos-rebuild switch` I could get Guix
-up and running:
+There you go! After running `sudo nixos-rebuild switch` I could get Guix up and
+running:
-```bash
+[source,sh]
+----
$ guix package -i hello
The following package will be installed:
hello 2.10 /gnu/store/bihfrh609gkxb9dp7n96wlpigiv3krfy-hello-2.10
@@ -182,15 +183,15 @@ Creating manual page database...
2 packages in profile
$ hello
Hello, world!
-```
+----
+
+:nixos-modules: https://nixos.org/nixos/manual/index.html#sec-writing-modules
+:req: https://www.gnu.org/software/guix/manual/en/html_node/Requirements.html#Requirements
Some improvements to this approach are:
-1. looking into [NixOS modules][4] and trying to bundle everything together
- into a single logical unit;
-2. [build Guix from source][5] and share the Nix store and daemon with Guix.
+. looking into {nixos-modules}[NixOS modules] and trying to bundle everything
+ together into a single logical unit;
+. {req}[build Guix from source] and share the Nix store and daemon with Guix.
Happy Guix/Nix hacking!
-
-[4]: https://nixos.org/nixos/manual/index.html#sec-writing-modules
-[5]: https://www.gnu.org/software/guix/manual/en/html_node/Requirements.html#Requirements
diff --git a/src/content/en/blog/2018/08/01/npm-ci-reproducibility.adoc b/src/content/en/blog/2018/08/01/npm-ci-reproducibility.adoc
new file mode 100644
index 0000000..76bd8e6
--- /dev/null
+++ b/src/content/en/blog/2018/08/01/npm-ci-reproducibility.adoc
@@ -0,0 +1,147 @@
+= Verifying "npm ci" reproducibility
+:updatedat: 2019-05-22
+
+:empty:
+:npm-5: https://blog.npmjs.org/post/161081169345/v500
+:package-locks-old: https://docs.npmjs.com/files/package-locks
+:package-lock: https://docs.npmjs.com/files/package-lock.json
+:add-npm-ci: https://blog.npmjs.org/post/171556855892/introducing-npm-ci-for-faster-more-reliable
+:cli-docs: https://docs.npmjs.com/cli/install#description
+:tricky-issue: https://github.com/npm/npm/issues/17979#issuecomment-332701215
+
+When {npm-5}[npm@5] came bringing {package-locks-old}[package-locks] with it, I
+was confused about the benefits it provided, since running `npm install` more
+than once could resolve all the dependencies again and yield yet another fresh
+`package-lock.json` file. The message saying "you should add this file to
+version control" left me hesitant on what to
+do{empty}footnote:package-lock-message[
+ {cli-docs}[documentation] claims `npm install` is driven by the existing
+ `package-lock.json`, but that's actually {tricky-issue}[a little bit tricky].
+].
+
+However the {add-npm-ci}[addition of `npm ci`] filled this gap: it's a stricter
+variation of `npm install` which guarantees that "{package-lock}[subsequent
+installs are able to generate identical trees]". But are they really identical?
+I could see that I didn't have the same problems of different installation
+outputs, but I didn't know for *sure* if it was really identical.
+
+== Computing the hash of a directory's content
+
+:merkle-tree: https://en.wikipedia.org/wiki/Merkle_tree
+
+I quickly searched for a way to check for the hash signature of an entire
+directory tree, but I couldn't find one. I've made a poor man's
+{merkle-tree}[Merkle tree] implementation using `sha256sum` and a few piped
+commands at the terminal:
+
+[source,sh]
+----
+merkle-tree () {
+ dirname="${1-.}"
+ pushd "$dirname"
+ find . -type f |
+ sort |
+ xargs -I{} sha256sum "{}" |
+ sha256sum |
+ awk '{print $1}'
+ popd
+}
+----
+
+Going through it line by line:
+
+* #1 we define a Bash function called `merkle-tree`;
+* #2 it accepts a single argument: the directory to compute the merkle tree from
+ If nothing is given, it runs on the current directory (`.`);
+* #3 we go to the directory, so we don't get different prefixes in `find`'s
+ output (like `../a/b`);
+* #4 we get all files from the directory tree. Since we're using `sha256sum` to
+ compute the hash of the file contents, we need to filter out folders from it;
+* #5 we need to sort the output, since different file systems and `find`
+ implementations may return files in different orders;
+* #6 we use `xargs` to compute the hash of each file individually through
+ `sha256sum`. Since a file may contain spaces we need to escape it with
+ quotes;
+* #7 we compute the hash of the combined hashes. Since `sha256sum` output is
+ formatted like `<hash> <filename>`, it produces a different final hash if a
+ file ever changes name without changing it's content;
+* #8 we get the final hash output, excluding the `<filename>` (which is `-` in
+ this case, aka `stdin`).
+
+=== Positive points:
+
+. ignore timestamp: running more than once on different installation yields the
+ same hash;
+. the name of the file is included in the final hash computation.
+
+=== Limitations:
+
+. it ignores empty folders from the hash computation;
+. the implementation's only goal is to represent using a digest whether the
+ content of a given directory is the same or not. Leaf presence checking is
+ obviously missing from it.
+
+=== Testing locally with sample data
+
+[source,sh]
+----
+mkdir /tmp/merkle-tree-test/
+cd /tmp/merkle-tree-test/
+mkdir -p a/b/ a/c/ d/
+echo "one" > a/b/one.txt
+echo "two" > a/c/two.txt
+echo "three" > d/three.txt
+merkle-tree . # output is be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
+merkle-tree . # output still is be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
+echo "four" > d/four.txt
+merkle-tree . # output is now b5464b958969ed81815641ace96b33f7fd52c20db71a7fccc45a36b3a2ae4d4c
+rm d/four.txt
+merkle-tree . # output back to be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
+echo "hidden-five" > a/b/one.txt
+merkle-tree . # output changed 471fae0d074947e4955e9ac53e95b56e4bc08d263d89d82003fb58a0ffba66f5
+----
+
+It seems to work for this simple test case.
+
+You can try copying and pasting it to verify the hash signatures.
+
+== Using `merkle-tree` to check the output of `npm ci`
+
+_I've done all of the following using Node.js v8.11.3 and npm@6.1.0_.
+
+In this test case I'll take the main repo of
+https://lernajs.io/[Lerna]footnote:lerna-package-lock[
+ Finding a big known repo that actually committed the `package-lock.json` file
+ was harder than I expected.
+]:
+
+```bash
+cd /tmp/
+git clone https://github.com/lerna/lerna.git
+cd lerna/
+git checkout 57ff865c0839df75dbe1974971d7310f235e1109
+npm ci
+merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
+rm -rf node_modules/
+npm ci
+merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
+npm ci # test if it also works with an existing node_modules/ folder
+merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
+```
+
+Good job `npm ci` :)
+
+#6 and #9 take some time to run (21 seconds in my machine), but this specific
+use case isn't performance sensitive. The slowest step is computing the hash of
+each individual file.
+
+== Conclusion
+
+`npm ci` really "generates identical trees".
+
+I'm not aware of any other existing solution for verifying the hash signature of
+a directory. If you know any, shoot me an email, as I'd like to know it.
+
+== *Edit*
+
+2019-05-22: Fix spelling.
diff --git a/src/content/en/blog/2018/12/21/ytdl-subs.adoc b/src/content/en/blog/2018/12/21/ytdl-subs.adoc
new file mode 100644
index 0000000..10afbf6
--- /dev/null
+++ b/src/content/en/blog/2018/12/21/ytdl-subs.adoc
@@ -0,0 +1,279 @@
+= Using "youtube-dl" to manage YouTube subscriptions
+
+:ytsm-ann: https://old.reddit.com/r/DataHoarder/comments/9sg8q5/i_built_a_selfhosted_youtube_subscription_manager/
+:ytsm-code: https://github.com/chibicitiberiu/ytsm
+:ytdl: https://youtube-dl.org/
+
+I've recently read the {ytsm-ann}[announcement] of a very nice
+{ytsm-code}[self-hosted YouTube subscription manager]. I haven't used YouTube's
+built-in subscriptions for a while now, and haven't missed it at all. When I
+saw the announcement, I considered writing about the solution I've built on top
+of {ytdl}[youtube-dl].
+
+== Background: the problem with YouTube
+
+:net-giants: https://staltz.com/what-happens-when-you-block-internet-giants.html
+
+In many ways, I agree with {net-giants}[André Staltz's view on data ownership
+and privacy]:
+
+____
+I started with the basic premise that "I want to be in control of my data".
+Sometimes that meant choosing when to interact with an internet giant and how
+much I feel like revealing to them. Most of times it meant not interacting with
+them at all. I don't want to let them be in full control of how much they can
+know about me. I don't want to be in autopilot mode. (...) Which leads us to
+YouTube. While I was able to find alternatives to Gmail (Fastmail), Calendar
+(Fastmail), Translate (Yandex Translate), _etc._ YouTube remains as the most
+indispensable Google-owned web service. It is really really hard to avoid
+consuming YouTube content. It was probably the smartest startup acquisition
+ever. My privacy-oriented alternative is to watch YouTube videos through Tor,
+which is technically feasible but not polite to use the Tor bandwidth for these
+purposes. I'm still scratching my head with this issue.
+____
+
+Even though I don't use most alternative services he mentions, I do watch videos
+from YouTube. But I also feel uncomfortable logging in to YouTube with a Google
+account, watching videos, creating playlists and similar things.
+
+Using the mobile app is worse: you can't even block ads in there. You're in
+less control on what you share with YouTube and Google.
+
+== youtube-dl
+
+:other-sites: https://rg3.github.io/youtube-dl/supportedsites.html
+
+youtube-dl is a command-line tool for downloading videos, from YouTube and
+{other-sites}[many other sites]:
+
+[source,sh]
+----
+$ youtube-dl https://www.youtube.com/watch?v=rnMYZnY3uLA
+[youtube] rnMYZnY3uLA: Downloading webpage
+[youtube] rnMYZnY3uLA: Downloading video info webpage
+[download] Destination: A Origem da Vida _ Nerdologia-rnMYZnY3uLA.mp4
+[download] 100% of 32.11MiB in 00:12
+----
+
+It can be used to download individual videos as showed above, but it also has
+some interesting flags that we can use:
+
+* `--output`: use a custom template to create the name of the downloaded file;
+* `--download-archive`: use a text file for recording and remembering which
+ videos were already downloaded;
+* `--prefer-free-formats`: prefer free video formats, like `webm`, `ogv` and
+ Matroska `mkv`;
+* `--playlist-end`: how many videos to download from a "playlist" (a channel, a
+ user or an actual playlist);
+* `--write-description`: write the video description to a `.description` file,
+ useful for accessing links and extra content.
+
+Putting it all together:
+
+[source,sh]
+----
+$ youtube-dl "https://www.youtube.com/channel/UClu474HMt895mVxZdlIHXEA" \
+ --download-archive ~/Nextcloud/cache/youtube-dl-seen.conf \
+ --prefer-free-formats \
+ --playlist-end 20 \
+ --write-description \
+ --output "~/Downloads/yt-dl/%(uploader)s/%(upload_date)s - %(title)s.%(ext)s"
+----
+
+This will download the latest 20 videos from the selected channel, and write
+down the video IDs in the `youtube-dl-seen.conf` file. Running it immediately
+after one more time won't have any effect.
+
+If the channel posts one more video, running the same command again will
+download only the last video, since the other 19 were already downloaded.
+
+With this basic setup you have a minimal subscription system at work, and you
+can create some functions to help you manage that:
+
+[source,sh]
+----
+#!/bin/sh
+
+export DEFAULT_PLAYLIST_END=15
+
+download() {
+ youtube-dl "$1" \
+ --download-archive ~/Nextcloud/cache/youtube-dl-seen.conf \
+ --prefer-free-formats \
+ --playlist-end "$2" \
+ --write-description \
+ --output "~/Downloads/yt-dl/%(uploader)s/%(upload_date)s - %(title)s.%(ext)s"
+}
+export -f download
+
+
+download_user() {
+ download "https://www.youtube.com/user/$1" "${2-$DEFAULT_PLAYLIST_END}"
+}
+export -f download_user
+
+
+download_channel() {
+ download "https://www.youtube.com/channel/$1" "${2-$DEFAULT_PLAYLIST_END}"
+}
+export -f download_channel
+
+
+download_playlist() {
+ download "https://www.youtube.com/playlist?list=$1" "${2-$DEFAULT_PLAYLIST_END}"
+}
+export -f download_playlist
+----
+
+With these functions, you now can have a subscription fetching script to
+download the latest videos from your favorite channels:
+
+[source,sh]
+----
+#!/bin/sh
+
+download_user ClojureTV 15
+download_channel 'UCmEClzCBDx-vrt0GuSKBd9g' 100
+download_playlist 'PLqG7fA3EaMRPzL5jzd83tWcjCUH9ZUsbX' 15
+----
+
+Now, whenever you want to watch the latest videos, just run the above script
+and you'll get all of them in your local machine.
+
+== Tradeoffs
+
+=== I've made it for myself, with my use case in mind
+
+
+[qanda]
+Offline::
+My internet speed it somewhat
+reasonable{empty}footnote:internet-speed[
+ Considering how expensive it is and the many ways it could be better, but also
+ how much it has improved over the last years, I say it's reasonable.
+], but it is really unstable. Either at work or at home, it's not uncommon to
+loose internet access for 2 minutes 3~5 times every day, and stay completely
+offline for a couple of hours once every week.
++
+Working through the hassle of keeping a playlist on disk has payed off many,
+many times. Sometimes I even not notice when the connection drops for some
+minutes, because I'm watching a video and working on some document, all on my
+local computer.
++
+There's also no quality adjustment for YouTube's web player, I always pick the
+higher quality and it doesn't change during the video. For some types of
+content, like a podcast with some tiny visual resources, this doesn't change
+much. For other types of content, like a keynote presentation with text written
+on the slides, watching on 144p isn't really an option.
++
+If the internet connection drops during the video download, youtube-dl will
+resume from where it stopped.
++
+This is an offline first benefit that I really like, and works well for me.
+
+
+Sync the "seen" file::
+I already have a running instance of Nextcloud, so just dumping the
+`youtube-dl-seen.conf` file inside Nextcloud was a no-brainer.
++
+You could try putting it in a dedicated git repository, and wrap the script with
+an autocommit after every run. If you ever had a merge conflict, you'd simply
+accept all changes and then run the following to tidy up the file:
++
+[source,sh]
+----
+$ uniq youtube-dl-seen.conf > youtube-dl-seen.conf
+----
+
+
+Doesn't work on mobile::
+My primary device that I use everyday is my laptop, not my phone. It works well
+for me this way.
++
+Also, it's harder to add ad-blockers to mobile phones, and most mobile software
+still depends on Google's and Apple's blessing.
++
+If you wish, you can sync the videos to the SD card periodically, but that's a
+bit of extra manual work.
+
+
+=== The Good
+
+
+[qanda]
+Better privacy::
+We don't even have to configure the ad-blocker to keep ads and trackers away!
++
+YouTube still has your IP address, so using a VPN is always a good idea.
+However, a timing analysis would be able to identify you (considering the
+current implementation).
+
+
+No need to self-host::
+There's no host that needs maintenance. Everything runs locally.
++
+As long as you keep youtube-dl itself up to date and sync your "seen" file,
+there's little extra work to do.
+
+
+Track your subscriptions with git::
+After creating a `subscriptions.sh` executable that downloads all the videos,
+you can add it to git and use it to track metadata about your subscriptions.
+
+
+=== The Bad
+
+
+[qanda]
+Maximum playlist size is your disk size::
+This is a good thing for getting a realistic view on your actual "watch later"
+list. However I've run out of disk space many times, and now I need to be more
+aware of how much is left.
+
+
+=== The Ugly
+
+We can only avoid all the bad parts of YouTube with youtube-dl as long as
+YouTube keeps the videos public and programmatically accessible. If YouTube
+ever blocks that we'd loose the ability to consume content this way, but also
+loose confidence on considering YouTube a healthy repository of videos on the
+internet.
+
+
+== Going beyond
+
+Since you're running everything locally, here are some possibilities to be
+explored:
+
+
+=== A playlist that is too long for being downloaded all at once
+
+You can wrap the `download_playlist` function (let's call the wrapper
+`inc_download`) and instead of passing it a fixed number to the `--playlist-end`
+parameter, you can store the `$n` in a folder (something like
+`$HOME/.yt-db/$PLAYLIST_ID`) and increment it by `$step` every time you run
+`inc_download`.
+
+This way you can incrementally download videos from a huge playlist without
+filling your disk with gigabytes of content all at once.
+
+
+=== Multiple computer scenario
+
+The `download_playlist` function could be aware of the specific machine that it
+is running on and apply specific policies depending on the machine: always
+download everything; only download videos that aren't present anywhere else;
+_etc._
+
+
+== Conclusion
+
+youtube-dl is a great tool to keep at hand. It covers a really large range of
+video websites and works robustly.
+
+Feel free to copy and modify this code, and send me suggestions of improvements
+or related content.
+
+== _Edit_
+
+2019-05-22: Fix spelling.
diff --git a/src/content/en/blog/2019/06/02/nixos-stateless-workstation.adoc b/src/content/en/blog/2019/06/02/nixos-stateless-workstation.adoc
new file mode 100644
index 0000000..f89a106
--- /dev/null
+++ b/src/content/en/blog/2019/06/02/nixos-stateless-workstation.adoc
@@ -0,0 +1,146 @@
+= Using NixOS as an stateless workstation
+
+:empty:
+:nixos: https://nixos.org/
+
+Last
+week{empty}footnote:last-week[
+ "Last week" as of the start of this writing, so around the end of May 2019.
+] I changed back to an
+old{empty}footnote:old-computer[
+ I was using a 32GB RAM, i7 and 250GB SSD Samsung laptop. The switch was back
+ to a 8GB RAM, i5 and 500GB HDD Dell laptop. The biggest difference I noticed
+ was on faster memory, both RAM availability and the disk speed, but I had
+ 250GB less local storage space.
+] Samsung laptop, and installed {nixos}[NixOS] on it.
+
+After using NixOS on another laptop for around two years, I wanted verify how
+reproducible was my desktop environment, and how far does NixOS actually can go
+on recreating my whole OS from my configuration files and personal data. I
+gravitated towards NixOS after trying (and failing) to create an `install.sh`
+script that would imperatively install and configure my whole OS using apt-get.
+When I found a GNU/Linux distribution that was built on top of the idea of
+declaratively specifying the whole OS I was automatically
+convinced{empty}footnote:convincend-by-declarative-aspect[
+ The declarative configuration aspect is something that I now completely take
+ for granted, and wouldn't consider using something which isn't declarative. A
+ good metric to show this is me realising that I can't pinpoint the moment when
+ I decided to switch to NixOS. It's like I had a distant past when this wasn't
+ true.
+].
+
+I was impressed. Even though I've been experiencing the benefits of Nix
+isolation daily, I always felt skeptical that something would be missing,
+because the devil is always on the details. But the result was much better than
+expected!
+
+There were only 2 missing configurations:
+
+. tap-to-click on the touchpad wasn't enabled by default;
+. the default theme from the gnome-terminal is "Black on white" instead of
+ "White on black".
+
+That's all.
+
+I haven't checked if I can configure those in NixOS GNOME module, but I guess
+both are scriptable and could be set in a fictional `setup.sh` run.
+
+This makes me really happy, actually. More happy than I anticipated.
+
+Having such a powerful declarative OS makes me feel like my data is the really
+important stuff (as it should be), and I can interact with it on any
+workstation. All I need is an internet connection and a few hours to download
+everything. It feels like my physical workstation and the installed OS are
+serving me and my data, instead of me feeling as hostage to the specific OS
+configuration at the moment. Having a few backup copies of everything important
+extends such peacefulness.
+
+After this positive experience with recreating my OS from simple Nix
+expressions, I started to wonder how far I could go with this, and started
+considering other areas of improvements:
+
+== First run on a fresh NixOS installation
+
+Right now the initial setup relies on non-declarative manual tasks, like
+decrypting some credentials, or manually downloading *this* git repository with
+specific configurations before *that* one.
+
+I wonder what some areas of improvements are on this topic, and if investing on
+it is worth it (both time-wise and happiness-wise).
+
+== Emacs
+
+:spacemacs: https://spacemacs.org/
+:emacs: https://www.gnu.org/software/emacs/
+:layers: https://spacemacs.org/doc/LAYERS.html
+:there: https://nixos.org/nixos/manual/index.html#module-services-emacs-adding-packages
+:packages: https://www.gnu.org/software/guix/manual/en/html_node/Application-Setup.html#Emacs-Packages
+
+Right now I'm using the {spacemacs}[Spacemacs], which is a community package
+curation and configuration on top of {emacs}[Emacs].
+
+Spacemacs does support the notion of {layers}[layers], which you can
+declaratively specify and let Spacemacs do the rest.
+
+However this solution isn't nearly as robust as Nix: being purely functional,
+Nix does describe everything required to build a derivation, and knows how to do
+so. Spacemacs it closer to more traditional package managers: even though the
+layers list is declarative, the installation is still very much imperative.
+I've had trouble with Spacemacs not behaving the same on different computers,
+both with identical configurations, only brought to convergence back again after
+a `git clean -fdx` inside `~/.emacs.d/`.
+
+The ideal solution would be managing Emacs packages with Nix itself. After a
+quick search I did found that {there}[there is support for Emacs packages in
+Nix]. So far I was only aware of {packages}[Guix support for Emacs packages].
+
+This isn't a trivial change because Spacemacs does include extra curation and
+configuration on top of Emacs packages. I'm not sure the best way to improve
+this right now.
+
+== myrepos
+
+:myrepos: https://myrepos.branchable.com/
+
+I'm using {myrepos}[myrepos] to manage all my git repositories, and the general
+rule I apply is to add any repository specific configuration in myrepos'
+`checkout` phase:
+
+[source,sh]
+----
+# sample ~/.mrconfig file snippet
+[dev/guix/guix]
+checkout =
+ git clone https://git.savannah.gnu.org/git/guix.git guix
+ cd guix/
+ git config sendemail.to guix-patches@gnu.org
+----
+
+This way when I clone this repo again the email sending is already
+pre-configured.
+
+This works well enough, but the solution is too imperative, and my `checkout`
+phases tend to become brittle over time if not enough care is taken.
+
+== GNU Stow
+
+:not-at-all: https://euandre.org/git/dotfiles/tree/bash/symlinks.sh?id=316939aa215181b1d22b69e94241eef757add98d
+:stow: https://www.gnu.org/software/stow/
+
+For my home profile and personal configuration I already have a few dozens of
+symlinks that I manage manually. This has worked so far, but the solution is
+sometimes fragile and {not-at-all}[not declarative at all]. I wonder if
+something like {stow}[GNU Stow] can help me simplify this.
+
+== Conclusion
+
+:nix: https://nixos.org/nix/
+
+I'm really satisfied with NixOS, and I intend to keep using it. If what I've
+said interests you, maybe try tinkering with the {nix}[Nix package manager] (not
+the whole NixOS) on your current distribution (it can live alongside any other
+package manager).
+
+If you have experience with declarative Emacs package managements, GNU Stow or
+any similar tool, _etc._, mail me some tips]. If you don't have any experience
+at all, I'd still love to hear from you.
diff --git a/_articles/2020-08-10-guix-inside-sourcehut-builds-sr-ht-ci.md b/src/content/en/blog/2020/08/10/guix-srht.adoc
index 4d7e8d5..a89e86e 100644
--- a/_articles/2020-08-10-guix-inside-sourcehut-builds-sr-ht-ci.md
+++ b/src/content/en/blog/2020/08/10/guix-srht.adoc
@@ -1,30 +1,25 @@
----
-title: Guix inside sourcehut builds.sr.ht CI
-date: 2020-08-10
-updated_at: 2020-08-19
-layout: post
-lang: en
-ref: guix-inside-sourcehut-builds-sr-ht-ci
----
-After the release of the [NixOS images in builds.sr.ht][0] and much
-usage of it, I also started looking at [Guix][1] and
-wondered if I could get it on the awesome builds.sr.ht service.
-
-[0]: https://man.sr.ht/builds.sr.ht/compatibility.md#nixos
-[1]: https://guix.gnu.org/
-
-The Guix manual section on the [binary installation][2] is very thorough, and
-even a [shell installer script][3] is provided, but it is built towards someone
-installing Guix on their personal computer, and relies heavily on interactive
-input.
-
-[2]: https://guix.gnu.org/manual/en/guix.html#Binary-Installation
-[3]: https://git.savannah.gnu.org/cgit/guix.git/plain/etc/guix-install.sh
+= Guix inside sourcehut builds.sr.ht CI
+:updatedat: 2020-08-19
+
+:nixos: https://man.sr.ht/builds.sr.ht/compatibility.md#nixos
+:guix: https://guix.gnu.org/
+:binary-inst: https://guix.gnu.org/manual/en/guix.html#Binary-Installation
+:shell-inst: https://git.savannah.gnu.org/cgit/guix.git/plain/etc/guix-install.sh
+
+After the release of the {nixos}[NixOS images in builds.sr.ht] and much usage of
+it, I also started looking at {guix}[Guix] and wondered if I could get it on the
+awesome builds.sr.ht service.
+
+The Guix manual section on the {binary-inst}[binary installation] is very
+thorough, and even a {shell-inst}[shell installer script] is provided, but it is
+built towards someone installing Guix on their personal computer, and relies
+heavily on interactive input.
I developed the following set of scripts that I have been using for some time to
-run Guix tasks inside builds.sr.ht jobs. First, `install-guix.sh`:
+run Guix tasks inside builds.sr.ht jobs. First, `install-guix.sh`:
-```shell
+[source,sh]
+----
#!/usr/bin/env bash
set -x
set -Eeuo pipefail
@@ -62,15 +57,18 @@ ln -s /var/guix/profiles/per-user/root/current-guix/bin/guix .
ln -s /var/guix/profiles/per-user/root/current-guix/bin/guix-daemon .
guix archive --authorize < ~root/.config/guix/current/share/guix/ci.guix.gnu.org.pub
-```
+----
-Almost all of it is taken directly from the [binary installation][2] section
-from the manual, with the interactive bits stripped out: after downloading and
-extracting the Guix tarball, we create some symlinks, add guixbuild users and
-authorize the `ci.guix.gnu.org.pub` signing key.
+Almost all of it is taken directly from the {binary-inst}[binary installation]
+section from the manual, with the interactive bits stripped out: after
+downloading and extracting the Guix tarball, we create some symlinks, add
+guixbuild users and authorize the `ci.guix.gnu.org.pub` signing key.
-After installing Guix, we perform a `guix pull` to update Guix inside `start-guix.sh`:
-```shell
+After installing Guix, we perform a `guix pull` to update Guix inside
+`start-guix.sh`:
+
+[source,sh]
+----
#!/usr/bin/env bash
set -x
set -Eeuo pipefail
@@ -79,12 +77,13 @@ sudo guix-daemon --build-users-group=guixbuild &
guix pull
guix package -u
guix --version
-```
+----
Then we can put it all together in a sample `.build.yml` configuration file I'm
using myself:
-```yaml
+[source,yaml]
+----
image: debian/stable
packages:
- wget
@@ -103,26 +102,27 @@ tasks:
- docs: |
cd ./songbooks/
guix environment -m build-aux/guix.scm -- make publish-dist
-```
+----
We have to add the `guix-daemon` to `~/.buildenv` so it can be started on every
-following task run. Also, since we used `wget` inside `install-guix.sh`, we had
+following task run. Also, since we used `wget` inside `install-guix.sh`, we had
to add it to the images package list.
After the `install-guix` task, you can use Guix to build and test your project,
or run any `guix environment --ad-hoc my-package -- my script` :)
-## Improvements
+== Improvements
+
+:repository: https://git.sr.ht/~sircmpwn/builds.sr.ht
When I originally created this code I had a reason why to have both a `sudo`
call for `sudo ./scripts/install-guix.sh` and `sudo` usages inside
-`install-guix.sh` itself. I couldn't figure out why (it feels like my past self
-was a bit smarter 😬), but it feels ugly now. If it is truly required I could
-add an explanation for it, or remove this entirely in favor of a more elegant solution.
+`install-guix.sh` itself. I couldn't figure out why (it feels like my past self
+was a bit smarter 😬), but it feels ugly now. If it is truly required I could
+add an explanation for it, or remove this entirely in favor of a more elegant
+solution.
I could also contribute the Guix image upstream to builds.sr.ht, but there
-wasn't any build or smoke tests in the original [repository][4], so I wasn't
-inclined to make something that just "works on my machine" or add a maintainence
-burden to the author. I didn't look at it again recently, though.
-
-[4]: https://git.sr.ht/~sircmpwn/builds.sr.ht
+wasn't any build or smoke tests in the original {repository}[repository], so I
+wasn't inclined to make something that just ``works on my machine'' or add a
+maintainence burden to the author. I didn't look at it again recently, though.
diff --git a/src/content/en/blog/2020/08/31/database-i-wish-i-had.adoc b/src/content/en/blog/2020/08/31/database-i-wish-i-had.adoc
new file mode 100644
index 0000000..7f010b9
--- /dev/null
+++ b/src/content/en/blog/2020/08/31/database-i-wish-i-had.adoc
@@ -0,0 +1,299 @@
+= The database I wish I had
+:categories: mediator
+:updatedat: 2020-09-03
+
+:empty:
+:values-talk: https://vimeo.com/230142234
+:haskell-startup: https://www.youtube.com/watch?v=ZR3Jirqk6W8
+
+I watched the talk "{values-talk}[Platform as a Reflection of Values: Joyent,
+Node.js and beyond]" by Bryan Cantrill, and I think he was able to put into
+words something I already felt for some time: if there's no piece of software
+out there that reflects your values, it's time for you to build that
+software{empty}footnote:talk-time[
+ At the very end, at time 29:49. When talking about the draft of this article
+ with a friend, he noted that Bryan O'Sullivan (a different Bryan) says a
+ similar thing on his talk "{haskell-startup}[Running a startup on Haskell]",
+ at time 4:15.
+].
+
+I kind of agree with what he said, because this is already happening to me. I
+long for a database with a certain set of values, and for a few years I was just
+waiting for someone to finally write it. After watching his talk, Bryan is
+saying to me: "time to stop waiting, and start writing it yourself".
+
+So let me try to give an overview of such database, and go over its values.
+
+== Overview
+
+I want a database that allows me to create decentralized client-side
+applications that can sync data.
+
+The best one-line description I can give right now is:
+
+____
+It's sort of like PouchDB, Git, Datomic, SQLite and Mentat.
+____
+
+A more descriptive version could be:
+
+____
+An embedded, immutable, syncable relational database.
+____
+
+Let's go over what I mean by each of those aspects one by one.
+
+=== Embedded
+
+:sqlite: https://sqlite.org/index.html
+:sqlite-whentouse: https://sqlite.org/whentouse.html
+:pouchdb: https://pouchdb.com/
+:couchdb: https://couchdb.apache.org/
+:mentat: https://github.com/mozilla/mentat
+:pouchdb-adapters: https://pouchdb.com/adapters.html
+:datomic-storage-services: https://docs.datomic.com/on-prem/storage.html
+:sqlite-amalgamation: https://www.sqlite.org/amalgamation.html
+:pointed-out: https://news.ycombinator.com/item?id=24338881
+
+I think the server-side database landscape is diverse and mature enough for my
+needs (even though I end up choosing SQLite most of the time), and what I'm
+after is a database to be embedded on client-side applications itself, be it
+desktop, browser, mobile, _etc._
+
+The purpose of such database is not to keep some local cache of data in case of
+lost connectivity: we have good solutions for that already. It should serve as
+the source of truth, and allow the application to work on top of it.
+
+{sqlite}[*SQLite*] is a great example of that: it is a very powerful relational
+database that runs {sqlite-whentouse}[almost anywhere]. What I miss from it
+that SQLite doesn't provide is the ability to run it on the browser: even though
+you could compile it to WebAssembly, [line-through]#it assumes a POSIX
+filesystem that would have to be
+emulated#{empty}footnote:posix-sqlite[
+ It was {pointed-out}[pointed out to me] that SQLite doesn't assume the
+ existence of a POSIX filesystem, as I wrongly stated. Thanks for the
+ correction.
+pass:[</p><p>]
+ This makes me consider it as a storage backend all by itself. I initially
+ considered having an SQLite storage backend as one implementation of the POSIX
+ filesystem storage API that I mentioned. My goal was to rely on it so I could
+ validate the correctness of the actual implementation, given SQLite's
+ robustness.
+pass:[</p><p>]
+ However it may even better to just use SQLite, and get an ACID backend without
+ recreating a big part of SQLite from scratch. In fact, both Datomic and
+ PouchDB didn't create an storage backend for themselves, they just plugged on
+ what already existed and already worked. I'm beginning to think that it would
+ be wiser to just do the same, and drop entirely the from scratch
+ implementation that I mentioned.
+pass:[</p><p>]
+ That's not to say that adding an IndexedDB compatibility layer to SQLite would
+ be enough to make it fit the other requirements I mention on this page. SQLite
+ still is an implementation of a update-in-place, SQL, table-oriented database.
+ It is probably true that cherry-picking the relevant parts of SQLite (like
+ storage access, consistency, crash recovery, parser generator, *etc.*) and
+ leaving out the unwanted parts (SQL, tables, threading, *etc.*) would be
+ better than including the full SQLite stack, that's simply an optimization.
+ Both could even coexist, if desired.
+pass:[</p><p>]
+ SQLite would have to be treated similarly to how Datomic treats SQL databases:
+ instead of having a table for each entities, spread attributes over the
+ tables, *etc.*, it treats SQL databases as a key-value storage so it doesn't
+ have to re-implement interacting with the disk that other databases do well.
+pass:[</p><p>]
+ The tables would contain blocks of binary data, so there isn't a difference on
+ how the SQLite storage backend behaves and how the IndexedDB storage backend
+ behaves, much like how Datomic works the same regardless of the storage
+ backend, same for PouchDB.
+pass:[</p><p>]
+ I welcome corrections on what I said above, too.
+].
+
+{pouchdb}[*PouchDB*] is another great example: it's a full reimplementation of
+{couchdb}[CouchDB] that targets JavaScript environments, mainly the browser and
+Node.js. However I want a tool that can be deployed anywhere, and not limit its
+applications to places that already have a JavaScript runtime environment, or
+force the developer to bundle a JavaScript runtime environment with their
+application. This is true for GTK+ applications, command line programs, Android
+apps, _etc._
+
+{mentat}[*Mentat*] was an interesting project, but its reliance on SQLite makes
+it inherit most of the downsides (and benefits too) of SQLite itself.
+
+Having such a requirement imposes a different approach to storage: we have to
+decouple the knowledge about the intricacies of storage from the usage of
+storage itself, so that a module (say query processing) can access storage
+through an API without needing to know about its implementation. This allows
+the database to target a POSIX filesystems storage API and an IndexedDB storage
+API, and make the rest of the code agnostic about storage. PouchDB has such
+mechanism (called {pouchdb-adapters}[adapters]) and Datomic has them too (called
+{datomic-storage-services}[storage services]).
+
+This would allow the database to adapt to where it is embedded: when targeting
+the browser the IndexedDB storage API would provide the persistence layer that
+the database requires, and similarly the POSIX filesystem storage API would
+provide the persistence layer when targeting POSIX systems (like desktops,
+mobile, _etc._).
+
+But there's also an extra restriction that comes from by being embedded: it
+needs to provide and embeddable artifact, most likely a binary library object
+that exposes a C compatible FFI, similar to {sqlite-amalgamation}[how SQLite
+does]. Bundling a full runtime environment is possible, but doesn't make it a
+compelling solution for embedding. This rules out most languages, and leaves
+us with C, Rust, Zig, and similar options that can target POSIX systems and
+WebAssembly.
+
+=== Immutable
+
+:datomic: https://www.datomic.com/
+:day-of-datomic: https://vimeo.com/116315075
+:git: https://git-scm.com/
+:sqlite-limits: https://sqlite.org/limits.html
+:datomic-no-history: https://docs.datomic.com/cloud/best.html#nohistory-for-high-churn
+
+Being immutable means that only new information is added, no in-place update
+ever happens, and nothing is ever deleted.
+
+Having an immutable database presents us with similar trade-offs found in
+persistent data structures, like lack of coordination when doing reads, caches
+being always coherent, and more usage of space.
+
+{datomic}[*Datomic*] is the go to database example of this: it will only add
+information (datoms) and allows you to query them in a multitude of ways.
+Stuart Halloway calls it "accumulate-only" over
+"append-only"{empty}footnote:accumulate-only[
+ Video "{day-of-datomic}[Day of Datomic Part 2]" on Datomic's information
+ model, at time 12:28.
+]:
+
+____
+It's accumulate-only, it is not append-only. So append-only, most people when
+they say that they're implying something physical about what happens.
+____
+
+Also a database can be append-only and overwrite existing information with new
+information, by doing clean-ups of "stale" data. I prefer to adopt the
+"accumulate-only" naming and approach.
+
+{git}[*Git*] is another example of this: new commits are always added on top of
+the previous data, and it grows by adding commits instead of replacing existing
+ones.
+
+Git repositories can only grow in size, and that is not only an acceptable
+condition, but also one of the reasons to use it.
+
+All this means that no in-place updates happens on data, and the database will
+be much more concerned about how compact and efficiently it stores data than how
+fast it does writes to disk. Being embedded, the storage limitation is either
+a) how much storage the device has or b) how much storage was designed for the
+application to consume. So even though the database could theoretically operate
+with hundreds of TBs, a browser page or mobile application wouldn't have access
+to this amount of storage. SQLite even {sqlite-limits}[says] that it does
+support approximately 280 TBs of data, but those limits are untested.
+
+The upside of keeping everything is that you can have historical views of your
+data, which is very powerful. This also means that applications should turn
+this off when not
+relevant{empty}footnote:no-history[
+ Similar to {datomic-no-history}[Datomic's `:db/noHistory`].
+].
+
+=== Syncable
+
+:3-way-merge: https://en.wikipedia.org/wiki/Merge_(version_control)
+:git-remote-gcrypt: https://spwhitton.name/tech/code/git-remote-gcrypt/
+
+This is a frequent topic when talking about offline-first solutions. When
+building applications that:
+
+* can fully work offline,
+* stores data,
+* propagates that data to other application instances,
+
+then you'll need a conflict resolution strategy to handle all the situations
+where different application instances disagree. Those application instances
+could be a desktop and a browser version of the same application, or the same
+mobile app in different devices.
+
+A three-way merge seems to be the best approach, on top of which you could add
+application specific conflict resolution functions, like:
+
+* pick the change with higher timestamp;
+* if one change is a delete, pick it;
+* present the diff on the screen and allow the user to merge them.
+
+Some databases try to make this "easy", by choosing a strategy for you, but I've
+found that different applications require different conflict resolution
+strategies. Instead, the database should leave this up to the user to decide,
+and provide tools for them to do it.
+
+{3-way-merge}[*Three-way merges in version control*] are the best example,
+performing automatic merges when possible and asking the user to resolve
+conflicts when they appear.
+
+The unit of conflict for a version control system is a line of text. The
+database equivalent would probably be a single attribute, not a full entity or a
+full row.
+
+Making all the conflict resolution logic be local should allow the database to
+have encrypted remotes similar to how {git-remote-gcrypt}[git-remote-gcrypt]
+adds this functionality to Git. This would enable users to sync the application
+data across devices using an untrusted intermediary.
+
+=== Relational
+
+:datomic-datalog: https://docs.datomic.com/on-prem/query.html
+:datomic-model: https://docs.datomic.com/cloud/whatis/data-model.html#datoms
+
+I want the power of relational queries on the client applications.
+
+Most of the arguments against traditional table-oriented relational databases
+are related to write performance, but those don't apply here. The bottlenecks
+for client applications usually aren't write throughput. Nobody is interested
+in differentiating between 1 MB/s or 10 MB/s when you're limited to 500 MB
+total.
+
+The relational model of the database could either be based on SQL and tables
+like in SQLite, or maybe {datomic-datalog}[datalog] and {datomic-model}[datoms]
+like in Datomic.
+
+== From aspects to values
+
+Now let's try to translate the aspects above into values, as suggested by Bryan
+Cantrill.
+
+=== Portability
+
+Being able to target so many different platforms is a bold goal, and the
+embedded nature of the database demands portability to be a core value.
+
+=== Integrity
+
+When the local database becomes the source of truth of the application, it must
+provide consistency guarantees that enables applications to rely on it.
+
+=== Expressiveness
+
+The database should empower applications to slice and dice the data in any way
+it wants to.
+
+== Next steps
+
+Since I can't find any database that fits these requirements, I've finally come
+to terms with doing it myself.
+
+It's probably going to take me a few years to do it, and making it portable
+between POSIX and IndexedDB will probably be the biggest challenge. I got
+myself a few books on databases to start.
+
+I wonder if I'll ever be able to get this done.
+
+== External links
+
+:reddit: https://old.reddit.com/r/programming/comments/ijwz5b/the_database_i_wish_i_had/
+:lobsters: https://lobste.rs/s/m9vkg4/database_i_wish_i_had
+:hn: https://news.ycombinator.com/item?id=24337244
+:list: https://lists.sr.ht/~euandreh/public-inbox/%3C010101744a592b75-1dce9281-f0b8-4226-9d50-fd2c7901fa72-000000%40us-west-2.amazonses.com%3E
+
+See discussions on {reddit}[Reddit], {lobsters}[lobsters], {hn}[HN] and {list}[a
+lengthy email exchange].
diff --git a/src/content/en/blog/2020/10/05/cargo2nix-demo.tar.gz b/src/content/en/blog/2020/10/05/cargo2nix-demo.tar.gz
new file mode 100644
index 0000000..43677ec
--- /dev/null
+++ b/src/content/en/blog/2020/10/05/cargo2nix-demo.tar.gz
Binary files differ
diff --git a/_articles/2020-10-05-cargo2nix-dramatically-simpler-rust-in-nix.md b/src/content/en/blog/2020/10/05/cargo2nix.adoc
index 368b62a..a2d478e 100644
--- a/_articles/2020-10-05-cargo2nix-dramatically-simpler-rust-in-nix.md
+++ b/src/content/en/blog/2020/10/05/cargo2nix.adoc
@@ -1,31 +1,20 @@
----
+= cargo2nix: Dramatically simpler Rust in Nix
+:sort: 1
-title: "cargo2nix: Dramatically simpler Rust in Nix"
+:empty:
+:swift2nix: link:swift2nix.html
+:cargo2nix: link:cargo2nix-demo.tar.gz
-date: 2020-10-05 2
+In the same vein of my earlier post on {swift2nix}[swift2nix], I was able to
+quickly prototype a Rust and Cargo variation of it: {cargo2nix}[cargo2nix].
-layout: post
-
-lang: en
-
-ref: cargo2nix-dramatically-simpler-rust-in-nix
-
----
-
-In the same vein of my earlier post on
-[swift2nix]({% link _articles/2020-10-05-swift2nix-run-swift-inside-nix-builds.md %}), I
-was able to quickly prototype a Rust and Cargo variation of it:
-[cargo2nix].
-
-
-The initial prototype is even smaller than swift2nix: it has only
-37 lines of code.
-
-[cargo2nix]: https://euandre.org/static/attachments/cargo2nix.tar.gz
+The initial prototype is even smaller than swift2nix: it has only 37 lines of
+code.
Here's how to use it (snippet taken from the repo's README):
-```nix
+[source,nix]
+----
let
niv-sources = import ./nix/sources.nix;
mozilla-overlay = import niv-sources.nixpkgs-mozilla;
@@ -50,33 +39,34 @@ in pkgs.stdenv.mkDerivation {
touch $out
'';
}
-```
+----
That `cargo test` part on line 20 is what I have been fighting with every
-"\*2nix" available for Rust out there. I don't want to bash any of them. All I
+"*2nix" available for Rust out there. I don't want to bash any of them. All I
want is to have full control of what Cargo commands to run, and the "*2nix" tool
-should only setup the environment for me. Let me drive Cargo myself, no need to
+should only setup the environment for me. Let me drive Cargo myself, no need to
parameterize how the tool runs it for me, or even replicate its internal
behaviour by calling the Rust compiler directly.
Sure it doesn't support private registries or Git dependencies, but how much
-bigger does it has to be to support them? Also, it doesn't support those **yet**,
-there's no reason it can't be extended. I just haven't needed it yet, so I
-haven't added. Patches welcome.
+bigger does it has to be to support them? Also, it doesn't support those *yet*,
+there's no reason it can't be extended. I just haven't needed it yet, so I
+haven't added. Patches welcome.
The layout of the `vendor/` directory is more explicit and public then what
-swift2nix does: it is whatever the command `cargo vendor` returns. However I
+swift2nix does: it is whatever the command `cargo vendor` returns. However I
haven't checked if the shape of the `.cargo-checksum.json` is specified, or
internal to Cargo.
Try out the demo (also taken from the repo's README):
-```shell
+[source,sh]
+----
pushd "$(mktemp -d)"
wget -O- https://euandre.org/static/attachments/cargo2nix-demo.tar.gz |
tar -xv
cd cargo2nix-demo/
nix-build
-```
+----
Report back if you wish.
diff --git a/src/content/en/blog/2020/10/05/cargo2nix.tar.gz b/src/content/en/blog/2020/10/05/cargo2nix.tar.gz
new file mode 100644
index 0000000..d7224d9
--- /dev/null
+++ b/src/content/en/blog/2020/10/05/cargo2nix.tar.gz
Binary files differ
diff --git a/src/content/en/blog/2020/10/05/swift2nix-demo.tar.gz b/src/content/en/blog/2020/10/05/swift2nix-demo.tar.gz
new file mode 100644
index 0000000..cc8b4f1
--- /dev/null
+++ b/src/content/en/blog/2020/10/05/swift2nix-demo.tar.gz
Binary files differ
diff --git a/_articles/2020-10-05-swift2nix-run-swift-inside-nix-builds.md b/src/content/en/blog/2020/10/05/swift2nix.adoc
index c922589..9a3c6fe 100644
--- a/_articles/2020-10-05-swift2nix-run-swift-inside-nix-builds.md
+++ b/src/content/en/blog/2020/10/05/swift2nix.adoc
@@ -1,28 +1,22 @@
----
+= swift2nix: Run Swift inside Nix builds
+:sort: 0
-title: "swift2nix: Run Swift inside Nix builds"
-
-date: 2020-10-05 1
-
-layout: post
-
-lang: en
-
-ref: swift2nix-run-swift-inside-nix-builds
-
----
+:empty:
+:nix: https://nixos.org/
+:swift2nix: link:swift2nix.tar.gz
While working on a Swift project, I didn't find any tool that would allow Swift
-to run inside [Nix][nix] builds. Even thought you *can* run Swift, the real
-problem arises when using the package manager. It has many of the same problems
+to run inside {nix}[Nix] builds. Even thought you _can_ run Swift, the real
+problem arises when using the package manager. It has many of the same problems
that other package managers have when trying to integrate with Nix, more on this
below.
-I wrote a simple little tool called [swift2nix] that allows you trick
-Swift's package manager into assuming everything is set up. Here's the example
+I wrote a simple little tool called {swift2nix}[swift2nix] that allows you trick
+Swift's package manager into assuming everything is set up. Here's the example
from swift2nix's README file:
-```
+[source,nix]
+----
let
niv-sources = import ./nix/sources.nix;
pkgs = import niv-sources.nixpkgs { };
@@ -46,7 +40,7 @@ in pkgs.stdenv.mkDerivation {
touch $out
'';
}
-```
+----
The key parts are lines 15~17: we just fake enough files inside `.build/` that
Swift believes it has already downloaded and checked-out all dependencies, and
@@ -55,85 +49,84 @@ just moves on to building them.
I've worked on it just enough to make it usable for myself, so beware of
unimplemented cases.
-[nix]: https://nixos.org/
-[swift2nix]: https://euandre.org/static/attachments/swift2nix.tar.gz
-
-## Design
+== Design
What swift2nix does is just provide you with the bare minimum that Swift
requires, and readily get out of the way:
-1. I explicitly did not want to generated a `Package.nix` file, since
- `Package.resolved` already exists and contains the required information;
-2. I didn't want to have an "easy" interface right out of the gate, after
- fighting with "*2nix" tools that focus too much on that.
+. I explicitly did not want to generated a `Package.nix` file, since
+ `Package.resolved` already exists and contains the required information;
+. I didn't want to have an "easy" interface right out of the gate, after
+ fighting with "*2nix" tools that focus too much on that.
-The final actual code was so small (46 lines) that it made me
-think about package managers, "*2nix" tools and some problems with many of them.
+The final actual code was so small (46 lines) that it made me think about
+package managers, "*2nix" tools and some problems with many of them.
-## Problems with package managers
+== Problems with package managers
-I'm going to talk about solely language package managers. Think npm and cargo,
+I'm going to talk about solely language package managers. Think npm and cargo,
not apt-get.
Package managers want to do too much, or assume too much, or just want to take
control of the entire build of the dependencies.
This is a recurrent problem in package managers, but I don't see it as an
-intrinsic one. There's nothing about a "package manager" that prevents it from
-*declaring* what it expects to encounter and in which format. The *declaring*
+intrinsic one. There's nothing about a "package manager" that prevents it from
+_declaring_ what it expects to encounter and in which format. The _declaring_
part is important: it should be data, not code, otherwise you're back in the
-same problem, just like lockfiles are just data. Those work in any language, and
-tools can cooperate happily.
+same problem, just like lockfiles are just data. Those work in any language,
+and tools can cooperate happily.
There's no need for this declarative expectation to be standardized, or be made
-compatible across languages. That would lead to a poor format that no package
-manager really likes. Instead, If every package manager could say out loud what
+compatible across languages. That would lead to a poor format that no package
+manager really likes. Instead, If every package manager could say out loud what
it wants to see exactly, than more tools like swift2nix could exist, and they
would be more reliable.
This could even work fully offline, and be simply a mapping from the lockfile
-(the `Package.resolved` in Swift's case) to the filesystem representation. For
+(the `Package.resolved` in Swift's case) to the filesystem representation. For
Swift, the `.build/dependencies-state.json` comes very close, but it is internal
to the package manager.
Even though this pain only exists when trying to use Swift inside Nix, it sheds
-light into this common implicit coupling that package managers have. They
+light into this common implicit coupling that package managers have. They
usually have fuzzy boundaries and tight coupling between:
-1. resolving the dependency tree and using some heuristic to pick a package
- version;
-2. generating a lockfile with the exact pinned versions;
-3. downloading the dependencies present on the lockfile into some local cache;
-4. arranging the dependencies from the cache in a meaningful way for itself inside
- the project;
-5. work using the dependencies while *assuming* that step 4 was done.
+. resolving the dependency tree and using some heuristic to pick a package
+ version;
+. generating a lockfile with the exact pinned versions;
+. downloading the dependencies present on the lockfile into some local cache;
+. arranging the dependencies from the cache in a meaningful way for itself
+ inside the project;
+. work using the dependencies while _assuming_ that step 4 was done.
-When you run `npm install` in a repository with no lockfile, it does 1~4. If you
-do the same with `cargo build`, it does 1~5. That's too much: many of those
+When you run `npm install` in a repository with no lockfile, it does 1~4. If
+you do the same with `cargo build`, it does 1~5. That's too much: many of those
assumptions are implicit and internal to the package manager, and if you ever
-need to rearrange them, you're on your own. Even though you can perform some of
+need to rearrange them, you're on your own. Even though you can perform some of
those steps, you can't compose or rearrange them.
Instead a much saner approach could be:
-1. this stays the same;
-2. this also stays the same;
-3. be able to generate some JSON/TOML/edn which represents the local expected
- filesystem layout with dependencies (i.e. exposing what the package manager
- expects to find), let's call it `local-registry.json`;
-4. if a `local-registry.json` was provided, do a build using that. Otherwise
- generate its own, by downloading the dependencies, arranging them, *etc.*
+. this stays the same;
+. this also stays the same;
+. be able to generate some JSON/TOML/edn which represents the local expected
+ filesystem layout with dependencies (i.e. exposing what the package manager
+ expects to find), let's call it `local-registry.json`;
+. if a `local-registry.json` was provided, do a build using that. Otherwise
+ generate its own, by downloading the dependencies, arranging them, _etc._
The point is just making what the package manager requires visible to the
-outside world via some declarative data. If this data wasn't provided, it can
+outside world via some declarative data. If this data wasn't provided, it can
move on to doing its own automatic things.
-By making the expectation explicit and public, one can plug tools *à la carte*
+By making the expectation explicit and public, one can plug tools _à la carte_
if desired, but doesn't prevent the default code path of doing things the exact
same way they are now.
-## Problems with "*2nix" tools
+== Problems with "*2nix" tools
+
+:node2nix: https://github.com/svanderburg/node2nix
I have to admit: I'm unhappy with most of they.
@@ -141,61 +134,61 @@ They conflate "using Nix" with "replicating every command of the package manager
inside Nix".
The avoidance of an "easy" interface that I mentioned above comes from me
-fighting with some of the "\*2nix" tools much like I have to fight with package
+fighting with some of the "*2nix" tools much like I have to fight with package
managers: I don't want to offload all build responsibilities to the "*2nix"
tool, I just want to let it download some of the dependencies and get out of the
-way. I want to stick with `npm test` or `cargo build`, and Nix should only
+way. I want to stick with `npm test` or `cargo build`, and Nix should only
provide the environment.
-This is something that [node2nix] does right. It allows you to build
+This is something that {node2nix}[node2nix] does right. It allows you to build
the Node.js environment to satisfy NPM, and you can keep using NPM for
everything else:
-```shell
+[source,sh]
+----
ln -s ${node2nix-package.shell.nodeDependencies}/lib/node_modules ./node_modules
npm test
-```
+----
Its natural to want to put as much things into Nix as possible to benefit from
-Nix's advantages. Isn't that how NixOS itself was born?
+Nix's advantages. Isn't that how NixOS itself was born?
-But a "*2nix" tool should leverage Nix, not be coupled with it. The above
+But a "*2nix" tool should leverage Nix, not be coupled with it. The above
example lets you run any arbitrary NPM command while profiting from isolation
-and reproducibility that Nix provides. It is even less brittle: any changes to
+and reproducibility that Nix provides. It is even less brittle: any changes to
how NPM runs some things will be future-compatible, since node2nix isn't trying
to replicate what NPM does, or fiddling with NPM's internal.
**A "*2nix" tool should build the environment, preferably from the lockfile
-directly and offload everything else to the package manager**. The rest is just
+directly and offload everything else to the package manager**. The rest is just
nice-to-have.
swift2nix itself could provide an "easy" interface, something that allows you to
write:
-```shell
+[source,sh]
+----
nix-build -A swift2nix.release
nix-build -A swift2nix.test
-```
+----
The implementation of those would be obvious: create a new
`pkgs.stdenv.mkDerivation` and call `swift build -c release` and `swift test`
while using `swift2nix.env` under the hood.
-[node2nix]: https://github.com/svanderburg/node2nix
-
-## Conclusion
+== Conclusion
Package managers should provide exact dependencies via a data representation,
i.e. lockfiles, and expose via another data representation how they expect those
-dependencies to appear on the filesystem, i.e. `local-registry.json`. This
+dependencies to appear on the filesystem, i.e. `local-registry.json`. This
allows package managers to provide an API so that external tools can create
-mirrors, offline builds, other registries, isolated builds, *etc.*
-
-"\*2nix" tools should build simple functions that leverage that
-`local-registry.json`[^local-registry] data and offload all the rest back to the
-package manager itself. This allows the "*2nix" to not keep chasing the package
-manager evolution, always trying to duplicate its behaviour.
-
-[^local-registry]: This `local-registry.json` file doesn't have to be checked-in
- the repository at all. It could be always generated on the fly, much like
- how Swift's `dependencies-state.json` is.
+mirrors, offline builds, other registries, isolated builds, _etc._
+
+"*2nix" tools should build simple functions that leverage that
+`local-registry.json`{empty}footnote:local-registry[
+ This `local-registry.json` file doesn't have to be checked-in the repository
+ at all. It could be always generated on the fly, much like how Swift's
+ `dependencies-state.json` is.
+] data and offload all the rest back to the package manager itself. This allows
+the "*2nix" to not keep chasing the package manager evolution, always trying to
+duplicate its behaviour.
diff --git a/src/content/en/blog/2020/10/05/swift2nix.tar.gz b/src/content/en/blog/2020/10/05/swift2nix.tar.gz
new file mode 100644
index 0000000..a22aaa0
--- /dev/null
+++ b/src/content/en/blog/2020/10/05/swift2nix.tar.gz
Binary files differ
diff --git a/src/content/en/blog/2020/10/19/feature-flags.adoc b/src/content/en/blog/2020/10/19/feature-flags.adoc
new file mode 100644
index 0000000..8788407
--- /dev/null
+++ b/src/content/en/blog/2020/10/19/feature-flags.adoc
@@ -0,0 +1,306 @@
+= Feature flags: differences between backend, frontend and mobile
+:categories: presentation
+:updatedat: 2020-11-03
+
+:empty:
+:slides: link:../../../../slide/2020/10/19/feature-flags.pdf
+:fowler-article: https://martinfowler.com/articles/feature-toggles.html
+
+_This article is derived from a {slides}[presentation] on the same subject._
+
+When discussing about feature flags, I find that their costs and benefits are
+often well exposed and addressed. Online articles like
+"{fowler-article}[Feature Toggle (aka Feature Flags)]" do a great job of
+explaining them in detail, giving great general guidance of how to apply
+techniques to adopt it.
+
+However the weight of those costs and benefits apply differently on backend,
+frontend or mobile, and those differences aren't covered. In fact, many of them
+stop making sense, or the decision of adopting a feature flag or not may change
+depending on the environment.
+
+In this article I try to make the distinction between environments and how
+feature flags apply to them, with some final best practices I've acquired when
+using them in production.
+
+== Why feature flags
+
+:atlassian-cicd: https://www.atlassian.com/continuous-delivery/principles/continuous-integration-vs-delivery-vs-deployment
+
+Feature flags in general tend to be cited on the context of
+{atlassian-cicd}[continuous deployment]:
+
+____
+A: With continuous deployment, you deploy to production automatically
+
+B: But how do I handle deployment failures, partial features, _etc._?
+
+A: With techniques like canary, monitoring and alarms, feature flags, _etc._
+____
+
+Though adopting continuous deployment doesn't force you to use feature flags, it
+creates a demand for it. The inverse is also true: using feature flags on the
+code points you more obviously to continuous deployment. Take the following
+code sample for example, that we will reference later on the article:
+
+[source,javascript]
+----
+function processTransaction() {
+ validate();
+ persist();
+ // TODO: add call to notifyListeners()
+}
+----
+
+While being developed, being tested for suitability or something similar,
+`notifyListeners()` may not be included in the code at once. So instead of
+keeping it on a separate, long-lived branch, a feature flag can decide when the
+new, partially implemented function will be called:
+
+[source,javascript]
+----
+function processTransaction() {
+ validate();
+ persist();
+ if (featureIsEnabled("activate-notify-listeners")) {
+ notifyListeners();
+ }
+}
+----
+
+This allows your code to include `notifyListeners()`, and decide when to call it
+at runtime. For the price of extra things around the code, you get more
+dynamicity.
+
+So the fundamental question to ask yourself when considering adding a feature
+flag should be:
+
+____
+Am I willing to pay with code complexity to get dynamicity?
+____
+
+It is true that you can make the management of feature flags as straightforward
+as possible, but having no feature flags is simpler than having any. What you
+get in return is the ability to parameterize the behaviour of the application at
+runtime, without doing any code changes.
+
+Sometimes this added complexity may tilt the balance towards not using a feature
+flag, and sometimes the flexibility of changing behaviour at runtime is
+absolutely worth the added complexity. This can vary a lot by code base,
+feature, but fundamentally by environment: its much cheaper to deploy a new
+version of a service than to release a new version of an app.
+
+So the question of which environment is being targeted is key when reasoning
+about costs and benefits of feature flags.
+
+== Control over the environment
+
+:fdroid: https://f-droid.org/
+:bad-apple: https://www.paulgraham.com/apple.html
+
+The key differentiator that makes the trade-offs apply differently is how much
+control you have over the environment.
+
+When running a *backend* service, you usually are paying for the servers
+themselves, and can tweak them as you wish. This means you have full control do
+to code changes as you wish. Not only that, you decide when to do it, and for
+how long the transition will last.
+
+On the *frontend* you have less control: even though you can choose to make a
+new version available any time you wish, you can't
+force{empy}footnote:force[
+ Technically you could force a reload with JavaScript using
+ `window.location.reload()`, but that not only is invasive and impolite, but
+ also gives you the illusion that you have control over the client when you
+ actually don't: clients with disabled JavaScript would be immune to such
+ tactics.
+] clients to immediately switch to the new version. That means that a) clients
+could skip upgrades at any time and b) you always have to keep backward and
+forward compatibility in mind.
+
+Even though I'm mentioning frontend directly, it applies to other environment
+with similar characteristics: desktop applications, command-line programs,
+_etc_.
+
+On *mobile* you have even less control: app stores need to allow your app to be
+updated, which could bite you when least desired. Theoretically you could make
+you APK available on third party stores like {fdroid}[F-Droid], or even make the
+APK itself available for direct download, which would give you the same
+characteristics of a frontend application, but that happens less often.
+
+On iOS you can't even do that. You have to get Apple's blessing on every single
+update. Even though we already know that is a {bad-apple}[bad idea] for over a
+decade now, there isn't a way around it. This is where you have the least
+control.
+
+In practice, the amount of control you have will change how much you value
+dynamicity: the less control you have, the more valuable it is. In other words,
+having a dynamic flag on the backend may or may not be worth it since you could
+always update the code immediately after, but on iOS it is basically always
+worth it.
+
+== Rollout
+
+:kubernetes-deployment: https://kubernetes.io/docs/concepts/workloads/controllers/deployment/#creating-a-deployment
+:play-store-rollout: https://support.google.com/googleplay/android-developer/answer/6346149?hl=en
+:app-store-rolllout: https://help.apple.com/app-store-connect/#/dev3d65fcee1
+
+A rollout is used to _roll out_ a new version of software.
+
+They are usually short-lived, being relevant as long as the new code is being
+deployed. The most common rule is percentages.
+
+On the *backend*, it is common to find it on the deployment infrastructure
+itself, like canary servers, blue/green deployments, {kubernetes-deployment}[a
+kubernetes deployment rollout], _etc_. You could do those manually, by having a
+dynamic control on the code itself, but rollbacks are cheap enough that people
+usually do a normal deployment and just give some extra attention to the metrics
+dashboard.
+
+Any time you see a blue/green deployment, there is a rollout happening: most
+likely a load balancer is starting to direct traffic to the new server, until
+reaching 100% of the traffic. Effectively, that is a rollout.
+
+On the *frontend*, you can selectively pick which user's will be able to
+download the new version of a page. You could use geographical region, IP,
+cookie or something similar to make this decision.
+
+CDN propagation delays and people not refreshing their web pages are also
+rollouts by themselves, since old and new versions of the software will coexist.
+
+On *mobile*, the Play Store allows you to perform fine-grained
+{play-store-rollout}[staged rollouts], and the App Store allows you to perform
+limited {app-store-rollout}[phased releases].
+
+Both for Android and iOS, the user plays the role of making the download.
+
+In summary: since you control the servers on the backend, you can do rollouts at
+will, and those are often found automated away in base infrastructure. On the
+frontend and on mobile, there are ways to make new versions available, but users
+may not download them immediately, and many different versions of the software
+end up coexisting.
+
+== Feature flag
+
+A feature flag is a _flag_ that tells the application on runtime to turn on or
+off a given _feature_. That means that the actual production code will have
+more than one possible code paths to go through, and that a new version of a
+feature coexists with the old version. The feature flag tells which part of the
+code to go through.
+
+They are usually medium-lived, being relevant as long as the new code is being
+developed. The most common rules are percentages, allow/deny lists, A/B groups
+and client version.
+
+On the *backend*, those are useful for things that have a long development
+cycle, or that needs to done by steps. Consider loading the feature flag rules
+in memory when the application starts, so that you avoid querying a database or
+an external service for applying a feature flag rule and avoid flakiness on the
+result due to intermittent network failures.
+
+Since on the *frontend* you don't control when to update the client software,
+you're left with applying the feature flag rule on the server, and exposing the
+value through an API for maximum dynamicity. This could be in the frontend code
+itself, and fallback to a "just refresh the page"/"just update to the latest
+version" strategy for less dynamic scenarios.
+
+On *mobile* you can't even rely on a "just update to the latest version"
+strategy, since the code for the app could be updated to a new feature and be
+blocked on the store. Those cases aren't recurrent, but you should always
+assume the store will deny updates on critical moments so you don't find
+yourself with no cards to play. That means the only control you actually have
+is via the backend, by parameterizing the runtime of the application using the
+API. In practice, you should always have a feature flag to control any relevant
+piece of code. There is no such thing as "too small code change for a feature
+flag". What you should ask yourself is:
+
+____
+If the code I'm writing breaks and stays broken for around a month, do I care?
+____
+
+If you're doing an experimental screen, or something that will have a very small
+impact you might answer "no" to the above question. For everything else, the
+answer will be "yes": bug fixes, layout changes, refactoring, new screen,
+filesystem/database changes, _etc_.
+
+== Experiment
+
+An experiment is a feature flag where you care about analytical value of the
+flag, and how it might impact user's behaviour. A feature flag with analytics.
+
+They are also usually medium-lived, being relevant as long as the new code is
+being developed. The most common rule is A/B test.
+
+On the *backend*, an experiment rely on an analytical environment that will pick
+the A/B test groups and distributions, which means those can't be held in memory
+easily. That also means that you'll need a fallback value in case fetching the
+group for a given customer fails.
+
+On the *frontend* and on *mobile* they are no different from feature flags.
+
+== Operational toggle
+
+An operational toggle is like a system-level manual circuit breaker, where you
+turn on/off a feature, fail over the load to a different server, _etc_. They
+are useful switches to have during an incident.
+
+They are usually long-lived, being relevant as long as the code is in
+production. The most common rule is percentages.
+
+They can be feature flags that are promoted to operational toggles on the
+*backend*, or may be purposefully put in place preventively or after a
+postmortem analysis.
+
+On the *frontend* and on *mobile* they are similar to feature flags, where the
+"feature" is being turned on and off, and the client interprets this value to
+show if the "feature" is available or unavailable.
+
+== Best practices
+
+=== Prefer dynamic content
+
+Even though feature flags give you more dynamicity, they're still somewhat
+manual: you have to create one for a specific feature and change it by hand.
+
+If you find yourself manually updating a feature flags every other day, or
+tweaking the percentages frequently, consider making it fully dynamic. Try
+using a dataset that is generated automatically, or computing the content on the
+fly.
+
+Say you have a configuration screen with a list of options and sub-options, and
+you're trying to find how to better structure this list. Instead of using a
+feature flag for switching between 3 and 5 options, make it fully dynamic. This
+way you'll be able to perform other tests that you didn't plan, and get more
+flexibility out of it.
+
+=== Use the client version to negotiate feature flags
+
+After effectively finishing a feature, the old code that coexisted with the new
+one will be deleted, and all traces of the transition will vanish from the code
+base. However if you just remove the feature flags from the API, all of the old
+versions of clients that relied on that value to show the new feature will go
+downgrade to the old feature.
+
+This means that you should avoid deleting client-facing feature flags, and
+retire them instead: use the client version to decide when the feature is
+stable, and return `true` for every client with a version greater or equal to
+that. This way you can stop thinking about the feature flag, and you don't
+break or downgrade clients that didn't upgrade past the transition.
+
+=== Beware of many nested feature flags
+
+Nested flags combine exponentially.
+
+Pick strategic entry points or transitions eligible for feature flags, and
+beware of their nesting.
+
+=== Include feature flags in the development workflow
+
+Add feature flags to the list of things to think about during whiteboarding, and
+deleting/retiring a feature flags at the end of the development.
+
+=== Always rely on a feature flag on the app
+
+Again, there is no such thing "too small for a feature flag". Too many feature
+flags is a good problem to have, not the opposite. Automate the process of
+creating a feature flag to lower its cost.
diff --git a/src/content/en/blog/2020/10/20/wrong-interviewing.adoc b/src/content/en/blog/2020/10/20/wrong-interviewing.adoc
new file mode 100644
index 0000000..4b8d855
--- /dev/null
+++ b/src/content/en/blog/2020/10/20/wrong-interviewing.adoc
@@ -0,0 +1,340 @@
+= How not to interview engineers
+:updatedat: 2020-10-24
+
+:bad-article: https://defmacro.substack.com/p/how-to-interview-engineers
+:satire-comment: https://defmacro.substack.com/p/how-to-interview-engineers/comments#comment-599996
+:double-down: https://twitter.com/spakhm/status/1315754730740617216
+:poes-law: https://en.wikipedia.org/wiki/Poe%27s_law
+:hn-comment-1: https://news.ycombinator.com/item?id=24757511
+
+This is a response to Slava's "{bad-article}[How to interview engineers]"
+article. I initially thought it was a satire, {satire-comment}[as have others],
+but he has [doubled down on it]:
+
+____
+(...) Some parts are slightly exaggerated for sure, but the essay isn't meant as
+a joke.
+____
+
+That being true, he completely misses the point on how to improve hiring, and
+proposes a worse alternative on many aspects. It doesn't qualify as
+provocative, it is just wrong.
+
+I was comfortable taking it as a satire, and I would just ignore the whole thing
+if it wasn't (except for the technical memo part), but friends of mine
+considered it to be somewhat reasonable. This is a adapted version of parts of
+the discussions we had, risking becoming a gigantic showcase of {poes-law}[Poe's
+law].
+
+In this piece, I will argument against his view, and propose an alternative
+approach to improve hiring.
+
+It is common to find people saying how broken technical hiring is, as well put
+in words by a phrase on {hn-comment-1}[this comment]:
+
+____
+Everyone loves to read and write about how developer interviewing is flawed, but
+no one wants to go out on a limb and make suggestions about how to improve it.
+____
+
+I guess Slava was trying to not fall on this trap, and make a suggestion on how
+to improve instead, which all went terribly wrong.
+
+== What not to do
+
+=== Time candidates
+
+:hammock-driven-talk: https://www.youtube.com/watch?v=f84n5oFoZBc
+
+Timing the candidate shows up on the "talent" and "judgment" sections, and they
+are both bad ideas for the same reason: programming is not a performance.
+
+What do e-sports, musicians, actors and athletes have in common: performance
+psychologists.
+
+For a pianist, their state of mind during concerts is crucial: they not only
+must be able to deal with stage anxiety, but to become really successful they
+will have to learn how to exploit it. The time window of the concert is what
+people practice thousands of hours for, and it is what defines one's career,
+since how well all the practice went is irrelevant to the nature of the
+profession. Being able to leverage stage anxiety is an actual goal of them.
+
+That is also applicable to athletes, where the execution during a competition
+makes them sink or swim, regardless of how all the training was.
+
+The same cannot be said about composers, though. They are more like book
+writers, where the value is not on very few moments with high adrenaline, but on
+the aggregate over hours, days, weeks, months and years. A composer may have a
+deadline to finish a song in five weeks, but it doesn't really matter if it is
+done on a single night, every morning between 6 and 9, at the very last week, or
+any other way. No rigid time structure applies, only whatever fits best to the
+composer.
+
+Programming is more like composing than doing a concert, which is another way of
+saying that programming is not a performance. People don't practice algorithms
+for months to keep them at their fingertips, so that finally in a single
+afternoon they can sit down and write everything at once in a rigid 4 hours
+window, and launch it immediately after.
+
+Instead software is built iteratively, by making small additions, than
+refactoring the implementation, fixing bugs, writing a lot at once, _etc_. all
+while they get a firmer grasp of the problem, stop to think about it, come up
+with new ideas, _etc_.
+
+Some specifically plan for including spaced pauses, and call it
+"{hammock-driven-talk}[Hammock Driven Development]", which is just artist's
+"creative idleness" for hackers.
+
+Unless you're hiring for a live coding group, a competitive programming team, or
+a professional live demoer, timing the candidate that way is more harmful than
+useful. This type of timing doesn't find good programmers, it finds performant
+programmers, which isn't the same thing, and you'll end up with people who can
+do great work on small problems but who might be unable to deal with big
+problems, and loose those who can very well handle huge problems, slowly. If
+you are lucky you'll get performant people who can also handle big problems on
+the long term, but maybe not.
+
+An incident is the closest to a "performance" that it gets, and yet it is still
+dramatically different. Surely it is a high stress scenario, but while people
+are trying to find a root cause and solve the problem, only the downtime itself
+is visible to the exterior. It is like being part of the support staff
+backstage during a play: even though execution matters, you're still not on the
+spot. During an incident you're doing debugging in anger rather than live
+coding.
+
+Although giving a candidate the task to write a "technical memo" has potential
+to get a measure of the written communication skills of someone, doing so in a
+hard time window also misses the point for the same reasons.
+
+=== Pay attention to typing speed
+
+:dijkstra-typing: https://www.cs.utexas.edu/users/EWD/transcriptions/EWD05xx/EWD512.html
+:speech-to-text: https://www.youtube.com/watch?v=Mz3JeYfBTcY
+:j-lang: https://www.jsoftware.com/#/
+
+Typing is speed in never the bottleneck of a programmer, no matter how great
+they are.
+
+As {dijkstra-typing}[Dijkstra said]:
+
+____
+But programming, when stripped of all its circumstantial irrelevancies, boils
+down to no more and no less than very effective thinking so as to avoid
+unmastered complexity, to very vigorous separation of your many different
+concerns.
+____
+
+In other words, programming is not about typing, it is about thinking.
+
+Otherwise, the way to get those star programmers that can't type fast enough a
+huge productivity boost is to give them a touch typing course. If they are so
+productive with typing speed being a limitation, imagine what they could
+accomplish if they had razor sharp touch typing skills?
+
+Also, why stop there? A good touch typist can do 90 WPM (words per minute), and
+a great one can do 120 WPM, but with a stenography keyboard they get to 200
+WPM+. That is double the productivity! Why not try
+{speech-to-text}[speech-to-text]? Make them all use {j-lang}[J] so they all
+need to type less! How come nobody thought of that?
+
+And if someone couldn't solve the programming puzzle in the given time window,
+but could come back in the following day with an implementation that is not only
+faster, but uses less memory, was simpler to understand and easier to read than
+anybody else? You'd be losing that person too.
+
+=== IQ
+
+:determination-article: https://www.paulgraham.com/determination.html
+:scihub-article: https://sci-hub.do/https://psycnet.apa.org/doiLanding?doi=10.1037%2F1076-8971.6.1.33
+
+For "building an extraordinary team at a hard technology startup",
+intelligence is not the most important,
+{determination-article}[determination is].
+
+And talent isn't "IQ specialized for engineers". IQ itself isn't a measure of
+how intelligent someone is. Ever since Alfred Binet with Théodore Simon started
+to formalize what would become IQ tests years later, they already acknowledged
+limitations of the technique for measuring intelligence, which is
+{scihub-article}[still true today].
+
+So having a high IQ tells only how smart people are for a particular aspect of
+intelligence, which is not representative of programming. There are numerous
+aspects of programming that are covered by IQ measurement: how to name variables
+and functions, how to create models which are compatible with schema evolution,
+how to make the system dynamic for runtime parameterization without making it
+fragile, how to measure and observe performance and availability, how to pick
+between acquiring and paying technical debt, _etc_.
+
+Not to say about everything else that a programmer does that is not purely
+programming. Saying high IQ correlates with great programming is a stretch, at
+best.
+
+=== Ditch HR
+
+Slava tangentially picks on HR, and I will digress on that a bit:
+
+____
+A good rule of thumb is that if a question could be asked by an intern in HR,
+it's a non-differential signaling question.
+____
+
+Stretching it, this is a rather snobbish view of HR. Why is it that an intern
+in HR can't make signaling questions? Could the same be said of an intern in
+engineering?
+
+In other words: is the question not signaling because the one asking is from HR,
+or because the one asking is an intern? If the latter, than he's just arguing
+that interns have no place in interviewing, but if the former than he was
+picking on HR.
+
+Extrapolating that, it is common to find people who don't value HR's work, and
+only see them as inferiors doing unpleasant work, and who aren't capable enough
+(or _smart_ enough) to learn programming.
+
+This is equivalent to people who work primarily on backend, and see others
+working on frontend struggling and say: "isn't it just building views and
+showing them on the browser? How could it possibly be that hard? I bet I could
+do it better, with 20% of code". As you already know, the answer to it is
+"well, why don't you go do it, then?".
+
+This sense of superiority ignores the fact that HR have actual professionals
+doing actual hard work, not unlike programmers. If HR is inferior and so easy,
+why not automate everything away and get rid of a whole department?
+
+I don't attribute this world view to Slava, this is only an extrapolation of a
+snippet of the article.
+
+=== Draconian mistreating of candidates
+
+:bad-apple: https://www.paulgraham.com/apple.html
+:be-good: https://www.paulgraham.com/good.html
+
+If I found out that people employed theatrics in my interview so that I could
+feel I've "earned the privilege to work at your company", I would quit.
+
+If your moral compass is so broken that you are comfortable mistreating me while
+I'm a candidate, I immediately assume you will also mistreat me as an employee,
+and that the company is not a good place to work, as {bad-apple}[evil begets
+stupidity]:
+
+____
+But the other reason programmers are fussy, I think, is that evil begets
+stupidity. An organization that wins by exercising power starts to lose the
+ability to win by doing better work. And it's not fun for a smart person to
+work in a place where the best ideas aren't the ones that win. I think the
+reason Google embraced "Don't be evil" so eagerly was not so much to impress the
+outside world as to inoculate themselves against arrogance.
+____
+
+Paul Graham goes beyond "don't be evil" with a better motto:
+"{be-good}[be good]".
+
+Abusing the asymmetric nature of an interview to increase the chance that the
+candidate will accept the offer is, well, abusive. I doubt a solid team can
+actually be built on such poor foundations, surrounded by such evil measures.
+
+And if you really want to give engineers "the measure of whoever they're going
+to be working with", there are plenty of reasonable ways of doing it that don't
+include performing fake interviews.
+
+=== Personality tests
+
+Personality tests around the world need to be a) translated, b) adapted and c)
+validated. Even though a given test may be applicable and useful in a country,
+this doesn't imply it will work for other countries.
+
+Not only tests usually come with translation guidelines, but also its
+applicability needs to be validated again after the translation and adaptation
+is done to see if the test still measures what it is supposed to.
+
+That is also true within the same language. If a test is shown to work in
+England, it may not work in New Zealand, in spite of both speaking english. The
+cultural context difference is influent to the point of invalidating a test and
+making it be no longer valid.
+
+Irregardless of the validity of the proposed "big five" personality test, saying
+"just use attributes x, y and z this test and you'll be fine" is a rough
+simplification, much like saying "just use Raft for distributed systems, after
+all it has been proven to work" shows he throws all of that background away.
+
+So much as applying personality tests themselves is not a trivial task, and
+psychologists do need special training to become able to effectively apply one.
+
+=== More cargo culting
+
+:cult: https://calteches.library.caltech.edu/51/2/CargoCult.htm
+:cult-archived: https://web.archive.org/web/20201003090303/https://calteches.library.caltech.edu/51/2/CargoCult.htm
+
+He calls the ill-defined "industry standard" to be cargo-culting, but his
+proposal isn't sound enough to not become one.
+
+Even if the ideas were good, they aren't solid enough, or based on solid enough
+things to make them stand out by themselves. Why is it that talent, judgment
+and personality are required to determine the fitness of a good candidate? Why
+not 2, 5, or 20 things? Why those specific 3? Why is talent defined like that?
+Is it just because he found talent to be like that?
+
+Isn't that definitionally also
+{cult}[cargo-culting]footnote:cargo-cult[
+ {cult-archived}[Archived version].
+]? Isn't he just repeating whatever he found to work form him, without
+understanding why?
+
+What Feynman proposes is actually the opposite:
+
+____
+In summary, the idea is to try to give *all* of the information to help others
+to judge the value of your contribution; not just the information that leads to
+judgment in one particular direction or another.
+____
+
+What Slava did was just another form of cargo culting, but this was one that he
+believed to work.
+
+== What to do
+
+I will not give you a list of things that "worked for me, thus they are
+correct". I won't either critique the current "industry standard", nor what
+I've learned from interviewing engineers.
+
+Instead, I'd like to invite you to learn from history, and from what other
+professionals have to teach us.
+
+Programming isn't an odd profession, where everything about it is different from
+anything else. It is just another episode in the "technology" series, which has
+seasons since before recorded history. It may be an episode where things move a
+bit faster, but it is fundamentally the same.
+
+So here is the key idea: what people did _before_ software engineering?
+
+What hiring is like for engineers in other areas? Don't civil, electrical and
+other types of engineering exist for much, much longer than software engineering
+does? What have those centuries of accumulated experience thought the world
+about technical hiring?
+
+What studies were performed on the different success rate of interviewing
+strategies? What have they done right and what have they done wrong?
+
+What is the purpose of HR? Why do they even exist? Do we need them, and if so,
+what for? What is the value they bring, since everybody insist on building an
+HR department in their companies? Is the existence of HR another form of cargo
+culting?
+
+What is industrial and organizational psychology? What is that field of study?
+What do they specialize in? What have they learned since the discipline
+appeared? What have they done right and wrong over history? Is is the current
+academic consensus on that area? What is a hot debate topic in academia on that
+area? What is the current bleeding edge of research? What can they teach us
+about hiring? What can they teach us about technical hiring?
+
+== Conclusion
+
+If all I've said makes me a "no hire" in the proposed framework, I'm really
+glad.
+
+This says less about my programming skills, and more about the employer's world
+view, and I hope not to be fooled into applying for a company that adopts this
+one.
+
+Claiming to be selecting "extraordinary engineers" isn't an excuse to reinvent
+the wheel, poorly.
diff --git a/src/content/en/blog/2020/11/07/diy-bugs.adoc b/src/content/en/blog/2020/11/07/diy-bugs.adoc
new file mode 100644
index 0000000..5fbc920
--- /dev/null
+++ b/src/content/en/blog/2020/11/07/diy-bugs.adoc
@@ -0,0 +1,93 @@
+= DIY an offline bug tracker with text files, Git and email
+:updatedat: 2021-08-14
+
+:attack-on-ytdl: https://github.com/github/dmca/blob/master/2020/10/2020-10-23-RIAA.md
+:list-discussions: https://sourcehut.org/blog/2020-10-29-how-mailing-lists-prevent-censorship/
+:docs-in-repo: https://podcast.writethedocs.org/2017/01/25/episode-3-trends/
+:ci-in-notes: link:../../../../til/2020/11/30/git-notes-ci.html
+:todos-mui: https://man.sr.ht/todo.sr.ht/#email-access
+:git-bug-bridges: https://github.com/MichaelMure/git-bug#bridges
+
+When {attack-on-ytdl}[push comes to shove], the operational aspects of
+governance of a software project matter a lot. And everybody likes to chime in
+with their alternative of how to avoid single points of failure in project
+governance, just like I'm doing right now.
+
+The most valuable assets of a project are:
+
+. source code
+. discussions
+. documentation
+. builds
+. tasks and bugs
+
+For *source code*, Git and other DVCS solve that already: everybody gets a full
+copy of the entire source code.
+
+If your code forge is compromised, moving it to a new one takes a couple of
+minutes, if there isn't a secondary remote serving as mirror already. In this
+case, no action is required.
+
+If you're having your *discussions* by email, "{list-discussions}[taking this
+archive somewhere else and carrying on is effortless]".
+
+Besides, make sure to backup archives of past discussions so that the history is
+also preserved when this migration happens.
+
+The *documentation* should {docs-in-repo}[live inside the repository
+itself]footnote:writethedocs-in-repo[
+ Described as "the ultimate marriage of the two". Starts at time 31:50.
+], so that not only it gets first class treatment, but also gets distributed to
+everybody too. Migrating the code to a new forge already migrates the
+documentation with it.
+
+As long as you keep the *builds* vendor neutral, the migration should only
+involve adapting how you call your `tests.sh` from the format of
+`provider-1.yml` uses to the format that `provider-2.yml` accepts. It isn't
+valuable to carry the build history with the project, as this data quickly
+decays in value as weeks and months go by, but for simple text logs
+{ci-in-notes}[using Git notes] may be just enough, and they would be replicated
+with the rest of the repository.
+
+But for *tasks and bugs* many rely on a vendor-specific service, where
+you register and manage those issues via a web browser. Some provide an
+{todos-mui}[interface for interacting via email] or an API for
+{git-bug-bridges[bridging local bugs with vendor-specific services]. But
+they're all layers around the service, that disguises it as being a central
+point of failure, which when compromised would lead to data loss. When push
+comes to shove, you'd loose data.
+
+== Alternative: text files, Git and email
+
+:todos-example: https://euandre.org/git/remembering/tree/TODOs.md?id=3f727802cb73ab7aa139ca52e729fd106ea916d0
+:todos-script: https://euandre.org/git/remembering/tree/aux/workflow/TODOs.sh?id=3f727802cb73ab7aa139ca52e729fd106ea916d0
+:todos-html: https://euandreh.xyz/remembering/TODOs.html
+:fossil-tickets: https://fossil-scm.org/home/doc/trunk/www/bugtheory.wiki
+
+Why not do the same as documentation, and move tasks and bugs into the
+repository itself?
+
+It requires no extra tool to be installed, and fits right in the already
+existing workflow for source code and documentation.
+
+I like to keep a {todos-example}[`TODOs.md`] file at the repository top-level,
+with two relevant sections: "tasks" and "bugs". Then when building the
+documentation I'll just {todos-script}[generate an HTML file from it], and
+{todos-html}[publish] it alongside the static website. All that is done on the
+main branch.
+
+Any issues discussions are done in the mailing list, and a reference to a
+discussion could be added to the ticket itself later on. External contributors
+can file tickets by sending a patch.
+
+The good thing about this solution is that it works for 99% of projects out
+there.
+
+For the other 1%, having Fossil's "{fossil-tickets}[tickets]" could be an
+alternative, but you may not want to migrate your project to Fossil to get those
+niceties.
+
+Even though I keep a `TODOs.md` file on the main branch, you can have a `tasks`
+branch with a `task-n.md` file for each task, or any other way you like.
+
+These tools are familiar enough that you can adjust it to fit your workflow.
diff --git a/src/content/en/blog/2020/11/08/paradigm-shift-review.adoc b/src/content/en/blog/2020/11/08/paradigm-shift-review.adoc
new file mode 100644
index 0000000..1110085
--- /dev/null
+++ b/src/content/en/blog/2020/11/08/paradigm-shift-review.adoc
@@ -0,0 +1,154 @@
+= The Next Paradigm Shift in Programming - video review
+:categories: video-review
+
+:reviewed-video: https://www.youtube.com/watch?v=6YbK8o9rZfI
+
+This is a review with comments of "{reviewed-video}[The Next Paradigm Shift in
+Programming]", by Richard Feldman.
+
+This video was _strongly_ suggested to me by a colleague. I wanted to discuss
+it with her, and when drafting my response I figured I could publish it publicly
+instead.
+
+Before anything else, let me just be clear: I really like the talk, and I think
+Richard is a great public speaker. I've watched several of his talks over the
+years, and I feel I've followed his career at a distance, with much respect.
+This isn't a piece criticizing him personally, and I agree with almost
+everything he said. These are just some comments but also nitpicks on a few
+topics I think he missed, or that I view differently.
+
+== Structured programming
+
+:forgotten-art-video: https://www.youtube.com/watch?v=SFv8Wm2HdNM
+
+The historical overview at the beginning is very good. In fact, the very video
+I watched previously was about structured programming!
+
+Kevlin Henney on "{forgotten-art-video}[The Forgotten Art of Structured
+Programming]" does a deep-dive on the topic of structured programming, and how
+on his view it is still hidden in our code, when we do a `continue` or a `break`
+in some ways. Even though it is less common to see an explicit `goto` in code
+these days, many of the original arguments of Dijkstra against explicit `goto`s
+is applicable to other constructs, too.
+
+This is a very mature view, and I like how he goes beyond the "don't use
+`goto`s" heuristic and proposes and a much more nuanced understanding of what
+"structured programming" means.
+
+In a few minutes, Richard is able to condense most of the significant bits of
+Kevlin's talk in a didactical way. Good job.
+
+== OOP like a distributed system
+
+:joe-oop: https://www.infoq.com/interviews/johnson-armstrong-oop/
+:rich-hickey-oop: https://www.youtube.com/watch?v=ROor6_NGIWU
+
+Richard extrapolates Alan Kay's original vision of OOP, and he concludes that it
+is more like a distributed system that how people think about OOP these days.
+But he then states that this is a rather bad idea, and we shouldn't pursue it,
+given that distributed systems are known to be hard.
+
+However, his extrapolation isn't really impossible, bad or an absurd. In fact,
+it has been followed through by Erlang. Joe Armstrong used to say that
+"{joe-oop}[Erlang might the only OOP language]", since it actually adopted this
+paradigm.
+
+But Erlang is a functional language. So this "OOP as a distributed system" view
+is more about designing systems in the large than programs in the small.
+
+There is a switch of levels in this comparison I'm making, as can be done with
+any language or paradigm: you can have a functional-like system that is built
+with an OOP language (like a compiler, that given the same input will produce
+the same output), or an OOP-like system that is built with a functional
+language (Rich Hickey calls it "{rich-hickey-oop}[OOP in the
+large]"footnote:langsys[
+ From 24:05 to 27:45.
+]).
+
+So this jump from in-process paradigm to distributed paradigm is rather a big
+one, and I don't think you he can argue that OOP has anything to say about
+software distribution across nodes. You can still have Erlang actors that run
+independently and send messages to each other without a network between them.
+Any OTP application deployed on a single node effectively works like that.
+
+I think he went a bit too far with this extrapolation. Even though I agree it
+is a logical a fair one, it isn't evidently bad as he painted. I would be fine
+working with a single-node OTP application and seeing someone call it "a _real_
+OOP program".
+
+== First class immutability
+
+:immer: https://sinusoid.es/immer/
+:immutable-js: https://immutable-js.github.io/immutable-js/
+
+I agree with his view of languages moving towards the functional paradigm. But
+I think you can narrow down the "first-class immutability" feature he points out
+as present on modern functional programming languages to "first-class immutable
+data structures".
+
+I wouldn't categorize a language as "supporting functional programming style"
+without a library for functional data structures it. By discipline you can
+avoid side-effects, write pure functions as much as possible, and pass functions
+as arguments around is almost every language these days, but if when changing an
+element of a vector mutates things in-place, that is still not functional
+programming.
+
+To avoid that, you end-up needing to make clones of objects to pass to a
+function, using freezes or other workarounds. All those cases are when the
+underlying mix of OOP and functional programming fail.
+
+There are some languages with third-party libraries that provide functional data
+structures, like {immer}[immer] for C++, or {immutable-js}[ImmutableJS] for
+JavaScript.
+
+But functional programming is more easily achievable in languages that have them
+built-in, like Erlang, Elm and Clojure.
+
+== Managed side-effects
+
+:redux: https://redux.js.org/
+:re-frame: https://github.com/Day8/re-frame
+
+His proposal of adopting managed side-effects as a first-class language concept
+is really intriguing.
+
+This is something you can achieve with a library, like {redux}[Redux] for
+JavaScript or {re-frame}[re-frame] for Clojure.
+
+I haven't worked with a language with managed side-effects at scale, and I don't
+feel this is a problem with Clojure or Erlang. But is this me finding a flaw in
+his argument or not acknowledging a benefit unknown to me? This is a
+provocative question I ask myself.
+
+Also all FP languages with managed side-effects I know are statically-typed, and
+all dynamically-typed FP languages I know don't have managed side-effects baked
+in.
+
+== What about declarative programming?
+
+:tarpit-article: https://curtclifton.net/papers/MoseleyMarks06a.pdf
+
+In "{tarpit-article}[Out of the Tar Pit]", B. Moseley and P. Marks go beyond his
+view of functional programming as the basis, and name a possible "functional
+relational programming" as an even better solution. They explicitly call out
+some flaws in most of the modern functional programming languages, and instead
+pick declarative programming as an even better starting paradigm.
+
+If the next paradigm shift is towards functional programming, will the following
+shift be towards declarative programming?
+
+== Conclusion
+
+:simple-made-easy: https://www.infoq.com/presentations/Simple-Made-Easy/
+
+Beyond all Richard said, I also hear often bring up functional programming when
+talking about utilizing all cores of a computer, and how FP can help with that.
+
+Rich Hickey makes a great case for single-process FP on his famous talk
+"{simple-made-easy}[Simple Made Easy]".
+
+////
+I find this conclusion too short, and it doesn't revisits the main points
+presented on the body of the article. I won't rewrite it now, but it would be
+an improvement to extend it to do so.
+////
diff --git a/src/content/en/blog/2020/11/12/database-parsers-trees.adoc b/src/content/en/blog/2020/11/12/database-parsers-trees.adoc
new file mode 100644
index 0000000..47595e8
--- /dev/null
+++ b/src/content/en/blog/2020/11/12/database-parsers-trees.adoc
@@ -0,0 +1,226 @@
+= Durable persistent trees and parser combinators - building a database
+:categories: mediator
+:updatedat: 2021-02-09
+
+:empty:
+:db-article: link:../../08/31/database-i-wish-i-had.html
+
+I've received with certain frequency messages from people wanting to know if
+I've made any progress on the database project {db-article}[I've written about].
+
+There are a few areas where I've made progress, and here's a public post on it.
+
+== Proof-of-concept: DAG log
+
+:mediator-permalink: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n1
+
+The main thing I wanted to validate with a concrete implementation was the
+concept of modeling a DAG on a sequence of datoms.
+
+The notion of a _datom_ is a rip-off from Datomic, which models data with time
+aware _facts_, which come from RDF. RDF's fact is a triple of
+subject-predicate-object, and Datomic's datoms add a time component to it:
+subject-predicate-object-time, A.K.A. entity-attribute-value-transaction:
+
+[source,clojure]
+----
+[[person :likes "pizza" 0 true]
+ [person :likes "bread" 1 true]
+ [person :likes "pizza" 1 false]]
+----
+
+The above datoms say: - at time 0, `person` like pizza; - at time 1, `person`
+stopped liking pizza, and started to like bread.
+
+Datomic ensures total consistency of this ever growing log by having a single
+writer, the transactor, that will enforce it when writing.
+
+In order to support disconnected clients, I needed a way to allow multiple
+writers, and I chose to do it by making the log not a list, but a directed
+acyclic graph (DAG):
+
+[source,clojure]
+----
+[[person :likes "pizza" 0 true]
+ [0 :parent :db/root 0 true]
+ [person :likes "bread" 1 true]
+ [person :likes "pizza" 1 false]
+ [1 :parent 0 1 true]]
+----
+
+The extra datoms above add more information to build the directionality to the
+log, and instead of a single consistent log, the DAG could have multiple leaves
+that coexist, much like how different Git branches can have different "latest"
+commits.
+
+In order to validate this idea, I started with a Clojure implementation. The
+goal was not to write the actual final code, but to make a proof-of-concept that
+would allow me to test and stretch the idea itself.
+
+This code {mediator-permalink}[already exists], but is yet fairly incomplete:
+
+:commented-code: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n295
+:more: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n130
+:than: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n146
+:one: https://euandre.org/git/mediator/tree/src/core/clojure/src/mediator.clj?id=db4a727bc24b54b50158827b34502de21dbf8948#n253
+
+* the building of the index isn't done yet (with some {commented-code}[commented
+ code] on the next step to be implemented)
+* the indexing is extremely inefficient, with {more}[more] {than}[than]
+ {one}[one] occurrence of `O²` functions;
+* no query support yet.
+
+== Top-down _and_ bottom-up
+
+However, as time passed and I started looking at what the final implementation
+would look like, I started to consider keeping the PoC around.
+
+The top-down approach (Clojure PoC) was in fact helping guide me with the
+bottom-up, and I now have "promoted" the Clojure PoC into a "reference
+implementation". It should now be a finished implementation that says what the
+expected behaviour is, and the actual code should match the behaviour.
+
+The good thing about a reference implementation is that it has no performance of
+resources boundary, so if it ends up being 1000× slower and using 500× more
+memory, it should be find. The code can be also 10× or 100× simpler, too.
+
+== Top-down: durable persistent trees
+
+:pavlo-videos: https://www.youtube.com/playlist?list=PLSE8ODhjZXjbohkNBWQs_otTrBTrjyohi
+:db-book: https://www.databass.dev/
+
+In promoting the PoC into a reference implementation, this top-down approach now
+needs to go beyond doing everything in memory, and the index data structure now
+needs to be disk-based.
+
+Roughly speaking, most storage engines out there are based either on B-Trees or
+LSM Trees, or some variations of those.
+
+But when building an immutable database, update-in-place B-Trees aren't an
+option, as it doesn't accommodate keeping historical views of the tree. LSM
+Trees may seem a better alternative, but duplication on the files with
+compaction are also ways to delete old data which is indeed useful for a
+historical view.
+
+I think the thing I'm after is a mix of a Copy-on-Write B-Tree, which would keep
+historical versions with the write IO cost amortization of memtables of LSM
+Trees. I don't know of any B-Tree variant out there that resembles this, so
+I'll call it "Flushing Copy-on-Write B-Tree".
+
+I haven't written any code for this yet, so all I have is a high-level view of
+what it will look like:
+
+. like Copy-on-Write B-Trees, changing a leaf involves creating a new leaf and
+ building a new path from root to the leaf. The upside is that writes a lock
+ free, and no coordination is needed between readers and writers, ever;
+. the downside is that a single leaf update means at least `H` new nodes that
+ will have to be flushed to disk, where `H` is the height of the tree. To
+ avoid that, the writer creates these nodes exclusively on the in-memory
+ memtable, to avoid flushing to disk on every leaf update;
+. a background job will consolidate the memtable data every time it hits X MB,
+ and persist it to disk, amortizing the cost of the Copy-on-Write B-Tree;
+. readers than will have the extra job of getting the latest relevant
+ disk-resident value and merge it with the memtable data.
+
+The key difference to existing Copy-on-Write B-Trees is that the new trees are
+only periodically written to disk, and the intermediate values are kept in
+memory. Since no node is ever updated, the page utilization is maximum as it
+doesn't need to keep space for future inserts and updates.
+
+And the key difference to existing LSM Trees is that no compaction is run:
+intermediate values are still relevant as the database grows. So this leaves
+out tombstones and value duplication done for write performance.
+
+One can delete intermediate index values to reclaim space, but no data is lost
+on the process, only old B-Tree values. And if the database ever comes back to
+that point (like when doing a historical query), the B-Tree will have to be
+rebuilt from a previous value. After all, the database _is_ a set of datoms,
+and everything else is just derived data.
+
+Right now I'm still reading about other data structures that storage engines
+use, and I'll start implementing the "Flushing Copy-on-Write B-Tree" as I learn
+more{empty}footnote:learn-more-db[
+ If you are interested in learning more about this too, the very best two
+ resources on this subject are Andy Pavlo's "{pavlo-videos}[Intro to Database
+ Systems]" course and Alex Petrov's "{db-book}[Database Internals]" book.
+] and mature it more.
+
+== Bottom-up: parser combinators and FFI
+
+:cbindgen: https://github.com/eqrion/cbindgen
+:cbindgen-next: https://blog.eqrion.net/future-directions-for-cbindgen/
+:syn-crate: https://github.com/dtolnay/syn
+:libedn: https://euandre.org/git/libedn/
+
+I chose Rust as it has the best WebAssembly tooling support.
+
+My goal is not to build a Rust database, but a database that happens to be in
+Rust. In order to reach client platforms, the primary API is the FFI one.
+
+I'm not very happy with current tools for exposing Rust code via FFI to the
+external world: they either mix C with C++, which I don't want to do, or
+provide no access to the intermediate representation of the FFI, which would be
+useful for generating binding for any language that speaks FFI.
+
+I like better the path that the author of {cbindgen}[cbindgen] crate
+{cbindgen-next}[proposes]: emitting an data representation of the Rust C API
+(the author calls is a `ffi.json` file), and than building transformers from the
+data representation to the target language. This way you could generate a C API
+_and_ the node-ffi bindings for JavaScript automatically from the Rust code.
+
+So the first thing to be done before moving on is an FFI exporter that doesn't
+mix C and C++, and generates said `ffi.json`, and than build a few transformers
+that take this `ffi.json` and generate the language bindings, be it C, C++,
+JavaScript, TypeScript, Kotlin, Swift, Dart,
+_etc_footnote:ffi-langs[
+ Those are, specifically, the languages I'm more interested on. My goal is
+ supporting client applications, and those languages are the most relevant for
+ doing so: C for GTK, C++ for Qt, JavaScript and TypeScript for Node.js and
+ browser, Kotlin for Android and Swing, Swift for iOS, and Dart for Flutter.
+].
+
+I think the best way to get there is by taking the existing code for cbindgen,
+which uses the {syn-crate}[syn] crate to parse the Rust
+code{empty}footnote:rust-syn[
+ The fact that syn is an external crate to the Rust compiler points to a big
+ warning: procedural macros are not first class in Rust. They are just like
+ Babel plugins in JavaScript land, with the extra shortcoming that there is no
+ specification for the Rust syntax, unlike JavaScript.
+pass:[</p><p>]
+ As flawed as this may be, it seems to be generally acceptable and adopted,
+ which works against building a solid ecosystem for Rust.
+pass:[</p><p>]
+ The alternative that rust-ffi implements relies on internals of the Rust
+ compiler, which isn't actually worst, just less common and less accepted.
+], and adapt it to emit the metadata.
+
+I've started a fork of cbindgen:
+[line-through]#x-bindgen#{empty}footnote:x-bindgen[
+ _EDIT_: now archived, the experimentation was fun. I've started to move more
+ towards C, so this effort became deprecated.
+]. Right now it is just a copy of cbindgen verbatim, and I plan to remove all C
+and C++ emitting code from it, and add a IR emitting code instead.
+
+When starting working on x-bindgen, I realized I didn't know what to look for in
+a header file, as I haven't written any C code in many years. So as I was
+writing {libedn}[libedn], I didn't know how to build a good C API to expose. So
+I tried porting the code to C, and right now I'm working on building a _good_ C
+API for a JSON parser using parser combinators:
+[line-through]#ParsecC#{empty}footnote:parsecc[
+ _EDIT_: now also archived.
+].
+
+After "finishing" ParsecC I'll have a good notion of what a good C API is, and
+I'll have a better direction towards how to expose code from libedn to other
+languages, and work on x-bindgen then.
+
+What both libedn and ParsecC are missing right now are proper error reporting,
+and property-based testing for libedn.
+
+== Conclusion
+
+I've learned a lot already, and I feel the journey I'm on is worth going
+through.
+
+If any of those topics interest you, message me to discuss more or contribute!
+Patches welcome!
diff --git a/_articles/2020-11-14-local-first-software-you-own-your-data-in-spite-of-the-cloud-article-review.md b/src/content/en/blog/2020/11/14/local-first-review.adoc
index 68ae03c..2036069 100644
--- a/_articles/2020-11-14-local-first-software-you-own-your-data-in-spite-of-the-cloud-article-review.md
+++ b/src/content/en/blog/2020/11/14/local-first-review.adoc
@@ -1,27 +1,18 @@
----
+= Local-First Software: article review
+:categories: presentation article-review
-title: "Local-First Software: You Own Your Data, in spite of the Cloud - article review"
+:empty:
+:presentation: link:../../../../slide/2020/11/14/local-first-hype.pdf
+:reviewed-article: https://martin.kleppmann.com/papers/local-first.pdf
-date: 2020-11-14
+_This article is derived from a {presentation}[presentation] given at a Papers
+We Love meetup on the same subject._
-layout: post
+This is a review of the article "{reviewed-article}[Local-First Software: You
+Own Your Data, in spite of the Cloud]", by M. Kleppmann, A. Wiggins, P. Van
+Hardenberg and M. F. McGranaghan.
-lang: en
-
-ref: local-first-software-you-own-your-data-in-spite-of-the-cloud-article-review
-
-eu_categories: presentation,article review
-
----
-
-*This article is derived from a [presentation][presentation] given at a Papers
-We Love meetup on the same subject.*
-
-This is a review of the article
-"[Local-First Software: You Own Your Data, in spite of the Cloud][article-pdf]",
-by M. Kleppmann, A. Wiggins, P. Van Hardenberg and M. F. McGranaghan.
-
-### Offline-first, local-first
+== Offline-first, local-first
The "local-first" term they use isn't new, and I have used it myself in the past
to refer to this types of application, where the data lives primarily on the
@@ -29,34 +20,34 @@ client, and there are conflict resolution algorithms that reconcile data created
on different instances.
Sometimes I see confusion with this idea and "client-side", "offline-friendly",
-"syncable", etc. I have myself used this terms, also.
+"syncable", etc. I have myself used this terms, also.
There exists, however, already the "offline-first" term, which conveys almost
-all of that meaning. In my view, "local-first" doesn't extend "offline-first" in
-any aspect, rather it gives a well-defined meaning to it instead. I could say
-that "local-first" is just "offline-first", but with 7 well-defined ideals
+all of that meaning. In my view, "local-first" doesn't extend "offline-first"
+in any aspect, rather it gives a well-defined meaning to it instead. I could
+say that "local-first" is just "offline-first", but with 7 well-defined ideals
instead of community best practices.
It is a step forward, and given the number of times I've seen the paper shared
around I think there's a chance people will prefer saying "local-first" in
-*lieu* of "offline-first" from now on.
-
-[presentation]: {% link _slides/2020-11-14-on-local-first-beyond-the-crdt-silver-bullet.slides %}
-[article-pdf]: https://martin.kleppmann.com/papers/local-first.pdf
+_lieu_ of "offline-first" from now on.
-### Software licenses
+== Software licenses
On a footnote of the 7th ideal ("You Retain Ultimate Ownership and Control"),
the authors say:
-> In our opinion, maintaining control and ownership of data does not mean that
-> the software must necessarily be open source. (...) as long as it does not
-> artificially restrict what users can do with their files.
+____
+In our opinion, maintaining control and ownership of data does not mean that the
+software must necessarily be open source. (...) as long as it does not
+artificially restrict what users can do with their files.
+____
They give examples of artificial restrictions, like this artificial restriction
I've come up with:
-```bash
+[source,sh]
+----
#!/bin/sh
TODAY=$(date +%s)
@@ -68,31 +59,33 @@ if [ $TODAY -ge $LICENSE_EXPIRATION ]; then
fi
echo $((2 + 2))
-```
+----
Now when using this very useful program:
-```bash
+[source,sh]
+----
# today
$ ./useful-adder.sh
4
# tomorrow
$ ./useful-adder.sh
License expired!
-```
+----
This is obviously an intentional restriction, and it goes against the 5th ideal
-("The Long Now"). This software would only be useful as long as the embedded
-license expiration allowed. Sure you could change the clock on the computer, but
-there are many other ways that this type of intentional restriction is in
+("The Long Now"). This software would only be useful as long as the embedded
+license expiration allowed. Sure you could change the clock on the computer,
+but there are many other ways that this type of intentional restriction is in
conflict with that ideal.
-However, what about unintentional restrictions? What if a software had an equal
-or similar restriction, and stopped working after days pass? Or what if the
+However, what about unintentional restrictions? What if a software had an equal
+or similar restriction, and stopped working after days pass? Or what if the
programmer added a constant to make the development simpler, and this led to
unintentionally restricting the user?
-```bash
+[source,sh]
+----
# today
$ useful-program
# ...useful output...
@@ -100,81 +93,86 @@ $ useful-program
# tomorrow, with more data
$ useful-program
ERROR: Panic! Stack overflow!
-```
+----
Just as easily as I can come up with ways to intentionally restrict users, I can
-do the same for unintentionally restrictions. A program can stop working for a
+do the same for unintentionally restrictions. A program can stop working for a
variety of reasons.
-If it stops working due do, say, data growth, what are the options? Reverting to
-an earlier backup, and making it read-only? That isn't really a "Long Now", but
-rather a "Long Now as long as the software keeps working as expected".
+If it stops working due do, say, data growth, what are the options? Reverting
+to an earlier backup, and making it read-only? That isn't really a "Long Now",
+but rather a "Long Now as long as the software keeps working as expected".
The point is: if the software isn't free, "The Long Now" isn't achievable
-without a lot of wishful thinking. Maybe the authors were trying to be more
-friendly towards business who don't like free software, but in doing so they've proposed
-a contradiction by reconciling "The Long Now" with proprietary software.
+without a lot of wishful thinking. Maybe the authors were trying to be more
+friendly towards business who don't like free software, but in doing so they've
+proposed a contradiction by reconciling "The Long Now" with proprietary
+software.
-It isn't the same as saying that any free software achieves that ideal,
-either. The license can still be free, but the source code can become
-unavailable due to cloud rot. Or maybe the build is undocumented, or the build
-tools had specific configuration that one has to guess. A piece of free
-software can still fail to achieve "The Long Now". Being free doesn't guarantee
-it, just makes it possible.
+It isn't the same as saying that any free software achieves that ideal, either.
+The license can still be free, but the source code can become unavailable due to
+cloud rot. Or maybe the build is undocumented, or the build tools had specific
+configuration that one has to guess. A piece of free software can still fail to
+achieve "The Long Now". Being free doesn't guarantee it, just makes it
+possible.
A colleague has challenged my view, arguing that the software doesn't really
-need to be free, as long as there is an specification of the file format. This
+need to be free, as long as there is an specification of the file format. This
way if the software stops working, the format can still be processed by other
-programs. But this doesn't apply in practice: if you have a document that you
+programs. But this doesn't apply in practice: if you have a document that you
write to, and software stops working, you still want to write to the document.
An external tool that navigates the content and shows it to you won't allow you
to keep writing, and when it does that tool is now starting to re-implement the
software.
An open specification could serve as a blueprint to other implementations,
-making the data format more friendly to reverse-engineering. But the
-re-implementation still has to exist, at which point the original software failed
-to achieve "The Long Now".
+making the data format more friendly to reverse-engineering. But the
+re-implementation still has to exist, at which point the original software
+failed to achieve "The Long Now".
It is less bad, but still not quite there yet.
-### Denial of existing solutions
+== Denial of existing solutions
-When describing "Existing Data Storage and Sharing Models", on a
-footnote[^devil] the authors say:
+:distgit: https://drewdevault.com/2018/07/23/Git-is-already-distributed.html
-[^devil]: This is the second aspect that I'm picking on the article from a
- footnote. I guess the devil really is on the details.
+When describing "Existing Data Storage and Sharing Models", on a
+footnote{empty}footnote:devil[
+ This is the second aspect that I'm picking on the article from a footnote. I
+ guess the devil really is on the details.
+] the authors say:
-> In principle it is possible to collaborate without a repository service,
-> e.g. by sending patch files by email, but the majority of Git users rely
-> on GitHub.
+____
+In principle it is possible to collaborate without a repository service, e.g. by
+sending patch files by email, but the majority of Git users rely on GitHub.
+____
The authors go to a great length to talk about usability of cloud apps, and even
point to research they've done on it, but they've missed learning more from
local-first solutions that already exist.
Say the automerge CRDT proves to be even more useful than what everybody
-imagined. Say someone builds a local-first repository service using it. How will
-it change anything of the Git/GitHub model? What is different about it that
-prevents people in the future writing a paper saying:
+imagined. Say someone builds a local-first repository service using it. How
+will it change anything of the Git/GitHub model? What is different about it
+that prevents people in the future writing a paper saying:
-> In principle it is possible to collaborate without a repository service,
-> e.g. by using automerge and platform X,
-> but the majority of Git users rely on GitHub.
+____
+In principle it is possible to collaborate without a repository service, e.g. by
+using automerge and platform X, but the majority of Git users rely on GitHub.
+____
How is this any better?
-If it is already [possible][git-local-first] to have a local-first development
-workflow, why don't people use it? Is it just fashion, or there's a fundamental
-problem with it? If so, what is it, and how to avoid it?
+If it is already {distgit}[possible] to have a local-first development workflow,
+why don't people use it? Is it just fashion, or there's a fundamental problem
+with it? If so, what is it, and how to avoid it?
If sending patches by emails is perfectly possible but out of fashion, why even
-talk about Git/GitHub? Isn't this a problem that people are putting themselves
-in? How can CRDTs possibly prevent people from doing that?
+talk about Git/GitHub? Isn't this a problem that people are putting themselves
+in? How can CRDTs possibly prevent people from doing that?
My impression is that the authors envision a better future, where development is
-fully decentralized unlike today, and somehow CRDTs will make that happen. If
+fully decentralized unlike today, and somehow CRDTs will make that happen. If
more people think this way, "CRDT" is next in line to the buzzword list that
solves everything, like "containers", "blockchain" or "machine learning".
@@ -182,56 +180,56 @@ Rather than picturing an imaginary service that could be described like
"GitHub+CRDTs" and people would adopt it, I'd rather better understand why
people don't do it already, since Git is built to work like that.
-[git-local-first]: https://drewdevault.com/2018/07/23/Git-is-already-distributed.html
+== Ditching of web applications
-### Ditching of web applications
+:pouchdb: https://pouchdb.com/
+:instant-apps: https://developer.android.com/topic/google-play-instant
The authors put web application in a worse position for building local-first
application, claiming that:
-> (...) the architecture of web apps remains fundamentally server-centric.
-> Offline support is an afterthought in most web apps, and the result is
-> accordingly fragile.
+____
+(...) the architecture of web apps remains fundamentally server-centric.
+Offline support is an afterthought in most web apps, and the result is
+accordingly fragile.
+____
Well, I disagree.
The problem isn't inherit to the web platform, but instead how people use it.
I have myself built offline-first applications, leveraging IndexedDB, App Cache,
-*etc*. I wanted to build an offline-first application on the web, and so I did.
+_etc_. I wanted to build an offline-first application on the web, and so I did.
-In fact, many people choose [PouchDB][pouchdb] *because* of that, since it is a
-good tool for offline-first web applications. The problem isn't really the
+In fact, many people choose {pouchdb}[PouchDB] _because_ of that, since it is a
+good tool for offline-first web applications. The problem isn't really the
technology, but how much people want their application to be local-first.
-Contrast it with Android [Instant Apps][instant-apps], where applications are
-sent to the phone in small parts. Since this requires an internet connection to
+Contrast it with Android {instant-apps}[Instant Apps], where applications are
+sent to the phone in small parts. Since this requires an internet connection to
move from a part of the app bundle to another, a subset of the app isn't
local-first, despite being an app.
-The point isn't the technology, but how people are using it. Local-first web
+The point isn't the technology, but how people are using it. Local-first web
applications are perfectly possible, just like non-local-first native
applications are possible.
-[pouchdb]: https://pouchdb.com/
-[instant-apps]: https://developer.android.com/topic/google-play-instant
-
-### Costs are underrated
+== Costs are underrated
I think the costs of "old-fashioned apps" over "cloud apps" are underrated,
mainly regarding storage, and that this costs can vary a lot by application.
Say a person writes online articles for their personal website, and puts
-everything into Git. Since there isn't supposed to be any collaboration, all
-of the relevant ideals of local-first are achieved.
+everything into Git. Since there isn't supposed to be any collaboration, all of
+the relevant ideals of local-first are achieved.
-Now another person creates videos instead of articles. They could try keeping
+Now another person creates videos instead of articles. They could try keeping
everything local, but after some time the storage usage fills the entire disk.
This person's local-first setup would be much more complex, and would cost much
more on maintenance, backup and storage.
Even though both have similar needs, a local-first video repository is much more
-demanding. So the local-first thinking here isn't "just keep everything local",
+demanding. So the local-first thinking here isn't "just keep everything local",
but "how much time and money am I willing to spend to keep everything local".
The convenience of "cloud apps" becomes so attractive that many don't even have
@@ -241,60 +239,61 @@ maintain, backup and store their content.
The dial measuring "cloud apps" and "old-fashioned apps" needs to be specific to
use-cases.
-### Real-time collaboration is optional
+== Real-time collaboration is optional
If I were the one making the list of ideals, I wouldn't focus so much on
real-time collaboration.
Even though seamless collaboration is desired, it being real-time depends on the
-network being available for that. But ideal 3 states that
-"The Network is Optional", so real-time collaboration is also optional.
+network being available for that. But ideal 3 states that "The Network is
+Optional", so real-time collaboration is also optional.
The fundamentals of a local-first system should enable real-time collaboration
when network is available, but shouldn't focus on it.
On many places when discussing applications being offline, it is common for me
-to find people saying that their application works
-"even on a plane, subway or elevator". That is a reflection of when said
-developers have to deal with networks being unavailable.
+to find people saying that their application works "even on a plane, subway or
+elevator". That is a reflection of when said developers have to deal with
+networks being unavailable.
But this leaves out a big chunk of the world where internet connection is
intermittent, or only works every other day or only once a week, or stops
-working when it rains, *etc*. For this audience, living without network
-connectivity isn't such a discrete moment in time, but part of every day life. I
-like the fact that the authors acknowledge that.
+working when it rains, _etc_. For this audience, living without network
+connectivity isn't such a discrete moment in time, but part of every day life.
+I like the fact that the authors acknowledge that.
When discussing "working offline", I'd rather keep this type of person in mind,
then the subset of people who are offline when on the elevator will naturally be
included.
-### On CRDTs and developer experience
+== On CRDTs and developer experience
+
+:archived-article: https://web.archive.org/web/20130116163535/https://labs.oracle.com/techrep/1994/smli_tr-94-29.pdf
When discussing developer experience, the authors bring up some questions to be
answered further, like:
-> For an app developer, how does the use of a CRDT-based data layer compare to
-> existing storage layers like a SQL database, a filesystem, or CoreData? Is a
-> distributed system harder to write software for?
+____
+For an app developer, how does the use of a CRDT-based data layer compare to
+existing storage layers like a SQL database, a filesystem, or CoreData? Is a
+distributed system harder to write software for?
+____
That is an easy one: yes.
-A distributed system *is* harder to write software for, being a distributed
+A distributed system _is_ harder to write software for, being a distributed
system.
Adding a large layer of data structures and algorithms will make it more complex
-to write software for, naturally. And if trying to make this layer transparent
+to write software for, naturally. And if trying to make this layer transparent
to the programmer, so they can pretend that layer doesn't exist is a bad idea,
as RPC frameworks have tried, and failed.
-See "[A Note on Distributed Computing][note-dist-comp]" for a critique on RPC
+See "{archived-article}[A Note on Distributed Computing]" for a critique on RPC
frameworks trying to make the network invisible, which I think also applies in
equivalence for making the CRDTs layer invisible.
-[rmi-wiki]: https://en.wikipedia.org/wiki/Java_remote_method_invocation
-[note-dist-comp]: https://web.archive.org/web/20130116163535/http://labs.oracle.com/techrep/1994/smli_tr-94-29.pdf
-
-## Conclusion
+== Conclusion
I liked a lot the article, as it took the "offline-first" philosophy and ran
with it.
diff --git a/src/content/en/blog/2021/01/26/remembering-ann.adoc b/src/content/en/blog/2021/01/26/remembering-ann.adoc
new file mode 100644
index 0000000..6786b3c
--- /dev/null
+++ b/src/content/en/blog/2021/01/26/remembering-ann.adoc
@@ -0,0 +1,216 @@
+= ANN: remembering - Add memory to dmenu, fzf and similar tools
+:categories: ann
+
+:remembering: https://euandreh.xyz/remembering/
+:dmenu: https://tools.suckless.org/dmenu/
+:fzf: https://github.com/junegunn/fzf
+
+Today I pushed v0.1.0 of {remembering}[remembering], a tool to enhance the
+interactive usability of menu-like tools, such as {dmenu}[dmenu] and {fzf}[fzf].
+
+== Previous solution
+
+:yeganesh: https://dmwit.com/yeganesh/
+
+I previously used {yeganesh}[yeganesh] to fill this gap, but as I started to
+rely less on Emacs, I added fzf as my go-to tool for doing fuzzy searching on
+the terminal. But I didn't like that fzf always showed the same order of
+things, when I would only need 3 or 4 commonly used files.
+
+For those who don't know: yeganesh is a wrapper around dmenu that will remember
+your most used programs and put them on the beginning of the list of
+executables. This is very convenient for interactive prolonged use, as with
+time the things you usually want are right at the very beginning.
+
+But now I had this thing, yeganesh, that solved this problem for dmenu, but
+didn't for fzf.
+
+I initially considered patching yeganesh to support it, but I found it more
+coupled to dmenu than I would desire. I'd rather have something that knows
+nothing about dmenu, fzf or anything, but enhances tools like those in a useful
+way.
+
+== Implementation
+
+:v-010: https://euandre.org/git/remembering/tree/remembering?id=v0.1.0
+:getopts: https://www.opengroup.org/onlinepubs/9699919799/utilities/getopts.html
+:sort: https://www.opengroup.org/onlinepubs/9699919799/utilities/sort.html
+:awk: https://www.opengroup.org/onlinepubs/9699919799/utilities/awk.html
+:spencer-quote: https://en.wikipedia.org/wiki/Henry_Spencer#cite_note-3
+
+Other than being decoupled from dmenu, another improvement I though that could
+be made on top of yeganesh is the programming language choice. Instead of
+Haskell, I went with POSIX sh. Sticking to POSIX sh makes it require less
+build-time dependencies. There aren't any, actually. Packaging is made much
+easier due to that.
+
+The good thing is that the program itself is small enough ({v-010}[119 lines] on
+v0.1.0) that POSIX sh does the job just fine, combined with other POSIX
+utilities such as {getopts}[getopts], {sort}[sort] and {awk}[awk].
+
+The behaviour is: given a program that will read from STDIN and write a single
+entry to STDOUT, `remembering` wraps that program, and rearranges STDIN so that
+previous choices appear at the beginning.
+
+Where you would do:
+
+[source,sh]
+----
+$ seq 5 | fzf
+
+ 5
+ 4
+ 3
+ 2
+> 1
+ 5/5
+>
+----
+
+And every time get the same order of numbers, now you can write:
+
+[source,sh]
+----
+$ seq 5 | remembering -p seq-fzf -c fzf
+
+ 5
+ 4
+ 3
+ 2
+> 1
+ 5/5
+>
+----
+
+On the first run, everything is the same. If you picked 4 on the previous
+example, the following run would be different:
+
+[source,sh]
+----
+$ seq 5 | remembering -p seq-fzf -c fzf
+
+ 5
+ 3
+ 2
+ 1
+> 4
+ 5/5
+>
+----
+
+As time passes, the list would adjust based on the frequency of your choices.
+
+I aimed for reusability, so that I could wrap diverse commands with
+`remembering` and it would be able to work. To accomplish that, a "profile"
+(the `-p something` part) stores data about different runs separately.
+
+I took the idea of building something small with few dependencies to other
+places too: - the manpages are written in troff directly; - the tests are just
+more POSIX sh files; - and a POSIX Makefile to `check` and `install`.
+
+I was aware of the value of sticking to coding to standards, but I had past
+experience mostly with programming language standards, such as ECMAScript,
+Common Lisp, Scheme, or with IndexedDB or DOM APIs. It felt good to rediscover
+these nice POSIX tools, which makes me remember of a quote by
+{spencer-quote}[Henry Spencer]:
+
+____
+Those who do not understand Unix are condemned to reinvent it, poorly.
+____
+
+== Usage examples
+
+Here are some functions I wrote myself that you may find useful:
+
+=== Run a command with fzf on `$PWD`
+
+[source,sh]
+----
+f() {
+ profile="$f-shell-function(pwd | sed -e 's_/_-_g')"
+ file="$(git ls-files | \
+ remembering -p "$profile" \
+ -c "fzf --select-1 --exit -0 --query \"$2\" --preview 'cat {}'")"
+ if [ -n "$file" ]; then
+ # shellcheck disable=2068
+ history -s f $@
+ history -s "$1" "$file"
+ "$1" "$file"
+fi
+}
+----
+
+This way I can run `f vi` or `f vi config` at the root of a repository, and the
+list of files will always appear on the most used order. Adding `pwd` to the
+profile allows it to not mix data for different repositories.
+
+=== Copy password to clipboard
+
+:pass: https://www.passwordstore.org/
+
+[source,sh]
+----
+choice="$(find "$HOME/.password-store" -type f | \
+ grep -Ev '(.git|.gpg-id)' | \
+ sed -e "s|$HOME/.password-store/||" -e 's/\.gpg$//' | \
+ remembering -p password-store \
+ -c 'dmenu -l 20 -i')"
+
+
+if [ -n "$choice" ]; then
+ pass show "$choice" -c
+fi
+----
+
+Adding the above to a file and binding it to a keyboard shortcut, I can access
+the contents of my {pass}[password store], with the entries ordered by usage.
+
+=== Replacing yeganesh
+
+Where I previously had:
+
+[source,sh]
+----
+exe=$(yeganesh -x) && exec $exe
+----
+
+Now I have:
+
+[source,sh]
+----
+exe=$(dmenu_path | remembering -p dmenu-exec -c dmenu) && exec $exe
+----
+
+This way, the executables appear on order of usage.
+
+If you don't have `dmenu_path`, you can get just the underlying `stest` tool
+that looks at the executables available in your `$PATH`. Here's a juicy
+one-liner to do it:
+
+[source,sh]
+----
+$ wget -O- https://dl.suckless.org/tools/dmenu-5.0.tar.gz | \
+ tar Ozxf - dmenu-5.0/arg.h dmenu-5.0/stest.c | \
+ sed 's|^#include "arg.h"$|// #include "arg.h"|' | \
+ cc -xc - -o stest
+----
+
+With the `stest` utility you'll be able to list executables in your `$PATH` and
+pipe them to dmenu or something else yourself:
+
+[source,sh]
+----
+$ (IFS=:; ./stest -flx $PATH;) | sort -u | remembering -p another-dmenu-exec -c dmenu | sh
+----
+
+In fact, the code for `dmenu_path` is almost just like that.
+
+== Conclusion
+
+:packaged: https://euandre.org/git/package-repository/
+
+For my personal use, I've {packaged}[packaged] `remembering` for GNU Guix and
+Nix. Packaging it to any other distribution should be trivial, or just
+downloading the tarball and running `[sudo] make install`.
+
+Patches welcome!
diff --git a/src/content/en/blog/2021/02/17/fallible.adoc b/src/content/en/blog/2021/02/17/fallible.adoc
new file mode 100644
index 0000000..1f2f641
--- /dev/null
+++ b/src/content/en/blog/2021/02/17/fallible.adoc
@@ -0,0 +1,285 @@
+= ANN: fallible - Fault injection library for stress-testing failure scenarios
+:updatedat: 2022-03-06
+
+:fallible: https://euandreh.xyz/fallible/
+
+Yesterday I pushed v0.1.0 of {fallible}[fallible], a miniscule library for
+fault-injection and stress-testing C programs.
+
+== _EDIT_
+
+:changelog: https://euandreh.xyz/fallible/CHANGELOG.html
+:tarball: https://euandre.org/static/attachments/fallible.tar.gz
+
+2021-06-12: As of {changelog}[0.3.0] (and beyond), the macro interface improved
+and is a bit different from what is presented in this article. If you're
+interested, I encourage you to take a look at it.
+
+2022-03-06: I've {tarball}[archived] the project for now. It still needs some
+maturing before being usable.
+
+== Existing solutions
+
+:gnu-std: https://www.gnu.org/prep/standards/standards.html#Semantics
+:valgrind: https://www.valgrind.org/
+:so-alloc: https://stackoverflow.com/questions/1711170/unit-testing-for-failed-malloc
+
+Writing robust code can be challenging, and tools like static analyzers, fuzzers
+and friends can help you get there with more certainty. As I would try to
+improve some of my C code and make it more robust, in order to handle system
+crashes, filled disks, out-of-memory and similar scenarios, I didn't find
+existing tooling to help me get there as I expected to find. I couldn't find
+existing tools to help me explicitly stress-test those failure scenarios.
+
+Take the "{gnu-std}[Writing Robust Programs]" section of the GNU Coding
+Standards:
+
+____
+Check every system call for an error return, unless you know you wish to ignore
+errors. (...) Check every call to malloc or realloc to see if it returned NULL.
+____
+
+From a robustness standpoint, this is a reasonable stance: if you want to have a
+robust program that knows how to fail when you're out of memory and `malloc`
+returns `NULL`, than you ought to check every call to `malloc`.
+
+Take a sample code snippet for clarity:
+
+[source,c]
+----
+void a_function() {
+ char *s1 = malloc(A_NUMBER);
+ strcpy(s1, "some string");
+
+ char *s2 = malloc(A_NUMBER);
+ strcpy(s2, "another string");
+}
+----
+
+At a first glance, this code is unsafe: if any of the calls to `malloc` returns
+`NULL`, `strcpy` will be given a `NULL` pointer.
+
+My first instinct was to change this code to something like this:
+
+[source,diff]
+----
+@@ -1,7 +1,15 @@
+ void a_function() {
+ char *s1 = malloc(A_NUMBER);
++ if (!s1) {
++ fprintf(stderr, "out of memory, exitting\n");
++ exit(1);
++ }
+ strcpy(s1, "some string");
+
+ char *s2 = malloc(A_NUMBER);
++ if (!s2) {
++ fprintf(stderr, "out of memory, exitting\n");
++ exit(1);
++ }
+ strcpy(s2, "another string");
+ }
+----
+
+As I later found out, there are at least 2 problems with this approach:
+
+. *it doesn't compose*: this could arguably work if `a_function` was `main`.
+ But if `a_function` lives inside a library, an `exit(1);` is an inelegant way
+ of handling failures, and will catch the top-level `main` consuming the
+ library by surprise;
+. *it gives up instead of handling failures*: the actual handling goes a bit
+ beyond stopping. What about open file handles, in-memory caches, unflushed
+ bytes, etc.?
+
+If you could force only the second call to `malloc` to fail,
+{valgrind}[Valgrind] would correctly complain that the program exitted with
+unfreed memory.
+
+So the last change to make the best version of the above code is:
+
+[source,diff]
+----
+@@ -1,15 +1,14 @@
+-void a_function() {
++bool a_function() {
+ char *s1 = malloc(A_NUMBER);
+ if (!s1) {
+- fprintf(stderr, "out of memory, exitting\n");
+- exit(1);
++ return false;
+ }
+ strcpy(s1, "some string");
+
+ char *s2 = malloc(A_NUMBER);
+ if (!s2) {
+- fprintf(stderr, "out of memory, exitting\n");
+- exit(1);
++ free(s1);
++ return false;
+ }
+ strcpy(s2, "another string");
+ }
+----
+
+Instead of returning `void`, `a_function` now returns `bool` to indicate whether
+an error ocurred during its execution. If `a_function` returned a pointer to
+something, the return value could be `NULL`, or an `int` that represents an
+error code.
+
+The code is now a) safe and b) failing gracefully, returning the control to the
+caller to properly handle the error case.
+
+After seeing similar patterns on well designed APIs, I adopted this practice for
+my own code, but was still left with manually verifying the correctness and
+robustness of it.
+
+How could I add assertions around my code that would help me make sure the
+`free(s1);` exists, before getting an error report? How do other people and
+projects solve this?
+
+From what I could see, either people a) hope for the best, b) write safe code
+but don't strees-test it or c) write ad-hoc code to stress it.
+
+The most proeminent case of c) is SQLite: it has a few wrappers around the
+familiar `malloc` to do fault injection, check for memory limits, add warnings,
+create shim layers for other environments, etc. All of that, however, is
+tightly couple with SQLite itself, and couldn't be easily pulled off for using
+somewhere else.
+
+When searching for it online, an {so-alloc}[interesting thread] caught my
+atention: fail the call to `malloc` for each time it is called, and when the
+same stacktrace appears again, allow it to proceed.
+
+== Implementation
+
+:mallocfail: https://github.com/ralight/mallocfail
+:should-fail-fn: https://euandre.org/git/fallible/tree/src/fallible.c?id=v0.1.0#n16
+
+A working implementation of that already exists: {mallocfail}[mallocfail]. It
+uses `LD_PRELOAD` to replace `malloc` at run-time, computes the SHA of the
+stacktrace and fails once for each SHA.
+
+I initially envisioned and started implementing something very similar to
+mallocfail. However I wanted it to go beyond out-of-memory scenarios, and using
+`LD_PRELOAD` for every possible corner that could fail wasn't a good idea on the
+long run.
+
+Also, mallocfail won't work together with tools such as Valgrind, who want to do
+their own override of `malloc` with `LD_PRELOAD`.
+
+I instead went with less automatic things: starting with a
+`fallible_should_fail(char *filename, int lineno)` function that fails once for
+each `filename`+`lineno` combination, I created macro wrappers around common
+functions such as `malloc`:
+
+[source,c]
+----
+void *fallible_malloc(size_t size, const char *const filename, int lineno) {
+#ifdef FALLIBLE
+ if (fallible_should_fail(filename, lineno)) {
+ return NULL;
+ }
+#else
+ (void)filename;
+ (void)lineno;
+#endif
+ return malloc(size);
+}
+
+#define MALLOC(size) fallible_malloc(size, __FILE__, __LINE__)
+----
+
+With this definition, I could replace the calls to `malloc` with `MALLOC` (or
+any other name that you want to `#define`):
+
+[source,diff]
+----
+--- 3.c 2021-02-17 00:15:38.019706074 -0300
++++ 4.c 2021-02-17 00:44:32.306885590 -0300
+@@ -1,11 +1,11 @@
+ bool a_function() {
+- char *s1 = malloc(A_NUMBER);
++ char *s1 = MALLOC(A_NUMBER);
+ if (!s1) {
+ return false;
+ }
+ strcpy(s1, "some string");
+
+- char *s2 = malloc(A_NUMBER);
++ char *s2 = MALLOC(A_NUMBER);
+ if (!s2) {
+ free(s1);
+ return false;
+----
+
+With this change, if the program gets compiled with the `-DFALLIBLE` flag the
+fault-injection mechanism will run, and `MALLOC` will fail once for each
+`filename`+`lineno` combination. When the flag is missing, `MALLOC` is a very
+thin wrapper around `malloc`, which compilers could remove entirely, and the
+`-lfallible` flags can be omitted.
+
+This applies not only to `malloc` or other `stdlib.h` functions. If
+`a_function` is important or relevant, I could add a wrapper around it too, that
+checks if `fallible_should_fail` to exercise if its callers are also doing the
+proper clean-up.
+
+The actual code is just this single function,
+{should-fail-fn}[`fallible_should_fail`], which ended-up taking only ~40 lines.
+In fact, there are more lines of either Makefile (111), README.md (82) or troff
+(306) on this first version.
+
+The price for such fine-grained control is that this approach requires more
+manual work.
+
+== Usage examples
+
+=== `MALLOC` from the `README.md`
+
+:fallible-check: https://euandreh.xyz/fallible/fallible-check.1.html
+
+[source,c]
+----
+// leaky.c
+#include <string.h>
+#include <fallible_alloc.h>
+
+int main() {
+ char *aaa = MALLOC(100);
+ if (!aaa) {
+ return 1;
+ }
+ strcpy(aaa, "a safe use of strcpy");
+
+ char *bbb = MALLOC(100);
+ if (!bbb) {
+ // free(aaa);
+ return 1;
+ }
+ strcpy(bbb, "not unsafe, but aaa is leaking");
+
+ free(bbb);
+ free(aaa);
+ return 0;
+}
+----
+
+Compile with `-DFALLIBLE` and run {fallible-check}[`fallible-check.1`]:
+
+[source,sh]
+----
+$ c99 -DFALLIBLE -o leaky leaky.c -lfallible
+$ fallible-check ./leaky
+Valgrind failed when we did not expect it to:
+(...suppressed output...)
+# exit status is 1
+----
+
+== Conclusion
+
+:package: https://euandre.org/git/package-repository/
+
+For my personal use, I'll {package}[package] them for GNU Guix and Nix.
+Packaging it to any other distribution should be trivial, or just downloading
+the tarball and running `[sudo] make install`.
+
+Patches welcome!
diff --git a/src/content/en/blog/2021/02/17/fallible.tar.gz b/src/content/en/blog/2021/02/17/fallible.tar.gz
new file mode 100644
index 0000000..211cadd
--- /dev/null
+++ b/src/content/en/blog/2021/02/17/fallible.tar.gz
Binary files differ
diff --git a/src/content/en/blog/2021/04/29/relational-review.adoc b/src/content/en/blog/2021/04/29/relational-review.adoc
new file mode 100644
index 0000000..4b53737
--- /dev/null
+++ b/src/content/en/blog/2021/04/29/relational-review.adoc
@@ -0,0 +1,144 @@
+= A Relational Model of Data for Large Shared Data Banks - article-review
+
+:empty:
+:reviewed-article: https://www.seas.upenn.edu/~zives/03f/cis550/codd.pdf
+
+This is a review of the article "{reviewed-article}[A Relational Model of Data
+for Large Shared Data Banks]", by E. F. Codd.
+
+== Data Independence
+
+Codd brings the idea of _data independence_ as a better approach to use on
+databases. This is contrast with the existing approaches, namely hierarquical
+(tree-based) and network-based.
+
+His main argument is that queries in applications shouldn't depende and be
+coupled with how the data is represented internally by the database system.
+This key idea is very powerful, and something that we strive for in many other
+places: decoupling the interface from the implementation.
+
+If the database system has this separation, it can kep the querying interface
+stable, while having the freedom to change its internal representation at will,
+for better performance, less storage, etc.
+
+This is true for most modern database systems. They can change from B-Trees
+with leafs containing pointers to data, to B-Trees with leafs containing the raw
+data , to hash tables. All that without changing the query interface, only its
+performance.
+
+Codd mentions that, from an information representation standpoint, any index is
+a duplication, but useful for perfomance.
+
+This data independence also impacts ordering (a _relation_ doesn't rely on the
+insertion order).
+
+== Duplicates
+
+His definition of relational data is a bit differente from most modern database
+systems, namely *no duplicate rows*.
+
+I couldn't find a reason behind this restriction, though. For practical
+purposes, I find it useful to have it.
+
+== Relational Data
+
+:edn: https://github.com/edn-format/edn
+
+In the article, Codd doesn't try to define a language, and today's most popular
+one is SQL.
+
+However, there is no restriction that says that "SQL database" and "relational
+database" are synonyms. One could have a relational database without using SQL
+at all, and it would still be a relational one.
+
+The main one that I have in mind, and the reason that led me to reading this
+paper in the first place, is Datomic.
+
+Is uses an {edn}[edn]-based representation for datalog
+queries{empty}footnote:edn-queries[
+ You can think of it as JSON, but with a Clojure taste.
+], and a particular schema used to represent data.
+
+Even though it looks very weird when coming from SQL, I'd argue that it ticks
+all the boxes (except for "no duplicates") that defines a relational database,
+since building relations and applying operations on them is possible.
+
+Compare and contrast a contrived example of possible representations of SQL and
+datalog of the same data:
+
+[source,sql]
+----
+-- create schema
+CREATE TABLE people (
+ id UUID PRIMARY KEY,
+ name TEXT NOT NULL,
+ manager_id UUID,
+ FOREIGN KEY (manager_id) REFERENCES people (id)
+);
+
+-- insert data
+INSERT INTO people (id, name, manager_id) VALUES
+ ("d3f29960-ccf0-44e4-be66-1a1544677441", "Foo", "076356f4-1a0e-451c-b9c6-a6f56feec941"),
+ ("076356f4-1a0e-451c-b9c6-a6f56feec941", "Bar");
+
+-- query data, make a relation
+
+SELECT employees.name AS 'employee-name',
+ managers.name AS 'manager-name'
+FROM people employees
+INNER JOIN people managers ON employees.manager_id = managers.id;
+----
+
+[source,clojure]
+----
+;; create schema
+#{{:db/ident :person/id
+ :db/valueType :db.type/uuid
+ :db/cardinality :db.cardinality/one
+ :db/unique :db.unique/value}
+ {:db/ident :person/name
+ :db/valueType :db.type/string
+ :db/cardinality :db.cardinality/one}
+ {:db/ident :person/manager
+ :db/valueType :db.type/ref
+ :db/cardinality :db.cardinality/one}}
+
+;; insert data
+#{{:person/id #uuid "d3f29960-ccf0-44e4-be66-1a1544677441"
+ :person/name "Foo"
+ :person/manager [:person/id #uuid "076356f4-1a0e-451c-b9c6-a6f56feec941"]}
+ {:person/id #uuid "076356f4-1a0e-451c-b9c6-a6f56feec941"
+ :person/name "Bar"}}
+
+;; query data, make a relation
+{:find [?employee-name ?manager-name]
+ :where [[?person :person/name ?employee-name]
+ [?person :person/manager ?manager]
+ [?manager :person/name ?manager-name]]}
+----
+
+(forgive any errors on the above SQL and datalog code, I didn't run them to
+check. Patches welcome!)
+
+This employee example comes from the paper, and both SQL and datalog
+representations match the paper definition of "relational".
+
+Both "Foo" and "Bar" are employees, and the data is normalized. SQL represents
+data as tables, and Datomic as datoms, but relations could be derived from both,
+which we could view as:
+
+[source,sql]
+----
+employee_name | manager_name
+----------------------------
+"Foo" | "Bar"
+----
+
+== Conclusion
+
+The article also talks about operators, consistency and normalization, which are
+now so widespread and well-known that it feels a bit weird seeing someone
+advocating for it.
+
+I also stablish that `relational != SQL`, and other databases such as Datomic
+are also relational, following Codd's original definition.
diff --git a/src/content/en/blog/categories.adoc b/src/content/en/blog/categories.adoc
new file mode 100644
index 0000000..feb64ff
--- /dev/null
+++ b/src/content/en/blog/categories.adoc
@@ -0,0 +1,2 @@
+= Articles by category
+:type: categories
diff --git a/src/content/en/blog/index.adoc b/src/content/en/blog/index.adoc
new file mode 100644
index 0000000..afd64d4
--- /dev/null
+++ b/src/content/en/blog/index.adoc
@@ -0,0 +1 @@
+= Blog
diff --git a/src/content/en/index.adoc b/src/content/en/index.adoc
new file mode 100644
index 0000000..275f7c4
--- /dev/null
+++ b/src/content/en/index.adoc
@@ -0,0 +1 @@
+= index
diff --git a/_pastebins/2016-04-05-rpn-macro-setup.md b/src/content/en/pastebin/2016/04/05/rpn.adoc
index 25ca6ba..c567d0d 100644
--- a/_pastebins/2016-04-05-rpn-macro-setup.md
+++ b/src/content/en/pastebin/2016/04/05/rpn.adoc
@@ -1,18 +1,7 @@
----
+= RPN macro setup
-title: RPN macro setup
-
-date: 2016-04-05
-
-layout: post
-
-lang: en
-
-ref: rpn-macro-setup
-
----
-
-```lisp
+[source,lisp]
+----
(defmacro rpn (body)
(rpn-expander body))
@@ -31,4 +20,4 @@ Just a quick stub.
One could easily improve #'RPN-EXPANDER in order to better suit one's needs.
|#
-```
+----
diff --git a/_pastebins/2018-07-11-nix-pinning.md b/src/content/en/pastebin/2018/07/11/nix-pinning.adoc
index 2d35e09..63b1ac9 100644
--- a/_pastebins/2018-07-11-nix-pinning.md
+++ b/src/content/en/pastebin/2018/07/11/nix-pinning.adoc
@@ -1,20 +1,8 @@
----
+= Nix pinning
+:categories: nix
-title: Nix pinning
-
-date: 2018-07-11
-
-layout: post
-
-lang: en
-
-eu_categories: nix
-
-ref: nix-pinning
-
----
-
-```nix
+[source,nix]
+----
let
# Pin the nixpkgs version
stdenv = pkgs.stdenv;
@@ -35,4 +23,4 @@ in rec {
patches = [];
};
}
-```
+----
diff --git a/_pastebins/2018-07-13-gnu-guix-systemd-daemon-for-nixos.md b/src/content/en/pastebin/2018/07/13/guix-nixos-systemd.adoc
index c2b8b62..fa4226a 100644
--- a/_pastebins/2018-07-13-gnu-guix-systemd-daemon-for-nixos.md
+++ b/src/content/en/pastebin/2018/07/13/guix-nixos-systemd.adoc
@@ -1,20 +1,9 @@
----
+= GNU Guix systemd daemon for NixOS
+:catgories: nix, guix
+:sort: 3
-title: GNU Guix systemd daemon for NixOS
-
-date: 2018-07-13
-
-layout: post
-
-lang: en
-
-eu_categories: nix,guix
-
-ref: gnu-guix-systemd-daemon-for-nixos
-
----
-
-```nix
+[source,nix]
+----
# Derived from Guix guix-daemon.service.in
# https://git.savannah.gnu.org/cgit/guix.git/tree/etc/guix-daemon.service.in?id=00c86a888488b16ce30634d3a3a9d871ed6734a2
systemd.services.guix-daemon = {
@@ -30,4 +19,4 @@ ref: gnu-guix-systemd-daemon-for-nixos
};
wantedBy = [ "multi-user.target" ];
};
-```
+----
diff --git a/_pastebins/2018-07-13-guix-users-in-nixos-system-configuration.md b/src/content/en/pastebin/2018/07/13/guixbuilder-nixos.adoc
index 880d347..65dbcc7 100644
--- a/_pastebins/2018-07-13-guix-users-in-nixos-system-configuration.md
+++ b/src/content/en/pastebin/2018/07/13/guixbuilder-nixos.adoc
@@ -1,20 +1,9 @@
----
+= Guix users in NixOS system configuration
+:categories: nix guix
+:sort: 2
-title: Guix users in NixOS system configuration
-
-date: 2018-07-13
-
-layout: post
-
-lang: en
-
-eu_categories: nix,guix
-
-ref: guix-users-in-nixos-system-configuration
-
----
-
-```nix
+[source,nix]
+----
users = {
mutableUsers = false;
@@ -50,4 +39,4 @@ ref: guix-users-in-nixos-system-configuration
name = "guixbuild";
};
};
-```
+----
diff --git a/_pastebins/2018-07-13-guix-builder-user-creation-commands.md b/src/content/en/pastebin/2018/07/13/guixbuilder.adoc
index 82204a8..35057f9 100644
--- a/_pastebins/2018-07-13-guix-builder-user-creation-commands.md
+++ b/src/content/en/pastebin/2018/07/13/guixbuilder.adoc
@@ -1,20 +1,8 @@
----
+= Guix builder user creation commands
+:categories: guix
-title: Guix builder user creation commands
-
-date: 2018-07-13
-
-layout: post
-
-lang: en
-
-eu_categories: guix
-
-ref: guix-builder-user-creation-commands
-
----
-
-```shell
+[source,sh]
+----
groupadd --system guixbuild
for i in `seq -w 1 10`;
do
@@ -23,4 +11,4 @@ do
-c "Guix build user $i" --system \
guixbuilder$i;
done
-```
+----
diff --git a/src/content/en/pastebin/2018/07/13/nix-strpad.adoc b/src/content/en/pastebin/2018/07/13/nix-strpad.adoc
new file mode 100644
index 0000000..71e8168
--- /dev/null
+++ b/src/content/en/pastebin/2018/07/13/nix-strpad.adoc
@@ -0,0 +1,8 @@
+= Nix string padding
+:categories: nix
+:sort: 1
+
+[source,nix]
+----
+padString = (n: if n < 10 then "0" + toString n else toString n)
+----
diff --git a/_pastebins/2018-07-25-nix-exps.md b/src/content/en/pastebin/2018/07/25/nix-exps.adoc
index 23d75b6..04cb7f4 100644
--- a/_pastebins/2018-07-25-nix-exps.md
+++ b/src/content/en/pastebin/2018/07/25/nix-exps.adoc
@@ -1,20 +1,8 @@
----
+= Nix exps
+:categories: nix
-title: Nix exps
-
-date: 2018-07-25
-
-layout: post
-
-lang: en
-
-eu_categories: nix
-
-ref: nix-exps
-
----
-
-```nix
+[source,nix]
+----
let
pkgsOriginal = import <nixpkgs> {};
pkgsSrc = pkgsOriginal.fetchzip {
@@ -55,4 +43,4 @@ in rec {
'';
};
}
-```
+----
diff --git a/_pastebins/2018-07-25-nix-show-derivation-sample-output.md b/src/content/en/pastebin/2018/07/25/nix-showdrv.adoc
index 813965d..b62e526 100644
--- a/_pastebins/2018-07-25-nix-show-derivation-sample-output.md
+++ b/src/content/en/pastebin/2018/07/25/nix-showdrv.adoc
@@ -1,20 +1,9 @@
----
+= nix show-derivation sample output
+:categories: nix
+:sort: 1
-title: nix show-derivation sample output
-
-date: 2018-07-25
-
-layout: post
-
-lang: en
-
-eu_categories: nix
-
-ref: nix-show-derivation-sample-output
-
----
-
-```nix
+[source,nix]
+----
$ nix show-derivation /nix/store/zzz9cl2ly0mb2njr7vwa5528fxmn29m8-combofont-0.2.drv
{
"/nix/store/zzz9cl2ly0mb2njr7vwa5528fxmn29m8-combofont-0.2.drv": {
@@ -83,4 +72,4 @@ $ nix show-derivation /nix/store/zzz9cl2ly0mb2njr7vwa5528fxmn29m8-combofont-0.2.
}
}
}
-```
+----
diff --git a/_pastebins/2019-06-08-inconsistent-hash-of-buildgomodule.md b/src/content/en/pastebin/2019/06/08/inconsistent-hash.adoc
index 51d8ad3..8dc5794 100644
--- a/_pastebins/2019-06-08-inconsistent-hash-of-buildgomodule.md
+++ b/src/content/en/pastebin/2019/06/08/inconsistent-hash.adoc
@@ -1,35 +1,25 @@
----
+= Inconsistent hash of buildGoModule
+:categories: nix
-title: Inconsistent hash of buildGoModule
+:commit: https://euandre.org/git/servers/commit?id=6ba76140238b5e3c7009c201f9f80ac86063f438
-date: 2019-06-08
+''''
-layout: post
+FIXED: The `<nixpkgs>` was different on different environments. See
+https://discourse.nixos.org/t/inconsistent-hash-of-buildgomodule/3127/2.
-lang: en
+''''
-eu_categories: nix
+The {commit}[commit that made this visible].
-ref: inconsistent-hash-of-buildgomodule
+== Offending derivation:
----
+:orig-src: https://euandre.org/git/servers/tree/default.nix?id=6ba76140238b5e3c7009c201f9f80ac86063f438#n3
-FIXED: The `<nixpkgs>` was different on different environments.
-See <https://discourse.nixos.org/t/inconsistent-hash-of-buildgomodule/3127/2>.
+{orig-src}[Full source code on the repository]:
----
-
-The [commit that made this visible][0].
-
-[0]: https://euandre.org/git/servers/commit?id=6ba76140238b5e3c7009c201f9f80ac86063f438
-
-## Offending derivation:
-
-[Full source code on the repository][1]:
-
-[1]: https://euandre.org/git/servers/tree/default.nix?id=6ba76140238b5e3c7009c201f9f80ac86063f438#n3
-
-```nix
+[source,nix]
+----
terraform-godaddy = pkgs.buildGoModule rec {
name = "terraform-godaddy-${version}";
version = "1.6.4";
@@ -43,11 +33,12 @@ terraform-godaddy = pkgs.buildGoModule rec {
postInstall =
"mv $out/bin/terraform-godaddy $out/bin/terraform-provider-godaddy";
};
-```
+----
-## Local build:
+== Local build:
-```shell
+[source,sh]
+----
$ nix-build -A terraform-godaddy
these derivations will be built:
/nix/store/3hs274i9qdsg3hsgp05j7i5cqxsvpcqx-terraform-godaddy-1.6.4-go-modules.drv
@@ -193,14 +184,15 @@ hash mismatch in fixed-output derivation '/nix/store/jgbfkhlsz6bmq724p5cqqcgfyc7
got: sha256:10n2dy7q9kk1ly58sw965n6qa8l0nffh8vyd1vslx0gdlyj25xxs
cannot build derivation '/nix/store/y5961vv6y9c0ps2sbd8xfnpqvk0q7qhq-terraform-godaddy-1.6.4.drv': 1 dependencies couldn't be built
error: build of '/nix/store/y5961vv6y9c0ps2sbd8xfnpqvk0q7qhq-terraform-godaddy-1.6.4.drv' failed
-```
+----
-## Build [on CI](https://builds.sr.ht/~euandreh/job/67836#task-setup-0):
+== Build https://builds.sr.ht/~euandreh/job/67836#task-setup-0[on CI]:
-The `setup.sh` script contains a call to `nix-shell` which in turns
-build the same `terraform-godaddy` derivation:
+The `setup.sh` script contains a call to `nix-shell` which in turns build the
+same `terraform-godaddy` derivation:
-```shell
+[source,sh]
+----
$ cd vps/
$ ./scripts/ci/setup.sh
warning: Nix search path entry '/nix/var/nix/profiles/per-user/root/channels' does not exist, ignoring
@@ -1058,4 +1050,4 @@ hash mismatch in fixed-output derivation '/nix/store/q8y0mzjl78hfhazjgq2sc84i7dp
cannot build derivation '/nix/store/w4ghinrmpq524k3617ikfc8i42aa0dbb-terraform-godaddy-1.6.4.drv': 1 dependencies couldn't be built
copying path '/nix/store/63gjp25l4cmdkl63zy0rcgmsvd2p2p34-terraform-0.11.14' from 'https://cache.nixos.org'...
error: build of '/nix/store/9drkn1qxkkcrz5g3413lpmbc2xysa582-terraform-0.11.14.drv', '/nix/store/w4ghinrmpq524k3617ikfc8i42aa0dbb-terraform-godaddy-1.6.4.drv' failed
-```
+----
diff --git a/_pastebins/2019-12-29-raku-tuple-type-annotation.md b/src/content/en/pastebin/2019/12/29/raku-tuple-type.adoc
index 3d5ff34..50dd841 100644
--- a/_pastebins/2019-12-29-raku-tuple-type-annotation.md
+++ b/src/content/en/pastebin/2019/12/29/raku-tuple-type.adoc
@@ -1,18 +1,8 @@
----
+= Raku tuple type annotation
+:categories: raku programming-languages
-title: Raku tuple type annotation
-
-date: 2019-12-29
-
-layout: post
-
-lang: en
-
-ref: raku-tuple-type-annotation
-
----
-
-```perl
+[source,raku]
+----
# Single Str return value: this works
sub f1(Str $in --> Str) {
$in;
@@ -27,11 +17,12 @@ sub f2(Str $in) {
sub f2(Str $in --> (Str, Str)) {
($in, $in);
}
-```
+----
Error log is:
-```perl
+[source,raku]
+----
===SORRY!=== Error while compiling /path/to/my/file
Malformed return value
-```
+----
diff --git a/_pastebins/2020-01-04-failure-on-guix-tex-live-importer.md b/src/content/en/pastebin/2020/01/04/guix-import-failure.adoc
index 3388a8d..5896645 100644
--- a/_pastebins/2020-01-04-failure-on-guix-tex-live-importer.md
+++ b/src/content/en/pastebin/2020/01/04/guix-import-failure.adoc
@@ -1,20 +1,8 @@
----
+= Failure on Guix TeX Live importer
+:categories: guix
-title: Failure on Guix TeX Live importer
-
-date: 2020-01-04
-
-layout: post
-
-lang: en
-
-eu_categories: guix
-
-ref: failure-on-guix-tex-live-importer
-
----
-
-```shell
+[source,sh]
+----
$ guix import texlive fontspec
redirection vers « https://ctan.org/xml/1.2/pkg/fontspec »...
Backtrace:
@@ -44,4 +32,4 @@ In guix/build/utils.scm:
guix/build/utils.scm:652:6: In procedure invoke:
Throw to key `srfi-34' with args `(#<condition &invoke-error [program: "svn" arguments: ("export" "--non-interactive" "--trust-server-cert" "-r" "49435" "svn://www.tug.org/texlive/tags/texlive-2018.2/Master/texmf-dist/source/latex/fontspec" "/tmp/guix-directory.WtLohP") exit-status: 1 term-signal: #f stop-signal: #f] 7fe80d229c80>)'.
-```
+----
diff --git a/src/content/en/pastebin/2020/02/14/guix-shebang.adoc b/src/content/en/pastebin/2020/02/14/guix-shebang.adoc
new file mode 100644
index 0000000..d415d36
--- /dev/null
+++ b/src/content/en/pastebin/2020/02/14/guix-shebang.adoc
@@ -0,0 +1,11 @@
+= Guix shebang
+:categories: guix
+
+[source,sh]
+----
+#!/usr/bin/env -S guix environment --ad-hoc bash -- bash
+set -Eeuo pipefail
+cd "$(dirname "${BASH_SOURCE[0]}")"
+
+pwd
+----
diff --git a/_pastebins/2020-11-27-guix-build-local-module.md b/src/content/en/pastebin/2020/11/27/guix-build-local.adoc
index 350d50f..1a18d4b 100644
--- a/_pastebins/2020-11-27-guix-build-local-module.md
+++ b/src/content/en/pastebin/2020/11/27/guix-build-local.adoc
@@ -1,25 +1,15 @@
----
+= Guix build local module
+:categories: guix
-title: Guix build local module
+FIXED: rename `name` on line 9 of the first snippet, and use `"my-hello"`
+instead of `"hello"`.
-date: 2020-11-27
-
-layout: post
-
-lang: en
-
-eu_categories: guix
-
-ref: guix-build-local-module
-
----
-
-FIXED: rename `name` on line 9 of the first snippet, and use `"my-hello"` instead of `"hello"`.
-
----
+'''''
Inside a file named `build.scm`:
-```scheme
+
+[source,scheme]
+----
(define-module (build)
#:use-module (guix packages)
#:use-module (guix download)
@@ -42,19 +32,20 @@ Inside a file named `build.scm`:
(description "")
(home-page "")
(license gpl3+)))
-
-```
+----
A plain build command didn't work:
-```shell
+[source,sh]
+----
$ guix build -L. my-hello
guix build: error: my-hello : paquet inconnu
-```
+----
But with an eval expression it did:
-```shell
+[source,sh]
+----
$ guix build -L. -e '(@ (build) my-hello)'
# works
-```
+----
diff --git a/_pastebins/2020-12-15-failure-with-relocatable-guix-pack-tarball.md b/src/content/en/pastebin/2020/12/15/guix-pack-fail.adoc
index 2834f90..3631fbc 100644
--- a/_pastebins/2020-12-15-failure-with-relocatable-guix-pack-tarball.md
+++ b/src/content/en/pastebin/2020/12/15/guix-pack-fail.adoc
@@ -1,28 +1,16 @@
----
+= Failure with relocatable Guix pack tarball
+:categories: guix
-title: Failure with relocatable Guix pack tarball
-
-date: 2020-12-15
-
-layout: post
-
-lang: en
-
-eu_categories: guix
-
-ref: failure-with-relocatable-guix-pack-tarball
-
----
+:post: https://guix.gnu.org/blog/2018/tarballs-the-ultimate-container-image-format/
FIXED: Use `GUIX_PROFILE= source etc/profile`
----
+'''''
-The example from the [blog post][guix-tarball-article] fails.
+The example from the {post}[blog post] fails.
-[guix-tarball-article]: https://guix.gnu.org/blog/2018/tarballs-the-ultimate-container-image-format/
-
-```shell
+[source,sh]
+----
$ tar xf `guix pack --relocatable -S /bin=bin -S /etc=etc guile gnutls guile-json`
$ source etc/profile
$ bin/guile -c '(use-modules (json))'
@@ -83,14 +71,16 @@ In ice-9/boot-9.scm:
ice-9/boot-9.scm:3300:6: In procedure resolve-interface:
no code for module (gnutls)
-```
+----
My Guix version if fairly recent:
-```shell
+
+[source,sh]
+----
$ guix describe
Génération 83 14 déc. 2020 00:28:16 (actuelle)
guix 41807eb
URL du dépôt : https://git.savannah.gnu.org/git/guix.git
branche: master
commit : 41807eb5329299b8c45cd49356a4ead01ce0d469
-```
+----
diff --git a/_pastebins/2021-04-03-javascript-naive-slugify.md b/src/content/en/pastebin/2021/04/03/naive-slugify-js.adoc
index f765495..184ce97 100644
--- a/_pastebins/2021-04-03-javascript-naive-slugify.md
+++ b/src/content/en/pastebin/2021/04/03/naive-slugify-js.adoc
@@ -1,20 +1,8 @@
----
+= JavaScript naive slugify
+:updatedat: 2021-08-15
-title: JavaScript naive slugify
-
-date: 2021-04-03
-
-updated_at: 2021-08-15
-
-layout: post
-
-lang: en
-
-ref: javascript-naive-slugify
-
----
-
-```javascript
+[source,javascript]
+----
const s = "Pézão: açaí, saci-pererê.";
const slugify = s =>
@@ -37,4 +25,4 @@ const slugify = s =>
.replaceAll("ç", "c");
console.log(slugify(s));
-```
+----
diff --git a/_pastebins/2021-06-08-debit-reading-session-sicp-solutions-pt-1.md b/src/content/en/pastebin/2021/06/08/reading-session-pt1.adoc
index b97ef08..d9f1f91 100644
--- a/_pastebins/2021-06-08-debit-reading-session-sicp-solutions-pt-1.md
+++ b/src/content/en/pastebin/2021/06/08/reading-session-pt1.adoc
@@ -1,18 +1,7 @@
----
+= Debit Reading Session - SICP solutions pt.1
-title: Debit Reading Session - SICP solutions pt.1
-
-date: 2021-06-08
-
-layout: post
-
-lang: en
-
-ref: debit-reading-session-sicp-solutions-pt-1
-
----
-
-```scheme
+[source,scheme]
+----
;; 1.41
(define (double f)
(lambda (x)
@@ -72,6 +61,6 @@ ref: debit-reading-session-sicp-solutions-pt-1
;;; 2.32
TODO
-```
+----
FYI: I just typed those in, I didn't yet test them yet.
diff --git a/_pastebins/2021-06-22-cloc-curl-and-wget.md b/src/content/en/pastebin/2021/06/22/curl-wget.adoc
index 1030c7b..97f55c7 100644
--- a/_pastebins/2021-06-22-cloc-curl-and-wget.md
+++ b/src/content/en/pastebin/2021/06/22/curl-wget.adoc
@@ -1,20 +1,9 @@
----
-
-title: "cloc: curl and wget"
-
-date: 2021-06-22
-
-layout: post
-
-lang: en
-
-ref: cloc-curl-and-wget
-
----
+= "cloc: curl and wget"
`curl`:
-```shell
+[source,sh]
+----
$ pushd `mktemp -d`
/tmp/tmp.AZkwvk7azD ~/
$ git clone git://github.com/curl/curl .
@@ -62,11 +51,12 @@ TOML 1 0 0
------------------------------------------------------------------------------------
SUM: 1309 39501 56083 254078
------------------------------------------------------------------------------------
-```
+----
`wget`:
-```shell
+[source,sh]
+----
$ pushd `mktemp -d`
/tmp/tmp.NX0udlJMiz ~/
$ git clone git://git.savannah.gnu.org/wget.git .
@@ -99,4 +89,4 @@ lex 1 29 65 73
--------------------------------------------------------------------------------
SUM: 342 11400 14185 52018
--------------------------------------------------------------------------------
-```
+----
diff --git a/_pastebins/2021-08-11-spaces-around-h1-tags.md b/src/content/en/pastebin/2021/08/11/h1-spacing.adoc
index 9a00ece..38dbb2e 100644
--- a/_pastebins/2021-08-11-spaces-around-h1-tags.md
+++ b/src/content/en/pastebin/2021/08/11/h1-spacing.adoc
@@ -1,29 +1,16 @@
----
+= Spaces around h1 tags
+:updatedat: 2021-08-15
-title: Spaces around h1 tags
+_EDIT_: Apparently, the behaviour below is consistent between Firefox and
+Chromium for links, but not for `<h1>`. My conclusion is that the `<h1>`
+behaviour is a Firefox quirk, but the `<a>` is expected.
-date: 2021-08-11
-
-updated_at: 2021-08-15
-
-layout: post
-
-lang: en
-
-ref: spaces-around-h1-tags
-
----
-
-*EDIT*: Apparently, the behaviour below is consistent between Firefox and
-Chromium for links, but not for `<h1>`.
-My conclusion is that the `<h1>` behaviour is a Firefox quirk, but the `<a>` is
-expected.
-
----
+'''''
The HTML below has selectable extra spaces after `<h1>` tags:
-```html
+[source,html]
+----
<!DOCTYPE html>
<html lang="en">
<head>
@@ -43,23 +30,22 @@ The HTML below has selectable extra spaces after `<h1>` tags:
</main>
</body>
</html>
-```
+----
The rendered output is:
-<h1>
- With spaces around when selecting this heading
-</h1>
-<h1>Without spaces around</h1>
-<p>
- Is this expected behaviour?
-</p>
+With spaces around when selecting this heading
+
+Without spaces around
----
+Is this expected behaviour?
+
+'''''
The same with links:
-```html
+[source,html]
+----
<!DOCTYPE html>
<html lang="en">
<head>
@@ -80,17 +66,10 @@ The same with links:
</main>
</body>
</html>
-```
+----
The rendered output is:
-<p>
- <a href="#">
- With extra underlined space
- </a>
- after the link.
-</p>
-<p>
- <a href="#">Without extra underlined space</a>
- after the link.
-</p>
+With extra underlined space after the link.
+
+Without extra underlined space after the link.
diff --git a/_pastebins/2021-09-02-sicp-exercise-3-19.md b/src/content/en/pastebin/2021/09/02/sicp-3-19.adoc
index 75ee346..166170f 100644
--- a/_pastebins/2021-09-02-sicp-exercise-3-19.md
+++ b/src/content/en/pastebin/2021/09/02/sicp-3-19.adoc
@@ -1,18 +1,8 @@
----
+= SICP exercise 3.19
+:categories: lisp programming-languages
-title: SICP exercise 3.19
-
-date: 2021-09-02
-
-layout: post
-
-lang: en
-
-ref: sicp-exercise-3-19
-
----
-
-```scheme
+[source,scheme]
+----
(define (cycle? l)
(define (rec l x)
(cond
@@ -20,11 +10,12 @@ ref: sicp-exercise-3-19
((eq? l x) true)
(true (rec l (cdr x)))))
(rec l (cdr l)))
-```
+----
Sample interactive session:
-```scheme
+[source,scheme]
+----
scheme@(guile-user)> (define true #t)
scheme@(guile-user)> (define false #f)
scheme@(guile-user)>
@@ -39,4 +30,4 @@ scheme@(guile-user)> (cycle? '(1 2 3))
$9 = #f
scheme@(guile-user)> (cycle? (make-cycle '(1 2 3)))
$10 = #t
-```
+----
diff --git a/_pastebins/2021-09-03-sicp-persistent-amortized-o1-queue.md b/src/content/en/pastebin/2021/09/03/sicp-persistent-queue.adoc
index 8cf7ea2..2b4a8a2 100644
--- a/_pastebins/2021-09-03-sicp-persistent-amortized-o1-queue.md
+++ b/src/content/en/pastebin/2021/09/03/sicp-persistent-queue.adoc
@@ -1,36 +1,25 @@
----
+= SICP persistent amortized O(1) queue
-title: SICP persistent amortized O(1) queue
-
-date: 2021-09-03
-
-layout: post
-
-lang: en
-
-ref: sicp-persistent-amortized-o1-queue
-
----
-
-```scheme
+[source,scheme]
+----
(define (queue)
(cons '()
- '()))
+ '()))
(define (enqueue x q)
(cons (car q)
- (cons x (cdr q))))
+ (cons x (cdr q))))
(define (flush q)
(cons (reverse (cdr q))
- '()))
+ '()))
(define (dequeue q)
(if (null? (car q))
(dequeue (flush q))
(cons (caar q)
- (cons (cdar q)
- (cdr q)))))
+ (cons (cdar q)
+ (cdr q)))))
(define (empty? q)
(and (null? (car q))
@@ -43,7 +32,7 @@ ref: sicp-persistent-amortized-o1-queue
(define (rec l leading-space?)
(when (not (null? l))
(when leading-space?
- (display " "))
+ (display " "))
(display (car l))
(rec (cdr l) #t)))
@@ -52,10 +41,12 @@ ref: sicp-persistent-amortized-o1-queue
(rec (reverse (cdr q)) (not (null? (car q))))
(display ")")
(newline))
-```
+----
Sample interactive session:
-```scheme
+
+[source,scheme]
+----
scheme@(guile-user)> (define true #t)
scheme@(guile-user)> (define false #f)
scheme@(guile-user)> (define q (queue))
@@ -82,4 +73,4 @@ scheme@(guile-user)> (print-queue q)
#q(b c d e)
scheme@(guile-user)> (print-queue (cdr (dequeue (cdr (dequeue (enqueue 'g (enqueue 'f q)))))))
#q(d e f g)
-```
+----
diff --git a/_pastebins/2022-07-14-git-cleanup-command.md b/src/content/en/pastebin/2022/07/14/git-cleanup.adoc
index 52cd17f..b223f86 100644
--- a/_pastebins/2022-07-14-git-cleanup-command.md
+++ b/src/content/en/pastebin/2022/07/14/git-cleanup.adoc
@@ -1,18 +1,7 @@
----
+= git-cleanup command
-title: git-cleanup command
-
-date: 2022-07-14
-
-layout: post
-
-lang: en
-
-ref: git-cleanup-command
-
----
-
-```
+[source,sh]
+----
#!/bin/sh
set -eu
@@ -67,4 +56,4 @@ shift $((OPTIND - 1))
git branch --merged |
grep -v -e '^\*' -e '^. main$' |
xargs git branch -d
-```
+----
diff --git a/_pastebins/2023-07-22-funcallable-amop.md b/src/content/en/pastebin/2023/07/22/funcallable-amop.adoc
index 37c79fe..47a8089 100644
--- a/_pastebins/2023-07-22-funcallable-amop.md
+++ b/src/content/en/pastebin/2023/07/22/funcallable-amop.adoc
@@ -1,22 +1,10 @@
----
-
-title: Funcallable AMOP
-
-date: 2023-07-22
-
-layout: post
-
-lang: en
-
-ref: funcallable-amop
-
----
-
+= Funcallable AMOP
Using `macrolet` to allow a `funcallable-standard-class` to be invoked without
using `funcall` directly, and let the macroexpansion do that instead:
-```
+[source,lisp]
+----
#!/usr/bin/env li
(asdf:load-system :closer-mop)
@@ -40,4 +28,4 @@ using `funcall` directly, and let the macroexpansion do that instead:
`(funcall c ,@body)))
(funcall c 2)
(c 3)))
-```
+----
diff --git a/src/content/en/pastebin/categories.adoc b/src/content/en/pastebin/categories.adoc
new file mode 100644
index 0000000..feb64ff
--- /dev/null
+++ b/src/content/en/pastebin/categories.adoc
@@ -0,0 +1,2 @@
+= Articles by category
+:type: categories
diff --git a/src/content/en/pastebin/index.adoc b/src/content/en/pastebin/index.adoc
new file mode 100644
index 0000000..433a2c1
--- /dev/null
+++ b/src/content/en/pastebin/index.adoc
@@ -0,0 +1 @@
+= Pastebins
diff --git a/_podcasts/2020-12-19-a-test-entry.md b/src/content/en/podcast/2020/12/19/test-entry.adoc
index a9235eb..9e7eb00 100644
--- a/_podcasts/2020-12-19-a-test-entry.md
+++ b/src/content/en/podcast/2020/12/19/test-entry.adoc
@@ -1,32 +1,18 @@
----
-
-title: A test entry
-
-date: 2020-12-19
-
-layout: post
-
-lang: en
-
-ref: a-test-entry
-
-slug: a-test-entry
+= A test entry
+:categories: test
audio: true
----
-
After.
-A link to [home][home].
+A link to https://euandre.org[home].
Another link to home: https://euandre.org
-[home]: https://euandre.org
-
A code block:
-```shell
+[source,sh]
+----
$ l
total 372K
drwxr-xr-x 23 andreh users 4,0K déc. 19 10:44 ./
@@ -113,4 +99,4 @@ drwxr-xr-x 2 andreh users 4,0K déc. 19 09:04 static/
-rw-r--r-- 1 andreh users 265 nov. 15 20:01 til.md
drwxr-xr-x 2 andreh users 4,0K déc. 16 04:03 _tils/
drwxr-xr-x 3 andreh users 4,0K oct. 10 09:20 vendor/
-```
+----
diff --git a/resources/podcasts/2020-12-19-a-test-entry.flac b/src/content/en/podcast/2020/12/19/test-entry.flac
index 786ab59..786ab59 100644
--- a/resources/podcasts/2020-12-19-a-test-entry.flac
+++ b/src/content/en/podcast/2020/12/19/test-entry.flac
Binary files differ
diff --git a/src/content/en/podcast/2020/12/19/test-entry.ogg b/src/content/en/podcast/2020/12/19/test-entry.ogg
new file mode 100644
index 0000000..9bf5500
--- /dev/null
+++ b/src/content/en/podcast/2020/12/19/test-entry.ogg
Binary files differ
diff --git a/src/content/en/podcast/categories.adoc b/src/content/en/podcast/categories.adoc
new file mode 100644
index 0000000..feb64ff
--- /dev/null
+++ b/src/content/en/podcast/categories.adoc
@@ -0,0 +1,2 @@
+= Articles by category
+:type: categories
diff --git a/src/content/en/podcast/index.adoc b/src/content/en/podcast/index.adoc
new file mode 100644
index 0000000..1f17da7
--- /dev/null
+++ b/src/content/en/podcast/index.adoc
@@ -0,0 +1 @@
+= Podcasts
diff --git a/src/content/en/screencast/2021/02/07/autoqemu.adoc b/src/content/en/screencast/2021/02/07/autoqemu.adoc
new file mode 100644
index 0000000..b20b092
--- /dev/null
+++ b/src/content/en/screencast/2021/02/07/autoqemu.adoc
@@ -0,0 +1,53 @@
+= AutoQEMU - automate installation and SSH setup of ISO OS images
+:updatedat: 2022-03-06
+:categories: first
+
+:empty:
+:begriffs-tips: https://begriffs.com/posts/2020-08-31-portable-stable-software.html
+:public-thread: https://talk.begriffs.com/pipermail/friends/2021-February/001263.html
+:archived: https://euandre.org/static/attachments/autoqemu.tar.gz
+:expect: https://core.tcl-lang.org/expect/index
+:script: https://www.man7.org/linux/man-pages/man1/script.1.html
+:klaatu: https://gnuworldorder.info/
+
+video: true
+
+After reading begriffs "{begriffs-tips}[Tips for stable and portable software]",
+the "Begriffs Buildfarm?" section caught my attention, as this is something I
+would be interested in.
+
+After emailing the author, a {public-thread}[public thread] began on the
+subject.
+
+As we discussed how it could be done, I decided to experiment with the idea of
+automating the setup of virtual environments with QEMU.
+
+This screencast is a simple demo of automating the installation of Alpine Linux
+3.12.3 standard x86_64 with
+AutoQEMU{empty}footnote:autoqemu[
+ The solution was a little too brittle to scale, and some distributions proved
+ to be particularly problematic. I've {archived}[archived] my progress if
+ you're interested in what I've done, and maybe wish to continue.
+], which is nothing more than POSIX sh, {expect}[expect] scripts and Makefiles
+glued together.
+
+As of this writing, I just worked on it for 2~3 days, so everything is still
+pretty ad-hoc.
+
+The commands from the screencast
+were{empty}footnote:script-command[
+ Only now, writing again what I ran on the screencast I thought that I should
+ have tried something like {script}[script]. Maybe next time (thanks
+ {klaatu}[klaatu] for the tip!).
+]:
+
+[source,sh]
+----
+pushd `mktemp -d`
+git clone https://euandre.org/git/autoqemu .
+make
+make install PREFIX=$HOME/.local
+autoqemu ssh alpine
+----
+
+It assumes that `$HOME/.local/bin` is in `$PATH`.
diff --git a/src/content/en/screencast/2021/02/07/autoqemu.tar.gz b/src/content/en/screencast/2021/02/07/autoqemu.tar.gz
new file mode 100644
index 0000000..c2fa042
--- /dev/null
+++ b/src/content/en/screencast/2021/02/07/autoqemu.tar.gz
Binary files differ
diff --git a/resources/screencasts/2021-02-07-autoqemu-automate-installation-and-ssh-setup-of-iso-os-images.webm b/src/content/en/screencast/2021/02/07/autoqemu.webm
index f553efb..f553efb 100644
--- a/resources/screencasts/2021-02-07-autoqemu-automate-installation-and-ssh-setup-of-iso-os-images.webm
+++ b/src/content/en/screencast/2021/02/07/autoqemu.webm
Binary files differ
diff --git a/src/content/en/screencast/categories.adoc b/src/content/en/screencast/categories.adoc
new file mode 100644
index 0000000..feb64ff
--- /dev/null
+++ b/src/content/en/screencast/categories.adoc
@@ -0,0 +1,2 @@
+= Articles by category
+:type: categories
diff --git a/src/content/en/screencast/index.adoc b/src/content/en/screencast/index.adoc
new file mode 100644
index 0000000..083adac
--- /dev/null
+++ b/src/content/en/screencast/index.adoc
@@ -0,0 +1 @@
+= Screencasts
diff --git a/src/content/en/slide/2020/10/19/feature-flags.adoc b/src/content/en/slide/2020/10/19/feature-flags.adoc
new file mode 100644
index 0000000..553cf4c
--- /dev/null
+++ b/src/content/en/slide/2020/10/19/feature-flags.adoc
@@ -0,0 +1,230 @@
+---
+# Rollout, feature flag, experiment, operational toggle
+
+@Different use cases for backend, frontend and mobile
+
+---
+.
+.
+.
+# "Feature flags" tend to come up when talking about continuous deployment
+
+---
+.
+.
+.
+.
+@CI: continuous integration
+.
+@CD: continuous delivery
+.
+@CD: continuous deployment
+
+---
+## Types
+.
+.
+.
+1. rollout
+2. feature flag
+3. experiment
+4. operational toggle
+
+% {favicon.svg}
+---
+## Rollout
+
+# For *rolling out* a new version of software
+
+Short-lived using percentages
+
+% FIXME: links
+- a new deployment of kubernetes
+- new APK released to the Play Store
+
+---
+## Feature flag
+
+# For turning a feature *on* or *off*
+
+Medium-lived using allow list, A/B test, percentage, app version, etc.
+
+- :new-chargeback-flow
+- :new-debit-card-activation-screen
+
+---
+## Experiment
+
+# For analysing behaviour
+
+Medium-lived using allow list and A/B test
+
+- :debit-withdrawal-test
+
+---
+## Operational toggle
+
+# For disabling features in #crash-like situations
+
+Long-lived using percentage
+
+- :bank-barcode-payment
+- :savings-bank-barcode-query-provider
+
+---
+.
+.
+@We know know about the types
+# But they have different relevance for backend, frontend and mobile
+
+---
+## backend
+.
+.
+1. rollout: k8s blue/green, canary and ~common-rollout~ common-xp
+2. feature flag: ~common-rollout~ common-xp and datasets
+3. experiment: common-xp
+4. operational toggle: ~common-rollout~ common-xp
+
+---
+## frontend
+.
+.
+1. rollout: CDN and page refreshes
+2. feature flag: percentages and maybe IPs (no :customer/id on the website)
+3. experiment: via dynamic backend control
+4. operational toggle: via dynamic backend control
+
+---
+## backend
+.
+.
+1. rollout: app stores
+2. feature flag: via dynamic backend control
+3. experiment: via dynamic backend control
+4. operational toggle: via dynamic backend control
+
+---
+.
+.
+@Key differentiator is
+# how much *control* we have over the environment
+
+---
+## backend
+# full control
+% FIXME: emoji
+% 🎉
+
+---
+## frontend
+# partial control
+We choose when to make a new version available
+
+---
+## mobile
+# very limited control
+- app stores can restrict updates (worse for iOS)
+- customers still have to download new versions
+
+---
+# Costs
+- more complex code
+- compatibility with old app versions
+- nesting is exponential
+
+---
+# Benefits
+- dynamicity
+
+---
+## weighting costs × benefits
+# The less control we have, the more we value dynamicity
+
+---
+## weighting costs × benefits
+.
+.
+.
+- backend: sometimes worth the cost
+- frontend: almost always worth the cost
+- mobile: *always* worth the cost
+
+---
+.
+.
+.
+# Best practices
+
+---
+# dynamic content > feature flag
+Always true for mobile, almost always for frontend
+
+---
+# Use :include-list for named groups
+Always true for backend, frontend and mobile
+
+ {:rules
+ #{{:types :include-list
+ :content {:filename "debit-team-members.txt"}}}}
+
+---
+# Always use :app-version
+Only for mobile
+
+ {:rules
+ #{{:types :app-version
+ :content {:min-version #{{:platform :android
+ :code 1000000}
+ {:platform :ios
+ :code 2000000}}}}}}
+
+---
+# Extend ~common-rollout~ common-xp if required
+
+That's how :include-list, :app-version, etc. were born
+
+---
+# Beware of many nested feature flags
+True for backend, frontend and mobile
+
+---
+# Don't delete app-facing feature flags
+True for mobile
+
+---
+.
+.
+.
+# Include a feature flag on the whiteboarding phase
+
+---
+.
+.
+.
+# Include deleting/retiring the feature flag at the end
+
+---
+# Avoid renaming a feature flag
+Use :app-version with :min-version instead
+
+---
+.
+.
+.
+# And most importantly...
+
+---
+# *Always* rely on a feature flag on the app
+Never do a hotfix, avoid expedited releases at all costs
+
+---
+## References
+.
+% FIXME: links
+1. "Feature Toggles (aka Feature Flags)", by Pete Hodgson
+2. "Continuous integration vs. delivery vs. deployment", by Sten Pittet
+3. Accelerate, by N. Forsgren, J. Humble and G. Kim
+4. these slides: euandre.org/slide/
+5. prose version of this presentation
+6. view source
diff --git a/src/content/en/slide/2020/11/14/local-first-hype.adoc b/src/content/en/slide/2020/11/14/local-first-hype.adoc
new file mode 100644
index 0000000..fd83115
--- /dev/null
+++ b/src/content/en/slide/2020/11/14/local-first-hype.adoc
@@ -0,0 +1,204 @@
+# On "local-first"
+
+@Beyond the CRDT silver bullet
+
+---
+## Part 1
+# Exposition
+
+---
+## Target
+...
+- documents
+- files
+- personal data repositories
+..
+Not: banking services, e-commerce, social networking, ride-sharing, etc.
+
+---
+...
+# 7 ideals for local-first software
+
+---
+...
+# 1 - no spinners: your work at your fingertips
+
+---
+...
+# 2 - your work is not trapped on one device
+
+---
+...
+# 3 - the network is optional
+
+---
+...
+# 4 - seamless collaboration with your colleagues
+
+---
+...
+# 5 - the long now
+
+---
+...
+# 6 - security and privacy by default
+
+---
+...
+# 7 - you retain ultimate ownership and control
+
+---
+## Towards a better future
+....
+@ CRDTs (Conflict-free Replicated Data Types) as a Foundational Technology
+
+---
+## Use case
+.
+ # in node A and node B
+ s = "Hello, World"
+
+ # in node A
+ s = "Hello, Alice"
+
+ # in node B
+ s = "Hello, Bob"
+
+How to reconcile those?
+- Hello, ABloibce
+- Hello, AliceBob
+- Hello, BobAlice
+- Hello, Alice
+\...
+
+---
+# Existing CRDTs differ
+- performance
+- storage
+- compression
+- metadata overhead
+
+---
+...
+# Hint towards the "automerge" CRDT
+
+---
+......
+@@show comparison table, page 9
+
+---
+## Part 2
+# Critique
+
+---
+## Software license
+...
+@@"In our opinion, maintaining control and ownership of data does not mean that the software must necessarily be open source."
+
+---
+## Example 1 - intentional restriction
+.
+ #!/bin/sh
+ TODAY="$(date +%s)"
+ LICENSE_EXPIRATION="$(date -d 2020-10-27 +%s)"
+ if [ "$TODAY" -ge "$LICENSE_EXPIRATION" ]; then
+ echo 'License expired!'
+ exit 1
+ fi
+ echo $((2 + 2))
+.
+ # today
+ $ ./useful-adder.sh
+ 4
+
+ # tomorrow
+ $ ./useful-adder.sh
+ License expired!
+---
+## Example 2 - unintentional restriction
+.
+ # today
+ $ useful-program
+ # ... useful output ...
+
+ # tomorrow, with more data
+ $ useful-program
+ ERROR: Panic! Stack overflow!
+
+---
+..
+# local-first *requires* free software
+
+Otherwise "The Long Now" (ideal nº5) is lost
+
+---
+## Denial of existing solutions
+..
+@@"In principle it is possible to collaborate without a repository service, e.g. by sending patch files by email, but the majority of Git users rely on GitHub."
+.
+Solution: either GitHub+CRDTs or git send-email
+
+---
+## Plain-text formats
+.
+@@"Git is highly optimized for code and similar line-based text file"
+.
+It even pulls software to the plain text directtion, e.g.:
+- delivery-templates
+- common-core.protocols.config
+.
+Why not exploit that more?
+
+---
+## Ditching of web applications
+..
+@@"The architecture of web apps remains fundamentally server-centric
+.
+% FIXME: links
+Disagree. Contrast PouchDB with Android Instant Apps
+
+---
+## Costs are underrated
+.
+- storage
+- backups
+- maintenance
+.
+Example: blog vs vlog
+
+---
+## Real-time collaboration a bit overrated
+.
+It is only possible on the presence of reliable, medium-quality network connection
+..
+@@"X also works when inside an elevator, subway or plane!"
+
+---
+## On CRDTs and developer experience
+.
+@@"For an app developer, how does the use of a CRDT-based data layer compare to existing storage layers like a SQL database, a filesystem, or CoreData? Is a distributed system harder to write software for?
+.
+@YES.
+.
+% FIXME: link
+See "A Note on Distributed Computing"
+
+---
+## Conclusion
+.
+Why this is a "paper I love": it took offline-first and ran with it.
+.
+But a pinch of CRDT won't make the world local-first.
+.
+The tricky part is the end of the sentence: "in spite of the Cloud".
+
+---
+## References
+.
+% FIXME: links
+1. "Local-First Software: You Own Your Data, in spite of the Cloud", by M. Kleppmann, A. Wiggins, P. Van Hardenberg and M. F. McGranaghan
+2. The Morning Paper article
+3. "A Note on Distributed Compiting", by J. Waldo, G. Wyant, A. Wollrath and S. Kendall
+4. these slides: euandre.org/slide/
+5. prose version of this presentation
+6. view source
diff --git a/_tils/2020-08-12-simple-filename-timestamp.md b/src/content/en/til/2020/08/12/filename-timestamp.adoc
index 7495fc9..aa8d63b 100644
--- a/_tils/2020-08-12-simple-filename-timestamp.md
+++ b/src/content/en/til/2020/08/12/filename-timestamp.adoc
@@ -1,44 +1,31 @@
----
-
-title: Simple filename timestamp
-
-date: 2020-08-12
-
-updated_at:
-
-layout: post
-
-lang: en
-
-ref: simple-filename-timestamp
-
-eu_categories: shell
-
----
+= Simple filename timestamp
+:categories: shell
When writing Jekyll posts or creating log files with dates on them, I usually
-struggle with finding a direct way of accomplishing that. There's a simple
+struggle with finding a direct way of accomplishing that. There's a simple
solution: `date -I`.
-```shell
+[source,sh]
+----
./my-program.sh > my-program.$(date -I).log
cp post-template.md _posts/$(date -I)-post-slug.md
-```
+----
-Using this built-in GNU/Linux tool allows you to `touch $(date -I).md` to readily
-create a `2020-08-12.md` file.
+Using this built-in GNU/Linux tool allows you to `touch $(date -I).md` to
+readily create a `2020-08-12.md` file.
I always had to read `man date` or search the web over and over, and after doing
this repeatedly it became clear that both `date -I` and `date -Is` (`s` here
stands for seconds) are the thing that I'm looking for 95% of the time:
-```shell
+[source,sh]
+----
# inside my-program.sh
echo "Program started at $(date -Is)"
# output is:
# Program started at 2020-08-12T09:04:58-03:00
-```
+----
Both date formats are hierarchical, having the bigger time intervals to the
-left. This means that you can easily sort them (and even tab-complete them) with
-no extra effort or tool required.
+left. This means that you can easily sort them (and even tab-complete them)
+with no extra effort or tool required.
diff --git a/_tils/2020-08-13-anchor-headers-and-code-lines-in-jekyll.md b/src/content/en/til/2020/08/13/code-jekyll.adoc
index 6566928..6bd90b0 100644
--- a/_tils/2020-08-13-anchor-headers-and-code-lines-in-jekyll.md
+++ b/src/content/en/til/2020/08/13/code-jekyll.adoc
@@ -1,33 +1,35 @@
----
-title: Anchor headers and code lines in Jekyll
-date: 2020-08-13
-layout: post
-lang: en
-ref: anchor-headers-and-code-lines-in-jekyll
----
-The default Jekyll toolbox ([Jekyll][0], [kramdown][1] and [rouge][2]) doesn't
-provide with a configuration option to add anchors to headers and code blocks.
-
-[0]: https://jekyllrb.com/
-[1]: https://kramdown.gettalong.org/
-[2]: http://rouge.jneen.net/
+= Anchor headers and code lines in Jekyll
-The best way I found of doing this is by creating a simple Jekyll plugin, more
-specifically, a [Jekyll hook][3]. These allow you to jump in to the Jekyll build
-and add a processing stage before of after Jekyll performs something.
+:empty:
+:jekyll: https://jekyllrb.com/
+:kramdown: https://kramdown.gettalong.org/
+:rouge: https://rouge.jneen.net/
+:jekyll-hook: https://jekyllrb.com/docs/plugins/hooks/
+
+The default Jekyll toolbox ({jekyll}[Jekyll], {kramdown}[kramdown] and
+{rouge}[rouge]) doesn't provide with a configuration option to add anchors to
+headers and code blocks.
-[3]: https://jekyllrb.com/docs/plugins/hooks/
+The best way I found of doing this is by creating a simple Jekyll plugin, more
+specifically, a {jekyll-hook}[Jekyll hook]. These allow you to jump in to the
+Jekyll build and add a processing stage before of after Jekyll performs
+something.
All you have to do is add the code to `_plugins/my-jekyll-plugin-code.rb`, and
Jekyll knows to pick it up and call your code on the appropriate time.
-## Anchor on headers
+== Anchor on headers
+
+:jemoji: https://github.com/jekyll/jemoji
+:jekyll-mentions: https://github.com/jekyll/jekyll-mentions
+:html-regex: https://stackoverflow.com/questions/1732348/regex-match-open-tags-except-xhtml-self-contained-tags/1732454#1732454
Since I wanted to add anchors to headers in all documents, this Jekyll hook
works on `:documents` after they have been transformed into HTML, the
`:post_render` phase:
-```ruby
+[source,ruby]
+----
Jekyll::Hooks.register :documents, :post_render do |doc|
if doc.output_ext == ".html"
doc.output =
@@ -37,30 +39,30 @@ Jekyll::Hooks.register :documents, :post_render do |doc|
)
end
end
-```
-
-I've derived my implementations from two "official"[^official] hooks,
-[jemoji][4] and [jekyll-mentions][5].
+----
-[4]: https://github.com/jekyll/jemoji
-[5]: https://github.com/jekyll/jekyll-mentions
-[^official]: I don't know how official they are, I just assumed it because they
- live in the same organization inside GitHub that Jekyll does.
+I've derived my implementations from two
+"official"{empty}footnote:official[
+ I don't know how official they are, I just assumed it because they live in the
+ same organization inside GitHub that Jekyll does.
+] hooks, {jemoji}[jemoji] and {jekyll-mentions}[jekyll-mentions].
All I did was to wrap the header tag inside an `<a>`, and set the `href` of that
-`<a>` to the existing id of the header. Before the hook the HTML looks like:
+`<a>` to the existing id of the header. Before the hook the HTML looks like:
-```html
+[source,html]
+----
...some unmodified text...
<h2 id="my-header">
My header
</h2>
...more unmodified text...
-```
+----
And after the hook should turn that into:
-```html
+[source,html]
+----
...some unmodified text...
<a href="#my-header">
<h2 id="my-header">
@@ -68,20 +70,18 @@ And after the hook should turn that into:
</h2>
</a>
...more unmodified text...
-```
+----
The used regexp tries to match only h1-h6 tags, and keep the rest of the HTML
-attributes untouched, since this isn't a general HTML parser, but the generated HTML
-is somewhat under your control. Use at your own risk because
-[you shouldn't parse HTML with regexps][6]. Also I used this strategy in my
-environment, where no other plugins are installed. I haven't considered how this
-approach may conflict with other Jekyll plugins.
-
-[6]: https://stackoverflow.com/questions/1732348/regex-match-open-tags-except-xhtml-self-contained-tags/1732454#1732454
+attributes untouched, since this isn't a general HTML parser, but the generated
+HTML is somewhat under your control. Use at your own risk because
+{html-regex}[you shouldn't parse HTML with regexps]. Also I used this strategy
+in my environment, where no other plugins are installed. I haven't considered
+how this approach may conflict with other Jekyll plugins.
In the new anchor tag you can add your custom CSS class to style it as you wish.
-## Anchor on code blocks
+== Anchor on code blocks
Adding anchors to code blocks needs a little bit of extra work, because line
numbers themselves don't have preexisting ids, so we need to generate them
@@ -90,7 +90,8 @@ without duplications between multiple code blocks in the same page.
Similarly, this Jekyll hook also works on `:documents` in the `:post_render`
phase:
-```ruby
+[source,ruby]
+----
PREFIX = '<pre class="lineno">'
POSTFIX = '</pre>'
Jekyll::Hooks.register :documents, :post_render do |doc|
@@ -111,25 +112,27 @@ Jekyll::Hooks.register :documents, :post_render do |doc|
end
end
end
-```
+----
This solution assumes the default Jekyll toolbox with code line numbers turned
on in `_config.yml`:
-```yaml
+[source,yaml]
+----
kramdown:
syntax_highlighter_opts:
span:
line_numbers: false
block:
line_numbers: true
-```
+----
The anchors go from B1-L1 to BN-LN, using the `code_block_counter` to track
-which code block we're in and don't duplicate anchor ids. Before the hook the
+which code block we're in and don't duplicate anchor ids. Before the hook the
HTML looks like:
-```html
+[source,html]
+----
...some unmodified text...
<pre class="lineno">1
2
@@ -138,11 +141,12 @@ HTML looks like:
5
</pre>
...more unmodified text...
-```
+----
And after the hook should turn that into:
-```html
+[source,html]
+----
...some unmodified text...
<pre class="lineno"><a id="B1-L1" href="#B1-L1">1</a>
<a id="B1-L2" href="#B1-L2">2</a>
@@ -150,6 +154,6 @@ And after the hook should turn that into:
<a id="B1-L4" href="#B1-L4">4</a>
<a id="B1-L5" href="#B1-L5">5</a></pre>
...more unmodified text...
-```
+----
Happy writing :)
diff --git a/_tils/2020-08-14-browse-a-git-repository-at-a-specific-commit.md b/src/content/en/til/2020/08/14/browse-git.adoc
index d06f0c1..6b3ff6d 100644
--- a/_tils/2020-08-14-browse-a-git-repository-at-a-specific-commit.md
+++ b/src/content/en/til/2020/08/14/browse-git.adoc
@@ -1,28 +1,16 @@
----
-
-title: Browse a git repository at a specific commit
-
-date: 2020-08-14
-
-layout: post
-
-lang: en
-
-ref: browse-a-git-repository-at-a-specific-commit
-
-eu_categories: git
-
----
+= Browse a git repository at a specific commit
+:categories: git
I commonly use tools like `git log` together with `git show` when inspecting
past changes in a repository:
-```shell
+[source,sh]
+----
git log
# search for a the commit I'm looking for
git show <my-commit>
# see the diff for the commit
-```
+----
But I also wanted to not only be able to look at the diff of a specific commit,
but to browse the whole repository at that specific commit.
@@ -30,55 +18,59 @@ but to browse the whole repository at that specific commit.
I used to accomplish it the "brute force" way: clone the whole repository in
another folder and checkout the commit there:
-```shell
+[source,sh]
+----
git clone <original-repo> /tmp/tmp-repo-clone
cd /tmp-repo-clone
git checkout <my-commit>
-```
+----
But git itself allows we to specific the directory of the checkout by using the
-`--work-tree` global git flag. This is what `man git` says about it:
+`--work-tree` global git flag. This is what `man git` says about it:
-```txt
+[source,text]
+----
--work-tree=<path>
Set the path to the working tree. It can be an absolute path or a path relative to the current working
directory. This can also be controlled by setting the GIT_WORK_TREE environment variable and the
core.worktree configuration variable (see core.worktree in git-config(1) for a more detailed
discussion).
-```
+----
-So it allows us to set the desired path of the working tree. So if we want to
+So it allows us to set the desired path of the working tree. So if we want to
copy the contents of the current working tree into `copy/`:
-```shell
+[source,sh]
+----
mkdir copy
git --work-tree=copy/ checkout .
-```
+----
-After that `copy/` will contain a replica of the code in HEAD. But to checkout a
-specific, we need some extra parameters:
+After that `copy/` will contain a replica of the code in HEAD. But to checkout
+a specific, we need some extra parameters:
-```shell
+[source,sh]
+----
git --work-tree=<dir> checkout <my-commit> -- .
-```
+----
There's an extra `-- .` at the end, which initially looks like we're sending
-Morse signals to git, but we're actually saying to `git-checkout` which
-sub directory of `<my-commit>` we want to look at. Which means we can do
-something like:
+Morse signals to git, but we're actually saying to `git-checkout` which sub
+directory of `<my-commit>` we want to look at. Which means we can do something
+like:
-```shell
+[source,sh]
+----
git --work-tree=<dir> checkout <my-commit> -- src/
-```
+----
And with that `<dir>` will only contain what was inside `src/` at `<commit>`.
After any of those checkouts, you have to `git reset .` to reset your current
staging area back to what it was before the checkout.
+== References
-## References
-
-1. [GIT: Checkout to a specific folder][0] (StackOverflow)
+:so-link: https://stackoverflow.com/a/16493707
-[0]: https://stackoverflow.com/a/16493707
+. {so-link}[GIT: Checkout to a specific folder] (StackOverflow)
diff --git a/_tils/2020-08-16-search-in-git.md b/src/content/en/til/2020/08/16/git-search.adoc
index f3ae6f0..4113f3f 100644
--- a/_tils/2020-08-16-search-in-git.md
+++ b/src/content/en/til/2020/08/16/git-search.adoc
@@ -1,59 +1,49 @@
----
-
-title: Search in git
-
-date: 2020-08-16
-
-layout: post
-
-lang: en
-
-ref: search-in-git
-
-eu_categories: git
-
----
+= Search in git
+:categories: git
Here's a useful trio to know about to help you search things in git:
-1. `git show <commit>`
-2. `git log --grep='<regexp>'`
-3. `git grep '<regexp>' [commit]`
+. `git show <commit>`
+. `git log --grep='<regexp>'`
+. `git grep '<regexp>' [commit]`
-## 1. `git show <commit>`
+== 1. `git show <commit>`
Show a specific commit and it's diff:
-```shell
+[source,sh]
+----
git show
# shows the latest commit
git show <commit>
# shows an specific <commit>
git show v1.2
# shows commit tagged with v1.2
-```
+----
-## 2. `git log --grep='<regexp>'`
+== 2. `git log --grep='<regexp>'`
Search through the commit messages:
-```shell
+[source,sh]
+----
git log --grep='refactor'
-```
+----
-## 3. `git grep '<regexp>' [commit]`
+== 3. `git grep '<regexp>' [commit]`
+
+:browse-article: link:../14/browse-git.html
Search content in git history:
-```shell
+[source,sh]
+----
git grep 'TODO'
# search the repository for the "TODO" string
git grep 'TODO' $(git rev-list --all)
# search the whole history for "TODO" string
-```
+----
And if you find an occurrence of the regexp in a specific commit and you want to
-browse the repository in that point in time, you can
-[use git checkout for that][0].
-
-[0]: {% link _tils/2020-08-14-browse-a-git-repository-at-a-specific-commit.md %}
+browse the repository in that point in time, you can {browse-article}[use git
+checkout for that].
diff --git a/_tils/2020-08-28-grep-online-repositories.md b/src/content/en/til/2020/08/28/grep-online.adoc
index 8b3b63f..77363ab 100644
--- a/_tils/2020-08-28-grep-online-repositories.md
+++ b/src/content/en/til/2020/08/28/grep-online.adoc
@@ -1,31 +1,20 @@
----
+= Grep online repositories
+:categories: git
-title: Grep online repositories
-
-date: 2020-08-28
-
-layout: post
-
-lang: en
-
-ref: grep-online-repositories
-
-eu_categories: git
-
----
+:cgit: https://git.zx2c4.com/cgit/
I often find interesting source code repositories online that I want to grep for
some pattern but I can't, because either:
-- the repository is on [cgit][cgit] or a similar code repository that doesn't
- allow search in files, or;
-- the search function is really bad, and doesn't allow me to use regular expressions for searching patterns in the code.
-
-[cgit]: https://git.zx2c4.com/cgit/
+* the repository is on {cgit}[cgit] or a similar code repository that doesn't
+ allow search in files, or;
+* the search function is really bad, and doesn't allow me to use regular
+ expressions for searching patterns in the code.
Here's a simple script that allows you to overcome that problem easily:
-```shell
+[source,sh]
+----
#!/usr/bin/env bash
set -eu
@@ -66,7 +55,7 @@ pushd "/tmp/git-search/${DIRNAME}"
shift 3 || shift 2 # when "--" is missing
git grep "${REGEX_PATTERN}" "${@}"
-```
+----
It is a wrapper around `git grep` that downloads the repository when missing.
Save in a file called `git-search`, make the file executable and add it to your
@@ -74,33 +63,35 @@ path.
Overview:
-- *lines 1~2*:
-
- Bash shebang and the `set -eu` options to exit on error or undefined
- variables.
+* _lines 1~2_:
++
+Bash shebang and the `set -eu` options to exit on error or undefined
+variables.
-- *lines 4~30*:
+* _lines 4~30_:
++
+Usage text to be printed when providing less arguments than expected.
- Usage text to be printed when providing less arguments than expected.
+* _line 33_:
++
+Extract the repository name from the URL, removing trailing slashes.
-- *line 33*:
+* _lines 34~37_:
++
+Download the repository when missing and go to the folder.
- Extract the repository name from the URL, removing trailing slashes.
+* _line 39_:
++
+Make the variable `$@` contain the rest of the unused arguments.
-- *lines 34~37*:
-
- Download the repository when missing and go to the folder.
-
-- *line 39*:
-
- Make the variable `$@` contain the rest of the unused arguments.
-
-- *line 40*:
-
- Perform `git grep`, forwarding the remaining arguments from `$@`.
+* _line 40_:
++
+Perform `git grep`, forwarding the remaining arguments from `$@`.
Example output:
-```shell
+
+[source,sh]
+----
$ git search 'make get-git' https://git.zx2c4.com/cgit/
Clonage dans '/tmp/git-search/cgit'...
remote: Enumerating objects: 542, done.
@@ -116,12 +107,15 @@ README: $ make get-git
$ git search 'make get-git' https://git.zx2c4.com/cgit/
/tmp/git-search/cgit ~/dev/libre/songbooks/docs
README: $ make get-git
-```
+----
-Subsequent greps on the same repository are faster because no download is needed.
+Subsequent greps on the same repository are faster because no download is
+needed.
When no argument is provided, it prints the usage text:
-```shell
+
+[source,sh]
+----
$ git search
Missing argument REGEX_PATTERN.
@@ -136,4 +130,4 @@ Examples:
Searching "make get-git" in cgit repository:
git search 'make get-git' https://git.zx2c4.com/cgit/
git search 'make get-git' https://git.zx2c4.com/cgit/ -- $(git rev-list --all)
-```
+----
diff --git a/_tils/2020-09-04-send-emails-using-the-command-line-for-fun-and-profit.md b/src/content/en/til/2020/09/04/cli-email-fun-profit.adoc
index 320f3ab..1da1154 100644
--- a/_tils/2020-09-04-send-emails-using-the-command-line-for-fun-and-profit.md
+++ b/src/content/en/til/2020/09/04/cli-email-fun-profit.adoc
@@ -1,25 +1,22 @@
----
-title: Send emails using the command line for fun and profit!
-date: 2020-09-04
-layout: post
-lang: en
-ref: send-emails-using-the-command-line-for-fun-and-profit
----
-Here are a few reasons why:
+= Send emails using the command line for fun and profit!
-1. send yourself and other people notification of cronjobs, scripts runs, CI
- jobs, *etc.*
+:ssmtp: https://wiki.archlinux.org/index.php/SSMTP
+:mailutils: https://mailutils.org/
-2. leverage the POSIX pipe `|`, and pipe emails away!
+Here are a few reasons why:
-3. because you can.
+. send yourself and other people notification of cronjobs, scripts runs, CI
+ jobs, _etc._
+. leverage the POSIX pipe `|`, and pipe emails away!
+. because you can.
Reason 3 is the fun part, reasons 1 and 2 are the profit part.
-First [install and configure SSMTP][ssmtp] for using, say, Gmail as the email
+First {ssmpt}[install and configure SSMTP] for using, say, Gmail as the email
server:
-```shell
+[source,sh]
+----
# file /etc/ssmtp/ssmtp.conf
FromLineOverride=YES
MailHub=smtp.gmail.com:587
@@ -29,19 +26,21 @@ rewriteDomain=gmail.com
root=username@gmail.com
AuthUser=username
AuthPass=password
-```
+----
-Now install [GNU Mailutils][gnu-mailutils] (`sudo apt-get install mailutils` or the
+Now install {mailutils}[GNU Mailutils] (`sudo apt-get install mailutils` or the
equivalent on your OS), and send yourself your first email:
-```shell
+[source,sh]
+----
echo body | mail -aFrom:email@example.com email@example.com -s subject
-```
+----
-And that's about it, you've got mail. Here are some more places where it might
+And that's about it, you've got mail. Here are some more places where it might
be applicable:
-```shell
+[source,sh]
+----
# report a backup cronjob, attaching logs
set -e
@@ -61,20 +60,17 @@ finish() {
trap finish EXIT
do-long-backup-cmd-here
-```
+----
-```
+[source,sh]
+----
# share the output of a cmd with someone
some-program | mail someone@example.com -s "The weird logs that I was talking about"
-```
+----
...and so on.
You may consider adding a `alias mail='mail -aFrom:email@example.com'` so you
-don't keep re-entering the "From: " part.
+don't keep re-entering the ``From:'' part.
Send yourself some emails to see it working!
-
-[ssmtp]: https://wiki.archlinux.org/index.php/SSMTP
-[gnu-mailutils]: https://mailutils.org/
-[forwarding-wiki-section]: https://wiki.archlinux.org/index.php/SSMTP#Forward_to_a_Gmail_mail_server
diff --git a/_tils/2020-09-05-pull-requests-with-git-the-old-school-way.md b/src/content/en/til/2020/09/05/oldschool-pr.adoc
index 5b4e445..392ec67 100644
--- a/_tils/2020-09-05-pull-requests-with-git-the-old-school-way.md
+++ b/src/content/en/til/2020/09/05/oldschool-pr.adoc
@@ -1,43 +1,33 @@
----
+= Pull requests with Git, the old school way
+:categories: git
-title: Pull requests with Git, the old school way
+:empty:
+:cgit: https://git.zx2c4.com/cgit/
-date: 2020-09-05
-
-layout: post
-
-lang: en
-
-ref: pull-requests-with-git-the-old-school-way
-
-eu_categories: git
-
----
It might be news to you, as it was to me, that "pull requests" that you can
-create on a Git hosting provider's web UI[^pr-webui] like
-GitLab/Bitbucket/GitHub actually comes from Git itself: `git request-pull`.
-
-[^pr-webui]: And maybe even using the Git hosting provider's API from the
- command line!
+create on a Git hosting provider’s web
+UI{empty}footnote:pr-webui[
+ And maybe even using the Git hosting provider’s API from the command line!
+] like GitLab/Bitbucket/GitHub actually comes from Git itself:
+`git request-pull`.
At the very core, they accomplish the same thing: both the original and the web
UI ones are ways for you to request the project maintainers to pull in your
-changes from your fork. It's like saying: "hi there, I did some changes on my
+changes from your fork. It’s like saying: "hi there, I did some changes on my
clone of the repository, what do you think about bringing those in?".
-The only difference is that you're working with only Git itself, so you're not
+The only difference is that you’re working with only Git itself, so you’re not
tied to any Git hosting provider: you can send pull requests across them
-transparently! You could even use your own [cgit][cgit] installation. No need to
-be locked in by any of them, putting the "D" back in "DVCS": it's a
-**distributed** version control system.
-
-[cgit]: https://git.zx2c4.com/cgit/
+transparently! You could even use your own {cgit}[cgit] installation. No need
+to be locked in by any of them, putting the "D" back in "DVCS": it’s a
+*distributed* version control system.
-## `git request-pull` introduction
+== `git request-pull` introduction
-Here's the raw output of a `git request-pull`:
+Here’s the raw output of a `git request-pull`:
-```shell
+[source,sh]
+----
$ git request-pull HEAD public-origin
The following changes since commit 302c9f2f035c0360acd4e13142428c100a10d43f:
@@ -57,35 +47,39 @@ EuAndreh (1):
_posts/2020-08-31-the-database-i-wish-i-had.md | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
-```
+----
That very first line is saying: "create me a pull request with only a single
commit, defined by `HEAD`, and use the URL defined by `public-origin`".
-Here's a pitfall: you may try using your `origin` remote at first where I put
+Here’s a pitfall: you may try using your `origin` remote at first where I put
`public-origin`, but that is many times pointing to something like
`git@example.com`, or `git.example.com:repo.git` (check that with
-`git remote -v | grep origin`). On both cases those are addresses available for
+`git remote -v | grep origin`). On both cases those are addresses available for
interaction via SSH, and it would be better if your pull requests used an
address ready for public consumption.
A simple solution for that is for you to add the `public-origin` alias as the
HTTPS alternative to the SSH version:
-```shell
+[source,sh]
+----
$ git remote add public-origin https://example.com/user/repo
-```
+----
Every Git hosting provider exposes repositories via HTTPS.
Experiment it yourself, and get acquainted with the CLI.
-## Delivering decentralized pull requests
+== Delivering decentralized pull requests
+
+:cli-email: link:../04/cli-email-fun-profit.html
Now that you can create the content of a pull request, you can just
-[deliver it][cli-email] to the interested parties email:
+{cli-email}[deliver it] to the interested parties email:
-```shell
+[source,sh]
+----
# send a PR with your last commit to the author's email
git request-pull HEAD public-origin | mail author@example.com -s "PR: Add thing to repo"
@@ -97,14 +91,14 @@ git request-pull -p HEAD~5 public-origin | \
# send every commit that is new in "other-branch"
git request-pull master public-origin other-branch | \
mail list@example.com -s 'PR: All commits from my "other-brach"'
-```
+----
-[cli-email]: {% link _tils/2020-09-04-send-emails-using-the-command-line-for-fun-and-profit.md %}
+== Conclusion
-## Conclusion
+:distgit: https://drewdevault.com/2018/07/23/Git-is-already-distributed.html
-In practice, I've never used or seen anyone use pull requests this way:
-everybody is just [sending patches via email][decentralized-git].
+In practice, I’ve never used or seen anyone use pull requests this way:
+everybody is just {distgit}[sending patches via email].
If you stop to think about this model, the problem of "Git hosting providers
becoming too centralized" is a non-issue, and "Git federation" proposals are a
@@ -114,5 +108,3 @@ Using Git this way is not scary or so weird as the first impression may suggest.
It is actually how Git was designed to be used.
Check `git help request-pull` for more info.
-
-[decentralized-git]: https://drewdevault.com/2018/07/23/Git-is-already-distributed.html
diff --git a/src/content/en/til/2020/10/11/search-git-history.adoc b/src/content/en/til/2020/10/11/search-git-history.adoc
new file mode 100644
index 0000000..696368c
--- /dev/null
+++ b/src/content/en/til/2020/10/11/search-git-history.adoc
@@ -0,0 +1,29 @@
+= Search changes to a filename pattern in Git history
+:categories: git
+
+:yet: link:../../08/14/browse-git.html
+:another: link:../../08/16/git-search.html
+:entry: link:../../08/28/grep-online.html
+
+This is {yet}[yet] {another}[another] {entry}["search in Git"] TIL entry. You
+could say that Git has a unintuitive CLI, or that is it very powerful.
+
+I wanted to search for an old file that I new that was in the history of the
+repository, but was deleted some time ago. So I didn't really remember the
+name, only bits of it.
+
+I immediately went to the list of TILs I had written on searching in Git, but it
+wasn't readily obvious how to do it, so here it goes:
+
+[source,sh]
+----
+git log -- *pattern*
+----
+
+You could add globs before the pattern to match things on any directory, and add
+our `-p` friend to promptly see the diffs:
+
+[source,sh]
+----
+git log -p -- **/*pattern*
+----
diff --git a/src/content/en/til/2020/11/08/find-broken-symlink.adoc b/src/content/en/til/2020/11/08/find-broken-symlink.adoc
new file mode 100644
index 0000000..624d24a
--- /dev/null
+++ b/src/content/en/til/2020/11/08/find-broken-symlink.adoc
@@ -0,0 +1,25 @@
+= Find broken symlinks with "find"
+:categories: shell
+
+:annex: https://git-annex.branchable.com/
+:annex-wanted: https://git-annex.branchable.com/git-annex-wanted/
+
+The `find` command knows how to show broken symlinks:
+
+[source,sh]
+----
+find . -xtype l
+----
+
+This was useful to me when combined with {annex}[Git Annex]. Its
+{annex-wanted}[`wanted`] option allows you to have a "sparse" checkout of the
+content, and save space by not having to copy every annexed file locally:
+
+[source,sh]
+----
+git annex wanted . 'exclude=Music/* and exclude=Videos/*'
+----
+
+You can `find` any broken symlinks outside those directories by querying with
+Git Annex itself, but `find . -xtype l` works on other places too, where broken
+symlinks might be a problem.
diff --git a/_tils/2020-11-12-diy-bare-bones-ci-server-with-bash-and-nix.md b/src/content/en/til/2020/11/12/diy-nix-bash-ci.adoc
index 3336482..97ace30 100644
--- a/_tils/2020-11-12-diy-bare-bones-ci-server-with-bash-and-nix.md
+++ b/src/content/en/til/2020/11/12/diy-nix-bash-ci.adoc
@@ -1,27 +1,19 @@
----
+= DIY bare bones CI server with Bash and Nix
+:categories: ci
+:sort: 2
-title: DIY bare bones CI server with Bash and Nix
-
-date: 2020-11-12 3
-
-layout: post
-
-lang: en
-
-ref: diy-bare-bones-ci-server-with-bash-and-nix
-
-eu_categories: ci
-
----
+:post-receive: https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks
+:example-project: https://euandreh.xyz/remembering/ci.html
With a server with Nix installed (no need for NixOS), you can leverage its build
-isolation for running CI jobs by adding a [post-receive][post-receive] Git hook
+isolation for running CI jobs by adding a {post-receive}[post-receive] Git hook
to the server.
In most of my project I like to keep a `test` attribute which runs the test with
-`nix-build -A test`. This way, a post-receive hook could look like:
+`nix-build -A test`. This way, a post-receive hook could look like:
-```shell
+[source,sh]
+----
#!/usr/bin/env bash
set -Eeuo pipefail
set -x
@@ -42,33 +34,30 @@ finish() {
trap finish EXIT
nix-build -A test
-```
+----
-We initially (lines #5 to #8) create a log file, named after *when* the run is
-running and for *which* commit it is running for. The `exec` and `tee` combo
-allows the output of the script to go both to `stdout` *and* the log file. This
+We initially (lines #5 to #8) create a log file, named after _when_ the run is
+running and for _which_ commit it is running for. The `exec` and `tee` combo
+allows the output of the script to go both to `stdout` _and_ the log file. This
makes the logs output show up when you do a `git push`.
Lines #10 to #13 create a fresh clone of the repository and line #20 runs the
test command.
After using a similar post-receive hook for a while, I now even generate a
-simple HTML file to make the logs available ([example project][ci-logs])
+simple HTML file to make the logs available ({example-project}[example project])
through the browser.
-[post-receive]: https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks
-[ci-logs]: https://euandreh.xyz/remembering/ci.html
-
-## Upsides
+== Upsides
No vendor lock-in, as all you need is a server with Nix installed.
And if you pin the Nixpkgs version you're using, this very simple setup yields
extremely sandboxed runs on a very hermetic environment.
-## Downsides
+== Downsides
Besides the many missing shiny features of this very simplistic CI, `nix-build`
-can be very resource intensive. Specifically, it consumes too much memory. So if
-it has to download too many things, or the build closure gets too big, the
+can be very resource intensive. Specifically, it consumes too much memory. So
+if it has to download too many things, or the build closure gets too big, the
server might very well run out of memory.
diff --git a/_tils/2020-11-12-git-bisect-automation.md b/src/content/en/til/2020/11/12/git-bisect-automation.adoc
index 9c34b2a..dff8737 100644
--- a/_tils/2020-11-12-git-bisect-automation.md
+++ b/src/content/en/til/2020/11/12/git-bisect-automation.adoc
@@ -1,18 +1,6 @@
----
-
-title: Git bisect automation
-
-date: 2020-11-12 2
-
-layout: post
-
-lang: en
-
-ref: git-bisect-automation
-
-eu_categories: git
-
----
+= Git bisect automation
+:categories: git
+:sort: 1
It is good to have an standardized way to run builds and tests on the repository
of a project, so that you can find when a bug was introduced by using
@@ -22,14 +10,16 @@ I've already been in the situation when a bug was introduced and I didn't know
how it even was occurring, and running Git bisect over hundreds of commits to
pinpoint the failing commit was very empowering:
-```
+[source,sh]
+----
$ GOOD_COMMIT_SHA=e1fd0a817d192c5a5df72dd7422e36558fa78e46
$ git bisect start HEAD $GOOD_COMMIT_SHA
$ git bisect run sn -c './build.sh && ./run-failing-case.sh'
-```
+----
Git will than do a binary search between the commits, and run the commands you
provide it with to find the failing commit.
Instead of being afraid of doing a bisect, you should instead leverage it, and
-make Git help you dig through the history of the repository to find the bad code.
+make Git help you dig through the history of the repository to find the bad
+code.
diff --git a/src/content/en/til/2020/11/12/useful-bashvars.adoc b/src/content/en/til/2020/11/12/useful-bashvars.adoc
new file mode 100644
index 0000000..fb148fb
--- /dev/null
+++ b/src/content/en/til/2020/11/12/useful-bashvars.adoc
@@ -0,0 +1,61 @@
+= Useful Bash variables
+:categories: shell
+
+:bash: https://www.gnu.org/software/bash/
+:bash-bang-bang: https://www.gnu.org/software/bash/manual/bash.html#Event-Designators
+:bash-dollar-underscore: https://www.gnu.org/software/bash/manual/bash.html#Special-Parameters
+
+{bash}[GNU Bash] has a few two letter variables that may be useful when typing
+on the terminal.
+
+== `!!`: the text of the last command
+
+The {bash-bang-bang}[`!!` variable] refers to the previous command, and I find
+useful when following chains for symlinks:
+
+[source,sh]
+----
+$ which git
+/run/current-system/sw/bin/git
+$ readlink $(!!)
+readlink $(which git)
+/nix/store/5bgr1xpm4m0r72h9049jbbhagxdyrnyb-git-2.28.0/bin/git
+----
+
+It is also useful when you forget to prefix `sudo` to a command that requires
+it:
+
+[source,sh]
+----
+$ requires-sudo.sh
+requires-sudo.sh: Permission denied
+$ sudo !!
+sudo ./requires-sudo.sh
+# all good
+----
+
+Bash prints the command expansion before executing it, so it is better for you
+to follow along what it is doing.
+
+== `$_`: most recent parameter
+
+The {bash-dollar-underscore}[`$_` variable] will give you the most recent
+parameter you provided to a previous argument, which can save you typing
+sometimes:
+
+[source,sh]
+----
+# instead of...
+$ mkdir -p a/b/c/d/
+$ cd a/b/c/d/
+
+# ...you can:
+$ mkdir -p a/b/c/d/
+$ cd $_
+----
+
+== Conclusion
+
+I wouldn't use those in a script, as it would make the script terser to read, I
+find those useful shortcut that are handy when writing at the interactive
+terminal.
diff --git a/src/content/en/til/2020/11/14/gpodder-media.adoc b/src/content/en/til/2020/11/14/gpodder-media.adoc
new file mode 100644
index 0000000..f722f35
--- /dev/null
+++ b/src/content/en/til/2020/11/14/gpodder-media.adoc
@@ -0,0 +1,21 @@
+= gPodder as a media subscription manager
+
+:re-discover: https://www.charlieharrington.com/unexpected-useless-and-urgent
+:gpodder: https://gpodder.github.io/
+
+As we {re-discover}[re-discover] the value of Atom/RSS feeds, most useful feed
+clients I know of don't support media, specifically audio and video.
+
+{gpodder}[gPodder] does.
+
+It is mostly know as a desktop podcatcher. But the thing about podcasts is that
+the feed is provided through an RSS/Atom feed. So you can just use gPodder as
+your media feed client, where you have control of what you look at.
+
+I audio and video providers I know of offer an RSS/Atom view of their content,
+so you can, say, treat any YouTube channel like a feed on its own.
+
+gPodder will then managed your feeds, watched/unwatched, queue downloads, etc.
+
+Being obvious now, it was a big finding for me. If it got you interested, I
+recommend you giving gPodder a try.
diff --git a/_tils/2020-11-30-storing-ci-data-on-git-notes.md b/src/content/en/til/2020/11/30/git-notes-ci.adoc
index f8dd063..48a996b 100644
--- a/_tils/2020-11-30-storing-ci-data-on-git-notes.md
+++ b/src/content/en/til/2020/11/30/git-notes-ci.adoc
@@ -1,28 +1,20 @@
----
+= Storing CI data on Git notes
+:categories: git ci
-title: Storing CI data on Git notes
+:git-notes: https://git-scm.com/docs/git-notes
+:nix-bash-ci: link:../12/diy-nix-bash-ci.html
+:cgit: https://git.zx2c4.com/cgit/
-date: 2020-11-30
+Extending the bare bones CI server I've {nix-bash-ci}[talked about before],
+divoplade on Freenode suggested storing CI artifacts on {git-notes}[Git notes],
+such as tarballs, binaries, logs, _etc_.
-layout: post
+I've written a small script that will put log files and CI job data on Git
+notes, and make it visible on the porcelain log. It is a simple extension of
+the previous article:
-lang: en
-
-ref: storing-ci-data-on-git-notes
-
-eu_categories: git,ci
-
----
-
-Extending the bare bones CI server I've [talked about before][previous-article],
-divoplade on Freenode suggested storing CI artifacts on [Git notes][git-notes],
-such as tarballs, binaries, logs, *etc*.
-
-I've written a small script that will put log files and CI job data on Git notes,
-and make it visible on the porcelain log. It is a simple extension of the
-previous article:
-
-```shell
+[source,sh]
+----
#!/usr/bin/env bash
set -Eeuo pipefail
set -x
@@ -63,16 +55,17 @@ git config --global user.name 'EuAndreh CI'
./container make check site
./container make publish
-```
+----
-The important part is in the `finish()` function:
-- #25 stores the exit status and the generated filename separated by spaces;
-- #26 adds the log file in a note using the `refs/notes/ci-logs` ref;
-- #27 it adds a note to the commit saying how to see the logs.
+The important part is in the `finish()` function: - #25 stores the exit status
+and the generated filename separated by spaces; - #26 adds the log file in a
+note using the `refs/notes/ci-logs` ref; - #27 it adds a note to the commit
+saying how to see the logs.
A commit now has an attached note, and shows it whenever you look at it:
-```diff
+[source,diff]
+----
$ git show 87c57133abd8be5d7cc46afbf107f59b26066575
commit 87c57133abd8be5d7cc46afbf107f59b26066575
Author: EuAndreh <eu@euandre.org>
@@ -100,23 +93,20 @@ index d1830ca..a4ccde7 100644
(service dhcp-client-service-type)
#;
(service opensmtpd-service-type
-```
+----
-Other tools such as [cgit][cgit] will also show notes on the web interface:
-<https://euandre.org/git/servers/commit?id=87c57133abd8be5d7cc46afbf107f59b26066575>.
+Other tools such as {cgit}[cgit] will also show notes on the web interface:
+https://euandre.org/git/servers/commit?id=87c57133abd8be5d7cc46afbf107f59b26066575.
You can go even further: since cgit can serve raw blob directly, you can even
serve such artifacts (log files, release artifacts, binaries) from cgit itself:
-```shell
+[source,sh]
+----
$ SHA="$(git notes --ref=refs/notes/ci-logs list 87c57133abd8be5d7cc46afbf107f59b26066575)"
$ echo "https://euandre.org/git/servers/blob?id=$SHA"
https://euandre.org/git/servers/blob?id=1707a97bae24e3864fe7943f8dda6d01c294fb5c
-```
+----
And like that you'll have cgit serving the artifacts for you:
-<https://euandre.org/git/servers/blob?id=1707a97bae24e3864fe7943f8dda6d01c294fb5c>.
-
-[previous-article]: {% link _tils/2020-11-12-diy-bare-bones-ci-server-with-bash-and-nix.md %}
-[git-notes]: https://git-scm.com/docs/git-notes
-[cgit]: https://git.zx2c4.com/cgit/
+https://euandre.org/git/servers/blob?id=1707a97bae24e3864fe7943f8dda6d01c294fb5c.
diff --git a/_tils/2020-12-15-awk-snippet-shellcheck-all-scripts-in-a-repository.md b/src/content/en/til/2020/12/15/shellcheck-repo.adoc
index 71d10a3..387e793 100644
--- a/_tils/2020-12-15-awk-snippet-shellcheck-all-scripts-in-a-repository.md
+++ b/src/content/en/til/2020/12/15/shellcheck-repo.adoc
@@ -1,52 +1,44 @@
----
+= Awk snippet: ShellCheck all scripts in a repository
+:categories: shell
+:updatedat: 2020-12-16
-title: 'Awk snippet: ShellCheck all scripts in a repository'
+:awk-20-min: https://ferd.ca/awk-in-20-minutes.html
+:shellcheck: https://www.shellcheck.net/
-date: 2020-12-15
-
-updated_at: 2020-12-16
-
-layout: post
-
-lang: en
-
-ref: awk-snippet-shellcheck-all-scripts-in-a-repository
-
-eu_categories: shell
-
----
-
-Inspired by Fred Herbert's "[Awk in 20 Minutes][awk-20min]", here's a problem I
+Inspired by Fred Herbert's "{awk-20-min}[Awk in 20 Minutes]", here's a problem I
just solved with a line of Awk: run ShellCheck in all scripts of a repository.
In my repositories I usually have Bash and POSIX scripts, which I want to keep
-tidy with [ShellCheck][shellcheck]. Here's the first version of
+tidy with {shellcheck}[ShellCheck]. Here's the first version of
`assert-shellcheck.sh`:
-```shell
+[source,sh]
+----
#!/bin/sh -eux
find . -type f -name '*.sh' -print0 | xargs -0 shellcheck
-```
+----
This is the type of script that I copy around to all repositories, and I want it
to be capable of working on any repository, without requiring a list of files to
run ShellCheck on.
-This first version worked fine, as all my scripts had the '.sh' ending. But I
+This first version worked fine, as all my scripts had the `.sh' ending. But I
recently added some scripts without any extension, so `assert-shellcheck.sh`
-called for a second version. The first attempt was to try grepping the shebang
+called for a second version. The first attempt was to try grepping the shebang
line:
-```shell
+[source,sh]
+----
$ grep '^#!/' assert-shellcheck.sh
#!/usr/sh
-```
+----
-Good, we have a grep pattern on the first try. Let's try to find all the
+Good, we have a grep pattern on the first try. Let's try to find all the
matching files:
-```shell
+[source,sh]
+----
$ find . -type f | xargs grep -l '^#!/'
./TODOs.org
./.git/hooks/pre-commit.sample
@@ -76,7 +68,7 @@ $ find . -type f | xargs grep -l '^#!/'
./scripts/songbooks.in
./scripts/with-container.sh
./scripts/assert-shellcheck.sh
-```
+----
This approach has a problem, though: it includes files ignored by Git, such as
`builld-aux/install-sh~`, and even goes into the `.git/` directory and finds
@@ -84,7 +76,8 @@ sample hooks in `.git/hooks/*`.
To list the files that Git is tracking we'll try `git ls-files`:
-```shell
+[source,sh]
+----
$ git ls-files | xargs grep -l '^#!/'
TODOs.org
bootstrap
@@ -99,23 +92,25 @@ scripts/compile-readme.sh
scripts/generate-tasks-and-bugs.sh
scripts/songbooks.in
scripts/with-container.sh
-```
+----
It looks to be almost there, but the `TODOs.org` entry shows a flaw in it: grep
-is looking for a `'^#!/'` pattern on any part of the file. In my case,
+is looking for a +'^#!/'+ pattern on any part of the file. In my case,
`TODOs.org` had a snippet in the middle of the file where a line started with
-`#!/bin/sh`.
++#!/bin/sh+.
-So what we actually want is to match the **first** line against the pattern. We
+So what we actually want is to match the *first* line against the pattern. We
could loop through each file, get the first line with `head -n 1` and grep
-against that, but this is starting to look messy. I bet there is another way of
+against that, but this is starting to look messy. I bet there is another way of
doing it concisely...
-Let's try Awk. I need a way to select the line numbers to replace `head -n 1`,
-and to stop processing the file if the pattern matches. A quick search points me
-to using `FNR` for the former, and `{ nextline }` for the latter. Let's try it:
+Let's try Awk. I need a way to select the line numbers to replace `head -n 1`,
+and to stop processing the file if the pattern matches. A quick search points
+me to using `FNR` for the former, and `{ nextline }` for the latter. Let's try
+it:
-```shell
+[source,sh]
+----
$ git ls-files | xargs awk 'FNR>1 { nextfile } /^#!\// { print FILENAME; nextfile }'
bootstrap
build-aux/with-guile-env.in
@@ -129,43 +124,42 @@ scripts/compile-readme.sh
scripts/generate-tasks-and-bugs.sh
scripts/songbooks.in
scripts/with-container.sh
-```
+----
-Great! Only `TODOs.org` is missing, but the script is much better: instead of
+Great! Only `TODOs.org` is missing, but the script is much better: instead of
matching against any part of the file that may have a shebang-like line, we only
-look for the first. Let's put it back into the `assert-shellcheck.sh` file and
+look for the first. Let's put it back into the `assert-shellcheck.sh` file and
use `NULL` for separators to accommodate files with spaces in the name:
-```
+[source,sh]
+----
#!/usr/sh -eux
git ls-files -z | \
xargs -0 awk 'FNR>1 { nextfile } /^#!\// { print FILENAME; nextfile }' | \
xargs shellcheck
-```
+----
This is where I've stopped, but I imagine a likely improvement: match against
-only `#!/bin/sh` and `#!/usr/bin/env bash` shebangs (the ones I use most), to
+only +#!/bin/sh+ and +#!/usr/bin/env bash+ shebangs (the ones I use most), to
avoid running ShellCheck on Perl files, or other shebangs.
Also when reviewing the text of this article, I found that `{ nextfile }` is a
-GNU Awk extension. It would be an improvement if `assert-shellcheck.sh` relied
+GNU Awk extension. It would be an improvement if `assert-shellcheck.sh` relied
on the POSIX subset of Awk for working correctly.
-## *Update*
+== _Update_
After publishing, I could remove `{ nextfile }` and even make the script
simpler:
-```shell
+[source,sh]
+----
#!/usr/sh -eux
git ls-files -z | \
xargs -0 awk 'FNR==1 && /^#!\// { print FILENAME }' | \
xargs shellcheck
-```
+----
Now both the shell and Awk usage are POSIX compatible.
-
-[awk-20min]: https://ferd.ca/awk-in-20-minutes.html
-[shellcheck]: https://www.shellcheck.net/
diff --git a/_tils/2020-12-29-svg-favicon.md b/src/content/en/til/2020/12/29/svg.adoc
index 54cca9a..e0e75a5 100644
--- a/_tils/2020-12-29-svg-favicon.md
+++ b/src/content/en/til/2020/12/29/svg.adoc
@@ -1,32 +1,22 @@
----
+= SVG favicon
+:updatedat: 2021-01-12
-title: SVG favicon
-
-date: 2020-12-29
-
-updated_at: 2021-01-12
-
-layout: post
-
-lang: en
-
-ref: svg-favicon
-
----
+:favicon: link:../../../../../img/favicon.svg
I've wanted to change this website's favicon from a plain `.ico` file to a
-proper SVG. The problem I was trying to solve was to reuse the same image on
+proper SVG. The problem I was trying to solve was to reuse the same image on
other places, such as avatars.
Generating a PNG from the existing 16x16 icon was possible but bad: the final
-image was blurry. Converting the `.ico` to an SVG was possible, but sub-optimal:
-tools try to guess some vector paths, and the final SVG didn't match the
-original.
+image was blurry. Converting the `.ico` to an SVG was possible, but
+sub-optimal: tools try to guess some vector paths, and the final SVG didn't
+match the original.
Instead I used a tool to draw the "vector pixels" as black squares, and after
getting the final result I manually cleaned-up the generated XML:
-```xml
+[source,xml]
+----
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16">
<path d="M 0 8 L 1 8 L 1 9 L 0 9 L 0 8 Z" />
@@ -89,27 +79,28 @@ getting the final result I manually cleaned-up the generated XML:
<path d="M 14 11 L 15 11 L 15 12 L 14 12 L 14 11 Z" />
<path d="M 14 12 L 15 12 L 15 13 L 14 13 L 14 12 Z" />
</svg>
-```
+----
-The good thing about this new favicon
-(at [`/static/lord-favicon.svg`](/static/lord-favicon.svg)) is that
-a) it is simple enough that I feel
-comfortable editing it manually and b) it is an SVG, which means I can generate
-any desired size.
+The good thing about this new favicon (at {favicon}[`/static/lord-favicon.svg`])
+is that a) it is simple enough that I feel comfortable editing it manually and
+b) it is an SVG, which means I can generate any desired size.
With the new favicon file, I now had to add to the templates' `<head>` a
`<link>` to this icon:
-```html
+
+[source,html]
+----
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/static/favicon.svg">
...
-```
+----
-Still missing is a bitmap image for places that can't handle vector images. I
+Still missing is a bitmap image for places that can't handle vector images. I
used Jekyll generator to create an PNG from the existing SVG:
-```ruby
+[source,ruby]
+----
module Jekyll
class FaviconGenerator < Generator
safe true
@@ -127,7 +118,7 @@ module Jekyll
end
end
end
-```
+----
I had to increase the priority of the generator so that it would run before
other places that would use a `{% link /static/lord-favicon.png %}`, otherwise
diff --git a/_tils/2021-01-12-awk-snippet-send-email-to-multiple-recipients-with-curl.md b/src/content/en/til/2021/01/12/curl-awk-emails.adoc
index 880ddf1..d432da2 100644
--- a/_tils/2021-01-12-awk-snippet-send-email-to-multiple-recipients-with-curl.md
+++ b/src/content/en/til/2021/01/12/curl-awk-emails.adoc
@@ -1,28 +1,25 @@
----
+= Awk snippet: send email to multiple recipients with cURL
-title: 'Awk snippet: send email to multiple recipients with cURL'
+:neomutt: https://neomutt.org/
+:found-out-article: https://blog.edmdesigner.com/send-email-from-linux-command-line/
+:curl: https://curl.se/
-date: 2021-01-12
-
-layout: post
-
-lang: en
-
-ref: awk-snippet-send-email-to-multiple-recipients-with-curl
-
----
-
-As I experiment with [Neomutt][neomutt], I wanted to keep being able to enqueue emails for sending later like my previous setup, so that I didn't rely on having an internet connection.
+As I experiment with {neomutt}[Neomutt], I wanted to keep being able to enqueue
+emails for sending later like my previous setup, so that I didn't rely on having
+an internet connection.
My requirements for the `sendmail` command were:
-1. store the email in a file, and send it later.
-1. send from different addresses, using different SMTP servers;
-I couldn't find an MTA that could accomplish that, but I was able to quickly write a solution.
+. store the email in a file, and send it later;
+. send from different addresses, using different SMTP servers.
+
+I couldn't find an MTA that could accomplish that, but I was able to quickly
+write a solution.
The first part was the easiest: store the email in a file:
-```shell
+[source,sh]
+----
# ~/.config/mutt/muttrc:
set sendmail=~/bin/enqueue-email.sh
@@ -30,20 +27,24 @@ set sendmail=~/bin/enqueue-email.sh
#!/bin/sh -eu
cat - > "$HOME/mbsync/my-queued-emails/$(date -Is)"
-```
+----
-Now that I had the email file store locally, I needed a program to send the email from the file, so that I could create a cronjob like:
+Now that I had the email file store locally, I needed a program to send the
+email from the file, so that I could create a cronjob like:
-```shell
+[source,sh]
+----
for f in ~/mbsync/my-queued-emails/*; do
~/bin/dispatch-email.sh "$f" && rm "$f"
done
-```
+----
-The `dispatch-email.sh` would have to look at the `From: ` header and decide which SMTP server to use.
-As I [found out][curl-email] that [curl][curl] supports SMTP and is able to send emails, this is what I ended up with:
+The `dispatch-email.sh` would have to look at the `From:` header and decide
+which SMTP server to use. As I {found-out-article}[found out] that {curl}[curl]
+supports SMTP and is able to send emails, this is what I ended up with:
-```shell
+[source,sh]
+----
#!/bin/sh -eu
F="$1"
@@ -79,24 +80,30 @@ else
echo 'Bad "From: " address'
exit 1
fi
-```
+----
Most of curl flags used are self-explanatory, except for `$rcpt`.
-curl connects to the SMTP server, but doesn't set the recipient address by looking at the message.
-My solution was to generate the curl flags, store them in `$rcpt` and use it unquoted to leverage shell word splitting.
+curl connects to the SMTP server, but doesn't set the recipient address by
+looking at the message. My solution was to generate the curl flags, store them
+in `$rcpt` and use it unquoted to leverage shell word splitting.
-To me, the most interesting part was building the `$rcpt` flags.
-My first instinct was to try grep, but it couldn't print only matches in a regex.
-As I started to turn towards sed, I envisioned needing something else to loop over the sed output, and I then moved to Awk.
+To me, the most interesting part was building the `$rcpt` flags. My first
+instinct was to try grep, but it couldn't print only matches in a regex. As I
+started to turn towards sed, I envisioned needing something else to loop over
+the sed output, and I then moved to Awk.
-In the short Awk snippet, 3 things were new to me: the `match(...)`, `split(...)` and `for () {}`.
-The only other function I have ever used was `gsub(...)`, but these new ones felt similar enough that I could almost guess their behaviour and arguments.
-`match(...)` stores the matches of a regex on the given array positionally, and `split(...)` stores the chunks in the given array.
+In the short Awk snippet, 3 things were new to me: the `match(...)`,
+`split(...)` and `for () {}`. The only other function I have ever used was
+`gsub(...)`, but these new ones felt similar enough that I could almost guess
+their behaviour and arguments. `match(...)` stores the matches of a regex on
+the given array positionally, and `split(...)` stores the chunks in the given
+array.
I even did it incrementally:
-```shell
+[source,sh]
+----
$ H='To: to@example.com, to2@example.com\nCc: cc@example.com, cc2@example.com\nBcc: bcc@example.com,bcc2@example.com\n'
$ printf "$H" | awk '/^To: .*$/ { print $0 }'
To: to@example.com, to2@example.com
@@ -115,28 +122,27 @@ to@example.com,
$ printf "$H" | awk 'match($0, /^To: (.*)$/, m) { split(m[1], tos, " "); print tos[2] }'
to2@example.com
$ printf "$H" | awk 'match($0, /^To: (.*)$/, m) { split(m[1], tos, " "); print tos[3] }'
+----
-```
+(This isn't the verbatim interactive session, but a cleaned version to make it
+more readable.)
-(This isn't the verbatim interactive session, but a cleaned version to make it more readable.)
+At this point, I realized I needed a for loop over the `tos` array, and I moved
+the Awk snippet into the `~/bin/dispatch-email.sh`. I liked the final thing:
-At this point, I realized I needed a for loop over the `tos` array, and I moved the Awk snippet into the `~/bin/dispatch-email.sh`.
-I liked the final thing:
-
-```awk
+[source,awk]
+----
match($0, /^(To|Cc|Bcc): (.*)$/, m) {
split(m[2], tos, ",")
for (i in tos) {
print "--mail-rcpt " tos[i]
}
}
-```
+----
-As I learn more about Awk, I feel that it is too undervalued, as many people turn to Perl or other programming languages when Awk suffices.
-The advantage is pretty clear: writing programs that run on any POSIX system, without extra dependencies required.
+As I learn more about Awk, I feel that it is too undervalued, as many people
+turn to Perl or other programming languages when Awk suffices. The advantage is
+pretty clear: writing programs that run on any POSIX system, without extra
+dependencies required.
Coding to the standards is underrated.
-
-[neomutt]: https://neomutt.org/
-[curl-email]: https://blog.edmdesigner.com/send-email-from-linux-command-line/
-[curl]: https://curl.se/
diff --git a/src/content/en/til/2021/01/17/posix-shebang.adoc b/src/content/en/til/2021/01/17/posix-shebang.adoc
new file mode 100644
index 0000000..5cf0695
--- /dev/null
+++ b/src/content/en/til/2021/01/17/posix-shebang.adoc
@@ -0,0 +1,58 @@
+= POSIX sh and shebangs
+
+:awk-1: link:../../../2020/12/15/shellcheck-repo.html
+:awk-2: link:../12/curl-awk-emails.html
+
+As I {awk-1}[keep moving] {awk-2}[towards POSIX], I'm on the process of
+migrating all my Bash scripts to POSIX sh.
+
+As I dropped `[[`, arrays and other Bashisms, I was left staring at the first
+line of every script, wondering what to do: what is the POSIX sh equivalent of
+`#!/usr/bin/env bash`? I already knew that POSIX says nothing about shebangs,
+and that the portable way to call a POSIX sh script is `sh script.sh`, but
+I didn't know what to do with that first line.
+
+What I had previously was:
+
+[source,sh]
+----
+#!/usr/bin/env bash
+set -Eeuo pipefail
+cd "$(dirname "${BASH_SOURCE[0]}")"
+----
+
+Obviously, the `$BASH_SOURCE` would be gone, and I would have to adapt some of
+my scripts to not rely on the script location. The `-E` and `-o pipefail`
+options were also gone, and would be replaced by nothing.
+
+I converted all of them to:
+
+[source,sh]
+----
+#!/bin/sh -eu
+----
+
+I moved the `-eu` options to the shebang line itself, striving for conciseness.
+But as I changed callers from `./script.sh` to `sh script.sh`, things started to
+fail. Some tests that should fail reported errors, but didn't return 1.
+
+My first reaction was to revert back to `./script.sh`, but the POSIX bug I
+caught is a strong strain, and when I went back to it, I figured that the
+callers were missing some flags. Specifically, `sh -eu script.sh`.
+
+Then it clicked: when running with `sh script.sh`, the shebang line with the sh
+options is ignored, as it is a comment!
+
+Which means that the shebang most friendly with POSIX is:
+
+[source,sh]
+----
+#!/bin/sh
+set -eu
+----
+
+. when running via `./script.sh`, if the system has an executable at `/bin/sh`,
+ it will be used to run the script;
+. when running via `sh script.sh`, the sh options aren't ignored as previously.
+
+TIL.
diff --git a/_tils/2021-04-24-common-lisp-argument-precedence-order-parameterization-of-a-generic-function.md b/src/content/en/til/2021/04/24/cl-generic-precedence.adoc
index 8051232..541afb0 100644
--- a/_tils/2021-04-24-common-lisp-argument-precedence-order-parameterization-of-a-generic-function.md
+++ b/src/content/en/til/2021/04/24/cl-generic-precedence.adoc
@@ -1,20 +1,10 @@
----
+= Common Lisp argument precedence order parameterization of a generic function
-title: Common Lisp argument precedence order parameterization of a generic function
+When CLOS dispatches a method, it picks the most specific method definition to
+the argument list:
-date: 2021-04-24 2
-
-layout: post
-
-lang: en
-
-ref: common-lisp-argument-precedence-order-parameterization-of-a-generic-function
-
----
-
-When CLOS dispatches a method, it picks the most specific method definition to the argument list:
-
-```lisp
+[source,lisp]
+----
* (defgeneric a-fn (x))
#<STANDARD-GENERIC-FUNCTION A-FN (0) {5815ACB9}>
@@ -39,11 +29,13 @@ When CLOS dispatches a method, it picks the most specific method definition to t
* (a-fn 1)
:NUMBER-1
-```
+----
-CLOS uses a similar logic when choosing the method from parent classes, when multiple ones are available:
+CLOS uses a similar logic when choosing the method from parent classes, when
+multiple ones are available:
-```lisp
+[source,lisp]
+----
* (defclass class-a () ())
#<STANDARD-CLASS CLASS-A {583E0B25}>
@@ -63,11 +55,13 @@ CLOS uses a similar logic when choosing the method from parent classes, when mul
; Compiling Top-Level Form:
#<STANDARD-METHOD ANOTHER-FN (CLASS-B) {584B8895}>
-```
+----
-Given the above definitions, when inheriting from `class-a` and `class-b`, the order of inheritance matters:
+Given the above definitions, when inheriting from `class-a` and `class-b`, the
+order of inheritance matters:
-```lisp
+[source,lisp]
+----
* (defclass class-a-coming-first (class-a class-b) ())
#<STANDARD-CLASS CLASS-A-COMING-FIRST {584BE6AD}>
@@ -79,11 +73,14 @@ Given the above definitions, when inheriting from `class-a` and `class-b`, the o
* (another-fn (make-instance 'class-b-coming-first))
:CLASS-B
-```
+----
-Combining the order of inheritance with generic functions with multiple arguments, CLOS has to make a choice of how to pick a method given two competing definitions, and its default strategy is prioritizing from left to right:
+Combining the order of inheritance with generic functions with multiple
+arguments, CLOS has to make a choice of how to pick a method given two competing
+definitions, and its default strategy is prioritizing from left to right:
-```lisp
+[source,lisp]
+----
* (defgeneric yet-another-fn (obj1 obj2))
#<STANDARD-GENERIC-FUNCTION YET-ANOTHER-FN (0) {584D9EC9}>
@@ -95,43 +92,58 @@ Combining the order of inheritance with generic functions with multiple argument
* (yet-another-fn (make-instance 'class-a) (make-instance 'class-b))
:FIRST-ARG-SPECIALIZED
-```
+----
-CLOS has to make a choice between the first and the second definition of `yet-another-fn`, but its choice is just a heuristic.
-What if we want the choice to be based on the second argument, instead of the first?
+CLOS has to make a choice between the first and the second definition of
+`yet-another-fn`, but its choice is just a heuristic. What if we want the
+choice to be based on the second argument, instead of the first?
-For that, we use the `:argument-precedence-order` option when declaring a generic function:
+For that, we use the `:argument-precedence-order` option when declaring a
+generic function:
-```lisp
+[source,lisp]
+----
* (defgeneric yet-another-fn (obj1 obj2) (:argument-precedence-order obj2 obj1))
#<STANDARD-GENERIC-FUNCTION YET-ANOTHER-FN (2) {584D9EC9}>
* (yet-another-fn (make-instance 'class-a) (make-instance 'class-b))
:SECOND-ARG-SPECIALIZED
-```
+----
-I liked that the `:argument-precedence-order` option exists.
-We shouldn't have to change the arguments from `(obj1 obj2)` to `(obj2 obj1)` just to make CLOS pick the method that we want.
-We can configure its default behaviour if desired, and keep the order of arguments however it best fits the generic function.
+I liked that the `:argument-precedence-order` option exists. We shouldn't have
+to change the arguments from `(obj1 obj2)` to `(obj2 obj1)` just to make CLOS
+pick the method that we want. We can configure its default behaviour if
+desired, and keep the order of arguments however it best fits the generic
+function.
-## Comparison with Clojure
+== Comparison with Clojure
Clojure has an equivalent, when using `defmulti`.
-Since when declaring a multi-method with `defmulti` we must define the dispatch function, Clojure uses it to pick the method definition.
-Since the dispatch function is required, there is no need for a default behaviour, such as left-to-right.
+Since when declaring a multi-method with `defmulti` we must define the dispatch
+function, Clojure uses it to pick the method definition. Since the dispatch
+function is required, there is no need for a default behaviour, such as
+left-to-right.
-## Conclusion
+== Conclusion
-Making the argument precedence order configurable for generic functions but not for class definitions makes a lot of sense.
+Making the argument precedence order configurable for generic functions but not
+for class definitions makes a lot of sense.
-When declaring a class, we can choose the precedence order, and that is about it.
-But when defining a generic function, the order of arguments is more important to the function semantics, and the argument precedence being left-to-right is just the default behaviour.
+When declaring a class, we can choose the precedence order, and that is about
+it. But when defining a generic function, the order of arguments is more
+important to the function semantics, and the argument precedence being
+left-to-right is just the default behaviour.
-One shouldn't change the order of arguments of a generic function for the sake of tailoring it to the CLOS priority ranking algorithm, but doing it for a class definition is just fine.
+One shouldn't change the order of arguments of a generic function for the sake
+of tailoring it to the CLOS priority ranking algorithm, but doing it for a class
+definition is just fine.
TIL.
-## References
+== References
+
+:clos-wiki: https://en.wikipedia.org/wiki/Object-Oriented_Programming_in_Common_Lisp
-1. [Object-Oriented Programming in Common Lisp: A Programmer's Guide to CLOS](https://en.wikipedia.org/wiki/Object-Oriented_Programming_in_Common_Lisp), by Sonja E. Keene
+. {clos-wiki}[Object-Oriented Programming in Common Lisp: A Programmer's Guide
+ to CLOS], by Sonja E. Keene
diff --git a/_tils/2021-04-24-clojure-auto-curry.md b/src/content/en/til/2021/04/24/clojure-autocurry.adoc
index c1e277f..a2c2835 100644
--- a/_tils/2021-04-24-clojure-auto-curry.md
+++ b/src/content/en/til/2021/04/24/clojure-autocurry.adoc
@@ -1,22 +1,15 @@
----
+= Clojure auto curry
+:sort: 1
+:updatedat: 2021-04-27
-title: Clojure auto curry
+:defcurry-orig: https://lorettahe.github.io/clojure/2016/09/22/clojure-auto-curry
-date: 2021-04-24 1
+Here's a simple macro defined by {defcurry-orig}[Loretta He] to create Clojure
+functions that are curried on all arguments, relying on Clojure's multi-arity
+support:
-updated_at: 2021-04-27
-
-layout: post
-
-lang: en
-
-ref: clojure-auto-curry
-
----
-
-Here's a simple macro defined by [Loretta He][lorettahe] to create Clojure functions that are curried on all arguments, relying on Clojure's multi-arity support:
-
-```clojure
+[source,clojure]
+----
(defmacro defcurry
[name args & body]
(let [partials (map (fn [n]
@@ -25,11 +18,12 @@ Here's a simple macro defined by [Loretta He][lorettahe] to create Clojure funct
`(defn ~name
(~args ~@body)
~@partials)))
-```
+----
A naive `add` definition, alongside its usage and macroexpansion:
-```clojure
+[source,clojure]
+----
user=> (defcurry add
[a b c d e]
(+ 1 2 3 4 5))
@@ -64,22 +58,24 @@ user=> (pprint
([a b c] (clojure.core/partial add a b c))
([a b c d] (clojure.core/partial add a b c d))))
nil
-```
+----
-This simplistic `defcurry` definition doesn't support optional parameters, multi-arity, `&` rest arguments, docstrings, etc., but it could certainly evolve to do so.
+This simplistic `defcurry` definition doesn't support optional parameters,
+multi-arity, `&` rest arguments, docstrings, etc., but it could certainly evolve
+to do so.
-I like how `defcurry` is so short, and abdicates the responsability of doing the multi-arity logic to Clojure's built-in multi-arity support.
-Simple and elegant.
+I like how `defcurry` is so short, and abdicates the responsability of doing the
+multi-arity logic to Clojure's built-in multi-arity support. Simple and
+elegant.
Same Clojure as before, now with auto-currying via macros.
-[lorettahe]: http://lorettahe.github.io/clojure/2016/09/22/clojure-auto-curry
-
-## Comparison with Common Lisp
+== Comparison with Common Lisp
My attempt at writing an equivalent for Common Lisp gives me:
-```lisp
+[source,lisp]
+----
(defun partial (fn &rest args)
(lambda (&rest args2)
(apply fn (append args args2))))
@@ -96,15 +92,17 @@ My attempt at writing an equivalent for Common Lisp gives me:
(let ((func (lambda ,args ,@body)))
(curry-n (- ,(length args) (length rest))
(apply #'partial func rest)))))
-```
+----
-Without built-in multi-arity support, we have to do more work, like tracking the number of arguments consumed so far.
-We also have to write `#'partial` ourselves.
-That is, without dependending on any library, sticking to ANSI Common Lisp.
+Without built-in multi-arity support, we have to do more work, like tracking the
+number of arguments consumed so far. We also have to write `#'partial`
+ourselves. That is, without dependending on any library, sticking to ANSI
+Common Lisp.
The usage is pretty similar:
-```lisp
+[source,lisp]
+----
* (defcurry add (a b c d e)
(+ a b c d e))
ADD
@@ -128,8 +126,10 @@ ADD
(LET ((FUNC (LAMBDA (A B C D E) (+ A B C D E))))
(CURRY-N (- 5 (LENGTH REST)) (APPLY #'PARTIAL FUNC REST))))
T
-```
+----
-This also require `funcall`s, since we return a `lambda` that doesn't live in the function namespace.
+This also require `funcall`s, since we return a `lambda` that doesn't live in
+the function namespace.
-Like the Clojure one, it doesn't support optional parameters, `&rest` rest arguments, docstrings, etc., but it also could evolve to do so.
+Like the Clojure one, it doesn't support optional parameters, `&rest` rest
+arguments, docstrings, etc., but it also could evolve to do so.
diff --git a/_tils/2021-04-24-three-way-conditional-for-number-signs-on-lisp.md b/src/content/en/til/2021/04/24/scm-nif.adoc
index f53451b..2ea8a6f 100644
--- a/_tils/2021-04-24-three-way-conditional-for-number-signs-on-lisp.md
+++ b/src/content/en/til/2021/04/24/scm-nif.adoc
@@ -1,35 +1,31 @@
----
+= Three-way conditional for number signs on Lisp
+:categories: lisp scheme common-lisp
+:sort: 2
+:updatedat: 2021-08-14
-title: Three-way conditional for number signs on Lisp
+:on-lisp: https://www.paulgraham.com/onlisptext.html
+:sicp: https://mitpress.mit.edu/sites/default/files/sicp/index.html
-date: 2021-04-24 3
+A useful macro from Paul Graham's {on-lisp}[On Lisp] book:
-updated_at: 2021-08-14
-
-layout: post
-
-lang: en
-
-ref: three-way-conditional-for-number-signs-on-lisp
-
----
-
-A useful macro from Paul Graham's [On Lisp][on-lisp] book:
-
-```lisp
+[source,lisp]
+----
(defmacro nif (expr pos zero neg)
(let ((g (gensym)))
`(let ((,g ,expr))
(cond ((plusp ,g) ,pos)
((zerop ,g) ,zero)
(t ,neg)))))
-```
+----
-After I looked at this macro, I started seeing opportunities to using it in many places, and yet I didn't see anyone else using it.
+After I looked at this macro, I started seeing opportunities to using it in many
+places, and yet I didn't see anyone else using it.
-The latest example I can think of is section 1.3.3 of [Structure and Interpretation of Computer Programs][sicp], which I was reading recently:
+The latest example I can think of is section 1.3.3 of {sicp}[Structure and
+Interpretation of Computer Programs], which I was reading recently:
-```scheme
+[source,scheme]
+----
(define (search f neg-point pos-point)
(let ((midpoint (average neg-point pos-point)))
(if (close-enough? neg-point post-point)
@@ -40,11 +36,14 @@ The latest example I can think of is section 1.3.3 of [Structure and Interpretat
((negative? test-value)
(search f midpoint pos-point))
(else midpoint))))))
-```
+----
-Not that the book should introduce such macro this early, but I couldn't avoid feeling bothered by not using the `nif` macro, which could even remove the need for the intermediate `test-value` variable:
+Not that the book should introduce such macro this early, but I couldn't avoid
+feeling bothered by not using the `nif` macro, which could even remove the need
+for the intermediate `test-value` variable:
-```scheme
+[source,scheme]
+----
(define (search f neg-point pos-point)
(let ((midpoint (average neg-point pos-point)))
(if (close-enough? neg-point post-point)
@@ -53,11 +52,10 @@ Not that the book should introduce such macro this early, but I couldn't avoid f
(search f neg-point midpoint)
(midpoint)
(search f midpoint pos-point)))))
-```
-
-It also avoids `cond`'s extra clunky parentheses for grouping, which is unnecessary but built-in.
+----
-As a macro, I personally feel it tilts the balance towards expressivenes despite its extra cognitive load toll.
+It also avoids `cond`'s extra clunky parentheses for grouping, which is
+unnecessary but built-in.
-[on-lisp]: http://www.paulgraham.com/onlisptext.html
-[sicp]: https://mitpress.mit.edu/sites/default/files/sicp/index.html
+As a macro, I personally feel it tilts the balance towards expressivenes despite
+its extra cognitive load toll.
diff --git a/_tils/2021-07-23-gpg-verification-of-git-repositories-without-tls.md b/src/content/en/til/2021/07/23/git-tls-gpg.adoc
index fd42c1c..f198c2b 100644
--- a/_tils/2021-07-23-gpg-verification-of-git-repositories-without-tls.md
+++ b/src/content/en/til/2021/07/23/git-tls-gpg.adoc
@@ -1,27 +1,21 @@
----
+= GPG verification of Git repositories without TLS
-title: GPG verification of Git repositories without TLS
-
-date: 2021-07-23
-
-layout: post
-
-lang: en
-
-ref: gpg-verification-of-git-repositories-without-tls
-
----
+:empty:
+:git-protocol: https://git-scm.com/book/en/v2/Git-on-the-Server-The-Protocols#_the_git_protocol
+:remembering: https://euandreh.xyz/remembering/
For online Git repositories that use the [Git Protocol] for serving code, you
can can use GPG to handle authentication, if you have the committer's public
key.
Here's how I'd verify that I've cloned an authentic version of
-[remembering][remembering][^not-available]:
+{remembering}[remembering]footnote:not-available[
+ Funnily enough, not available anymore via the Git Protocol, now only with
+ HTTPS.
+]:
-[^not-available]: Funnily enough, not available anymore via the Git Protocol, now only with HTTPS.
-
-```shell
+[source,sh]
+----
$ wget -qO- https://euandre.org/public.asc | gpg --import -
gpg: clef 81F90EC3CD356060 : « EuAndreh <eu@euandre.org> » n'est pas modifiée
gpg: Quantité totale traitée : 1
@@ -32,7 +26,7 @@ $ git verify-commit HEAD
gpg: Signature faite le dim. 27 juin 2021 16:50:21 -03
gpg: avec la clef RSA 5BDAE9B8B2F6C6BCBB0D6CE581F90EC3CD356060
gpg: Bonne signature de « EuAndreh <eu@euandre.org> » [ultime]
-```
+----
On the first line we import the public key (funnily enough, available via
HTTPS), and after cloning the code via the insecure `git://` protocol, we use
@@ -40,17 +34,12 @@ HTTPS), and after cloning the code via the insecure `git://` protocol, we use
The verification is successful, and we can see that the public key from the
signature matches the fingerprint of the imported one. However
-`git verify-commit` doesn't have an option to check which public key you want
-to verify the commit against. Which means that if a MITM attack happens, the
-attacker could very easily serve a malicious repository with signed commits,
-and you'd have to verify the public key by yourself. That would need to happen
-for subsequent fetches, too.
+`git verify-commit` doesn't have an option to check which public key you want to
+verify the commit against. Which means that if a MITM attack happens, the
+attacker could very easily serve a malicious repository with signed commits, and
+you'd have to verify the public key by yourself. That would need to happen for
+subsequent fetches, too.
Even though this is possible, it is not very convenient, and certainly very
brittle. Despite the fact that the Git Protocol is much faster, it being harder
to make secure is a big downside.
-
-
-
-[Git Protocol]: https://git-scm.com/book/en/v2/Git-on-the-Server-The-Protocols#_the_git_protocol
-[remembering]: https://euandreh.xyz/remembering/
diff --git a/_tils/2021-08-11-encoding-and-decoding-javascript-bigint-values-with-reviver.md b/src/content/en/til/2021/08/11/js-bigint-reviver.adoc
index d71174d..98ee79b 100644
--- a/_tils/2021-08-11-encoding-and-decoding-javascript-bigint-values-with-reviver.md
+++ b/src/content/en/til/2021/08/11/js-bigint-reviver.adoc
@@ -1,39 +1,27 @@
----
+= Encoding and decoding JavaScript BigInt values with reviver
+:updatedat: 2021-08-13
-title: Encoding and decoding JavaScript BigInt values with reviver
+:reviver-fn: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#using_the_reviver_parameter
+:bigint: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt
+:json-rfc: https://datatracker.ietf.org/doc/html/rfc8259
-date: 2021-08-11
-
-updated_at: 2021-08-13
-
-layout: post
-
-lang: en
-
-ref: encoding-and-decoding-javascript-bigint-values-with-reviver
-
----
-
-`JSON.parse()` accepts a second parameter: a [`reviver()` function][reviver].
+`JSON.parse()` accepts a second parameter: a {reviver-fn}[`reviver()` function].
It is a function that can be used to transform the `JSON` values as they're
being parsed.
-[reviver]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#using_the_reviver_parameter
-
-As it turns out, when combined with JavaScript's [`BigInt`] type, you can parse
-and encode JavaScript `BigInt` numbers via JSON:
+As it turns out, when combined with JavaScript's {bigint}[`BigInt`] type, you
+can parse and encode JavaScript `BigInt` numbers via JSON:
-[`BigInt`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt
-
-```javascript
+[source,javascript]
+----
const bigIntReviver = (_, value) =>
- typeof value === "string" && value.match(/^-?[0-9]+n$/)
- ? BigInt(value.slice(0, value.length - 1))
- : value;
-```
+ typeof value === "string" && value.match(/^-?[0-9]+n$/)
+ ? BigInt(value.slice(0, value.length - 1))
+ : value;
+----
-I chose to interpret strings that contains only numbers and an ending `n` suffix
-as `BigInt` values, similar to how JavaScript interprets `123` (a number)
+I chose to interpret strings that contains only numbers and an ending `n`
+suffix as `BigInt` values, similar to how JavaScript interprets `123` (a number)
differently from `123n` (a `bigint`);
We do those checks before constructing the `BigInt` to avoid throwing needless
@@ -42,25 +30,27 @@ become a bottleneck when parsing large JSON values.
In order to do the full roundtrip, we now only need the `toJSON()` counterpart:
-```javascript
+[source,javascript]
+----
BigInt.prototype.toJSON = function() {
- return this.toString() + "n";
+ return this.toString() + "n";
};
-```
+----
With both `bigIntReviver` and `toJSON` defined, we can now successfully parse
and encode JavaScript objects with `BigInt` values transparently:
-```javascript
+[source,javascript]
+----
const s = `[
- null,
- true,
- false,
- -1,
- 3.14,
- "a string",
- { "a-number": "-123" },
- { "a-bigint": "-123n" }
+ null,
+ true,
+ false,
+ -1,
+ 3.14,
+ "a string",
+ { "a-number": "-123" },
+ { "a-bigint": "-123n" }
]`;
const parsed = JSON.parse(s, bigIntReviver);
@@ -71,11 +61,12 @@ console.log(s2);
console.log(typeof parsed[6]["a-number"])
console.log(typeof parsed[7]["a-bigint"])
-```
+----
The output of the above is:
-```
+[source,javascript]
+----
[
null,
true,
@@ -89,12 +80,10 @@ The output of the above is:
[null,true,false,-1,3.14,"a string",{"a-number":"-123"},{"a-bigint":"-123n"}]
string
bigint
-```
+----
If you're on a web browser, you can probably try copying and pasting the above
code on the console right now, as is.
-Even though [`JSON`] doesn't include `BigInt` number, encoding and decoding them
-as strings is quite trivial on JavaScript.
-
-[`JSON`]: https://datatracker.ietf.org/doc/html/rfc8259
+Even though {json-rfc}[`JSON`] doesn't include `BigInt` number, encoding and
+decoding them as strings is quite trivial on JavaScript.
diff --git a/src/content/en/til/categories.adoc b/src/content/en/til/categories.adoc
new file mode 100644
index 0000000..feb64ff
--- /dev/null
+++ b/src/content/en/til/categories.adoc
@@ -0,0 +1,2 @@
+= Articles by category
+:type: categories
diff --git a/src/content/en/til/index.adoc b/src/content/en/til/index.adoc
new file mode 100644
index 0000000..7e85335
--- /dev/null
+++ b/src/content/en/til/index.adoc
@@ -0,0 +1,7 @@
+= Today I Learned
+
+:anna-e-so: https://til.flourishing.stream/
+
+**T**oday **I** **L**earned: small entries of useful knowledge.
+
+Shameless rip-off of {anna-e-so}[Anna e só].
diff --git a/src/content/favicon.ico b/src/content/favicon.ico
new file mode 100644
index 0000000..8f2130a
--- /dev/null
+++ b/src/content/favicon.ico
Binary files differ
diff --git a/src/content/favicon.png b/src/content/favicon.png
new file mode 100644
index 0000000..136d8cb
--- /dev/null
+++ b/src/content/favicon.png
Binary files differ
diff --git a/src/content/img/atom.svg b/src/content/img/atom.svg
new file mode 100644
index 0000000..23ab291
--- /dev/null
+++ b/src/content/img/atom.svg
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<svg width="22" height="22" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg">
+ <path
+ fill="#EA990E"
+ d="M576 1344q0 80-56 136t-136 56-136-56-56-136 56-136 136-56 136 56 56 136zm512 123q2 28-17 48-18 21-47 21h-135q-25 0-43-16.5t-20-41.5q-22-229-184.5-391.5t-391.5-184.5q-25-2-41.5-20t-16.5-43v-135q0-29 21-47 17-17 43-17h5q160 13 306 80.5t259 181.5q114 113 181.5 259t80.5 306zm512 2q2 27-18 47-18 20-46 20h-143q-26 0-44.5-17.5t-19.5-42.5q-12-215-101-408.5t-231.5-336-336-231.5-408.5-102q-25-1-42.5-19.5t-17.5-43.5v-143q0-28 20-46 18-18 44-18h3q262 13 501.5 120t425.5 294q187 186 294 425.5t120 501.5z" />
+</svg>
diff --git a/src/content/img/envelope/dark.svg b/src/content/img/envelope/dark.svg
new file mode 100644
index 0000000..f521a8f
--- /dev/null
+++ b/src/content/img/envelope/dark.svg
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<svg width="22" height="22" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg">
+ <path
+ fill="white"
+ d="M1664 1504v-768q-32 36-69 66-268 206-426 338-51 43-83 67t-86.5 48.5-102.5 24.5h-2q-48 0-102.5-24.5t-86.5-48.5-83-67q-158-132-426-338-37-30-69-66v768q0 13 9.5 22.5t22.5 9.5h1472q13 0 22.5-9.5t9.5-22.5zm0-1051v-24.5l-.5-13-3-12.5-5.5-9-9-7.5-14-2.5h-1472q-13 0-22.5 9.5t-9.5 22.5q0 168 147 284 193 152 401 317 6 5 35 29.5t46 37.5 44.5 31.5 50.5 27.5 43 9h2q20 0 43-9t50.5-27.5 44.5-31.5 46-37.5 35-29.5q208-165 401-317 54-43 100.5-115.5t46.5-131.5zm128-37v1088q0 66-47 113t-113 47h-1472q-66 0-113-47t-47-113v-1088q0-66 47-113t113-47h1472q66 0 113 47t47 113z" />
+</svg>
diff --git a/src/content/img/envelope/light.svg b/src/content/img/envelope/light.svg
new file mode 100644
index 0000000..12aba9f
--- /dev/null
+++ b/src/content/img/envelope/light.svg
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<svg width="22" height="22" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg">
+ <path
+ fill="black"
+ d="M1664 1504v-768q-32 36-69 66-268 206-426 338-51 43-83 67t-86.5 48.5-102.5 24.5h-2q-48 0-102.5-24.5t-86.5-48.5-83-67q-158-132-426-338-37-30-69-66v768q0 13 9.5 22.5t22.5 9.5h1472q13 0 22.5-9.5t9.5-22.5zm0-1051v-24.5l-.5-13-3-12.5-5.5-9-9-7.5-14-2.5h-1472q-13 0-22.5 9.5t-9.5 22.5q0 168 147 284 193 152 401 317 6 5 35 29.5t46 37.5 44.5 31.5 50.5 27.5 43 9h2q20 0 43-9t50.5-27.5 44.5-31.5 46-37.5 35-29.5q208-165 401-317 54-43 100.5-115.5t46.5-131.5zm128-37v1088q0 66-47 113t-113 47h-1472q-66 0-113-47t-47-113v-1088q0-66 47-113t113-47h1472q66 0 113 47t47 113z" />
+</svg>
diff --git a/static/lord-favicon.svg b/src/content/img/favicon.svg
index ce566b2..ce566b2 100644
--- a/static/lord-favicon.svg
+++ b/src/content/img/favicon.svg
diff --git a/src/content/img/link/dark.svg b/src/content/img/link/dark.svg
new file mode 100644
index 0000000..fd6ca35
--- /dev/null
+++ b/src/content/img/link/dark.svg
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="utf-8"?>
+<svg width="22" height="22" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg">
+ <path
+ fill="white"
+ fill-rule="evenodd"
+ d="M7.775 3.275a.75.75 0 001.06 1.06l1.25-1.25a2 2 0 112.83 2.83l-2.5 2.5a2 2 0 01-2.83 0 .75.75 0 00-1.06 1.06 3.5 3.5 0 004.95 0l2.5-2.5a3.5 3.5 0 00-4.95-4.95l-1.25 1.25zm-4.69 9.64a2 2 0 010-2.83l2.5-2.5a2 2 0 012.83 0 .75.75 0 001.06-1.06 3.5 3.5 0 00-4.95 0l-2.5 2.5a3.5 3.5 0 004.95 4.95l1.25-1.25a.75.75 0 00-1.06-1.06l-1.25 1.25a2 2 0 01-2.83 0z" />
+</svg>
diff --git a/src/content/img/link/light.svg b/src/content/img/link/light.svg
new file mode 100644
index 0000000..fd9033f
--- /dev/null
+++ b/src/content/img/link/light.svg
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="utf-8"?>
+<svg width="22" height="22" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg">
+ <path
+ fill="black"
+ fill-rule="evenodd"
+ d="M7.775 3.275a.75.75 0 001.06 1.06l1.25-1.25a2 2 0 112.83 2.83l-2.5 2.5a2 2 0 01-2.83 0 .75.75 0 00-1.06 1.06 3.5 3.5 0 004.95 0l2.5-2.5a3.5 3.5 0 00-4.95-4.95l-1.25 1.25zm-4.69 9.64a2 2 0 010-2.83l2.5-2.5a2 2 0 012.83 0 .75.75 0 001.06-1.06 3.5 3.5 0 00-4.95 0l-2.5 2.5a3.5 3.5 0 004.95 4.95l1.25-1.25a.75.75 0 00-1.06-1.06l-1.25 1.25a2 2 0 01-2.83 0z" />
+</svg>
diff --git a/src/content/img/lock/dark.svg b/src/content/img/lock/dark.svg
new file mode 100644
index 0000000..98a36bc
--- /dev/null
+++ b/src/content/img/lock/dark.svg
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<svg width="22" height="22" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg">
+ <path
+ fill="white"
+ d="M640 768h512v-192q0-106-75-181t-181-75-181 75-75 181v192zm832 96v576q0 40-28 68t-68 28h-960q-40 0-68-28t-28-68v-576q0-40 28-68t68-28h32v-192q0-184 132-316t316-132 316 132 132 316v192h32q40 0 68 28t28 68z" />
+</svg>
diff --git a/src/content/img/lock/light.svg b/src/content/img/lock/light.svg
new file mode 100644
index 0000000..d449467
--- /dev/null
+++ b/src/content/img/lock/light.svg
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<svg width="22" height="22" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg">
+ <path
+ fill="black"
+ d="M640 768h512v-192q0-106-75-181t-181-75-181 75-75 181v192zm832 96v576q0 40-28 68t-68 28h-960q-40 0-68-28t-28-68v-576q0-40 28-68t68-28h32v-192q0-184 132-316t316-132 316 132 132 316v192h32q40 0 68 28t28 68z" />
+</svg>
diff --git a/src/content/img/logo/dark.svg b/src/content/img/logo/dark.svg
new file mode 100644
index 0000000..96e0f87
--- /dev/null
+++ b/src/content/img/logo/dark.svg
@@ -0,0 +1,62 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16">
+ <path fill="white" d="M 0 8 L 1 8 L 1 9 L 0 9 L 0 8 Z" />
+ <path fill="white" d="M 0 13 L 1 13 L 1 14 L 0 14 L 0 13 Z" />
+ <path fill="white" d="M 1 8 L 2 8 L 2 9 L 1 9 L 1 8 Z" />
+ <path fill="white" d="M 1 13 L 2 13 L 2 14 L 1 14 L 1 13 Z" />
+ <path fill="white" d="M 2 8 L 3 8 L 3 9 L 2 9 L 2 8 Z" />
+ <path fill="white" d="M 2 13 L 3 13 L 3 14 L 2 14 L 2 13 Z" />
+ <path fill="white" d="M 3 8 L 4 8 L 4 9 L 3 9 L 3 8 Z" />
+ <path fill="white" d="M 3 13 L 4 13 L 4 14 L 3 14 L 3 13 Z" />
+ <path fill="white" d="M 4 7 L 5 7 L 5 8 L 4 8 L 4 7 Z" />
+ <path fill="white" d="M 4 8 L 5 8 L 5 9 L 4 9 L 4 8 Z" />
+ <path fill="white" d="M 4 13 L 5 13 L 5 14 L 4 14 L 4 13 Z" />
+ <path fill="white" d="M 5 6 L 6 6 L 6 7 L 5 7 L 5 6 Z" />
+ <path fill="white" d="M 5 7 L 6 7 L 6 8 L 5 8 L 5 7 Z" />
+ <path fill="white" d="M 5 13 L 6 13 L 6 14 L 5 14 L 5 13 Z" />
+ <path fill="white" d="M 6 5 L 7 5 L 7 6 L 6 6 L 6 5 Z" />
+ <path fill="white" d="M 6 6 L 7 6 L 7 7 L 6 7 L 6 6 Z" />
+ <path fill="white" d="M 6 14 L 7 14 L 7 15 L 6 15 L 6 14 Z" />
+ <path fill="white" d="M 7 1 L 8 1 L 8 2 L 7 2 L 7 1 Z" />
+ <path fill="white" d="M 7 14 L 8 14 L 8 15 L 7 15 L 7 14 Z" />
+ <path fill="white" d="M 7 15 L 8 15 L 8 16 L 7 16 L 7 15 Z" />
+ <path fill="white" d="M 7 2 L 8 2 L 8 3 L 7 3 L 7 2 Z" />
+ <path fill="white" d="M 7 3 L 8 3 L 8 4 L 7 4 L 7 3 Z" />
+ <path fill="white" d="M 7 4 L 8 4 L 8 5 L 7 5 L 7 4 Z" />
+ <path fill="white" d="M 7 5 L 8 5 L 8 6 L 7 6 L 7 5 Z" />
+ <path fill="white" d="M 8 1 L 9 1 L 9 2 L 8 2 L 8 1 Z" />
+ <path fill="white" d="M 8 15 L 9 15 L 9 16 L 8 16 L 8 15 Z" />
+ <path fill="white" d="M 9 1 L 10 1 L 10 2 L 9 2 L 9 1 Z" />
+ <path fill="white" d="M 9 2 L 10 2 L 10 3 L 9 3 L 9 2 Z" />
+ <path fill="white" d="M 9 6 L 10 6 L 10 7 L 9 7 L 9 6 Z" />
+ <path fill="white" d="M 9 15 L 10 15 L 10 16 L 9 16 L 9 15 Z" />
+ <path fill="white" d="M 10 2 L 11 2 L 11 3 L 10 3 L 10 2 Z" />
+ <path fill="white" d="M 10 3 L 11 3 L 11 4 L 10 4 L 10 3 Z" />
+ <path fill="white" d="M 10 4 L 11 4 L 11 5 L 10 5 L 10 4 Z" />
+ <path fill="white" d="M 10 5 L 11 5 L 11 6 L 10 6 L 10 5 Z" />
+ <path fill="white" d="M 10 6 L 11 6 L 11 7 L 10 7 L 10 6 Z" />
+ <path fill="white" d="M 11 6 L 12 6 L 12 7 L 11 7 L 11 6 Z" />
+ <path fill="white" d="M 11 8 L 12 8 L 12 9 L 11 9 L 11 8 Z" />
+ <path fill="white" d="M 10 15 L 11 15 L 11 16 L 10 16 L 10 15 Z" />
+ <path fill="white" d="M 11 10 L 12 10 L 12 11 L 11 11 L 11 10 Z" />
+ <path fill="white" d="M 11 12 L 12 12 L 12 13 L 11 13 L 11 12 Z" />
+ <path fill="white" d="M 11 14 L 12 14 L 12 15 L 11 15 L 11 14 Z" />
+ <path fill="white" d="M 11 15 L 12 15 L 12 16 L 11 16 L 11 15 Z" />
+ <path fill="white" d="M 12 6 L 13 6 L 13 7 L 12 7 L 12 6 Z" />
+ <path fill="white" d="M 12 8 L 13 8 L 13 9 L 12 9 L 12 8 Z" />
+ <path fill="white" d="M 12 10 L 13 10 L 13 11 L 12 11 L 12 10 Z" />
+ <path fill="white" d="M 12 12 L 13 12 L 13 13 L 12 13 L 12 12 Z" />
+ <path fill="white" d="M 12 14 L 13 14 L 13 15 L 12 15 L 12 14 Z" />
+ <path fill="white" d="M 13 6 L 14 6 L 14 7 L 13 7 L 13 6 Z" />
+ <path fill="white" d="M 13 8 L 14 8 L 14 9 L 13 9 L 13 8 Z" />
+ <path fill="white" d="M 13 10 L 14 10 L 14 11 L 13 11 L 13 10 Z" />
+ <path fill="white" d="M 13 12 L 14 12 L 14 13 L 13 13 L 13 12 Z" />
+ <path fill="white" d="M 13 13 L 14 13 L 14 14 L 13 14 L 13 13 Z" />
+ <path fill="white" d="M 13 14 L 14 14 L 14 15 L 13 15 L 13 14 Z" />
+ <path fill="white" d="M 14 7 L 15 7 L 15 8 L 14 8 L 14 7 Z" />
+ <path fill="white" d="M 14 8 L 15 8 L 15 9 L 14 9 L 14 8 Z" />
+ <path fill="white" d="M 14 9 L 15 9 L 15 10 L 14 10 L 14 9 Z" />
+ <path fill="white" d="M 14 10 L 15 10 L 15 11 L 14 11 L 14 10 Z" />
+ <path fill="white" d="M 14 11 L 15 11 L 15 12 L 14 12 L 14 11 Z" />
+ <path fill="white" d="M 14 12 L 15 12 L 15 13 L 14 13 L 14 12 Z" />
+</svg>
diff --git a/src/content/img/logo/light.svg b/src/content/img/logo/light.svg
new file mode 100644
index 0000000..ab6000b
--- /dev/null
+++ b/src/content/img/logo/light.svg
@@ -0,0 +1,62 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" width="16" height="16">
+ <path fill="black" d="M 0 8 L 1 8 L 1 9 L 0 9 L 0 8 Z" />
+ <path fill="black" d="M 0 13 L 1 13 L 1 14 L 0 14 L 0 13 Z" />
+ <path fill="black" d="M 1 8 L 2 8 L 2 9 L 1 9 L 1 8 Z" />
+ <path fill="black" d="M 1 13 L 2 13 L 2 14 L 1 14 L 1 13 Z" />
+ <path fill="black" d="M 2 8 L 3 8 L 3 9 L 2 9 L 2 8 Z" />
+ <path fill="black" d="M 2 13 L 3 13 L 3 14 L 2 14 L 2 13 Z" />
+ <path fill="black" d="M 3 8 L 4 8 L 4 9 L 3 9 L 3 8 Z" />
+ <path fill="black" d="M 3 13 L 4 13 L 4 14 L 3 14 L 3 13 Z" />
+ <path fill="black" d="M 4 7 L 5 7 L 5 8 L 4 8 L 4 7 Z" />
+ <path fill="black" d="M 4 8 L 5 8 L 5 9 L 4 9 L 4 8 Z" />
+ <path fill="black" d="M 4 13 L 5 13 L 5 14 L 4 14 L 4 13 Z" />
+ <path fill="black" d="M 5 6 L 6 6 L 6 7 L 5 7 L 5 6 Z" />
+ <path fill="black" d="M 5 7 L 6 7 L 6 8 L 5 8 L 5 7 Z" />
+ <path fill="black" d="M 5 13 L 6 13 L 6 14 L 5 14 L 5 13 Z" />
+ <path fill="black" d="M 6 5 L 7 5 L 7 6 L 6 6 L 6 5 Z" />
+ <path fill="black" d="M 6 6 L 7 6 L 7 7 L 6 7 L 6 6 Z" />
+ <path fill="black" d="M 6 14 L 7 14 L 7 15 L 6 15 L 6 14 Z" />
+ <path fill="black" d="M 7 1 L 8 1 L 8 2 L 7 2 L 7 1 Z" />
+ <path fill="black" d="M 7 14 L 8 14 L 8 15 L 7 15 L 7 14 Z" />
+ <path fill="black" d="M 7 15 L 8 15 L 8 16 L 7 16 L 7 15 Z" />
+ <path fill="black" d="M 7 2 L 8 2 L 8 3 L 7 3 L 7 2 Z" />
+ <path fill="black" d="M 7 3 L 8 3 L 8 4 L 7 4 L 7 3 Z" />
+ <path fill="black" d="M 7 4 L 8 4 L 8 5 L 7 5 L 7 4 Z" />
+ <path fill="black" d="M 7 5 L 8 5 L 8 6 L 7 6 L 7 5 Z" />
+ <path fill="black" d="M 8 1 L 9 1 L 9 2 L 8 2 L 8 1 Z" />
+ <path fill="black" d="M 8 15 L 9 15 L 9 16 L 8 16 L 8 15 Z" />
+ <path fill="black" d="M 9 1 L 10 1 L 10 2 L 9 2 L 9 1 Z" />
+ <path fill="black" d="M 9 2 L 10 2 L 10 3 L 9 3 L 9 2 Z" />
+ <path fill="black" d="M 9 6 L 10 6 L 10 7 L 9 7 L 9 6 Z" />
+ <path fill="black" d="M 9 15 L 10 15 L 10 16 L 9 16 L 9 15 Z" />
+ <path fill="black" d="M 10 2 L 11 2 L 11 3 L 10 3 L 10 2 Z" />
+ <path fill="black" d="M 10 3 L 11 3 L 11 4 L 10 4 L 10 3 Z" />
+ <path fill="black" d="M 10 4 L 11 4 L 11 5 L 10 5 L 10 4 Z" />
+ <path fill="black" d="M 10 5 L 11 5 L 11 6 L 10 6 L 10 5 Z" />
+ <path fill="black" d="M 10 6 L 11 6 L 11 7 L 10 7 L 10 6 Z" />
+ <path fill="black" d="M 11 6 L 12 6 L 12 7 L 11 7 L 11 6 Z" />
+ <path fill="black" d="M 11 8 L 12 8 L 12 9 L 11 9 L 11 8 Z" />
+ <path fill="black" d="M 10 15 L 11 15 L 11 16 L 10 16 L 10 15 Z" />
+ <path fill="black" d="M 11 10 L 12 10 L 12 11 L 11 11 L 11 10 Z" />
+ <path fill="black" d="M 11 12 L 12 12 L 12 13 L 11 13 L 11 12 Z" />
+ <path fill="black" d="M 11 14 L 12 14 L 12 15 L 11 15 L 11 14 Z" />
+ <path fill="black" d="M 11 15 L 12 15 L 12 16 L 11 16 L 11 15 Z" />
+ <path fill="black" d="M 12 6 L 13 6 L 13 7 L 12 7 L 12 6 Z" />
+ <path fill="black" d="M 12 8 L 13 8 L 13 9 L 12 9 L 12 8 Z" />
+ <path fill="black" d="M 12 10 L 13 10 L 13 11 L 12 11 L 12 10 Z" />
+ <path fill="black" d="M 12 12 L 13 12 L 13 13 L 12 13 L 12 12 Z" />
+ <path fill="black" d="M 12 14 L 13 14 L 13 15 L 12 15 L 12 14 Z" />
+ <path fill="black" d="M 13 6 L 14 6 L 14 7 L 13 7 L 13 6 Z" />
+ <path fill="black" d="M 13 8 L 14 8 L 14 9 L 13 9 L 13 8 Z" />
+ <path fill="black" d="M 13 10 L 14 10 L 14 11 L 13 11 L 13 10 Z" />
+ <path fill="black" d="M 13 12 L 14 12 L 14 13 L 13 13 L 13 12 Z" />
+ <path fill="black" d="M 13 13 L 14 13 L 14 14 L 13 14 L 13 13 Z" />
+ <path fill="black" d="M 13 14 L 14 14 L 14 15 L 13 15 L 13 14 Z" />
+ <path fill="black" d="M 14 7 L 15 7 L 15 8 L 14 8 L 14 7 Z" />
+ <path fill="black" d="M 14 8 L 15 8 L 15 9 L 14 9 L 14 8 Z" />
+ <path fill="black" d="M 14 9 L 15 9 L 15 10 L 14 10 L 14 9 Z" />
+ <path fill="black" d="M 14 10 L 15 10 L 15 11 L 14 11 L 14 10 Z" />
+ <path fill="black" d="M 14 11 L 15 11 L 15 12 L 14 12 L 14 11 Z" />
+ <path fill="black" d="M 14 12 L 15 12 L 15 13 L 14 13 L 14 12 Z" />
+</svg>
diff --git a/src/content/pt/hea/2020/08/12/arquivo-datado.adoc b/src/content/pt/hea/2020/08/12/arquivo-datado.adoc
new file mode 100644
index 0000000..42842ce
--- /dev/null
+++ b/src/content/pt/hea/2020/08/12/arquivo-datado.adoc
@@ -0,0 +1,29 @@
+= Nome de arquivo com dia e hora de forma simplificada
+:updatedat: 2025-04-30
+:categories: shell
+
+Quando vou escrever um artigo no Jekyll ou criar um arquivo de log com a data no nome, eu normalmente engasgo para achar um jeito direto de fazer isso. Há uma solução simples: `date -I`.
+
+[source, sh]
+----
+./meu-programa.sh > meu-programa.$(date -I).log
+cp template-de-artigo.md _posts/$(date -I)-slug-do-artigo.md
+----
+
+Usar essa ferramenta padrão do GNU/Linux permite que você simplesmente escreva `touch $(date -I).md` para criar um arquivo `2020-08-12.md`.
+
+Eu sempre tinha que para para reler o `man date` ou buscar na internet de novo e de novo como fazer isso, e depois de sempre chegar no mesmo resultado ficou claro para mim que tanto `date -I` quanto `date -Is` (`s` de segundos) são as respostas que eu estou procurando 95% do tempo:
+
+[source, sh]
+----
+# dentro do meu programa.sh
+echo "Programa começou em $(date -Is)"
+# saída é:
+# Programa começou em 2020-08-12T09:15:16-03:00
+----
+
+Ambos os formatos de data são hierárquicos, com intervalos de tempo maior à esquerda. Isso significa que você pode facilmente ordená-los (e até usar TAB para completar) sem esforço ou ferramenta extra.
+
+
+
+// Generated from po4a(1).
diff --git a/src/content/pt/hea/categorias.adoc b/src/content/pt/hea/categorias.adoc
new file mode 100644
index 0000000..19e7cb7
--- /dev/null
+++ b/src/content/pt/hea/categorias.adoc
@@ -0,0 +1,6 @@
+= Artigos por categoria
+:type: categories
+
+
+
+// Generated from po4a(1).
diff --git a/src/content/pt/hea/index.adoc b/src/content/pt/hea/index.adoc
new file mode 100644
index 0000000..77a035c
--- /dev/null
+++ b/src/content/pt/hea/index.adoc
@@ -0,0 +1,11 @@
+= Hoje Eu Aprendi
+
+:anna-e-so: https://til.flourishing.stream/
+
+**H**oje **E**u **A**prendi (do inglês, _**T**oday **I** **L**earned_): pequenas postagens de conhecimentos úteis.
+
+Cópia descarada da {anna-e-so}[Anna e só].
+
+
+
+// Generated from po4a(1).
diff --git a/src/content/pt/sobre.adoc b/src/content/pt/sobre.adoc
new file mode 100644
index 0000000..2d7ad20
--- /dev/null
+++ b/src/content/pt/sobre.adoc
@@ -0,0 +1,12 @@
+= Sobre
+
+:mailto: mailto:~euandreh/public-inbox@lists.sr.ht
+:archive: https://lists.sr.ht/~euandreh/public-inbox
+
+Oi, eu sou EuAndreh. Eu escrevo software e, ocasionalmente, música. Você encontra meu dados para entrar em contato no rodapé desta página, ou pode mandar também uma mensagem para minha {mailto}[caixa de entrada pública] ({archive}[arquivo]).
+
+Esse é o meu site pessoal onde eu escrevo artigos, publico software e outros trabalhos relacionados.
+
+
+
+// Generated from po4a(1).
diff --git a/src/content/public.asc b/src/content/public.asc
deleted file mode 100644
index 533b54c..0000000
--- a/src/content/public.asc
+++ /dev/null
@@ -1,86 +0,0 @@
------BEGIN PGP PUBLIC KEY BLOCK-----
-
-mQINBFjVvh4BEADIlHUiO6IfkhcNm3J7ilXERgimvKuFNyLIUPZlDcESC1ORrv4y
-9slMDA5uojXctuLRC7nNdynLP+eFFfVUQ+hUXcV24AzyOE0CYo5c4PQA5TLe2AUC
-E9YqqfQF4XuNddY+UpcG47MuVDR+6SHkFkF29ATzpmShJj41lc7a9CdRib+62Wpe
-h7WJOFj/YoxMCBBzic4tiFNgoYobu+lLxyA4T2kCmxEaiZzc6eXBDDgJ0STL4+S8
-avpglaQ+mb5gHbH0yOtuwDG3sWyHKf7LSRVtzWvOqaGmRUmmDsSPjb5vQqvT8EMq
-UfqFFZhScLalthF3PhG0SLXPvoCoRm2aLkN+O3sv057RqaN8E39223mmz6EMXmLk
-H/U5qk2SUl3dx86dIQcB+2WUVu5zuFyfR1g6tD+DcqzxGc9XB7Gz/0TTDf3OimHb
-rp1x5i/04198ocRZT3MzXx8H25tLMS/rHmE87YdgPhMTWheSUevyhoGNHfAOcDwX
-P2oGzELXbLqHxtjENMEw2E996KrSmpcz7WOqIl3PHS1J6eRZoYQesXE+SZTeIiYb
-wD0kkZGYhBZbtLC4VWIuU2T3AL/2hF6aUh1tj1B6vcV0i3HpIHNbvPAF/I0NUhhc
-Gxwwi+ggG/MBHBbxkq7LvG5DfDbav0ZoZaov5dyhtX0CBWjVYATvjRfeAwARAQAB
-tBlFdUFuZHJlaCA8ZXVAZXVhbmRyZS5vcmc+iQI5BBMBCAAjBQJY1b4eAhsDBwsJ
-CAcDAgEGFQgCCQoLBBYCAwECHgECF4AACgkQgfkOw801YGCWzg//QtDpwgbDY9uC
-Y9a/RgUsbqGAYzSInsbyDCXrAAhWGzkDMLPeFp03Sw9QyCDe0wWu8L2H4hV/FN58
-+4G6353ISwkqsf9R+P9lQs/5dwG7lp5/Gez8bZK3y7zFrdtVwcOCb4De+9fhPsgP
-9pRU8dHpLNo8Ui9IzbiYla7aGxXQdkXU2cvOuEoiuFgvcWU1KWNOWrjImATcC8EF
-8VaEaZYGRXz8lML8KgsAUxrjFkk6tqxrMlOLTjY0BuzcYZpt5XLZ2NuSIDYBoSib
-uBQ1H7DLGa+r0hnNjVEBmMOvFA1hbWa33h1AyYjYhoeVlBYpoHuDosEFqkwZ+otz
-zvImaRAOOFX1IehifTGEFie3imuOHdVuRjXb8SGu8Cgeby0T096A/vf+L1S35nc2
-mdRCUE/SIURW6hfH7uT6KqpokU86vozKmNzIcV3zhAXJ9UYwQqZgg2H3DOcTtZyE
-jVBl2glspoclsfR20T+g+qPqNDAgoDbC71fEAbUTACQau162utpHiabog7e7vyhI
-go5xdjxA8xb3Jtn39pYzbg75ArZqPbxHNZ38m00EBtC5EkD4DFh0cpQ2peuZIh1k
-c5bragCt8o6cV9t4jaq+TtVv4PrFEPqEd+w1FqqwabBq3xSsIgKg2X5rXQkktymB
-un+oN41wofuTZIoGNt8nnGb+skFBxgyJAlYEEwEKAEACGwMHCwkIBwMCAQYVCAIJ
-CgsEFgIDAQIeAQIXgBYhBFva6biy9sa8uw1s5YH5DsPNNWBgBQJi00VjBQkNv+5F
-AAoJEIH5DsPNNWBgy9IP/A8ERtFP3B5BDfIb4BUyw9AvWPAMyNfuKiXVcfrn/CGn
-D+x0dx5doGcIXskTWGEow1/6sFSheYk728wO3pp+DUaDp+2rVwO2AsKBEjBptk9i
-b9YJ4fl4rYtltscLHBGflrQ6C8jIwBqt72Ots+F7IEXy1NcskS/jU6DUzLPDmOog
-doM5IHD/2Fekmq8QVvyryH0nT5YxaJ/qRgOr1NTnnmgTcZHO7l21gJNvWo1QJLME
-lz5xNXRN/rFl5xQ3NxqVh9hwDwp/k5lXW0dxJCpmjbNKG2hNsTYrjTFrG6mSaER5
-0rdzGzQVWavyR+PDY5KRRKupYY4P5luLFy9zCdBr+ZBDTHmLfRcwXubLOSmq+gUO
-8LievpDZITHtgtWGIhWWqA80gOoqWRfAO+cpDpCqWIa+KoZyaxd19WXUqHEBr6Y9
-ZcyCCenM/+WsfmySNqAo6HGVoehewMVSRI6GObS9bdDDJTa3QySQGjdRyAn3uavo
-JwjpXfy09Kirji2x9G85OzOdXDNUrMqu0nB4AFxOU0SLhg0YpRJCig/2uuYRhRMe
-gLFM52AGxk1LfK9Pjrr2V029eRclD8SwC/F51YFP6CKGMyYHJWuaBJL1HXr/fzDD
-sLq4K1TZN/8TpYRA6t8B1mY/57KVsv2naWprmVv7q2eNU17nriLQiYYqfybcVGwn
-uQINBFjVvh4BEADzt2iKa1gSksHtTFkPQ5ULqUF2sHDClr3ykbLq/AxgSCON58eP
-A9SKQy2O+qDpojHAN1UULJgHEn34afzMkBzjxcJXMRgaTV2M+1trjwx/VluD9OKX
-wmnhmSdvCIP7Z0qdhU78maLq10UG1vVwej3kVlxsf4Eu2ZA+NeIr7Tj0DERqEDQo
-DRtNPVEy3h1xoYruy/VjNDi1CI3yFkM6HW1CgRA50rI7GDtvOuitZy+9Lpqs0mWq
-vdApWZxoQwslFcziNd+ZVaQjgO6LSnkDttRkAOblFiD710OQy3/Yo97i7bqsKrnZ
-qQMRUk0n12VXY9I94c7ELfViVqGk123ELtTViiIz5BT5iQRkJj1GiizTgGY6cfsj
-kwWwvabpmWYdyQ85sYoVuNAPz3yDaLdtStWRNHWi4+UHC03J2BiBgIrQbuXoNGuc
-j0b1fsntdntaBoZgFygwW6kXUjHLeEfnrGX3C2X49zg0rBTvEzdZwr2K0xgc2z26
-1EEf5ObmOGRt27K1fwrCxKHbKTscReHv78S4v3uN/9LvHfvIEaBoYHqMCcxy7Aii
-dk+02dNDO/jZDnTAJH2NWhyB+PJvrlnK34zHhUMVH0i5nUjaCDL/n07Vd2sbE5qW
-ivE2MWeayVKRGPci80tEGA1i42FJzGiA1uZrxXNImnsyxQyS8cr9iKoTIQARAQAB
-iQIfBBgBCAAJBQJY1b4eAhsMAAoJEIH5DsPNNWBg+bYQALJyD1nyuz8+vl8rqj7K
-Z9aRSW+XeG/wz6xrAqdY3OVvHwXYw33pgOmhNhfMUgP/Uy5OsxZdjIO7NzyKa2H9
-JoVSsAs/eLQDOQCcwXruBND6zuxt99kZh6o/Xp4lII9vuLafKner+fWluFHhOy/w
-E3Q3VwCbC9npbmzweEl9Q83R7IxbEhtFF5HV0wKVRzW/GX7iWADoHpkAAQ2sUnQp
-HhE1wOrdPm0dD9BEbTRQHekUiIQ8cFoORyWbJBwbflY64ioaFjyM+Ji49pNMykie
-LzQFW1UYyhkXJeTvv93ym4XyMi2mhsOzna7mG1bonKvbKj6qaXb7gFHUXHh/ARuu
-6CNARzBh6BTp+7c1brthGjT/L8CxrAeW2oE5wVIRuk8mdKiFoK3BuXc1P+vsnp36
-ioOQ0y+KPcp+PSbw6oDp7hTHztcW/3EoAgyHneWCmtYYi6RmVptTNpeeyHwqRP/O
-elCN1cw9zopofVQhnxDEUgzVPrWWaE7UR6vrHbzlXvWMeGTYtmdmo/9xkYbQzZW7
-y90QLUGyDwQ+KeCG29W3EhygGy3myVQbRaXywgzzO2YvovjATDa7wZQrXNoVE7J9
-uLonNtRlyRlTAfFP6hCLDXwuE6WRHXhdu7aFKbq0LQGFv5hY4wPUp8vnUtGYT/wo
-qqSkuSYhzNvmuKBIHPs6YD8duQINBGC7n68BEADnUv7iWOejQNa3fZ6v4lkHT6qF
-Rp2+NuzIpFJ2Vy7eP58XZoiz6HJPcCU8Hf95JXwaXEwS4S7mXdw1x60hd8JIe058
-Ek6MZSSVQmlLfocGsAYj1wTrLmnQ8+PV0IeQlNj1aytBI1fL+v3IPt+JdLt6b+g3
-vwcEUU9efzxx2E0KZ5GIpb2meiCQ6ha+tcd7XqegB53eQj/h/coE2zLJodpaJ3xb
-j894pE/OJCNC0+4d0Sv7oHhY7QoLYldTQbSgPyhyfl4iZpJf6OEPZxK2cJaB+cbe
-oBB6aGNyU+CIJToM+uAJJ7H7EpvxfcnfJQ1PuY5szTdvFbW820euiUEKEW69mW4u
-aFNPSc6D4Z8tZ5hXQIqBD40irULhF0CYNkIILmyNV/KJIZ5HkbQ1q+UrCFHJyvuH
-/3aCTjj9OSfE7xHPQ3xd3Xw8vvj0Mjie09xFbbcklBTw5WRzH7cw8c+Q0O69kZZ8
-b+ykcdzWTeZeWNdnzptNqnMjfheig90rUIJ7DN0c+53jCUcGpWJxJhcYF9Uk1RNH
-mSE5+VzK1y+20t0grVFX90nApm4Tl35QPrX7Qxp9C81cWiUB8xCAE6jYrmd4x+P/
-3wSQfc1Xg0Eg3QjJB+6JD7cbyDJpzDR3ja+CLZCAr9I0B4rDKD2d6et/z67iXPnZ
-UWMyZ8RVVZPFbBMOTwARAQABiQI8BBgBCAAmFiEEW9rpuLL2xry7DWzlgfkOw801
-YGAFAmC7n68CGyAFCQPCZwAACgkQgfkOw801YGAS7hAAvAEKKdNj8NK8STfehHIH
-QYxdotNHJc3b0rUa/Kzb9ELTvYgheHH6Dq26c/YSoApJxUrgUVDSJwAJV4T9JqPX
-rfCfhyzfdxocXVAWH01dhWWxCOh/S/gLB/r2CvymbFbNGY6y8vyxG8TahGYZQJEE
-ynUtw+S1sfrbqc8EMGmnw67z/hK3JIcfNrNxvt7FXo1HHcNEMRiah2NtwO9sumEK
-041y7v2efGS4z1i5FIarf/2HtIgIGs77B0G54o4IhgzJzUEYWlHumXKMsETNT3zI
-9uukR16RRkwxqOj6fOD9qNvnM1Tzf9T5DClrS5klz448qlpWWiUDABmyBMDqGKWS
-vr6oi24iemJ4LoAUws1tPCE5WukFKr69UQ9Ab4DuSWwPbQ51RUjMJPeqdV53GnjU
-H6gNBKqxlC0ccuwY3V2kDb8lc46pyN7rqLVZ0IENZ0PFHmfvH+rPkybEjRBqFbhf
-nkDPnHuXSPhsCGPk45OQxnqqCf4QFqyOTG3slc6yk/N4Bz0IVNOFq5sewISGeolb
-4uOF951f5gA2cUy5FXu8Hf8vkdJuB70nHtJLNijloPbAQFq9SuVpvAOlSFLB2wiy
-VgSGXzb4jfIEJidZlsveHDkg/LTzrkHu+f1Qj5thHXN7ARPWvZp1eNFSA6iV7Sho
-LsPdAc9FGcUNEy+/AlLpM1Y=
-=2ZCp
------END PGP PUBLIC KEY BLOCK-----
diff --git a/src/content/public.asc.txt b/src/content/public.asc.txt
new file mode 100644
index 0000000..d548547
--- /dev/null
+++ b/src/content/public.asc.txt
@@ -0,0 +1,99 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQINBFjVvh4BEADIlHUiO6IfkhcNm3J7ilXERgimvKuFNyLIUPZlDcESC1ORrv4y
+9slMDA5uojXctuLRC7nNdynLP+eFFfVUQ+hUXcV24AzyOE0CYo5c4PQA5TLe2AUC
+E9YqqfQF4XuNddY+UpcG47MuVDR+6SHkFkF29ATzpmShJj41lc7a9CdRib+62Wpe
+h7WJOFj/YoxMCBBzic4tiFNgoYobu+lLxyA4T2kCmxEaiZzc6eXBDDgJ0STL4+S8
+avpglaQ+mb5gHbH0yOtuwDG3sWyHKf7LSRVtzWvOqaGmRUmmDsSPjb5vQqvT8EMq
+UfqFFZhScLalthF3PhG0SLXPvoCoRm2aLkN+O3sv057RqaN8E39223mmz6EMXmLk
+H/U5qk2SUl3dx86dIQcB+2WUVu5zuFyfR1g6tD+DcqzxGc9XB7Gz/0TTDf3OimHb
+rp1x5i/04198ocRZT3MzXx8H25tLMS/rHmE87YdgPhMTWheSUevyhoGNHfAOcDwX
+P2oGzELXbLqHxtjENMEw2E996KrSmpcz7WOqIl3PHS1J6eRZoYQesXE+SZTeIiYb
+wD0kkZGYhBZbtLC4VWIuU2T3AL/2hF6aUh1tj1B6vcV0i3HpIHNbvPAF/I0NUhhc
+Gxwwi+ggG/MBHBbxkq7LvG5DfDbav0ZoZaov5dyhtX0CBWjVYATvjRfeAwARAQAB
+tBlFdUFuZHJlaCA8ZXVAZXVhbmRyZS5vcmc+iQI5BBMBCAAjBQJY1b4eAhsDBwsJ
+CAcDAgEGFQgCCQoLBBYCAwECHgECF4AACgkQgfkOw801YGCWzg//QtDpwgbDY9uC
+Y9a/RgUsbqGAYzSInsbyDCXrAAhWGzkDMLPeFp03Sw9QyCDe0wWu8L2H4hV/FN58
++4G6353ISwkqsf9R+P9lQs/5dwG7lp5/Gez8bZK3y7zFrdtVwcOCb4De+9fhPsgP
+9pRU8dHpLNo8Ui9IzbiYla7aGxXQdkXU2cvOuEoiuFgvcWU1KWNOWrjImATcC8EF
+8VaEaZYGRXz8lML8KgsAUxrjFkk6tqxrMlOLTjY0BuzcYZpt5XLZ2NuSIDYBoSib
+uBQ1H7DLGa+r0hnNjVEBmMOvFA1hbWa33h1AyYjYhoeVlBYpoHuDosEFqkwZ+otz
+zvImaRAOOFX1IehifTGEFie3imuOHdVuRjXb8SGu8Cgeby0T096A/vf+L1S35nc2
+mdRCUE/SIURW6hfH7uT6KqpokU86vozKmNzIcV3zhAXJ9UYwQqZgg2H3DOcTtZyE
+jVBl2glspoclsfR20T+g+qPqNDAgoDbC71fEAbUTACQau162utpHiabog7e7vyhI
+go5xdjxA8xb3Jtn39pYzbg75ArZqPbxHNZ38m00EBtC5EkD4DFh0cpQ2peuZIh1k
+c5bragCt8o6cV9t4jaq+TtVv4PrFEPqEd+w1FqqwabBq3xSsIgKg2X5rXQkktymB
+un+oN41wofuTZIoGNt8nnGb+skFBxgyJAlYEEwEKAEACGwMHCwkIBwMCAQYVCAIJ
+CgsEFgIDAQIeAQIXgBYhBFva6biy9sa8uw1s5YH5DsPNNWBgBQJmm7wJBQkRiGTr
+AAoJEIH5DsPNNWBgkxoQAJToyFUvioGD/91ztneUBOD2LOkQaIH4ZUTyIEjxpiep
+ry7QIC33wc2OLJo8QKfPZNaZlvy0EcuzSNZz2Za0NUg5y/lzDOCyjrRw7/szHcPX
+saUVHcjwYjQV/yiB1GJcuHefrEwXmgjQbkGmNZw8t7DuEm7qMrAdbAg3b+S4yX8X
+wgAHXhCx33E7sLCwGDzWzXKhWyn3RNoDtTrsSyoXEUi2tUAJ6oEVIQd2MMH3gMxt
+f31sg+cWTMdx7s2ZYatQ9hYOu86HR0O5fGD4T9Ae1TEhV8t3ZiUvozB/BfMUYwGx
+UajxAN+QJZJrsuc98MlqTDk3vViqCyptFJaIjZHaLrB3rhtw9VISnEnXY1WsiL4R
++0R99xKt6D/yZXljXS1HR3w9VLpzE0NikYVkYBbsFFTZ8SrMwVCzY+wWjP0miQ2X
+gAvX3+Lr1PeAmigKkCnt9gKuZz5b2hD0WWb7P9HXLR04XtOq2QOvA5kObl51C+Af
+BcdEM9Q9FzQHqa5ofq0ZeS4WL72nkJFuAiGhi9kcpV70YHBMk0BuSIEuctGempuJ
+4CeH6X2pKMHNJ5ijctZJiMPB8V2AzlWPber44IXD9f8B6HAQsNpZJ4+rWw2zCeKf
+Jj/n0UIqwEU9vG1KMU3OaAqrlaP0ePd3+BgjgIemMSwzNtSamtVc8TbqodXWKQF/
+tCFwYXBvLmltIFN1cHBvcnQgPHN1cHBvcnRAcGFwby5pbT6JAlcEEwEIAEEWIQRb
+2um4svbGvLsNbOWB+Q7DzTVgYAUCaBSRgAIbAwUJEYhk6wULCQgHAgIiAgYVCgkI
+CwIEFgIDAQIeBwIXgAAKCRCB+Q7DzTVgYJ7dEACtW+VEVBIj2v390ph+6bB9hnvq
+b78oC0EGryuKcKh797lld02jOW1WsIa2l7RzsS+XWDoY6c1DhCha75avLdr4uXgz
+oLjScXinRNa5y9ECYdhiAScTVE3PbqURFbd9txPAY6rCP0Lo05tfXrX7zs/5UK8W
+98FN4mSNge7YZEmOSSFFEQLaLRs/JH89GSlAXjfdke5cEiYqvabXXle/Zv/umm7B
+J2ndd1bUgRbcL4SznMMo+JAhHzr1KpuvhFj/Dv4IUeMsWwv5x0KOXcCSUNpEnN0v
+vw1aIFgI7JuQiYyMK0g3ubUfser5jOX7faz4Y5PJEcx6mzTrrdjC8vqCDaN+iA7O
+pIS951yaYs6oI09ev1xQKCO+tJVFgPYMSh4lTMHMndZnMvTGUZy1Mt1tE+u8/Ps+
+Ps6UbApinrL1saeg1ODsels16mANGsQ62LXQfqTaQomKD/FfGHokOIogdj/hwhGC
+XqhAoUtPoO5gQRqToenWybMNyu+fKo1gERU0WjXdqkledgY3Hi8JvjAaBgGyUmoi
+DdRzGt8CAYZ72Tpwj4/yziVPw+myP6JmOvaRatZbL6SvFUEGnmip+ZWhA3yw0oyC
+dSQ2TL6q2G/qjA/0cpxTGEyiObSLqlZgamV8i+bpqXn6yt3CoUk4xm0jsChHK9zF
+w+7WzwhBSyDeeAIm8rkCDQRY1b4eARAA87doimtYEpLB7UxZD0OVC6lBdrBwwpa9
+8pGy6vwMYEgjjefHjwPUikMtjvqg6aIxwDdVFCyYBxJ9+Gn8zJAc48XCVzEYGk1d
+jPtba48Mf1Zbg/Til8Jp4ZknbwiD+2dKnYVO/Jmi6tdFBtb1cHo95FZcbH+BLtmQ
+PjXiK+049AxEahA0KA0bTT1RMt4dcaGK7sv1YzQ4tQiN8hZDOh1tQoEQOdKyOxg7
+bzrorWcvvS6arNJlqr3QKVmcaEMLJRXM4jXfmVWkI4Dui0p5A7bUZADm5RYg+9dD
+kMt/2KPe4u26rCq52akDEVJNJ9dlV2PSPeHOxC31YlahpNdtxC7U1YoiM+QU+YkE
+ZCY9Roos04BmOnH7I5MFsL2m6ZlmHckPObGKFbjQD898g2i3bUrVkTR1ouPlBwtN
+ydgYgYCK0G7l6DRrnI9G9X7J7XZ7WgaGYBcoMFupF1Ixy3hH56xl9wtl+Pc4NKwU
+7xM3WcK9itMYHNs9utRBH+Tm5jhkbduytX8KwsSh2yk7HEXh7+/EuL97jf/S7x37
+yBGgaGB6jAnMcuwIonZPtNnTQzv42Q50wCR9jVocgfjyb65Zyt+Mx4VDFR9IuZ1I
+2ggy/59O1XdrGxOalorxNjFnmslSkRj3IvNLRBgNYuNhScxogNbma8VzSJp7MsUM
+kvHK/YiqEyEAEQEAAYkCHwQYAQgACQUCWNW+HgIbDAAKCRCB+Q7DzTVgYPm2EACy
+cg9Z8rs/Pr5fK6o+ymfWkUlvl3hv8M+sawKnWNzlbx8F2MN96YDpoTYXzFID/1Mu
+TrMWXYyDuzc8imth/SaFUrALP3i0AzkAnMF67gTQ+s7sbffZGYeqP16eJSCPb7i2
+nyp3q/n1pbhR4Tsv8BN0N1cAmwvZ6W5s8HhJfUPN0eyMWxIbRReR1dMClUc1vxl+
+4lgA6B6ZAAENrFJ0KR4RNcDq3T5tHQ/QRG00UB3pFIiEPHBaDkclmyQcG35WOuIq
+GhY8jPiYuPaTTMpIni80BVtVGMoZFyXk77/d8puF8jItpobDs52u5htW6Jyr2yo+
+qml2+4BR1Fx4fwEbrugjQEcwYegU6fu3NW67YRo0/y/AsawHltqBOcFSEbpPJnSo
+haCtwbl3NT/r7J6d+oqDkNMvij3Kfj0m8OqA6e4Ux87XFv9xKAIMh53lgprWGIuk
+ZlabUzaXnsh8KkT/znpQjdXMPc6KaH1UIZ8QxFIM1T61lmhO1Eer6x285V71jHhk
+2LZnZqP/cZGG0M2Vu8vdEC1Bsg8EPinghtvVtxIcoBst5slUG0Wl8sIM8ztmL6L4
+wEw2u8GUK1zaFROyfbi6JzbUZckZUwHxT+oQiw18LhOlkR14Xbu2hSm6tC0Bhb+Y
+WOMD1KfL51LRmE/8KKqkpLkmIczb5rigSBz7OmA/HbkCDQRgu5+vARAA51L+4ljn
+o0DWt32er+JZB0+qhUadvjbsyKRSdlcu3j+fF2aIs+hyT3AlPB3/eSV8GlxMEuEu
+5l3cNcetIXfCSHtOfBJOjGUklUJpS36HBrAGI9cE6y5p0PPj1dCHkJTY9WsrQSNX
+y/r9yD7fiXS7em/oN78HBFFPXn88cdhNCmeRiKW9pnogkOoWvrXHe16noAed3kI/
+4f3KBNsyyaHaWid8W4/PeKRPziQjQtPuHdEr+6B4WO0KC2JXU0G0oD8ocn5eImaS
+X+jhD2cStnCWgfnG3qAQemhjclPgiCU6DPrgCSex+xKb8X3J3yUNT7mObM03bxW1
+vNtHrolBChFuvZluLmhTT0nOg+GfLWeYV0CKgQ+NIq1C4RdAmDZCCC5sjVfyiSGe
+R5G0NavlKwhRycr7h/92gk44/TknxO8Rz0N8Xd18PL749DI4ntPcRW23JJQU8OVk
+cx+3MPHPkNDuvZGWfG/spHHc1k3mXljXZ86bTapzI34XooPdK1CCewzdHPud4wlH
+BqVicSYXGBfVJNUTR5khOflcytcvttLdIK1RV/dJwKZuE5d+UD61+0MafQvNXFol
+AfMQgBOo2K5neMfj/98EkH3NV4NBIN0IyQfuiQ+3G8gyacw0d42vgi2QgK/SNAeK
+wyg9nenrf8+u4lz52VFjMmfEVVWTxWwTDk8AEQEAAYkCPAQYAQgAJgIbIBYhBFva
+6biy9sa8uw1s5YH5DsPNNWBgBQJk/eCDBQkIBKfUAAoJEIH5DsPNNWBgXdUP/27B
+0vAdg9zwhOvRXTgYRtAZjsPPGegc9o8ACxt74aKOYGYICo0Xnru/M4fXVS4LLRf1
+c+iuf9NxonWTJUnORts7i9oM2CDWs3HT3yc06GPwfvHTPUrCFwiOWroD06op1SgT
+I2stjSPO4qK1RrNAnFFZgh9d/Z7NvDiqwlt1XbJB3ZcZJHOSZnhxlOfedPlgqxLw
+z7qztTFm9l4Y816I80CG3/dVDTlqtKQfJDk7RfyTavxLSPR4TcNaYWoqjGnzb48C
+QXyreSrL6OGdReoBuNw8eQSxohBWI3PKBn/G+4HvzlvDFa745T9B4yTnvVJh2SF0
+Cqbg5FcRsHpTdL2qo9e1MleeBkGZnxWFKWZWcJoczD1pxjZGIFNI1aMxoSMB0yA6
+T0PAbuproAk2TXn8NDzsFJP6ERM+3c1ARNiwZ8wCjiNChxX5cEh4ciNltkQ54ZF/
+7SIQWBxcJ8IwHuckIwr30Om0lhOiuPbZWrpLX0/dBCU4qdwOVaQdaRopJU7E2XIW
+rN/8i+o7WAE9xdIdVHZg0H+WPWa/lOs5yfsSXES3Y7wxOR8chW038/y82TECk7LP
++dZyFKJ26VSvqPHxMqmji/UJjOB+K5xaaCflnzsKzsaEf5fRCIrCp6ldIXh0/x0s
+pqsEJPXPOb40oBnNzLOSv9X++wKq+oXWEX8OSqhr
+=bNun
+-----END PGP PUBLIC KEY BLOCK-----
diff --git a/src/content/s b/src/content/s
new file mode 120000
index 0000000..dc1dc0c
--- /dev/null
+++ b/src/content/s
@@ -0,0 +1 @@
+/dev/null \ No newline at end of file
diff --git a/src/content/security.txt b/src/content/security.txt
deleted file mode 120000
index abdf74b..0000000
--- a/src/content/security.txt
+++ /dev/null
@@ -1 +0,0 @@
-.well-known/security.txt \ No newline at end of file
diff --git a/src/content/style.css b/src/content/style.css
index 653092f..dbd7967 100644
--- a/src/content/style.css
+++ b/src/content/style.css
@@ -1,3 +1,35 @@
+:root {
+ --color-fg: black;
+ --color-bg: white;
+ --color-2nd-fg: #555555;
+ --color-2nd-bg: #f5f5f5;
+ --color-pre-border: hsla(0, 100%, 0%, 30%);
+
+ --link-url: url(img/link/light.svg);
+
+ color: var(--color-fg);
+ background-color: var(--color-bg);
+}
+
+@media (prefers-color-scheme: dark) {
+ :root {
+ --color-fg: white;
+ --color-bg: black;
+ --color-2nd-fg: #aaaaaa;
+ --color-2nd-bg: #222222;
+ --color-pre-border: hsla(50, 100%, 70%, 10%);
+
+ --link-url: url(img/link/dark.svg);
+ }
+
+ a {
+ color: hsl(211, 100%, 60%);
+ &:visited {
+ color: hsl(242, 100%, 80%);
+ }
+ }
+}
+
/* General declarations */
body {
@@ -6,7 +38,7 @@ body {
max-width: 750px;
}
-.simple-icon {
+.icon {
width: 22px;
height: 22px;
vertical-align: middle;
@@ -16,7 +48,6 @@ body {
/* Navigation header */
header {
- border-bottom: 2px solid black;
margin-bottom: 30px;
padding: 12px 0px 12px 0px;
}
@@ -29,7 +60,6 @@ nav .nav-row {
}
nav a {
- color: maroon;
font-size: 18px;
margin: 12px;
text-decoration: none;
@@ -40,7 +70,6 @@ nav ul, nav li {
}
nav ul li a {
- color: black;
font-size: 14px;
margin: 6px;
}
@@ -48,51 +77,50 @@ nav ul li a {
/* Article bodies */
-div.header {
- color: #555;
+#published-at, #updated-at {
+ color: var(--color-2nd-fg);
font-size: 14px;
font-style: italic;
}
blockquote {
font-style: italic;
- color: dimgrey;
+ color: var(--color-2nd-fg);
padding-left: 10px;
- border-left: 3px solid #ccc;
+ border-left: 3px solid var(--color-2nd-fg);
}
-ul.no-style {
+.collection-list {
list-style-type: none;
}
-ul.no-style li {
+.collection-list li {
margin: 20px 0px;
}
/* Footer */
-footer {
- border-top: solid 2px black;
+body > footer {
font-size: 14px;
- margin-top: 30px;
padding: 12px 0px 12px 0px;
}
-footer li {
+body > footer li {
list-style-type: none;
margin-top: 10px;
}
-footer li a {
+body > footer li a {
margin-left: 5px;
}
-div.post-footer, div.footnotes {
- border-top: 0.5px solid #555;
+.content-footer, .page-footer, #footnotes {
+ border-top: 0.5px solid var(--color-2nd-fg);
}
+
/* Code blocks */
/* The "lineno" class is the default generated by Rouge for table-row in code blocks, see:
@@ -101,14 +129,16 @@ pre.lineno {
margin-right: 3px;
padding-right: 3px;
border-right: 1px solid;
- border-color: hsla(0, 0%, 0%, 0.3);
+ border-color: var(--color-pre-border);
text-align: right;
user-select: none;
}
-pre.highlight {
- border: 1px solid #ccc;
+.highlight, .listingblock .content {
+ border: 1px solid;
+ border-color: var(--color-pre-border);
border-radius: 10px;
+ background-color: var(--color-2nd-bg);
}
pre {
@@ -119,7 +149,8 @@ pre {
/* Code block anchors */
a.code-line-anchor {
- color: black;
+ color: var(--color-fg);
+ user-select: none;
text-decoration: none;
}
@@ -131,7 +162,7 @@ a.code-line-anchor:hover {
/* Header anchor */
div.header-anchor {
- color: black;
+ color: var(--color-fg);
text-decoration: none;
display: block;
margin-bottom: 15px;
@@ -158,8 +189,26 @@ div.header-anchor:hover img {
/* Plaintext code block links */
-div.plaintext-link {
+.plaintext {
margin: auto auto 0 auto;
text-align: right;
font-family: monospace;
}
+
+a.anchor {
+ background: transparent var(--link-url) center right no-repeat;
+ padding-right: 22px;
+ margin-left: 10px;
+ visibility: hidden;
+}
+
+h2:hover a.anchor {
+ visibility: visible;
+}
+
+
+/* Fix asciidoc shortcoming */
+
+.line-through {
+ text-decoration: line-through;
+}
diff --git a/src/headers/de.txt b/src/headers/de.txt
new file mode 100644
index 0000000..ca848a3
--- /dev/null
+++ b/src/headers/de.txt
@@ -0,0 +1,13 @@
+en/blog/ Blog
+
+en/til/ TIL
+
+en/podcast/ Podcasts
+
+en/screencast/ Screencasts
+
+en/pastebin/ Pastebins
+
+en/slide/ Slides
+
+en/about.html About
diff --git a/src/headers/en.txt b/src/headers/en.txt
new file mode 100644
index 0000000..ca848a3
--- /dev/null
+++ b/src/headers/en.txt
@@ -0,0 +1,13 @@
+en/blog/ Blog
+
+en/til/ TIL
+
+en/podcast/ Podcasts
+
+en/screencast/ Screencasts
+
+en/pastebin/ Pastebins
+
+en/slide/ Slides
+
+en/about.html About
diff --git a/src/headers/eo.txt b/src/headers/eo.txt
new file mode 100644
index 0000000..ca848a3
--- /dev/null
+++ b/src/headers/eo.txt
@@ -0,0 +1,13 @@
+en/blog/ Blog
+
+en/til/ TIL
+
+en/podcast/ Podcasts
+
+en/screencast/ Screencasts
+
+en/pastebin/ Pastebins
+
+en/slide/ Slides
+
+en/about.html About
diff --git a/src/headers/es.txt b/src/headers/es.txt
new file mode 100644
index 0000000..ca848a3
--- /dev/null
+++ b/src/headers/es.txt
@@ -0,0 +1,13 @@
+en/blog/ Blog
+
+en/til/ TIL
+
+en/podcast/ Podcasts
+
+en/screencast/ Screencasts
+
+en/pastebin/ Pastebins
+
+en/slide/ Slides
+
+en/about.html About
diff --git a/src/headers/fr.txt b/src/headers/fr.txt
new file mode 100644
index 0000000..ca848a3
--- /dev/null
+++ b/src/headers/fr.txt
@@ -0,0 +1,13 @@
+en/blog/ Blog
+
+en/til/ TIL
+
+en/podcast/ Podcasts
+
+en/screencast/ Screencasts
+
+en/pastebin/ Pastebins
+
+en/slide/ Slides
+
+en/about.html About
diff --git a/src/headers/pt.txt b/src/headers/pt.txt
new file mode 100644
index 0000000..48f2523
--- /dev/null
+++ b/src/headers/pt.txt
@@ -0,0 +1,13 @@
+#
+
+pt/hea/ HEA
+
+#
+
+#
+
+#
+
+#
+
+pt/sobre.html Sobre
diff --git a/src/headers/ref.txt b/src/headers/ref.txt
new file mode 100644
index 0000000..ca848a3
--- /dev/null
+++ b/src/headers/ref.txt
@@ -0,0 +1,13 @@
+en/blog/ Blog
+
+en/til/ TIL
+
+en/podcast/ Podcasts
+
+en/screencast/ Screencasts
+
+en/pastebin/ Pastebins
+
+en/slide/ Slides
+
+en/about.html About
diff --git a/src/linkonly-dirs.txt b/src/linkonly-dirs.txt
new file mode 100644
index 0000000..3a038f0
--- /dev/null
+++ b/src/linkonly-dirs.txt
@@ -0,0 +1,5 @@
+src/content/css/
+src/content/favicon/
+src/content/images/
+src/content/resources/
+src/content/static/
diff --git a/src/names/categories/de.txt b/src/names/categories/de.txt
new file mode 100644
index 0000000..36e8f75
--- /dev/null
+++ b/src/names/categories/de.txt
@@ -0,0 +1 @@
+categories
diff --git a/src/names/categories/en.txt b/src/names/categories/en.txt
new file mode 100644
index 0000000..36e8f75
--- /dev/null
+++ b/src/names/categories/en.txt
@@ -0,0 +1 @@
+categories
diff --git a/src/names/categories/eo.txt b/src/names/categories/eo.txt
new file mode 100644
index 0000000..36e8f75
--- /dev/null
+++ b/src/names/categories/eo.txt
@@ -0,0 +1 @@
+categories
diff --git a/src/names/categories/es.txt b/src/names/categories/es.txt
new file mode 100644
index 0000000..36e8f75
--- /dev/null
+++ b/src/names/categories/es.txt
@@ -0,0 +1 @@
+categories
diff --git a/src/names/categories/fr.txt b/src/names/categories/fr.txt
new file mode 100644
index 0000000..36e8f75
--- /dev/null
+++ b/src/names/categories/fr.txt
@@ -0,0 +1 @@
+categories
diff --git a/src/names/categories/pt.txt b/src/names/categories/pt.txt
new file mode 100644
index 0000000..7216072
--- /dev/null
+++ b/src/names/categories/pt.txt
@@ -0,0 +1 @@
+categorias
diff --git a/src/names/categories/ref.txt b/src/names/categories/ref.txt
new file mode 100644
index 0000000..36e8f75
--- /dev/null
+++ b/src/names/categories/ref.txt
@@ -0,0 +1 @@
+categories
diff --git a/src/names/category/de.txt b/src/names/category/de.txt
new file mode 100644
index 0000000..46e72f3
--- /dev/null
+++ b/src/names/category/de.txt
@@ -0,0 +1 @@
+category
diff --git a/src/names/category/en.txt b/src/names/category/en.txt
new file mode 100644
index 0000000..46e72f3
--- /dev/null
+++ b/src/names/category/en.txt
@@ -0,0 +1 @@
+category
diff --git a/src/names/category/eo.txt b/src/names/category/eo.txt
new file mode 100644
index 0000000..46e72f3
--- /dev/null
+++ b/src/names/category/eo.txt
@@ -0,0 +1 @@
+category
diff --git a/src/names/category/es.txt b/src/names/category/es.txt
new file mode 100644
index 0000000..46e72f3
--- /dev/null
+++ b/src/names/category/es.txt
@@ -0,0 +1 @@
+category
diff --git a/src/names/category/fr.txt b/src/names/category/fr.txt
new file mode 100644
index 0000000..46e72f3
--- /dev/null
+++ b/src/names/category/fr.txt
@@ -0,0 +1 @@
+category
diff --git a/src/names/category/pt.txt b/src/names/category/pt.txt
new file mode 100644
index 0000000..ae2c934
--- /dev/null
+++ b/src/names/category/pt.txt
@@ -0,0 +1 @@
+categoria
diff --git a/src/names/category/ref.txt b/src/names/category/ref.txt
new file mode 100644
index 0000000..46e72f3
--- /dev/null
+++ b/src/names/category/ref.txt
@@ -0,0 +1 @@
+category
diff --git a/src/pages/en b/src/pages/en
new file mode 120000
index 0000000..2d811cf
--- /dev/null
+++ b/src/pages/en
@@ -0,0 +1 @@
+../content/en \ No newline at end of file
diff --git a/src/pages/pt b/src/pages/pt
new file mode 120000
index 0000000..12d2087
--- /dev/null
+++ b/src/pages/pt
@@ -0,0 +1 @@
+../content/pt \ No newline at end of file
diff --git a/src/slides/en b/src/slides/en
new file mode 120000
index 0000000..5cbb4c2
--- /dev/null
+++ b/src/slides/en
@@ -0,0 +1 @@
+../content/en/slide \ No newline at end of file
diff --git a/src/static.conf b/src/static.conf
new file mode 100644
index 0000000..45f5028
--- /dev/null
+++ b/src/static.conf
@@ -0,0 +1,11 @@
+export headers_dir=src/headers
+export names_dir=src/names
+export root_dir=src/content
+export list_addr='~euandreh/public-inbox@lists.sr.ht'
+export discussions_url_prefix='https://lists.st.ht/~euandreh/public-inbox?search='
+export sourcecode_url_prefix="$sourcecode_url/tree"
+export author='EuAndreh'
+export site_name="EuAndreh's website"
+export feed_title="EuAndreh's blog"
+export feed_url='feed.articles.en.xml'
+export feed_alternate_url='./'
diff --git a/src/symlinks.txt b/src/symlinks.txt
new file mode 100644
index 0000000..ebd28aa
--- /dev/null
+++ b/src/symlinks.txt
@@ -0,0 +1,74 @@
+# Paths that existed as I changed my mind on where to put them
+en/favicon.ico favicon.ico
+favicon/favicon.ico favicon.ico
+static/favicon.svg img/favicon.svg
+css/styles.css style.css
+static/style.css style.css
+public-key.txt public.asc.txt
+atom.xml en/blog/feed.xml
+feed.xml en/blog/feed.xml
+rss.xml en/blog/feed.xml
+feed.atom en/blog/feed.xml
+feed.en.atom en/blog/feed.xml
+feed.til.en.atom en/til/feed.xml
+
+# Some I think existed, and where the logs show a few hundred missing hits
+images/atom.svg img/atom.svg
+images/link.svg img/link/light.svg
+images/lock.svg img/lock/light.svg
+images/envelope.svg img/envelope/light.svg
+
+# "Official" redirections
+index.html en/index.html
+security.txt .well-known/security.txt
+
+# Current published pages
+static/attachments/autoqemu.tar.gz en/screencast/2021/02/07/autoqemu.tar.gz
+static/attachments/cargo2nix-demo.tar.gz en/blog/2020/10/05/cargo2nix-demo.tar.gz
+static/attachments/cargo2nix.tar.gz en/blog/2020/10/05/cargo2nix.tar.gz
+static/attachments/fallible.tar.gz en/blog/2021/02/17/fallible.tar.gz
+static/attachments/swift2nix-demo.tar.gz en/blog/2020/10/05/swift2nix-demo.tar.gz
+static/attachments/swift2nix.tar.gz en/blog/2020/10/05/swift2nix.tar.gz
+static/atom.svg img/atom.svg
+static/envelope.svg img/envelope/light.svg
+static/link.svg img/link/light.svg
+static/lock.svg img/lock/light.svg
+static/lord-favicon.ico favicon.ico
+static/lord-favicon.png favicon.png
+static/lord-favicon.svg img/favicon.svg
+
+resources/podcasts/2020-12-19-a-test-entry.flac en/podcast/2020/12/19/test-entry.flac
+resources/podcasts/2020-12-19-a-test-entry.flac.torrent en/podcast/2020/12/19/test-entry.flac.torrent
+resources/podcasts/2020-12-19-a-test-entry.ogg en/podcast/2020/12/19/test-entry.ogg
+resources/screencasts/2021-02-07-autoqemu-automate-installation-and-ssh-setup-of-iso-os-images.webm en/screencast/2021/02/07/autoqemu.webm
+resources/screencasts/2021-02-07-autoqemu-automate-installation-and-ssh-setup-of-iso-os-images.webm.torrent en/screencast/2021/02/07/autoqemu.webm.torrent
+
+
+til.html en/til/index.html
+podcast.en.html en/podcast/index.html
+screencasts.en.html en/screencast/index.html
+pastebins.en.html en/pastebin/index.html
+about.html en/about.html
+articles-by-category.html en/blog/categories.html
+til-by-category.html en/til/categories.html
+podcast-episodes-by-category.html en/podcast/categories.html
+screencasts-by-category.html en/screencast/categories.html
+pastebins-by-category.html en/pastebin/categories.html
+
+feed.articles.en.atom en/blog/feed.xml
+feed.blog.en.atom en/blog/feed.xml
+feed.posts.en.atom en/blog/feed.xml
+feed.tils.en.atom en/til/feed.xml
+feed.podcasts.en.atom en/podcast/feed.xml
+feed.screencasts.en.atom en/screencast/feed.xml
+feed.pastebins.en.atom en/pastebin/feed.xml
+
+feed.articles-by-category.en.article-review.atom en/blog/feed.article-review.xml
+feed.articles-by-category.en.mediator.atom en/blog/feed.mediator.xml
+feed.articles-by-category.en.presentation.atom en/blog/feed.presentation.xml
+feed.articles-by-category.en.video-review.atom en/blog/feed.video-review.xml
+feed.pastebins-by-category.en.guix.atom en/pastebin/feed.guix.xml
+feed.pastebins-by-category.en.nix.atom en/pastebin/feed.nix.xml
+feed.tils-by-category.en.ci.atom en/til/feed.ci.xml
+feed.tils-by-category.en.git.atom en/til/feed.git.xml
+feed.tils-by-category.en.shell.atom en/til/feed.shell.xml
diff --git a/static/atom.svg b/static/atom.svg
deleted file mode 100644
index 37bace2..0000000
--- a/static/atom.svg
+++ /dev/null
@@ -1,5 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<svg width="22" height="22" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg">
- <path d="M576 1344q0 80-56 136t-136 56-136-56-56-136 56-136 136-56 136 56 56 136zm512 123q2 28-17 48-18 21-47 21h-135q-25 0-43-16.5t-20-41.5q-22-229-184.5-391.5t-391.5-184.5q-25-2-41.5-20t-16.5-43v-135q0-29 21-47 17-17 43-17h5q160 13 306 80.5t259 181.5q114 113 181.5 259t80.5 306zm512 2q2 27-18 47-18 20-46 20h-143q-26 0-44.5-17.5t-19.5-42.5q-12-215-101-408.5t-231.5-336-336-231.5-408.5-102q-25-1-42.5-19.5t-17.5-43.5v-143q0-28 20-46 18-18 44-18h3q262 13 501.5 120t425.5 294q187 186 294 425.5t120 501.5z"
- fill="#EA990E" />
-</svg>
diff --git a/static/attachments/apollo-server-demo.tar.gz b/static/attachments/apollo-server-demo.tar.gz
deleted file mode 100644
index 72bb6d5..0000000
--- a/static/attachments/apollo-server-demo.tar.gz
+++ /dev/null
Binary files differ
diff --git a/static/attachments/autoqemu.tar.gz b/static/attachments/autoqemu.tar.gz
deleted file mode 100644
index 3022f14..0000000
--- a/static/attachments/autoqemu.tar.gz
+++ /dev/null
Binary files differ
diff --git a/static/attachments/cargo2nix-demo.tar.gz b/static/attachments/cargo2nix-demo.tar.gz
deleted file mode 100644
index 281a91c..0000000
--- a/static/attachments/cargo2nix-demo.tar.gz
+++ /dev/null
Binary files differ
diff --git a/static/attachments/cargo2nix.tar.gz b/static/attachments/cargo2nix.tar.gz
deleted file mode 100644
index 8a9985a..0000000
--- a/static/attachments/cargo2nix.tar.gz
+++ /dev/null
Binary files differ
diff --git a/static/attachments/fallible.tar.gz b/static/attachments/fallible.tar.gz
deleted file mode 100644
index 7bf2a58..0000000
--- a/static/attachments/fallible.tar.gz
+++ /dev/null
Binary files differ
diff --git a/static/attachments/swift2nix-demo.tar.gz b/static/attachments/swift2nix-demo.tar.gz
deleted file mode 100644
index f688572..0000000
--- a/static/attachments/swift2nix-demo.tar.gz
+++ /dev/null
Binary files differ
diff --git a/static/attachments/swift2nix.tar.gz b/static/attachments/swift2nix.tar.gz
deleted file mode 100644
index bfab3f1..0000000
--- a/static/attachments/swift2nix.tar.gz
+++ /dev/null
Binary files differ
diff --git a/static/envelope.svg b/static/envelope.svg
deleted file mode 100644
index c2251f4..0000000
--- a/static/envelope.svg
+++ /dev/null
@@ -1,4 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<svg width="22" height="22" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg">
- <path d="M1664 1504v-768q-32 36-69 66-268 206-426 338-51 43-83 67t-86.5 48.5-102.5 24.5h-2q-48 0-102.5-24.5t-86.5-48.5-83-67q-158-132-426-338-37-30-69-66v768q0 13 9.5 22.5t22.5 9.5h1472q13 0 22.5-9.5t9.5-22.5zm0-1051v-24.5l-.5-13-3-12.5-5.5-9-9-7.5-14-2.5h-1472q-13 0-22.5 9.5t-9.5 22.5q0 168 147 284 193 152 401 317 6 5 35 29.5t46 37.5 44.5 31.5 50.5 27.5 43 9h2q20 0 43-9t50.5-27.5 44.5-31.5 46-37.5 35-29.5q208-165 401-317 54-43 100.5-115.5t46.5-131.5zm128-37v1088q0 66-47 113t-113 47h-1472q-66 0-113-47t-47-113v-1088q0-66 47-113t113-47h1472q66 0 113 47t47 113z" />
-</svg>
diff --git a/static/link.svg b/static/link.svg
deleted file mode 100644
index e5c7050..0000000
--- a/static/link.svg
+++ /dev/null
@@ -1,5 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<svg width="22" height="22" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg">
- <path fill-rule="evenodd"
- d="M7.775 3.275a.75.75 0 001.06 1.06l1.25-1.25a2 2 0 112.83 2.83l-2.5 2.5a2 2 0 01-2.83 0 .75.75 0 00-1.06 1.06 3.5 3.5 0 004.95 0l2.5-2.5a3.5 3.5 0 00-4.95-4.95l-1.25 1.25zm-4.69 9.64a2 2 0 010-2.83l2.5-2.5a2 2 0 012.83 0 .75.75 0 001.06-1.06 3.5 3.5 0 00-4.95 0l-2.5 2.5a3.5 3.5 0 004.95 4.95l1.25-1.25a.75.75 0 00-1.06-1.06l-1.25 1.25a2 2 0 01-2.83 0z" />
-</svg>
diff --git a/static/lock.svg b/static/lock.svg
deleted file mode 100644
index 1a4a18e..0000000
--- a/static/lock.svg
+++ /dev/null
@@ -1,4 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<svg width="22" height="22" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg">
- <path d="M640 768h512v-192q0-106-75-181t-181-75-181 75-75 181v192zm832 96v576q0 40-28 68t-68 28h-960q-40 0-68-28t-28-68v-576q0-40 28-68t68-28h32v-192q0-184 132-316t316-132 316 132 132 316v192h32q40 0 68 28t28 68z" />
-</svg>