aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEuAndreh <eu@euandre.org>2020-02-06 03:58:52 -0300
committerEuAndreh <eu@euandre.org>2020-02-06 03:58:52 -0300
commitccadc0bdf0436cefe76318567d358f27a46bf3e5 (patch)
tree58d52309b1ea2b6b7c3a08feed04deb5d64b83d2
parentAdd code metadata annotation (diff)
parentSetup publish environment variables (diff)
downloadeuandre.org-ccadc0bdf0436cefe76318567d358f27a46bf3e5.tar.gz
euandre.org-ccadc0bdf0436cefe76318567d358f27a46bf3e5.tar.xz
Merge branch 'jekyll'
-rw-r--r--.build.yml2
-rw-r--r--.gitignore7
-rw-r--r--.gitmodules3
-rw-r--r--.ignore1
-rw-r--r--Gemfile3
-rw-r--r--Gemfile.lock61
-rw-r--r--TODOs.org4
-rw-r--r--_config.yml68
-rw-r--r--_includes/feed.atom34
-rw-r--r--_includes/public-inbox.html15
-rw-r--r--_layouts/default.html80
-rw-r--r--_layouts/index.html13
-rw-r--r--_layouts/page.html6
-rw-r--r--_layouts/pastebin.html11
-rw-r--r--_layouts/pastebins-listing.html11
-rw-r--r--_layouts/pastebins.html10
-rw-r--r--_layouts/post.html16
-rw-r--r--_pastebins/failure-on-guix-tex-live-importer.md (renamed from site/pastebin/failure-on-guix-tex-live-importer.org)7
-rw-r--r--_pastebins/inconsistent-hash-of-buildgomodule.md (renamed from site/pastebin/inconsistent-hash-of-buildgomodule.org)69
-rw-r--r--_pastebins/nix-exps.md (renamed from site/pastebin/nix-exps.org)7
-rw-r--r--_pastebins/nix-show-derivation.md (renamed from site/pastebin/nix-show-derivation.org)7
-rw-r--r--_pastebins/raku-tuple-type-annotation.md (renamed from site/pastebin/raku-tuple-type-annotation.org)11
-rw-r--r--_posts/2018-07-17-running-guix-on-nixos.md (renamed from site/posts/2018-07-17-running-guix-on-nixos.org)102
-rw-r--r--_posts/2018-08-01-verifying-npm-ci-reproducibility.md146
-rw-r--r--_posts/2018-12-21-using-youtube-dl-to-manage-youtube-subscriptions.md273
-rw-r--r--_posts/2019-06-02-stateless-os.md145
-rw-r--r--about.md29
l---------atom.xml1
-rwxr-xr-xcss.sh11
-rw-r--r--default.nix71
-rw-r--r--favicon.ico (renamed from site/favicon.ico)bin1150 -> 1150 bytes
-rw-r--r--feed.all.atom4
l---------feed.atom1
-rw-r--r--feed.en.atom4
-rw-r--r--feed.pt.atom4
l---------feed.xml1
-rw-r--r--gemset.nix266
-rw-r--r--hakyll/Main.hs125
-rw-r--r--images/atom.svg (renamed from site/images/atom.svg)0
-rw-r--r--images/envelope.svg (renamed from site/images/envelope.svg)0
-rw-r--r--images/lock.svg (renamed from site/images/lock.svg)0
-rw-r--r--index.md5
-rw-r--r--pastebin/skeleton.org7
-rwxr-xr-xpastebin/website-pastebin121
-rw-r--r--pastebins.md5
-rw-r--r--pt/index.md5
-rw-r--r--public-key.txt (renamed from site/root/public-key.txt)0
l---------rss.xml1
-rwxr-xr-xscripts/assert-content.sh99
-rwxr-xr-xscripts/bundix-gen.sh5
-rw-r--r--scripts/publish-env.sh (renamed from docs/env.sh)0
-rw-r--r--site.json39
-rw-r--r--site/index.html6
-rw-r--r--site/pages/about.org14
-rw-r--r--site/pastebins.html5
-rw-r--r--site/posts/2018-08-01-verifying-npm-ci-reproducibility.org85
-rw-r--r--site/posts/2018-12-21-using-youtube-dl-to-manage-youtube-subscriptions.org145
-rw-r--r--site/posts/2019-06-02-stateless-os.org60
-rw-r--r--site/templates/default.html48
-rw-r--r--site/templates/pastebin-list.html7
-rw-r--r--site/templates/pastebin.html8
-rw-r--r--site/templates/post-list.html7
-rw-r--r--site/templates/post.html16
-rw-r--r--sitemap.xml29
-rw-r--r--slides/base.org3
m---------slides/reveal.js0
-rwxr-xr-xslides/website-slides75
-rw-r--r--sobre.md30
-rwxr-xr-xspelling/check-spelling.sh29
-rw-r--r--spelling/international.dic.txt2
-rw-r--r--spelling/pt_BR.dic.txt2
-rw-r--r--styles.css (renamed from site/css/styles.css)17
-rwxr-xr-xt/website.bats70
-rwxr-xr-xwebsite98
74 files changed, 1608 insertions, 1064 deletions
diff --git a/.build.yml b/.build.yml
index 29f67a6..5e3c5bf 100644
--- a/.build.yml
+++ b/.build.yml
@@ -16,5 +16,5 @@ tasks:
- docs: |
cd website/
nix-build -A publishScript
- source ./docs/env.sh
+ source ./scripts/publish-env.sh
./result/bin/publish.sh
diff --git a/.gitignore b/.gitignore
index e6894f9..d2dab7d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,8 @@
# Nix
/result
-# Hakyll
-/_cache/ \ No newline at end of file
+# Jekyll
+/.bundle/
+/vendor/
+/_site/
+/.sass-cache/ \ No newline at end of file
diff --git a/.gitmodules b/.gitmodules
deleted file mode 100644
index 8d60864..0000000
--- a/.gitmodules
+++ /dev/null
@@ -1,3 +0,0 @@
-[submodule "slides/reveal.js"]
- path = slides/reveal.js
- url = https://github.com/hakimel/reveal.js.git
diff --git a/.ignore b/.ignore
deleted file mode 100644
index 6b5747f..0000000
--- a/.ignore
+++ /dev/null
@@ -1 +0,0 @@
-reveal.js \ No newline at end of file
diff --git a/Gemfile b/Gemfile
new file mode 100644
index 0000000..c99d6dd
--- /dev/null
+++ b/Gemfile
@@ -0,0 +1,3 @@
+source "https://rubygems.org"
+
+gem "jekyll"
diff --git a/Gemfile.lock b/Gemfile.lock
new file mode 100644
index 0000000..58faf53
--- /dev/null
+++ b/Gemfile.lock
@@ -0,0 +1,61 @@
+GEM
+ remote: https://rubygems.org/
+ specs:
+ addressable (2.7.0)
+ public_suffix (>= 2.0.2, < 5.0)
+ colorator (1.1.0)
+ concurrent-ruby (1.1.5)
+ em-websocket (0.5.1)
+ eventmachine (>= 0.12.9)
+ http_parser.rb (~> 0.6.0)
+ eventmachine (1.2.7)
+ ffi (1.12.2)
+ forwardable-extended (2.6.0)
+ http_parser.rb (0.6.0)
+ i18n (0.9.5)
+ concurrent-ruby (~> 1.0)
+ jekyll (3.8.5)
+ addressable (~> 2.4)
+ colorator (~> 1.0)
+ em-websocket (~> 0.5)
+ i18n (~> 0.7)
+ jekyll-sass-converter (~> 1.0)
+ jekyll-watch (~> 2.0)
+ kramdown (~> 1.14)
+ liquid (~> 4.0)
+ mercenary (~> 0.3.3)
+ pathutil (~> 0.9)
+ rouge (>= 1.7, < 4)
+ safe_yaml (~> 1.0)
+ jekyll-sass-converter (1.5.2)
+ sass (~> 3.4)
+ jekyll-watch (2.2.1)
+ listen (~> 3.0)
+ kramdown (1.17.0)
+ liquid (4.0.3)
+ listen (3.2.1)
+ rb-fsevent (~> 0.10, >= 0.10.3)
+ rb-inotify (~> 0.9, >= 0.9.10)
+ mercenary (0.3.6)
+ pathutil (0.16.2)
+ forwardable-extended (~> 2.6)
+ public_suffix (3.1.1)
+ rb-fsevent (0.10.3)
+ rb-inotify (0.10.1)
+ ffi (~> 1.0)
+ rouge (3.13.0)
+ safe_yaml (1.0.5)
+ sass (3.7.4)
+ sass-listen (~> 4.0.0)
+ sass-listen (4.0.0)
+ rb-fsevent (~> 0.9, >= 0.9.4)
+ rb-inotify (~> 0.9, >= 0.9.7)
+
+PLATFORMS
+ ruby
+
+DEPENDENCIES
+ jekyll
+
+BUNDLED WITH
+ 1.17.3
diff --git a/TODOs.org b/TODOs.org
index 1009539..8ad5c9c 100644
--- a/TODOs.org
+++ b/TODOs.org
@@ -1,3 +1,3 @@
* Tasks
-** TODO Translate articles
-https://groups.google.com/forum/#!topic/hakyll/KAhCO1GVELA
+** pt_BR hunspell dictionary
+** Line number
diff --git a/_config.yml b/_config.yml
new file mode 100644
index 0000000..8eabae1
--- /dev/null
+++ b/_config.yml
@@ -0,0 +1,68 @@
+---
+title: EuAndreh::blog
+url: "https://euandre.org"
+author:
+ name: EuAndreh
+ email: eu@euandre.org
+
+exclude:
+ - Gemfile
+ - Gemfile.lock
+ - gemset.nix
+ - README
+ - TODOs.org
+ - LICENSE
+ - default.nix
+ - utils.nix
+ - shell.nix
+ - vendor/
+ - spelling/
+ - scripts/
+
+collections:
+ pastebins:
+ output: true
+
+t:
+ about:
+ en: "About"
+ pt: "Sobre"
+ about_url:
+ en: '/about.html'
+ pt: '/sobre.html'
+ home:
+ en: "EuAndreh's blog"
+ pt: "Blog do EuAndreh"
+ home_url:
+ en: '/'
+ pt: '/pt/'
+ recent_posts:
+ en: 'Recent posts'
+ pt: 'Postagens recentes'
+ feed_title:
+ en: "EuAndreh's Feed"
+ pt: "Feed do EuAndreh"
+ feed_url:
+ en: '/feed.en.xml'
+ pt: '/feed.pt.xml'
+ date_format:
+ en: '%B %-d, %Y'
+ pt: '%-d de %B de %Y'
+ posted_on:
+ en: 'Posted on'
+ pt: 'Postado em'
+ alt:
+ blog_feed:
+ en: 'Blog feed'
+ pt: 'Feed do blog'
+ envelope_icon:
+ en: 'Envelope icon'
+ pt: 'Ícone de envelope'
+ lock_icon:
+ en: 'Lock icon'
+ pt: 'Ícone de fechadura'
+ footer:
+ en: >-
+ The content for this site is licensed under <a rel="license" href="https://creativecommons.org/licenses/by-sa/4.0/">CC-BY-SA</a>. The <a href="https://git.sr.ht/~euandreh/website">code</a> is <a href="https://git.sr.ht/~euandreh/website/tree/master/LICENSE">GPLv3 or later</a>.
+ pt: >-
+ O conteúdo desse site é licensiado sob a licença <a rel="license" href="https://creativecommons.org/licenses/by-sa/4.0/">CC-BY-SA</a>. O <a href="https://git.sr.ht/~euandreh/website">código</a> é <a href="https://git.sr.ht/~euandreh/website/tree/master/LICENSE">GPLv3 ou versões maiores</a>.
diff --git a/_includes/feed.atom b/_includes/feed.atom
new file mode 100644
index 0000000..bf0f4d4
--- /dev/null
+++ b/_includes/feed.atom
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="{{ include.lang }}">
+ <link href="{{ page.url | absolute_url }}" rel="self" type="application/atom+xml" />
+ <link href="{{ '/' | absolute_url }}" rel="alternate" type="text/html" hreflang="{{ include.lang }}" />
+ <updated>{{ site.time | date_to_xmlschema }}</updated>
+ <id>{{ page.url | absolute_url | xml_escape }}</id>
+
+ <title type="html">{{ site.title | smartify | xml_escape }}</title>
+ <subtitle>{{ site.t.feed_title[include.lang] | smartify | xml_escape }}</subtitle>
+
+ <author>
+ <name>{{ site.author.name | xml_escape }}</name>
+ <email>{{ site.author.email | xml_escape }}</email>
+ </author>
+
+ {% for post in include.posts %}
+ <entry xml:lang="{{ post.lang }}">
+ <title type="html">{{ post.title | smartify | strip_html | normalize_whitespace | xml_escape }}</title>
+ <link href="{{ post.url | absolute_url }}" rel="alternate" type="text/html" title="{{ post.title | xml_escape }}" />
+ <published>{{ post.date | date_to_xmlschema }}</published>
+ <updated>{{ post.updated_at | default: post.date | date_to_xmlschema }}</updated>
+ <id>{{ post.url | absolute_url | xml_escape }}</id>
+
+ <content type="html" xml:base="{{ post.url | absolute_url | xml_escape }}">{{ post.content | strip | xml_escape }}</content>
+
+ <author>
+ <name>{{ site.author.name | xml_escape }}</name>
+ <email>{{ site.author.email | xml_escape }}</email>
+ </author>
+
+ <summary type="html">{{ post.excerpt | strip_html | normalize_whitespace | xml_escape }}</summary>
+ </entry>
+ {% endfor %}
+</feed>
diff --git a/_includes/public-inbox.html b/_includes/public-inbox.html
new file mode 100644
index 0000000..478eac6
--- /dev/null
+++ b/_includes/public-inbox.html
@@ -0,0 +1,15 @@
+{% if page.lang == 'en' %}
+ Have a comment on this post? Start a discussion in my
+ <a href="https://lists.sr.ht/~euandreh/public-inbox">public inbox</a>
+ by sending an email to
+ <a href="mailto:~euandreh/public-inbox@lists.sr.ht?Subject=Re%3A%20{{ page.title | escape }}">~euandreh/public-inbox@lists.sr.ht</a>
+ [<a href="https://man.sr.ht/lists.sr.ht/etiquette.md">mailing list etiquette</a>],
+ or see <a href="https://lists.sr.ht/~euandreh/public-inbox?search={{ page.title | escape }}">existing discussions</a>.
+{% elsif page.lang == 'pt' %}
+ Tem um comentário sobre essa postagem? Comece uma discussão na minha
+ <a href="https://lists.sr.ht/~euandreh/public-inbox">caixa de entrada pública</a>
+ mandando um email para
+ <a href="mailto:~euandreh/public-inbox@lists.sr.ht?Subject=Re%3A%20{{ page.title | escape }}">~euandreh/public-inbox@lists.sr.ht</a>
+ [<a href="https://man.sr.ht/lists.sr.ht/etiquette.md">etiqueta da lista de email</a> (em inglês)],
+ ou procure por <a href="https://lists.sr.ht/~euandreh/public-inbox?search={{ page.title | escape }}">discussões existentes</a>.
+{% endif %}
diff --git a/_layouts/default.html b/_layouts/default.html
new file mode 100644
index 0000000..109d408
--- /dev/null
+++ b/_layouts/default.html
@@ -0,0 +1,80 @@
+<!DOCTYPE html>
+<html lang="{{ page.lang }}">
+ <head>
+ <meta charset="UTF-8">
+ <meta http-equiv="X-UA-Compatible" content="IE=edge">
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <link rel="stylesheet" href="/styles.css">
+ <link rel="alternate" type="application/atom+xml" href="{{ site.t.feed_url[page.lang] }}" title="{{ site.t.feed_title[page.lang] }}" />
+
+ <title>{{ page.title }}</title>
+ <meta property="og:site_name" content="{{ site.title }}" />
+ <meta name="author" content="{{ site.author.name }}" />
+ <meta property="og:locale" content="{{ page.lang }}" />
+ <meta property="og:title" content="{{ page.title }}" />
+
+ <link rel="canonical" href="{{ page.url | absolute_url }}" />
+ <meta property="og:url" content="{{ page.url | absolute_url }}" />
+
+ {% if page.date %}
+ <meta property="og:type" content="article" />
+ <meta property="article:published_time" content="{{ page.date | date_to_xmlschema }}" />
+ {% endif %}
+ </head>
+ <body>
+ <header>
+ <nav>
+ <div id="nav-left">
+ <a href="{{ site.t.home_url[page.lang] }}">{{ site.t.home[page.lang] }}</a>
+ </div>
+ <div id="nav-right">
+ <ul>
+ {% if page.ref != nil %}
+ {% assign lposts=site.posts | where:"ref", page.ref | sort: 'lang' %}
+ {% for lpost in lposts %}
+ <li>
+ <a href="{{ lpost.url }}" class="{{ lpost.lang }}">{{ lpost.lang }}</a>
+ </li>
+ {% endfor %}
+ {% endif %}
+
+ {% if page.ref != nil %}
+ {% assign lpages=site.pages | where:"ref", page.ref | sort: 'lang' %}
+ {% for lpage in lpages %}
+ <li>
+ <a href="{{ lpage.url }}" class="{{ lpage.lang }}">{{ lpage.lang }}</a>
+ </li>
+ {% endfor %}
+ {% endif %}
+ </ul>
+
+ <a href="{{ site.t.about_url[page.lang] }}">{{ site.t.about[page.lang] }}</a>
+ <a href="{{ site.t.feed_url[page.lang] }}">
+ <img class="simple-icon" src="/images/atom.svg" alt="{{ site.t.alt.blog_feed[page.lang] }}" />
+ </a>
+ </div>
+ </nav>
+ </header>
+
+ <main role="main">
+ <h1>{{ page.title }}</h1>
+ {{ content }}
+ </main>
+
+ <footer>
+ <ul>
+ <li>
+ <img class="simple-icon" src="/images/envelope.svg" alt="{{ site.t.alt.envelope_icon[page.lang] }}" />
+ <a href="mailto:eu@euandre.org">eu@euandre.org</a>
+ </li>
+ <li>
+ <img class="simple-icon" src="/images/lock.svg" alt="{{ site.t.alt.lock_icon[page.lang] }}" />
+ <a href="/public-key.txt">81F90EC3CD356060</a>
+ </li>
+ </ul>
+ <p>
+ {{ site.t.footer[page.lang] }}
+ </p>
+ </footer>
+ </body>
+</html>
diff --git a/_layouts/index.html b/_layouts/index.html
new file mode 100644
index 0000000..5b45898
--- /dev/null
+++ b/_layouts/index.html
@@ -0,0 +1,13 @@
+---
+layout: default
+---
+<h1>{{ site.t.recent_posts[page.lang] }}</h1>
+<ul>
+ {%- for post in site.posts -%}
+ {% if post.lang == page.lang %}
+ <li>
+ <a href="{{ post.url }}">{{ post.title | escape }}</a> - {{ post.date | date: "%b %-d, %Y" }}
+ </li>
+ {% endif %}
+ {%- endfor -%}
+</ul>
diff --git a/_layouts/page.html b/_layouts/page.html
new file mode 100644
index 0000000..57116c1
--- /dev/null
+++ b/_layouts/page.html
@@ -0,0 +1,6 @@
+---
+layout: default
+---
+<article class="post">
+ {{ content }}
+</article>
diff --git a/_layouts/pastebin.html b/_layouts/pastebin.html
new file mode 100644
index 0000000..51194d7
--- /dev/null
+++ b/_layouts/pastebin.html
@@ -0,0 +1,11 @@
+---
+layout: default
+---
+<article>
+ <section class="header">
+ Posted on {{ page.date | date: "%b %-d, %Y" }}
+ </section>
+ <section>
+ {{ content }}
+ </section>
+</article>
diff --git a/_layouts/pastebins-listing.html b/_layouts/pastebins-listing.html
new file mode 100644
index 0000000..2829411
--- /dev/null
+++ b/_layouts/pastebins-listing.html
@@ -0,0 +1,11 @@
+---
+layout: default
+---
+<h1>Pastebins listing</h1>
+<ul>
+ {%- for pastebin in site.pastebins -%}
+ <li>
+ <a href="{{ pastebin.url | relative_url }}">{{ pastebin.title | escape }}</a> - {{ pastebin.date | date: "%b %-d, %Y" }}
+ </li>
+ {%- endfor -%}
+</ul>
diff --git a/_layouts/pastebins.html b/_layouts/pastebins.html
new file mode 100644
index 0000000..6b7830c
--- /dev/null
+++ b/_layouts/pastebins.html
@@ -0,0 +1,10 @@
+---
+layout: default
+---
+<ul>
+ {%- for pastebin in site.pastebins -%}
+ <li>
+ <a href="{{ pastebin.url | relative_url }}">{{ pastebin.title | escape }}</a> - {{ pastebin.date | date: "%b %-d, %Y" }}
+ </li>
+ {%- endfor -%}
+</ul>
diff --git a/_layouts/post.html b/_layouts/post.html
new file mode 100644
index 0000000..629e02f
--- /dev/null
+++ b/_layouts/post.html
@@ -0,0 +1,16 @@
+---
+layout: default
+---
+<article>
+ <section class="header">
+ {{ site.t.posted_on[page.lang] }} {{ page.date | date: site.t.date_format[page.lang] }}
+ </section>
+ <section>
+ {{ content }}
+ </section>
+ <section>
+ <p>
+ {% include public-inbox.html %}
+ </p>
+ </section>
+</article>
diff --git a/site/pastebin/failure-on-guix-tex-live-importer.org b/_pastebins/failure-on-guix-tex-live-importer.md
index cb86621..5b74724 100644
--- a/site/pastebin/failure-on-guix-tex-live-importer.org
+++ b/_pastebins/failure-on-guix-tex-live-importer.md
@@ -1,8 +1,11 @@
---
title: Failure on Guix TeX Live importer
date: 2020-01-04
+layout: pastebin
+lang: en
---
-#+BEGIN_SRC shell -n
+
+```shell
$ guix import texlive fontspec
redirection vers « https://ctan.org/xml/1.2/pkg/fontspec »...
Backtrace:
@@ -32,4 +35,4 @@ In guix/build/utils.scm:
guix/build/utils.scm:652:6: In procedure invoke:
Throw to key `srfi-34' with args `(#<condition &invoke-error [program: "svn" arguments: ("export" "--non-interactive" "--trust-server-cert" "-r" "49435" "svn://www.tug.org/texlive/tags/texlive-2018.2/Master/texmf-dist/source/latex/fontspec" "/tmp/guix-directory.WtLohP") exit-status: 1 term-signal: #f stop-signal: #f] 7fe80d229c80>)'.
-#+END_SRC
+```
diff --git a/site/pastebin/inconsistent-hash-of-buildgomodule.org b/_pastebins/inconsistent-hash-of-buildgomodule.md
index 2aa7d96..c82438c 100644
--- a/site/pastebin/inconsistent-hash-of-buildgomodule.org
+++ b/_pastebins/inconsistent-hash-of-buildgomodule.md
@@ -1,29 +1,40 @@
---
title: Inconsistent hash of buildGoModule
date: 2019-06-08
+layout: pastebin
+lang: en
---
-[[https://discourse.nixos.org/t/inconsistent-hash-of-buildgomodule/3127/2][FIXED]].
+[FIXED](https://discourse.nixos.org/t/inconsistent-hash-of-buildgomodule/3127/2).
-The [[https://git.sr.ht/~euandreh/vps/commit/6ba76140238b5e3c7009c201f9f80ac86063f438][commit that made this visible]].
-* Offending derivation:
-[[https://git.sr.ht/~euandreh/vps/tree/6ba76140238b5e3c7009c201f9f80ac86063f438/default.nix#L3-15][Full source code on sr.ht]]:
-#+BEGIN_SRC nix -n
- terraform-godaddy = pkgs.buildGoModule rec {
- name = "terraform-godaddy-${version}";
- version = "1.6.4";
- src = pkgs.fetchFromGitHub {
- owner = "n3integration";
- repo = "terraform-godaddy";
- rev = "v${version}";
- sha256 = "00blqsan74s53dk9ab4hxi1kzxi46k57dr65dmbiradfa3yz3852";
- };
- modSha256 = "0p81wqw2n8vraxk20xwg717582ijwq2k7v5j3n13y4cd5bxd8hhz";
- postInstall =
- "mv $out/bin/terraform-godaddy $out/bin/terraform-provider-godaddy";
+The [commit that made this visible][0].
+
+[0]: https://git.sr.ht/~euandreh/vps/commit/6ba76140238b5e3c7009c201f9f80ac86063f438
+
+# Offending derivation:
+
+[Full source code on sr.ht][1]:
+
+[1]: https://git.sr.ht/~euandreh/vps/tree/6ba76140238b5e3c7009c201f9f80ac86063f438/default.nix#L3-15
+
+```nix
+terraform-godaddy = pkgs.buildGoModule rec {
+ name = "terraform-godaddy-${version}";
+ version = "1.6.4";
+ src = pkgs.fetchFromGitHub {
+ owner = "n3integration";
+ repo = "terraform-godaddy";
+ rev = "v${version}";
+ sha256 = "00blqsan74s53dk9ab4hxi1kzxi46k57dr65dmbiradfa3yz3852";
};
-#+END_SRC
-* Local build:
-#+BEGIN_SRC shell -n
+ modSha256 = "0p81wqw2n8vraxk20xwg717582ijwq2k7v5j3n13y4cd5bxd8hhz";
+ postInstall =
+ "mv $out/bin/terraform-godaddy $out/bin/terraform-provider-godaddy";
+};
+```
+
+# Local build:
+
+```shell
$ nix-build -A terraform-godaddy
these derivations will be built:
/nix/store/3hs274i9qdsg3hsgp05j7i5cqxsvpcqx-terraform-godaddy-1.6.4-go-modules.drv
@@ -169,12 +180,16 @@ hash mismatch in fixed-output derivation '/nix/store/jgbfkhlsz6bmq724p5cqqcgfyc7
got: sha256:10n2dy7q9kk1ly58sw965n6qa8l0nffh8vyd1vslx0gdlyj25xxs
cannot build derivation '/nix/store/y5961vv6y9c0ps2sbd8xfnpqvk0q7qhq-terraform-godaddy-1.6.4.drv': 1 dependencies couldn't be built
error: build of '/nix/store/y5961vv6y9c0ps2sbd8xfnpqvk0q7qhq-terraform-godaddy-1.6.4.drv' failed
-#+END_SRC
-* Build [[https://builds.sr.ht/~euandreh/job/67836#task-setup-0][on CI]]:
-The =setup.sh= script contains a call to =nix-shell= which in turns build the same =terraform-godaddy= derivation:
-#+BEGIN_SRC shell -n
-+ cd vps/
-+ ./scripts/ci/setup.sh
+```
+
+# Build [on CI](https://builds.sr.ht/~euandreh/job/67836#task-setup-0):
+
+The `setup.sh` script contains a call to `nix-shell` which in turns
+build the same `terraform-godaddy` derivation:
+
+```shell
+$ cd vps/
+$ ./scripts/ci/setup.sh
warning: Nix search path entry '/nix/var/nix/profiles/per-user/root/channels' does not exist, ignoring
these derivations will be built:
/nix/store/as9r3n55czsdiq82iacs0hq12alxb2m0-remove-references-to.drv
@@ -1030,4 +1045,4 @@ hash mismatch in fixed-output derivation '/nix/store/q8y0mzjl78hfhazjgq2sc84i7dp
cannot build derivation '/nix/store/w4ghinrmpq524k3617ikfc8i42aa0dbb-terraform-godaddy-1.6.4.drv': 1 dependencies couldn't be built
copying path '/nix/store/63gjp25l4cmdkl63zy0rcgmsvd2p2p34-terraform-0.11.14' from 'https://cache.nixos.org'...
error: build of '/nix/store/9drkn1qxkkcrz5g3413lpmbc2xysa582-terraform-0.11.14.drv', '/nix/store/w4ghinrmpq524k3617ikfc8i42aa0dbb-terraform-godaddy-1.6.4.drv' failed
-#+END_SRC
+```
diff --git a/site/pastebin/nix-exps.org b/_pastebins/nix-exps.md
index 997d6ab..d0ee26b 100644
--- a/site/pastebin/nix-exps.org
+++ b/_pastebins/nix-exps.md
@@ -1,8 +1,11 @@
---
title: Nix Stuff
date: 2018-07-25
+layout: pastebin
+lang: en
---
-#+BEGIN_SRC nix -n
+
+```nix
let
pkgsOriginal = import <nixpkgs> {};
pkgsSrc = pkgsOriginal.fetchzip {
@@ -43,4 +46,4 @@ in rec {
'';
};
}
-#+END_SRC
+```
diff --git a/site/pastebin/nix-show-derivation.org b/_pastebins/nix-show-derivation.md
index 96d2c66..90ec80e 100644
--- a/site/pastebin/nix-show-derivation.org
+++ b/_pastebins/nix-show-derivation.md
@@ -1,8 +1,11 @@
---
title: nix show-derivation sample output
date: 2018-07-25
+layout: pastebin
+lang: en
---
-#+BEGIN_SRC nix -n
+
+```nix
$ nix show-derivation /nix/store/zzz9cl2ly0mb2njr7vwa5528fxmn29m8-combofont-0.2.drv
{
"/nix/store/zzz9cl2ly0mb2njr7vwa5528fxmn29m8-combofont-0.2.drv": {
@@ -71,4 +74,4 @@ $ nix show-derivation /nix/store/zzz9cl2ly0mb2njr7vwa5528fxmn29m8-combofont-0.2.
}
}
}
-#+END_SRC
+```
diff --git a/site/pastebin/raku-tuple-type-annotation.org b/_pastebins/raku-tuple-type-annotation.md
index 8d0e55f..c2de915 100644
--- a/site/pastebin/raku-tuple-type-annotation.org
+++ b/_pastebins/raku-tuple-type-annotation.md
@@ -1,8 +1,11 @@
---
title: Raku tuple type annotation
date: 2019-12-29
+layout: pastebin
+lang: en
---
-#+BEGIN_SRC raku -n
+
+```raku
# Single Str return value: this works
sub f1(Str $in --> Str) {
$in;
@@ -18,11 +21,11 @@ sub f2(Str $in --> (Str, Str)) {
($in, $in);
}
-#+END_SRC
+```
Error log is:
-#+BEGIN_SRC text -n
+```
===SORRY!=== Error while compiling /path/to/my/file
Malformed return value
-#+END_SRC
+```
diff --git a/site/posts/2018-07-17-running-guix-on-nixos.org b/_posts/2018-07-17-running-guix-on-nixos.md
index d28b8d2..e409f3c 100644
--- a/site/posts/2018-07-17-running-guix-on-nixos.org
+++ b/_posts/2018-07-17-running-guix-on-nixos.md
@@ -1,15 +1,30 @@
---
title: Running Guix on NixOS
date: 2018-07-17
+layout: post
+lang: en
+ref: running-guix-on-nixos
---
-I wanted to run Guix on a NixOS machine. Even though the Guix manual explains how to do it [[https://www.gnu.org/software/guix/manual/en/html_node/Binary-Installation.html#Binary-Installation][step by step]], I needed a few extra ones to make it work properly.
+I wanted to run
+Guix on a NixOS machine. Even though the Guix manual explains how to do it
+[step by step][0], I needed a few extra ones to make it work properly.
-I couldn't just install GuixSD because my wireless network card doesn't have any free/libre drivers (yet).
-** Creating =guixbuilder= users
-Guix requires you to create non-root users that will be used to perform the builds in the isolated environments.
+[0]: https://www.gnu.org/software/guix/manual/en/html_node/Binary-Installation.html#Binary-Installation
-The [[https://www.gnu.org/software/guix/manual/en/html_node/Build-Environment-Setup.html#Build-Environment-Setup][manual]] already provides you with a ready to run (as root) command for creating the build users:
-#+BEGIN_SRC bash -n
+I couldn't just install GuixSD because my wireless network card
+doesn't have any free/libre drivers (yet).
+
+## Creating `guixbuilder` users
+
+Guix requires you to create non-root users that will be used to perform
+the builds in the isolated environments.
+
+The [manual][1] already provides you with a ready to run (as root) command for
+creating the build users:
+
+[1]: https://www.gnu.org/software/guix/manual/en/html_node/Build-Environment-Setup.html#Build-Environment-Setup
+
+```bash
groupadd --system guixbuild
for i in `seq -w 1 10`;
do
@@ -18,9 +33,15 @@ do
-c "Guix build user $i" --system \
guixbuilder$i;
done
-#+END_SRC
-However, In my personal NixOS I have disabled [[https://nixos.org/nixos/manual/index.html#sec-user-management][=users.mutableUsers=]], which means that even if I run the above command it means that they'll be removed once I rebuild my OS:
-#+BEGIN_SRC shell -n
+```
+
+However, In my personal NixOS I have disabled [`users.mutableUsers`][2], which
+means that even if I run the above command it means that they'll be removed once
+I rebuild my OS:
+
+[2]: https://nixos.org/nixos/manual/index.html#sec-user-management
+
+```shell
$ sudo nixos-rebuild switch
(...)
removing user ‘guixbuilder7’
@@ -34,9 +55,12 @@ removing user ‘guixbuilder2’
removing user ‘guixbuilder8’
removing user ‘guixbuilder5’
(...)
-#+END_SRC
-Instead of enabling =users.mutableUsers= I could add the Guix users by adding them to my system configuration:
-#+BEGIN_SRC nix -n
+```
+
+Instead of enabling `users.mutableUsers` I could add the Guix users by
+adding them to my system configuration:
+
+```nix
{ config, pkgs, ...}:
{
@@ -77,15 +101,26 @@ Instead of enabling =users.mutableUsers= I could add the Guix users by adding th
};
};
}
-#+END_SRC
-Here I used =fold= and the =//= operator to merge all of the configuration sets into a single =extraUsers= value.
-** Creating the =systemd= service
-One other thing missing was the =systemd= service.
+```
+
+Here I used `fold` and the `//` operator to merge all of the
+configuration sets into a single `extraUsers` value.
-First I couldn't just copy the =.service= file to =/etc= since in NixOS that folder isn't writable. But also I wanted the service to be better integrated with the OS.
+## Creating the `systemd` service
-That was a little easier than creating the users, all I had to do was translate the provided [[https://git.savannah.gnu.org/cgit/guix.git/tree/etc/guix-daemon.service.in?id=00c86a888488b16ce30634d3a3a9d871ed6734a2][=guix-daemon.service.in=]] configuration to an equivalent Nix expression
-#+BEGIN_SRC ini -n
+One other thing missing was the `systemd` service.
+
+First I couldn't just copy the `.service` file to `/etc` since in NixOS
+that folder isn't writable. But also I wanted the service to be better
+integrated with the OS.
+
+That was a little easier than creating the users, all I had to do was translate
+the provided [`guix-daemon.service.in`][3] configuration to an equivalent Nix
+expression
+
+[3]: https://git.savannah.gnu.org/cgit/guix.git/tree/etc/guix-daemon.service.in?id=00c86a888488b16ce30634d3a3a9d871ed6734a2
+
+```ini
# This is a "service unit file" for the systemd init system to launch
# 'guix-daemon'. Drop it in /etc/systemd/system or similar to have
# 'guix-daemon' automatically started.
@@ -107,9 +142,11 @@ TasksMax=8192
[Install]
WantedBy=multi-user.target
-#+END_SRC
-This sample =systemd= configuration file became:
-#+BEGIN_SRC nix -n
+```
+
+This sample `systemd` configuration file became:
+
+```nix
guix-daemon = {
enable = true;
description = "Build daemon for GNU Guix";
@@ -123,9 +160,12 @@ guix-daemon = {
};
wantedBy = [ "multi-user.target" ];
};
-#+END_SRC
-There you go! After running =sudo nixos-rebuild switch= I could get Guix up and running:
-#+BEGIN_SRC bash -n
+```
+
+There you go! After running `sudo nixos-rebuild switch` I could get Guix
+up and running:
+
+```bash
$ guix package -i hello
The following package will be installed:
hello 2.10 /gnu/store/bihfrh609gkxb9dp7n96wlpigiv3krfy-hello-2.10
@@ -142,9 +182,15 @@ Creating manual page database...
2 packages in profile
$ hello
Hello, world!
-#+END_SRC
+```
+
Some improvements to this approach are:
-1. looking into [[https://nixos.org/nixos/manual/index.html#sec-writing-modules][NixOS modules]] and trying to bundle everything together into a single logical unit;
-2. [[https://www.gnu.org/software/guix/manual/en/html_node/Requirements.html#Requirements][build Guix from source]] and share the Nix store and daemon with Guix.
+
+1. looking into [NixOS modules][4] and trying to bundle everything together
+ into a single logical unit;
+2. [build Guix from source][5] and share the Nix store and daemon with Guix.
Happy Guix/Nix hacking!
+
+[4]: https://nixos.org/nixos/manual/index.html#sec-writing-modules
+[5]: https://www.gnu.org/software/guix/manual/en/html_node/Requirements.html#Requirements
diff --git a/_posts/2018-08-01-verifying-npm-ci-reproducibility.md b/_posts/2018-08-01-verifying-npm-ci-reproducibility.md
new file mode 100644
index 0000000..f1fd1dd
--- /dev/null
+++ b/_posts/2018-08-01-verifying-npm-ci-reproducibility.md
@@ -0,0 +1,146 @@
+---
+title: Verifying "npm ci" reproducibility
+date: 2018-08-01
+layout: post
+lang: en
+ref: veryfing-npm-ci-reproducibility
+updated_at: 2019-05-22
+---
+When [npm@5](https://blog.npmjs.org/post/161081169345/v500) came bringing
+[package-locks](https://docs.npmjs.com/files/package-locks) with it, I
+was confused about the benefits it provided, since running `npm install`
+more than once could resolve all the dependencies again and yield yet
+another fresh `package-lock.json` file. The message saying "you should
+add this file to version control" left me hesitant on what to do[^1].
+
+However the [addition of `npm ci`](https://blog.npmjs.org/post/171556855892/introducing-npm-ci-for-faster-more-reliable)
+filled this gap: it's a stricter variation of `npm install` which
+guarantees that "[subsequent installs are able to generate identical trees](https://docs.npmjs.com/files/package-lock.json)". But are they
+really identical? I could see that I didn't have the same problems of
+different installation outputs, but I didn't know for **sure** if it
+was really identical.
+
+## Computing the hash of a directory's content
+
+I quickly searched for a way to check for the hash signature of an
+entire directory tree, but I couldn't find one. I've made a poor
+man's [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree)
+implementation using `sha256sum` and a few piped commands at the
+terminal:
+
+```bash
+merkle-tree () {
+ dirname="${1-.}"
+ pushd "$dirname"
+ find . -type f | \
+ sort | \
+ xargs -I{} sha256sum "{}" | \
+ sha256sum | \
+ awk '{print $1}'
+ popd
+}
+```
+
+Going through it line by line:
+
+- #1 we define a Bash function called `merkle-tree`;
+- #2 it accepts a single argument: the directory to compute the
+ merkle tree from. If nothing is given, it runs on the current
+ directory (`.`);
+- #3 we go to the directory, so we don't get different prefixes in
+ `find`'s output (like `../a/b`);
+- #4 we get all files from the directory tree. Since we're using
+ `sha256sum` to compute the hash of the file contents, we need to
+ filter out folders from it;
+- #5 we need to sort the output, since different file systems and
+ `find` implementations may return files in different orders;
+- #6 we use `xargs` to compute the hash of each file individually
+ through `sha256sum`. Since a file may contain spaces we need to
+ escape it with quotes;
+- #7 we compute the hash of the combined hashes. Since `sha256sum`
+ output is formatted like `<hash> <filename>`, it produces a
+ different final hash if a file ever changes name without changing
+ it's content;
+- #8 we get the final hash output, excluding the `<filename>` (which
+ is `-` in this case, aka `stdin`).
+
+### Positive points:
+
+1. ignore timestamp: running more than once on different installation
+ yields the same hash;
+2. the name of the file is included in the final hash computation.
+
+### Limitations:
+
+1. it ignores empty folders from the hash computation;
+2. the implementation's only goal is to represent using a digest
+ whether the content of a given directory is the same or not. Leaf
+ presence checking is obviously missing from it.
+
+### Testing locally with sample data
+
+```bash
+mkdir /tmp/merkle-tree-test/
+cd /tmp/merkle-tree-test/
+mkdir -p a/b/ a/c/ d/
+echo "one" > a/b/one.txt
+echo "two" > a/c/two.txt
+echo "three" > d/three.txt
+merkle-tree . # output is be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
+merkle-tree . # output still is be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
+echo "four" > d/four.txt
+merkle-tree . # output is now b5464b958969ed81815641ace96b33f7fd52c20db71a7fccc45a36b3a2ae4d4c
+rm d/four.txt
+merkle-tree . # output back to be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
+echo "hidden-five" > a/b/one.txt
+merkle-tree . # output changed 471fae0d074947e4955e9ac53e95b56e4bc08d263d89d82003fb58a0ffba66f5
+```
+
+It seems to work for this simple test case.
+
+You can try copying and pasting it to verify the hash signatures.
+
+## Using `merkle-tree` to check the output of `npm ci`
+
+*I've done all of the following using Node.js v8.11.3 and npm@6.1.0.*
+
+In this test case I'll take the main repo of
+[Lerna](https://lernajs.io/)[^2]:
+
+```bash
+cd /tmp/
+git clone https://github.com/lerna/lerna.git
+cd lerna/
+git checkout 57ff865c0839df75dbe1974971d7310f235e1109
+npm ci
+merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
+rm -rf node_modules/
+npm ci
+merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
+npm ci # test if it also works with an existing node_modules/ folder
+merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
+```
+
+Good job `npm ci` :)
+
+#6 and #9 take some time to run (21 seconds in my machine), but this
+specific use case isn't performance sensitive. The slowest step is
+computing the hash of each individual file.
+
+## Conclusion
+
+`npm ci` really "generates identical trees".
+
+I'm not aware of any other existing solution for verifying the hash
+signature of a directory. If you know any I'd [like to know](mailto:eu@euandre.org).
+
+## *Edit*
+
+2019/05/22: Fix spelling.
+
+[^1]: The [documentation](https://docs.npmjs.com/cli/install#description)
+ claims `npm install` is driven by the existing `package-lock.json`,
+ but that's actually [a little bit tricky](https://github.com/npm/npm/issues/17979#issuecomment-332701215).
+
+[^2]: Finding a big known repo that actually committed the
+ `package-lock.json` file was harder than I expected.
diff --git a/_posts/2018-12-21-using-youtube-dl-to-manage-youtube-subscriptions.md b/_posts/2018-12-21-using-youtube-dl-to-manage-youtube-subscriptions.md
new file mode 100644
index 0000000..a66d957
--- /dev/null
+++ b/_posts/2018-12-21-using-youtube-dl-to-manage-youtube-subscriptions.md
@@ -0,0 +1,273 @@
+---
+title: Using "youtube-dl" to manage YouTube subscriptions
+date: 2018-12-21
+layout: post
+lang: en
+ref: using-youtube-dl-to-manage-youtube-subscriptions
+---
+I've recently read the
+[announcement](https://www.reddit.com/r/DataHoarder/comments/9sg8q5/i_built_a_selfhosted_youtube_subscription_manager/)
+of a very nice [self-hosted YouTube subscription
+manager](https://github.com/chibicitiberiu/ytsm). I haven't used
+YouTube's built-in subscriptions for a while now, and haven't missed
+it at all. When I saw the announcement, I considered writing about the
+solution I've built on top of [youtube-dl](https://youtube-dl.org/).
+
+## Background: the problem with YouTube
+
+In many ways, I agree with [André Staltz's view on data ownership and
+privacy](https://staltz.com/what-happens-when-you-block-internet-giants.html):
+
+> I started with the basic premise that "I want to be in control of my
+> data". Sometimes that meant choosing when to interact with an internet
+> giant and how much I feel like revealing to them. Most of times it
+> meant not interacting with them at all. I don't want to let them be in
+> full control of how much they can know about me. I don't want to be in
+> autopilot mode. (...) Which leads us to YouTube. While I was able to
+> find alternatives to Gmail (Fastmail), Calendar (Fastmail), Translate
+> (Yandex Translate), etc, YouTube remains as the most indispensable
+> Google-owned web service. It is really really hard to avoid consuming
+> YouTube content. It was probably the smartest startup acquisition
+> ever. My privacy-oriented alternative is to watch YouTube videos
+> through Tor, which is technically feasible but not polite to use the
+> Tor bandwidth for these purposes. I'm still scratching my head with
+> this issue.
+
+Even though I don't use most alternative services he mentions, I do
+watch videos from YouTube. But I also feel uncomfortable logging in to
+YouTube with a Google account, watching videos, creating playlists and
+similar things.
+
+Using the mobile app is worse: you can't even block ads in there.
+You're in less control on what you share with YouTube and Google.
+
+## youtube-dl
+
+youtube-dl is a command-line tool for downloading videos, from YouTube
+and [many other sites](https://rg3.github.io/youtube-dl/supportedsites.html):
+
+```shell
+$ youtube-dl https://www.youtube.com/watch?v=rnMYZnY3uLA
+[youtube] rnMYZnY3uLA: Downloading webpage
+[youtube] rnMYZnY3uLA: Downloading video info webpage
+[download] Destination: A Origem da Vida _ Nerdologia-rnMYZnY3uLA.mp4
+[download] 100% of 32.11MiB in 00:12
+```
+
+It can be used to download individual videos as showed above, but it
+also has some interesting flags that we can use:
+
+- `--output`: use a custom template to create the name of the
+ downloaded file;
+- `--download-archive`: use a text file for recording and remembering
+ which videos were already downloaded;
+- `--prefer-free-formats`: prefer free video formats, like `webm`,
+ `ogv` and Matroska `mkv`;
+- `--playlist-end`: how many videos to download from a "playlist" (a
+ channel, a user or an actual playlist);
+- `--write-description`: write the video description to a
+ `.description` file, useful for accessing links and extra content.
+
+Putting it all together:
+
+```shell
+$ youtube-dl "https://www.youtube.com/channel/UClu474HMt895mVxZdlIHXEA" \
+ --download-archive ~/Nextcloud/cache/youtube-dl-seen.conf \
+ --prefer-free-formats \
+ --playlist-end 20 \
+ --write-description \
+ --output "~/Downloads/yt-dl/%(uploader)s/%(upload_date)s - %(title)s.%(ext)s"
+```
+
+This will download the latest 20 videos from the selected channel, and
+write down the video IDs in the `youtube-dl-seen.conf` file. Running it
+immediately after one more time won't have any effect.
+
+If the channel posts one more video, running the same command again will
+download only the last video, since the other 19 were already
+downloaded.
+
+With this basic setup you have a minimal subscription system at work,
+and you can create some functions to help you manage that:
+
+```shell
+#!/bin/sh
+
+export DEFAULT_PLAYLIST_END=15
+
+download() {
+ youtube-dl "$1" \
+ --download-archive ~/Nextcloud/cache/youtube-dl-seen.conf \
+ --prefer-free-formats \
+ --playlist-end $2 \
+ --write-description \
+ --output "~/Downloads/yt-dl/%(uploader)s/%(upload_date)s - %(title)s.%(ext)s"
+}
+export -f download
+
+
+download_user() {
+ download "https://www.youtube.com/user/$1" ${2-$DEFAULT_PLAYLIST_END}
+}
+export -f download_user
+
+
+download_channel() {
+ download "https://www.youtube.com/channel/$1" ${2-$DEFAULT_PLAYLIST_END}
+}
+export -f download_channel
+
+
+download_playlist() {
+ download "https://www.youtube.com/playlist?list=$1" ${2-$DEFAULT_PLAYLIST_END}
+}
+export -f download_playlist
+```
+
+With these functions, you now can have a subscription fetching script to
+download the latest videos from your favorite channels:
+
+```shell
+#!/bin/sh
+
+download_user ClojureTV 15
+download_channel "UCmEClzCBDx-vrt0GuSKBd9g" 100
+download_playlist "PLqG7fA3EaMRPzL5jzd83tWcjCUH9ZUsbX" 15
+```
+
+Now, whenever you want to watch the latest videos, just run the above
+script and you'll get all of them in your local machine.
+
+## Tradeoffs
+
+### I've made it for myself, with my use case in mind
+
+1. Offline
+
+ My internet speed it somewhat reasonable[^1], but it is really
+ unstable. Either at work or at home, it's not uncommon to loose
+ internet access for 2 minutes 3~5 times every day, and stay
+ completely offline for a couple of hours once every week.
+
+ Working through the hassle of keeping a playlist on disk has payed
+ off many, many times. Sometimes I even not notice when the
+ connection drops for some minutes, because I'm watching a video and
+ working on some document, all on my local computer.
+
+ There's also no quality adjustment for YouTube's web player, I
+ always pick the higher quality and it doesn't change during the
+ video. For some types of content, like a podcast with some tiny
+ visual resources, this doesn't change much. For other types of
+ content, like a keynote presentation with text written on the
+ slides, watching on 144p isn't really an option.
+
+ If the internet connection drops during the video download,
+ youtube-dl will resume from where it stopped.
+
+ This is an offline first benefit that I really like, and works well
+ for me.
+
+2. Sync the "seen" file
+
+ I already have a running instance of Nextcloud, so just dumping the
+ `youtube-dl-seen.conf` file inside Nextcloud was a no-brainer.
+
+ You could try putting it in a dedicated git repository, and wrap the
+ script with an autocommit after every run. If you ever had a merge
+ conflict, you'd simply accept all changes and then run:
+
+ ```shell
+ $ uniq youtube-dl-seen.conf > youtube-dl-seen.conf
+ ```
+
+ to tidy up the file.
+
+3. Doesn't work on mobile
+
+ My primary device that I use everyday is my laptop, not my phone. It
+ works well for me this way.
+
+ Also, it's harder to add ad-blockers to mobile phones, and most
+ mobile software still depends on Google's and Apple's blessing.
+
+ If you wish, you can sync the videos to the SD card periodically,
+ but that's a bit of extra manual work.
+
+### The Good
+
+1. Better privacy
+
+ We don't even have to configure the ad-blocker to keep ads and
+ trackers away!
+
+ YouTube still has your IP address, so using a VPN is always a good
+ idea. However, a timing analysis would be able to identify you
+ (considering the current implementation).
+
+2. No need to self-host
+
+ There's no host that needs maintenance. Everything runs locally.
+
+ As long as you keep youtube-dl itself up to date and sync your
+ "seen" file, there's little extra work to do.
+
+3. Track your subscriptions with git
+
+ After creating a `subscriptions.sh` executable that downloads all
+ the videos, you can add it to git and use it to track metadata about
+ your subscriptions.
+
+### The Bad
+
+1. Maximum playlist size is your disk size
+
+ This is a good thing for getting a realistic view on your actual
+ "watch later" list. However I've run out of disk space many
+ times, and now I need to be more aware of how much is left.
+
+### The Ugly
+
+We can only avoid all the bad parts of YouTube with youtube-dl as long
+as YouTube keeps the videos public and programmatically accessible. If
+YouTube ever blocks that we'd loose the ability to consume content this
+way, but also loose confidence on considering YouTube a healthy
+repository of videos on the internet.
+
+## Going beyond
+
+Since you're running everything locally, here are some possibilities to
+be explored:
+
+### A playlist that is too long for being downloaded all at once
+
+You can wrap the `download_playlist` function (let's call the wrapper
+`inc_download`) and instead of passing it a fixed number to the
+`--playlist-end` parameter, you can store the `$n` in a folder
+(something like `$HOME/.yt-db/$PLAYLIST_ID`) and increment it by `$step`
+every time you run `inc_download`.
+
+This way you can incrementally download videos from a huge playlist
+without filling your disk with gigabytes of content all at once.
+
+### Multiple computer scenario
+
+The `download_playlist` function could be aware of the specific machine
+that it is running on and apply specific policies depending on the
+machine: always download everything; only download videos that aren't
+present anywhere else; etc.
+
+## Conclusion
+
+youtube-dl is a great tool to keep at hand. It covers a really large
+range of video websites and works robustly.
+
+Feel free to copy and modify this code, and [send me](mailto:eu@euandre.org) suggestions of improvements or related
+content.
+
+## *Edit*
+
+2019/05/22: Fix spelling.
+
+[^1]: Considering how expensive it is and the many ways it could be
+ better, but also how much it has improved over the last years, I say
+ it's reasonable.
diff --git a/_posts/2019-06-02-stateless-os.md b/_posts/2019-06-02-stateless-os.md
new file mode 100644
index 0000000..2f15c17
--- /dev/null
+++ b/_posts/2019-06-02-stateless-os.md
@@ -0,0 +1,145 @@
+---
+title: Using NixOS as an stateless workstation
+date: 2019-06-02
+layout: post
+lang: en
+ref: stateless-os
+---
+Last week[^1] I changed back to an old[^2] Samsung laptop, and installed
+[NixOS](https://nixos.org/) on it.
+
+After using NixOS on another laptop for around two years, I wanted
+verify how reproducible was my desktop environment, and how far does
+NixOS actually can go on recreating my whole OS from my configuration
+files and personal data. I gravitated towards NixOS after trying (and
+failing) to create an `install.sh` script that would imperatively
+install and configure my whole OS using apt-get. When I found a
+GNU/Linux distribution that was built on top of the idea of
+declaratively specifying the whole OS I was automatically convinced[^3].
+
+I was impressed. Even though I've been experiencing the benefits of Nix
+isolation daily, I always felt skeptical that something would be
+missing, because the devil is always on the details. But the result was
+much better than expected!
+
+There were only 2 missing configurations:
+
+1. tap-to-click on the touchpad wasn't enabled by default;
+2. the default theme from the gnome-terminal is "Black on white"
+ instead of "White on black".
+
+That's all.
+
+I haven't checked if I can configure those in NixOS GNOME module, but I
+guess both are scriptable and could be set in a fictional `setup.sh`
+run.
+
+This makes me really happy, actually. More happy than I anticipated.
+
+Having such a powerful declarative OS makes me feel like my data is the
+really important stuff (as it should be), and I can interact with it on
+any workstation. All I need is an internet connection and a few hours to
+download everything. It feels like my physical workstation and the
+installed OS are serving me and my data, instead of me feeling as
+hostage to the specific OS configuration at the moment. Having a few
+backup copies of everything important extends such peacefulness.
+
+After this positive experience with recreating my OS from simple Nix
+expressions, I started to wonder how far I could go with this, and
+started considering other areas of improvements:
+
+### First run on a fresh NixOS installation
+
+Right now the initial setup relies on non-declarative manual tasks, like
+decrypting some credentials, or manually downloading **this** git
+repository with specific configurations before **that** one.
+
+I wonder what some areas of improvements are on this topic, and if
+investing on it is worth it (both time-wise and happiness-wise).
+
+### Emacs
+
+Right now I'm using the [Spacemacs](http://spacemacs.org/), which is a
+community package curation and configuration on top of
+[Emacs](https://www.gnu.org/software/emacs/).
+
+Spacemacs does support the notion of
+[layers](http://spacemacs.org/doc/LAYERS.html), which you can
+declaratively specify and let Spacemacs do the rest.
+
+However this solution isn't nearly as robust as Nix: being purely
+functional, Nix does describe everything required to build a derivation,
+and knows how to do so. Spacemacs it closer to more traditional package
+managers: even though the layers list is declarative, the installation
+is still very much imperative. I've had trouble with Spacemacs not
+behaving the same on different computers, both with identical
+configurations, only brought to convergence back again after a
+`git clean -fdx` inside `~/.emacs.d/`.
+
+The ideal solution would be managing Emacs packages with Nix itself.
+After a quick search I did found that [there is support for Emacs
+packages in
+Nix](https://nixos.org/nixos/manual/index.html#module-services-emacs-adding-packages).
+So far I was only aware of [Guix support for Emacs packages](https://www.gnu.org/software/guix/manual/en/html_node/Application-Setup.html#Emacs-Packages).
+
+This isn't a trivial change because Spacemacs does include extra
+curation and configuration on top of Emacs packages. I'm not sure the
+best way to improve this right now.
+
+### myrepos
+
+I'm using [myrepos](https://myrepos.branchable.com/) to manage all my
+git repositories, and the general rule I apply is to add any repository
+specific configuration in myrepos' `checkout` phase:
+
+```shell
+# sample ~/.mrconfig file snippet
+[dev/guix/guix]
+checkout =
+ git clone https://git.savannah.gnu.org/git/guix.git guix
+ cd guix/
+ git config sendemail.to guix-patches@gnu.org
+```
+
+This way when I clone this repo again the email sending is already
+pre-configured.
+
+This works well enough, but the solution is too imperative, and my
+`checkout` phases tend to become brittle over time if not enough care is
+taken.
+
+### GNU Stow
+
+For my home profile and personal configuration I already have a few
+dozens of symlinks that I manage manually. This has worked so far, but
+the solution is sometimes fragile and [not declarative at
+all](https://git.sr.ht/~euandreh/dotfiles/tree/316939aa215181b1d22b69e94241eef757add98d/bash/symlinks.sh#L14-75).
+I wonder if something like [GNU
+Stow](https://www.gnu.org/software/stow/) can help me simplify this.
+
+## Conclusion
+
+I'm really satisfied with NixOS, and I intend to keep using it. If what
+I've said interests you, maybe try tinkering with the [Nix package
+manager](https://nixos.org/nix/) (not the whole NixOS) on your current
+distribution (it can live alongside any other package manager).
+
+If you have experience with declarative Emacs package managements, GNU
+Stow or any similar tool, etc., [I'd like some
+tips](mailto:eu@euandre.org). If you don't have any experience at all,
+[I'd still love to hear from you](mailto:eu@euandre.org).
+
+[^1]: "Last week" as of the start of this writing, so around the end
+ of May 2019.
+
+[^2]: I was using a 32GB RAM, i7 and 250GB SSD Samsung laptop. The
+ switch was back to a 8GB RAM, i5 and 500GB HDD Dell laptop. The
+ biggest difference I noticed was on faster memory, both RAM
+ availability and the disk speed, but I had 250GB less local storage
+ space.
+
+[^3]: The declarative configuration aspect is something that I now
+ completely take for granted, and wouldn't consider using something
+ which isn't declarative. A good metric to show this is me realising
+ that I can't pinpoint the moment when I decided to switch to NixOS.
+ It's like I had a distant past when this wasn't true.
diff --git a/about.md b/about.md
new file mode 100644
index 0000000..452c1c7
--- /dev/null
+++ b/about.md
@@ -0,0 +1,29 @@
+---
+layout: page
+title: About
+lang: en
+ref: about
+---
+Hi, I'm EuAndreh. I write software and occasionally music. You can find my
+contact information in the footer of this page, or mail my [public inbox][0].
+
+[0]: mailto:~euandreh/public-inbox@lists.sr.ht
+
+This is my personal website where I write articles, publish software and
+more related work.
+
+Below you can find some interesting projects of mine.
+
+## Software projects
+
+### [cool-read-macros](https://euandre.org/cool-read-macros/)
+
+### [cl-BSON](https://euandre.org/cl-bson/)
+
+[cl-intbytes](https://euandre.org/cl-intbytes) as a side-effect.
+
+## Other things
+
+[Pastebin listing](./pastebins.html).
+
+[Atom feed with all languages](./feed.all.atom).
diff --git a/atom.xml b/atom.xml
new file mode 120000
index 0000000..5291931
--- /dev/null
+++ b/atom.xml
@@ -0,0 +1 @@
+feed.en.atom \ No newline at end of file
diff --git a/css.sh b/css.sh
deleted file mode 100755
index 420878b..0000000
--- a/css.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env bash
-set -Eeuo pipefail
-cd "$(dirname "${BASH_SOURCE[@]}")"
-
-nix-build -A subtasks.docs
-rm -rf tmp/
-mkdir tmp/
-
-cp -R result/* tmp/
-chmod -R +w tmp/
-cp site/css/styles.css tmp/css/
diff --git a/default.nix b/default.nix
index 414e233..5f6663d 100644
--- a/default.nix
+++ b/default.nix
@@ -1,57 +1,17 @@
-let
- pkgsUnstable = import <nixpkgs> { };
- pkgsPinned = import (pkgsUnstable.fetchzip {
- url = "https://github.com/NixOS/nixpkgs/archive/18.03.zip";
- sha256 = "0hk4y2vkgm1qadpsm4b0q1vxq889jhxzjx3ragybrlwwg54mzp4f";
- }) { };
- pkgs = pkgsUnstable;
-
- # Taken from:
- # http://www.cs.yale.edu/homes/lucas.paul/posts/2017-04-10-hakyll-on-nix.html
- websiteBuilder = pkgs.stdenv.mkDerivation {
- name = "website-builder";
- src = ./hakyll;
- phases = "unpackPhase buildPhase";
- buildInputs =
- [ (pkgsPinned.haskellPackages.ghcWithPackages (p: [ p.hakyll ])) ];
- buildPhase = ''
- mkdir -p $out/bin
- ghc -O2 -dynamic --make Main.hs -o $out/bin/build-site
- '';
- };
+let pkgs = import <nixpkgs> { };
in rec {
utils = import ./utils.nix {
pkgs = pkgs;
src = pkgs.nix-gitignore.gitignoreSource [ ] ./.;
baseName = "website";
};
+ jekyllEnv = pkgs.bundlerEnv {
+ name = "jekyll-env";
+ gemfile = ./Gemfile;
+ lockfile = ./Gemfile.lock;
+ gemset = ./gemset.nix;
+ };
subtasks = rec {
- perlPodCheck = utils.baseTask.overrideAttrs (baseAttrs: {
- name = "${baseAttrs.name}-perl-podcheck";
- buildInputs = baseAttrs.buildInputs ++ [ pkgs.perl ];
- buildPhase = ''
- podchecker website pastebin/website-pastebin slides/website-slides
- touch $out
- '';
- });
- batsTest = utils.baseTask.overrideAttrs (baseAttrs: {
- name = "${baseAttrs.name}-bats-test";
- buildInputs = baseAttrs.buildInputs ++ [ pkgs.bats pkgs.perl ];
- buildPhase = ''
- patchShebangs .
- ./t/website.bats
- touch $out
- '';
- });
- perlInternalTest = utils.baseTask.overrideAttrs (baseAttrs: {
- name = "${baseAttrs.name}-perl-test";
- buildInputs = baseAttrs.buildInputs ++ [ pkgs.perl ];
- buildPhase = ''
- patchShebangs .
- ./website test
- touch $out
- '';
- });
hunspellCheck = utils.baseTask.overrideAttrs (baseAttrs: {
name = "${baseAttrs.name}-hunspell";
buildInputs = baseAttrs.buildInputs
@@ -64,31 +24,22 @@ in rec {
});
docs = utils.baseTask.overrideAttrs (baseAttrs: {
name = "${baseAttrs.name}-docs";
- src = ./site;
- buildInputs = [ websiteBuilder ];
+ buildInputs = [ jekyllEnv ];
buildPhase = ''
- export LOCALE_ARCHIVE="${pkgs.glibcLocales}/lib/locale/locale-archive";
- export LANG=en_US.UTF-8
- build-site build
- mkdir $out
- cp -r _site/* $out
+ jekyll build -d $out
'';
});
};
test = utils.test [
utils.formatNix
(utils.shellcheck null)
- (utils.fixme [ "pastebin/skeleton.org" "utils.nix" ])
- subtasks.perlPodCheck
- subtasks.batsTest
- subtasks.perlInternalTest
+ (utils.fixme null)
subtasks.hunspellCheck
];
shell = pkgs.mkShell rec {
name = "website-shell";
buildInputs = [
- websiteBuilder
- (pkgs.haskellPackages.ghcWithPackages (p: with p; [ hakyll ]))
+ jekyllEnv
(pkgs.hunspellWithDicts (with pkgs.hunspellDicts; [ en-us ]))
];
};
diff --git a/site/favicon.ico b/favicon.ico
index 8ebd54f..8ebd54f 100644
--- a/site/favicon.ico
+++ b/favicon.ico
Binary files differ
diff --git a/feed.all.atom b/feed.all.atom
new file mode 100644
index 0000000..381dd63
--- /dev/null
+++ b/feed.all.atom
@@ -0,0 +1,4 @@
+---
+---
+{% assign lposts = site.posts | sort: "date" | reverse %}
+{% include feed.atom posts=lposts lang="en" %} \ No newline at end of file
diff --git a/feed.atom b/feed.atom
new file mode 120000
index 0000000..5291931
--- /dev/null
+++ b/feed.atom
@@ -0,0 +1 @@
+feed.en.atom \ No newline at end of file
diff --git a/feed.en.atom b/feed.en.atom
new file mode 100644
index 0000000..a048a54
--- /dev/null
+++ b/feed.en.atom
@@ -0,0 +1,4 @@
+---
+---
+{% assign lposts = site.posts | where:"lang", "en" | sort: "date" | reverse %}
+{% include feed.atom posts=lposts lang="en" %} \ No newline at end of file
diff --git a/feed.pt.atom b/feed.pt.atom
new file mode 100644
index 0000000..9862e38
--- /dev/null
+++ b/feed.pt.atom
@@ -0,0 +1,4 @@
+---
+---
+{% assign lposts = site.posts | where:"lang", "pt" | sort: "date" | reverse %}
+{% include feed.atom lang="pt" %}
diff --git a/feed.xml b/feed.xml
new file mode 120000
index 0000000..5291931
--- /dev/null
+++ b/feed.xml
@@ -0,0 +1 @@
+feed.en.atom \ No newline at end of file
diff --git a/gemset.nix b/gemset.nix
new file mode 100644
index 0000000..6e00820
--- /dev/null
+++ b/gemset.nix
@@ -0,0 +1,266 @@
+{
+ addressable = {
+ dependencies = [ "public_suffix" ];
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "1fvchp2rhp2rmigx7qglf69xvjqvzq7x0g49naliw29r2bz656sy";
+ type = "gem";
+ };
+ version = "2.7.0";
+ };
+ colorator = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "0f7wvpam948cglrciyqd798gdc6z3cfijciavd0dfixgaypmvy72";
+ type = "gem";
+ };
+ version = "1.1.0";
+ };
+ concurrent-ruby = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "1x07r23s7836cpp5z9yrlbpljcxpax14yw4fy4bnp6crhr6x24an";
+ type = "gem";
+ };
+ version = "1.1.5";
+ };
+ em-websocket = {
+ dependencies = [ "eventmachine" "http_parser.rb" ];
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "1bsw8vjz0z267j40nhbmrvfz7dvacq4p0pagvyp17jif6mj6v7n3";
+ type = "gem";
+ };
+ version = "0.5.1";
+ };
+ eventmachine = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "0wh9aqb0skz80fhfn66lbpr4f86ya2z5rx6gm5xlfhd05bj1ch4r";
+ type = "gem";
+ };
+ version = "1.2.7";
+ };
+ ffi = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "10lfhahnnc91v63xpvk65apn61pib086zha3z5sp1xk9acfx12h4";
+ type = "gem";
+ };
+ version = "1.12.2";
+ };
+ forwardable-extended = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "15zcqfxfvsnprwm8agia85x64vjzr2w0xn9vxfnxzgcv8s699v0v";
+ type = "gem";
+ };
+ version = "2.6.0";
+ };
+ "http_parser.rb" = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "15nidriy0v5yqfjsgsra51wmknxci2n2grliz78sf9pga3n0l7gi";
+ type = "gem";
+ };
+ version = "0.6.0";
+ };
+ i18n = {
+ dependencies = [ "concurrent-ruby" ];
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "038qvz7kd3cfxk8bvagqhakx68pfbnmghpdkx7573wbf0maqp9a3";
+ type = "gem";
+ };
+ version = "0.9.5";
+ };
+ jekyll = {
+ dependencies = [
+ "addressable"
+ "colorator"
+ "em-websocket"
+ "i18n"
+ "jekyll-sass-converter"
+ "jekyll-watch"
+ "kramdown"
+ "liquid"
+ "mercenary"
+ "pathutil"
+ "rouge"
+ "safe_yaml"
+ ];
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "1nn2sc308l2mz0yiall4r90l6vy67qp4sy9zapi73a948nd4a5k3";
+ type = "gem";
+ };
+ version = "3.8.5";
+ };
+ jekyll-sass-converter = {
+ dependencies = [ "sass" ];
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "008ikh5fk0n6ri54mylcl8jn0mq8p2nfyfqif2q3pp0lwilkcxsk";
+ type = "gem";
+ };
+ version = "1.5.2";
+ };
+ jekyll-watch = {
+ dependencies = [ "listen" ];
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "1qd7hy1kl87fl7l0frw5qbn22x7ayfzlv9a5ca1m59g0ym1ysi5w";
+ type = "gem";
+ };
+ version = "2.2.1";
+ };
+ kramdown = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "1n1c4jmrh5ig8iv1rw81s4mw4xsp4v97hvf8zkigv4hn5h542qjq";
+ type = "gem";
+ };
+ version = "1.17.0";
+ };
+ liquid = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "0zhg5ha8zy8zw9qr3fl4wgk4r5940n4128xm2pn4shpbzdbsj5by";
+ type = "gem";
+ };
+ version = "4.0.3";
+ };
+ listen = {
+ dependencies = [ "rb-fsevent" "rb-inotify" ];
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "1w923wmdi3gyiky0asqdw5dnh3gcjs2xyn82ajvjfjwh6sn0clgi";
+ type = "gem";
+ };
+ version = "3.2.1";
+ };
+ mercenary = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "10la0xw82dh5mqab8bl0dk21zld63cqxb1g16fk8cb39ylc4n21a";
+ type = "gem";
+ };
+ version = "0.3.6";
+ };
+ pathutil = {
+ dependencies = [ "forwardable-extended" ];
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "12fm93ljw9fbxmv2krki5k5wkvr7560qy8p4spvb9jiiaqv78fz4";
+ type = "gem";
+ };
+ version = "0.16.2";
+ };
+ public_suffix = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "0g9ds2ffzljl6jjmkjffwxc1z6lh5nkqqmhhkxjk71q5ggv0rkpm";
+ type = "gem";
+ };
+ version = "3.1.1";
+ };
+ rb-fsevent = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "1lm1k7wpz69jx7jrc92w3ggczkjyjbfziq5mg62vjnxmzs383xx8";
+ type = "gem";
+ };
+ version = "0.10.3";
+ };
+ rb-inotify = {
+ dependencies = [ "ffi" ];
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "1jm76h8f8hji38z3ggf4bzi8vps6p7sagxn3ab57qc0xyga64005";
+ type = "gem";
+ };
+ version = "0.10.1";
+ };
+ rouge = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "1y90nx9ph9adnrpcsvs2adca2l3dyz8am2d2kzxkwd3a086ji7aw";
+ type = "gem";
+ };
+ version = "3.13.0";
+ };
+ safe_yaml = {
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "0j7qv63p0vqcd838i2iy2f76c3dgwzkiz1d1xkg7n0pbnxj2vb56";
+ type = "gem";
+ };
+ version = "1.0.5";
+ };
+ sass = {
+ dependencies = [ "sass-listen" ];
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "0p95lhs0jza5l7hqci1isflxakz83xkj97lkvxl919is0lwhv2w0";
+ type = "gem";
+ };
+ version = "3.7.4";
+ };
+ sass-listen = {
+ dependencies = [ "rb-fsevent" "rb-inotify" ];
+ groups = [ "default" ];
+ platforms = [ ];
+ source = {
+ remotes = [ "https://rubygems.org" ];
+ sha256 = "0xw3q46cmahkgyldid5hwyiwacp590zj2vmswlll68ryvmvcp7df";
+ type = "gem";
+ };
+ version = "4.0.0";
+ };
+}
diff --git a/hakyll/Main.hs b/hakyll/Main.hs
deleted file mode 100644
index f2ed24e..0000000
--- a/hakyll/Main.hs
+++ /dev/null
@@ -1,125 +0,0 @@
-{-# LANGUAGE OverloadedStrings #-}
-import System.FilePath.Posix
-import Hakyll
-
-
-main :: IO ()
-main = hakyll $ do
- match "favicon.ico" $ do
- route idRoute
- compile copyFileCompiler
- match "css/*" $ do
- route idRoute
- compile compressCssCompiler
- match "images/*" $ do
- route idRoute
- compile copyFileCompiler
- match "static/**/*" $ do
- route idRoute
- compile copyFileCompiler
- match "fonts/*" $ do
- route idRoute
- compile copyFileCompiler
- match "root/*" $ do
- route $ dropPrefix "root/"
- compile copyFileCompiler
-
- match "pastebin/*" $ do
- route $ setExtension "html"
- compile $ pandocCompiler
- >>= loadAndApplyTemplate "templates/pastebin.html" pastebinCtx
- >>= saveSnapshot "content"
- >>= loadAndApplyTemplate "templates/default.html" pastebinCtx
- >>= relativizeUrls
-
- match "posts/*" $ do
- route $ setExtension "html"
- `composeRoutes` dropPrefix "posts/"
- `composeRoutes` dateFolders
- compile $ pandocCompiler
- >>= loadAndApplyTemplate "templates/post.html" postCtx
- >>= saveSnapshot "content"
- >>= loadAndApplyTemplate "templates/default.html" postCtx
- >>= relativizeUrls
-
- match "pages/*" $ do
- route $ setExtension "html"
- `composeRoutes` dropPrefix "pages/"
- compile $ pandocCompiler
- >>= loadAndApplyTemplate "templates/default.html" defaultContext
- >>= relativizeUrls
-
- match "pastebins.html" $ do
- route idRoute
- compile $ do
- pastebins <- recentFirst =<< loadAll "pastebin/*"
- let pastebinCtx =
- listField "pastebins" pastebinCtx (return pastebins) `mappend`
- defaultContext
-
- getResourceBody
- >>= applyAsTemplate pastebinCtx
- >>= loadAndApplyTemplate "templates/default.html" pastebinCtx
- >>= relativizeUrls
-
- match "index.html" $ do
- route idRoute
- compile $ do
- posts <- recentFirst =<< loadAll "posts/*"
- let indexCtx =
- listField "posts" postCtx (return posts) `mappend`
- constField "title" "Home" `mappend`
- defaultContext
-
- getResourceBody
- >>= applyAsTemplate indexCtx
- >>= loadAndApplyTemplate "templates/default.html" indexCtx
- >>= relativizeUrls
-
- create ["atom.xml", "feed.xml", "feed.atom"] $ do
- route idRoute
- compile $ do
- loadAllSnapshots "posts/*" "content"
- >>= recentFirst
- >>= renderAtom feedConfiguration feedCtx
-
- create ["rss.xml"] $ do
- route idRoute
- compile $ do
- let feedCtx = postCtx `mappend`
- constField "description" "This is the post description"
-
- posts <- recentFirst =<< loadAll "posts/*"
- renderRss feedConfiguration feedCtx posts
-
- match "templates/*" $ compile templateBodyCompiler
-
-
-postCtx :: Context String
-postCtx =
- dateField "date" "%B %e, %Y" `mappend`
- defaultContext
-
-pastebinCtx :: Context String
-pastebinCtx = postCtx
-
-feedCtx :: Context String
-feedCtx =
- bodyField "description" `mappend`
- defaultContext
-
-feedConfiguration :: FeedConfiguration
-feedConfiguration = FeedConfiguration
- { feedTitle = "EuAndreh"
- , feedDescription = "EuAndreh's blog"
- , feedAuthorName = "EuAndreh"
- , feedAuthorEmail = "eu@euandre.org"
- , feedRoot = "https://euandre.org"
- }
-
-dropPrefix :: String -> Routes
-dropPrefix prefix = gsubRoute prefix $ const ""
-
-dateFolders :: Routes
-dateFolders =
- gsubRoute "[0-9]{4}-[0-9]{2}-[0-9]{2}-" $ replaceAll "-" (const "/")
diff --git a/site/images/atom.svg b/images/atom.svg
index 8d9a80b..8d9a80b 100644
--- a/site/images/atom.svg
+++ b/images/atom.svg
diff --git a/site/images/envelope.svg b/images/envelope.svg
index aa58507..aa58507 100644
--- a/site/images/envelope.svg
+++ b/images/envelope.svg
diff --git a/site/images/lock.svg b/images/lock.svg
index e8b33a2..e8b33a2 100644
--- a/site/images/lock.svg
+++ b/images/lock.svg
diff --git a/index.md b/index.md
new file mode 100644
index 0000000..28cca27
--- /dev/null
+++ b/index.md
@@ -0,0 +1,5 @@
+---
+layout: index
+lang: en
+ref: index
+---
diff --git a/pastebin/skeleton.org b/pastebin/skeleton.org
deleted file mode 100644
index d116991..0000000
--- a/pastebin/skeleton.org
+++ /dev/null
@@ -1,7 +0,0 @@
----
-title: $title
-date: $date
----
-#+BEGIN_SRC $lang -n
-FIXME
-#+END_SRC
diff --git a/pastebin/website-pastebin b/pastebin/website-pastebin
deleted file mode 100755
index 9aa4956..0000000
--- a/pastebin/website-pastebin
+++ /dev/null
@@ -1,121 +0,0 @@
-#!/usr/bin/env perl
-
-=head1 NAME
-
-website pastebin - Create new pastebins from the org-mode template.
-
-=head1 SYNOPSIS
-
-website pastebin [options]
-
- Options:
- --help Show the manpage.
- --title Title of the pastebin.
-
-=head1 OPTIONS
-
-=over 4
-
-=item B<-h, --help>
-
-Prints the manual page and exits.
-
-=item B<-t, --title>
-
-The title of the pastebin. This string will be slugified and the output is used to create the pastebin file name. Special characters are simplified or discarded.
-
-=back
-
-=head1 DESCRIPTION
-
-B<website pastebin> creates a pastebin org-mode text files, that are later processed to produce HTML to be deployed statically.
-
-=cut
-
-use strict;
-use warnings;
-use Getopt::Long qw(:config no_ignore_case bundling);
-use Pod::Usage qw(pod2usage);
-use Unicode::Normalize qw(NFKD);
-use File::Basename qw(dirname);
-use Term::ANSIColor;
-
-my $help = 0;
-my $title = '';
-my $lang = '';
-my $test = 0;
-GetOptions(
- 'help|h|?' => \$help,
- 'title|t=s' => \$title,
- 'lang|l=s' => \$lang,
- 'test|?' => \$test
-) or pod2usage(-verbose => 1, -exitval => 2);
-pod2usage(
- -verbose => 2,
- -exitval => 0
-) if $help;
-pod2usage(
- -verbose => 1,
- -exitval => 2,
- -message => colored("Missing required --title argument.", "red")
-) if !$title && !$test;
-pod2usage(
- -verbose =>1,
- -exitval => 2,
- -message => colored("Missing required --lang argument.", "red")
-) if !$lang && !$test;
-
-# Taken from:
-# https://stackoverflow.com/a/4009519
-sub slugify {
- my $input = shift;
- $input = NFKD($input); # Normalize (decompose) the Unicode string
- $input =~ tr/\000-\177//cd; # Strip non-ASCII characters (>127)
- $input =~ s/[^\w\s-]//g; # Remove all characters that are not word characters (includes _), spaces, or hyphens
- $input =~ s/^\s+|\s+$//g; # Trim whitespace from both ends
- $input = lc($input);
- $input =~ s/[-\s]+/-/g; # Replace all occurrences of spaces and hyphens with a single hyphen
- return $input;
-}
-
-if ($test) {
- eval "use Test::More tests => 4"; die $@ if $@;
- is(slugify("My Custom Title String"), "my-custom-title-string");
- is(slugify("String with áccents and sym?bol-s."), "string-with-accents-and-symbol-s");
- is(slugify("unicode-↓æđ-chars"), "unicode-aaa-chars");
- is(slugify(" spaces and line
-break"), "spaces-and-line-break");
- done_testing();
- exit;
-}
-
-our $dirname = dirname(__FILE__);
-our $in = "$dirname/skeleton.org";
-our $out;
-my $date = `date +"%Y-%m-%d"`;
-chomp $date;
-my %ENV = (title => $title, date => $date, lang => $lang);
-
-# Derived from both:
-# https://unix.stackexchange.com/a/294836
-# https://stackoverflow.com/a/47664214
-sub envsubst {
- open(IN, '<'.$in) or die $!;
- open(OUT, '>'.$out) or die $!;
- while(<IN>) {
- $_ =~ s/\$([_a-zA-Z]+)/$ENV{$1}/g;
- print OUT $_;
- }
- close(IN);
- close(OUT);
-}
-
-my $slug = slugify($title);
-$out = `realpath $dirname/../site/pastebin/$slug.org`;
-chomp $out;
-
-envsubst();
-
-`cat $out | vipe | sponge $out`;
-
-print "$out\n";
diff --git a/pastebins.md b/pastebins.md
new file mode 100644
index 0000000..24083f9
--- /dev/null
+++ b/pastebins.md
@@ -0,0 +1,5 @@
+---
+layout: pastebins-listing
+lang: en
+ref: pastebins
+---
diff --git a/pt/index.md b/pt/index.md
new file mode 100644
index 0000000..2285152
--- /dev/null
+++ b/pt/index.md
@@ -0,0 +1,5 @@
+---
+layout: index
+lang: pt
+ref: index
+---
diff --git a/site/root/public-key.txt b/public-key.txt
index a21283d..a21283d 100644
--- a/site/root/public-key.txt
+++ b/public-key.txt
diff --git a/rss.xml b/rss.xml
new file mode 120000
index 0000000..5291931
--- /dev/null
+++ b/rss.xml
@@ -0,0 +1 @@
+feed.en.atom \ No newline at end of file
diff --git a/scripts/assert-content.sh b/scripts/assert-content.sh
new file mode 100755
index 0000000..f9ea345
--- /dev/null
+++ b/scripts/assert-content.sh
@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+set -Eeuo pipefail
+
+JSON="${1:-}"
+[[ -z "${JSON}" ]] && {
+ echo 'Missing input JSON file.'
+ exit 2
+}
+
+contains-element() {
+ local e match="$1"
+ shift
+ for e; do [[ "$e" == "$match" ]] && return 0; done
+ return 1
+}
+
+fail-attr() {
+ ATTRIBUTE="${1}"
+ URL="${2}"
+ red "Undefined '${ATTRIBUTE}' for ${URL}." >&2
+ exit 1
+}
+
+get-lang() {
+ echo "${1}" | base64 --decode | jq -r .lang
+}
+
+get-ref() {
+ echo "${1}" | base64 --decode | jq -r .ref
+}
+
+get-url() {
+ echo "${1}" | base64 --decode | jq -r .url
+}
+
+get-date() {
+ echo "${1}" | base64 --decode | jq -r .date
+}
+
+LANGS=(en pt)
+assert-frontmatter() {
+ F="${1}"
+ LANG="$(get-lang "$F")"
+ REF="$(get-ref "$F")"
+ URL="$(get-url "$F")"
+ [[ -z "${LANG}" ]] && fail-attr 'lang' "${URL}"
+ [[ -z "${REF}" ]] && fail-attr 'ref' "${URL}"
+ if ! contains-element "${LANG}" "${LANGS[@]}"; then
+ red "Invalid lang '${LANG}' in ${URL}." >&2
+ exit 1
+ fi
+}
+
+echo Linting posts... >&2
+for post in $(jq -r '.posts[] | @base64' "${JSON}"); do
+ assert-frontmatter "$post"
+ DATE="$(get-date "$post" | awk '{print $1}')"
+ URL="$(basename "$(get-url "$post")")"
+ FILE="_posts/${DATE}-${URL%.html}.md"
+
+ [[ -f "${FILE}" ]] || {
+ red "date/filename mismatch: '${FILE}' does not exist."
+ exit 1
+ }
+done
+
+echo Linting pages... >&2
+IGNORED_PAGES=(/site.json /sitemap.xml /rss.xml /atom.xml /feed.atom /feed.xml /feed.all.atom /feed.en.atom /feed.pt.atom)
+for page in $(jq -r '.pages[] | @base64' "${JSON}"); do
+ URL="$(get-url "$page")"
+ if ! contains-element "${URL}" "${IGNORED_PAGES[@]}"; then
+ assert-frontmatter "${page}"
+ fi
+done
+
+echo Linting pastebins... >&2
+for pastebin in $(jq -r '.pastebins[] | @base64' "${JSON}"); do
+ assert-frontmatter "$pastebin"
+done
+
+echo Asserting unique refs... >&2
+KNOWN_IDS=()
+for page in $(jq -r '.pages[] | @base64' "${JSON}"); do
+ URL="$(get-url "$page")"
+ if ! contains-element "${URL}" "${IGNORED_PAGES[@]}"; then
+ LANG="$(get-lang "$page")"
+ REF="$(get-ref "$page")"
+ ID="${LANG}:${REF}"
+
+ if contains-element "${ID}" "${KNOWN_IDS[@]}"; then
+ red "Duplicated lang:ref match: '${ID}'." >&2
+ exit 1
+ fi
+
+ KNOWN_IDS+=("${ID}") # printf '%s\n' "${KNOWN_IDS[@]}"
+ fi
+done
+
+echo Done. >&2
diff --git a/scripts/bundix-gen.sh b/scripts/bundix-gen.sh
new file mode 100755
index 0000000..1dd303e
--- /dev/null
+++ b/scripts/bundix-gen.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+set -Eeuo pipefail
+
+bundler package --no-install --path vendor
+bundix
diff --git a/docs/env.sh b/scripts/publish-env.sh
index d5e8be2..d5e8be2 100644
--- a/docs/env.sh
+++ b/scripts/publish-env.sh
diff --git a/site.json b/site.json
new file mode 100644
index 0000000..411a02b
--- /dev/null
+++ b/site.json
@@ -0,0 +1,39 @@
+---
+---
+{
+ "posts": [
+ {% for post in site.posts %}
+ {
+ "title": "{{ post.title | smartify }}",
+ "date": "{{ post.date }}",
+ "url": "{{ post.url }}",
+ "lang": "{{ post.lang }}",
+ "ref": "{{ post.ref }}",
+ "content": {{ post.content | strip_html | jsonify }}
+ }{% unless forloop.last %},{% endunless %}
+ {% endfor %}
+ ],
+ "pages": [
+ {% for page in site.pages %}
+ {
+ "title": "{{ page.title | smartify }}",
+ "url": "{{ page.url }}",
+ "lang": "{{ page.lang }}",
+ "ref": "{{ page.ref }}",
+ "content": {{ page.content | strip_html | jsonify }}
+ }{% unless forloop.last %},{% endunless %}
+ {% endfor %}
+ ],
+ "pastebins": [
+ {% for pastebin in site.pastebins %}
+ {
+ "title": "{{ pastebin.title | smartify }}",
+ "date": "{{ post.date }}",
+ "url": "{{ pastebin.url }}",
+ "lang": "{{ pastebin.lang }}",
+ "ref": "placeholder-ref",
+ "content": {{ pastebin.content | strip_html | jsonify }}
+ }{% unless forloop.last %},{% endunless %}
+ {% endfor %}
+ ]
+}
diff --git a/site/index.html b/site/index.html
deleted file mode 100644
index 8c3db94..0000000
--- a/site/index.html
+++ /dev/null
@@ -1,6 +0,0 @@
----
-title: Home
----
-
-<h2>Recent posts</h2>
-$partial("templates/post-list.html")$
diff --git a/site/pages/about.org b/site/pages/about.org
deleted file mode 100644
index b7dd87a..0000000
--- a/site/pages/about.org
+++ /dev/null
@@ -1,14 +0,0 @@
----
-title: About
----
-Hi, I'm EuAndreh. I write software and occasionally music. You can find my contact information in the footer of this page, or mail my [[mailto:~euandreh/public-inbox@lists.sr.ht][public inbox]].
-
-This is my personal website where I write articles, publish software and more related work.
-
-Below you can find some interesting projects of mine.
-** Software projects
-*** [[https://euandre.org/cool-read-macros/][cool-read-macros]]
-*** [[https://euandre.org/cl-bson/][cl-BSON]]
-[[https://euandre.org/cl-intbytes][cl-intbytes]] as a side-effect.
-** Other things
-[[./pastebins.html][Pastebin listing]].
diff --git a/site/pastebins.html b/site/pastebins.html
deleted file mode 100644
index 0afa928..0000000
--- a/site/pastebins.html
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Pastebins listing
----
-
-$partial("templates/pastebin-list.html")$
diff --git a/site/posts/2018-08-01-verifying-npm-ci-reproducibility.org b/site/posts/2018-08-01-verifying-npm-ci-reproducibility.org
deleted file mode 100644
index 7d19632..0000000
--- a/site/posts/2018-08-01-verifying-npm-ci-reproducibility.org
+++ /dev/null
@@ -1,85 +0,0 @@
----
-title: Verifying <code>npm ci</code> reproducibility
-date: 2018-08-01
----
-When [[https://blog.npmjs.org/post/161081169345/v500][npm@5]] came bringing [[https://docs.npmjs.com/files/package-locks][package-locks]] with it, I was confused about the benefits it provided, since running =npm install= more than once could resolve all the dependencies again and yield yet another fresh =package-lock.json= file. The message saying "you should add this file to version control" left me hesitant on what to do[fn:npm-install].
-
-However the [[https://blog.npmjs.org/post/171556855892/introducing-npm-ci-for-faster-more-reliable][addition of =npm ci=]] filled this gap: it's a stricter variation of =npm install= which guarantees that "[[https://docs.npmjs.com/files/package-lock.json][subsequent installs are able to generate identical trees]]". But are they really identical? I could see that I didn't have the same problems of different installation outputs, but I didn't know for *sure* if it was really identical.
-** Computing the hash of a directory's content
-I quickly searched for a way to check for the hash signature of an entire directory tree, but I couldn't find one. I've made a poor man's [[https://en.wikipedia.org/wiki/Merkle_tree][Merkle tree]] implementation using =sha256sum= and a few piped commands at the terminal:
-#+BEGIN_SRC bash -n
- merkle-tree () {
- dirname="${1-.}"
- pushd "$dirname"
- find . -type f | \
- sort | \
- xargs -I{} sha256sum "{}" | \
- sha256sum | \
- awk '{print $1}'
- popd
- }
-#+END_SRC
-Going through it line by line:
-- #1 we define a Bash function called =merkle-tree=;
-- #2 it accepts a single argument: the directory to compute the merkle tree from. If nothing is given, it runs on the current directory (=.=);
-- #3 we go to the directory, so we don't get different prefixes in =find='s output (like =../a/b=);
-- #4 we get all files from the directory tree. Since we're using =sha256sum= to compute the hash of the file contents, we need to filter out folders from it;
-- #5 we need to sort the output, since different file systems and =find= implementations may return files in different orders;
-- #6 we use =xargs= to compute the hash of each file individually through =sha256sum=. Since a file may contain spaces we need to escape it with quotes;
-- #7 we compute the hash of the combined hashes. Since =sha256sum= output is formatted like =<hash> <filename>=, it produces a different final hash if a file ever changes name without changing it's content;
-- #8 we get the final hash output, excluding the =<filename>= (which is =-= in this case, aka =stdin=).
-*** Positive points:
-1. ignore timestamp: running more than once on different installation yields the same hash;
-2. the name of the file is included in the final hash computation.
-*** Limitations:
-1. it ignores empty folders from the hash computation;
-2. the implementation's only goal is to represent using a digest whether the content of a given directory is the same or not. Leaf presence checking is obviously missing from it.
-*** Testing locally with sample data
-#+BEGIN_SRC bash -n
- mkdir /tmp/merkle-tree-test/
- cd /tmp/merkle-tree-test/
- mkdir -p a/b/ a/c/ d/
- echo "one" > a/b/one.txt
- echo "two" > a/c/two.txt
- echo "three" > d/three.txt
- merkle-tree . # output is be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
- merkle-tree . # output still is be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
- echo "four" > d/four.txt
- merkle-tree . # output is now b5464b958969ed81815641ace96b33f7fd52c20db71a7fccc45a36b3a2ae4d4c
- rm d/four.txt
- merkle-tree . # output back to be343bb01fe00aeb8fef14a3e16b1c3d1dccbf86d7e41b4753e6ccb7dc3a57c3
- echo "hidden-five" > a/b/one.txt
- merkle-tree . # output changed 471fae0d074947e4955e9ac53e95b56e4bc08d263d89d82003fb58a0ffba66f5
-#+END_SRC
-It seems to work for this simple test case.
-
-You can try copying and pasting it to verify the hash signatures.
-** Using =merkle-tree= to check the output of =npm ci=
-/I've done all of the following using Node.js v8.11.3 and npm@6.1.0./
-
-In this test case I'll take the main repo of [[https://lernajs.io/][Lerna]][fn:js-repos]:
-#+BEGIN_SRC bash -n
- cd /tmp/
- git clone https://github.com/lerna/lerna.git
- cd lerna/
- git checkout 57ff865c0839df75dbe1974971d7310f235e1109
- npm ci
- merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
- rm -rf node_modules/
- npm ci
- merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
- npm ci # test if it also works with an existing node_modules/ folder
- merkle-tree node_modules/ # outputs 11e218c4ac32fac8a9607a8da644fe870a25c99821167d21b607af45699afafa
-#+END_SRC
-Good job =npm ci= :)
-
-#6 and #9 take some time to run (21 seconds in my machine), but this specific use case isn't performance sensitive. The slowest step is computing the hash of each individual file.
-** Conclusion
-=npm ci= really "generates identical trees".
-
-I'm not aware of any other existing solution for verifying the hash signature of a directory. If you know any I'd [[mailto:eu@euandre.org][like to know]].
-** /Edit/
-2019/05/22: Fix spelling.
-
-[fn:npm-install] The [[https://docs.npmjs.com/cli/install#description][documentation]] claims =npm install= is driven by the existing =package-lock.json=, but that' actually [[https://github.com/npm/npm/issues/17979#issuecomment-332701215][a little bit tricky]].
-[fn:js-repos] Finding a big known repo that actually committed the =package-lock.json= file was harder than I expected.
diff --git a/site/posts/2018-12-21-using-youtube-dl-to-manage-youtube-subscriptions.org b/site/posts/2018-12-21-using-youtube-dl-to-manage-youtube-subscriptions.org
deleted file mode 100644
index b4f4e81..0000000
--- a/site/posts/2018-12-21-using-youtube-dl-to-manage-youtube-subscriptions.org
+++ /dev/null
@@ -1,145 +0,0 @@
----
-title: Using <code>youtube-dl</code> to manage YouTube subscriptions
-date: 2018-12-21
----
-I've recently read the [[https://www.reddit.com/r/DataHoarder/comments/9sg8q5/i_built_a_selfhosted_youtube_subscription_manager/][announcement]] of a very nice [[https://github.com/chibicitiberiu/ytsm][self-hosted YouTube subscription manager]]. I haven't used YouTube's built-in subscriptions for a while now, and haven't missed it at all. When I saw the announcement, I considered writing about the solution I've built on top of [[https://youtube-dl.org/][youtube-dl]].
-** Background: the problem with YouTube
-In many ways, I agree with [[https://staltz.com/what-happens-when-you-block-internet-giants.html][André Staltz's view on data ownership and privacy]]:
-#+BEGIN_QUOTE
-I started with the basic premise that “I want to be in control of my data”. Sometimes that meant choosing when to interact with an internet giant and how much I feel like revealing to them. Most of times it meant not interacting with them at all. I don’t want to let them be in full control of how much they can know about me. I don’t want to be in autopilot mode.
-(...)
-Which leads us to YouTube. While I was able to find alternatives to Gmail (Fastmail), Calendar (Fastmail), Translate (Yandex Translate), etc, YouTube remains as the most indispensable Google-owned web service. It is really really hard to avoid consuming YouTube content. It was probably the smartest startup acquisition ever. My privacy-oriented alternative is to watch YouTube videos through Tor, which is technically feasible but not polite to use the Tor bandwidth for these purposes. I’m still scratching my head with this issue.
-#+END_QUOTE
-Even though I don't use most alternative services he mentions, I do watch videos from YouTube. But I also feel uncomfortable logging in to YouTube with a Google account, watching videos, creating playlists and similar things.
-
-Using the mobile app is worse: you can't even block ads in there. You're in less control on what you share with YouTube and Google.
-** youtube-dl
-youtube-dl is a command-line tool for downloading videos, from YouTube and [[https://rg3.github.io/youtube-dl/supportedsites.html][many other sites]]:
-#+BEGIN_SRC shell
-$ youtube-dl https://www.youtube.com/watch?v=rnMYZnY3uLA
-[youtube] rnMYZnY3uLA: Downloading webpage
-[youtube] rnMYZnY3uLA: Downloading video info webpage
-[download] Destination: A Origem da Vida _ Nerdologia-rnMYZnY3uLA.mp4
-[download] 100% of 32.11MiB in 00:12
-#+END_SRC
-It can be used to download individual videos as showed above, but it also has some interesting flags that we can use:
-- =--output=: use a custom template to create the name of the downloaded file;
-- =--download-archive=: use a text file for recording and remembering which videos were already downloaded;
-- =--prefer-free-formats=: prefer free video formats, like =webm=, =ogv= and Matroska =mkv=;
-- =--playlist-end=: how many videos to download from a "playlist" (a channel, a user or an actual playlist);
-- =--write-description=: write the video description to a =.description= file, useful for accessing links and extra content.
-
-Putting it all together:
-#+BEGIN_SRC shell
-$ youtube-dl "https://www.youtube.com/channel/UClu474HMt895mVxZdlIHXEA" \
- --download-archive ~/Nextcloud/cache/youtube-dl-seen.conf \
- --prefer-free-formats \
- --playlist-end 20 \
- --write-description \
- --output "~/Downloads/yt-dl/%(uploader)s/%(upload_date)s - %(title)s.%(ext)s"
-#+END_SRC
-This will download the latest 20 videos from the selected channel, and write down the video IDs in the =youtube-dl-seen.conf= file. Running it immediately after one more time won't have any effect.
-
-If the channel posts one more video, running the same command again will download only the last video, since the other 19 were already downloaded.
-
-With this basic setup you have a minimal subscription system at work, and you can create some functions to help you manage that:
-#+BEGIN_SRC shell
-#!/bin/sh
-
-export DEFAULT_PLAYLIST_END=15
-
-download() {
- youtube-dl "$1" \
- --download-archive ~/Nextcloud/cache/youtube-dl-seen.conf \
- --prefer-free-formats \
- --playlist-end $2 \
- --write-description \
- --output "~/Downloads/yt-dl/%(uploader)s/%(upload_date)s - %(title)s.%(ext)s"
-}
-export -f download
-
-
-download_user() {
- download "https://www.youtube.com/user/$1" ${2-$DEFAULT_PLAYLIST_END}
-}
-export -f download_user
-
-
-download_channel() {
- download "https://www.youtube.com/channel/$1" ${2-$DEFAULT_PLAYLIST_END}
-}
-export -f download_channel
-
-
-download_playlist() {
- download "https://www.youtube.com/playlist?list=$1" ${2-$DEFAULT_PLAYLIST_END}
-}
-export -f download_playlist
-#+END_SRC
-With these functions, you now can have a subscription fetching script to download the latest videos from your favorite channels:
-#+BEGIN_SRC shell
-#!/bin/sh
-
-download_user ClojureTV 15
-download_channel "UCmEClzCBDx-vrt0GuSKBd9g" 100
-download_playlist "PLqG7fA3EaMRPzL5jzd83tWcjCUH9ZUsbX" 15
-#+END_SRC
-Now, whenever you want to watch the latest videos, just run the above script and you'll get all of them in your local machine.
-** Tradeoffs
-*** I've made it for myself, with my use case in mind
-**** Offline
-My internet speed it somewhat reasonable[fn:reasonable-internet], but it is really unstable. Either at work or at home, it's not uncommon to loose internet access for 2 minutes 3~5 times every day, and stay completely offline for a couple of hours once every week.
-
-Working through the hassle of keeping a playlist on disk has payed off many, many times. Sometimes I even not notice when the connection drops for some minutes, because I'm watching a video and working on some document, all on my local computer.
-
-There's also no quality adjustment for YouTube's web player, I always pick the higher quality and it doesn't change during the video. For some types of content, like a podcast with some tiny visual resources, this doesn't change much. For other types of content, like a keynote presentation with text written on the slides, watching on 144p isn't really an option.
-
-If the internet connection drops during the video download, youtube-dl will resume from where it stopped.
-
-This is an offline first benefit that I really like, and works well for me.
-**** Sync the "seen" file
-I already have a running instance of Nextcloud, so just dumping the =youtube-dl-seen.conf= file inside Nextcloud was a no-brainer.
-
-You could try putting it in a dedicated git repository, and wrap the script with an autocommit after every run. If you ever had a merge conflict, you'd simply accept all changes and then run:
-#+BEGIN_SRC shell
-$ uniq youtube-dl-seen.conf > youtube-dl-seen.conf
-#+END_SRC
-to tidy up the file.
-**** Doesn't work on mobile
-My primary device that I use everyday is my laptop, not my phone. It works well for me this way.
-
-Also, it's harder to add ad-blockers to mobile phones, and most mobile software still depends on Google's and Apple's blessing.
-
-If you wish, you can sync the videos to the SD card periodically, but that's a bit of extra manual work.
-*** The Good
-**** Better privacy
-We don't even have to configure the ad-blocker to keep ads and trackers away!
-
-YouTube still has your IP address, so using a VPN is always a good idea. However, a timing analysis would be able to identify you (considering the current implementation).
-**** No need to self-host
-There's no host that needs maintenance. Everything runs locally.
-
-As long as you keep youtube-dl itself up to date and sync your "seen" file, there's little extra work to do.
-**** Track your subscriptions with git
-After creating a =subscriptions.sh= executable that downloads all the videos, you can add it to git and use it to track metadata about your subscriptions.
-*** The Bad
-**** Maximum playlist size is your disk size
-This is a good thing for getting a realistic view on your actual "watch later" list. However I've run out of disk space many times, and now I need to be more aware of how much is left.
-*** The Ugly
-We can only avoid all the bad parts of YouTube with youtube-dl as long as YouTube keeps the videos public and programmatically accessible. If YouTube ever blocks that we'd loose the ability to consume content this way, but also loose confidence on considering YouTube a healthy repository of videos on the internet.
-** Going beyond
-Since you're running everything locally, here are some possibilities to be explored:
-*** A playlist that is too long for being downloaded all at once
-You can wrap the =download_playlist= function (let's call the wrapper =inc_download=) and instead of passing it a fixed number to the =--playlist-end= parameter, you can store the =$n= in a folder (something like =$HOME/.yt-db/$PLAYLIST_ID=) and increment it by =$step= every time you run =inc_download=.
-
-This way you can incrementally download videos from a huge playlist without filling your disk with gigabytes of content all at once.
-*** Multiple computer scenario
-The =download_playlist= function could be aware of the specific machine that it is running on and apply specific policies depending on the machine: always download everything; only download videos that aren't present anywhere else; etc.
-** Conclusion
-youtube-dl is a great tool to keep at hand. It covers a really large range of video websites and works robustly.
-
-Feel free to copy and modify this code, and [[mailto:eu@euandre.org][send me]] suggestions of improvements or related content.
-** /Edit/
-2019/05/22: Fix spelling.
-
-[fn:reasonable-internet] Considering how expensive it is and the many ways it could be better, but also how much it has improved over the last years, I say it's reasonable.
diff --git a/site/posts/2019-06-02-stateless-os.org b/site/posts/2019-06-02-stateless-os.org
deleted file mode 100644
index 84659d8..0000000
--- a/site/posts/2019-06-02-stateless-os.org
+++ /dev/null
@@ -1,60 +0,0 @@
----
-title: Using NixOS as an stateless workstation
-date: 2019-06-02
----
-Last week[fn:last-week] I changed back to an old[fn:old-laptop] Samsung laptop, and installed [[https://nixos.org/][NixOS]] on it.
-
-After using NixOS on another laptop for around two years, I wanted verify how reproducible was my desktop environment, and how far does NixOS actually can go on recreating my whole OS from my configuration files and personal data. I gravitated towards NixOS after trying (and failing) to create an =install.sh= script that would imperatively install and configure my whole OS using apt-get. When I found a GNU/Linux distribution that was built on top of the idea of declaratively specifying the whole OS I was automatically convinced[fn:convinced].
-
-I was impressed. Even though I've been experiencing the benefits of Nix isolation daily, I always felt skeptical that something would be missing, because the devil is always on the details. But the result was much better than expected!
-
-There were only 2 missing configurations:
-1. tap-to-click on the touchpad wasn't enabled by default;
-2. the default theme from the gnome-terminal is "Black on white" instead of "White on black".
-
-That's all.
-
-I haven't checked if I can configure those in NixOS GNOME module, but I guess both are scriptable and could be set in a fictional =setup.sh= run.
-
-This makes me really happy, actually. More happy than I anticipated.
-
-Having such a powerful declarative OS makes me feel like my data is the really important stuff (as it should be), and I can interact with it on any workstation. All I need is an internet connection and a few hours to download everything. It feels like my physical workstation and the installed OS are serving me and my data, instead of me feeling as hostage to the specific OS configuration at the moment. Having a few backup copies of everything important extends such peacefulness.
-
-After this positive experience with recreating my OS from simple Nix expressions, I started to wonder how far I could go with this, and started considering other areas of improvements:
-*** First run on a fresh NixOS installation
-Right now the initial setup relies on non-declarative manual tasks, like decrypting some credentials, or manually downloading *this* git repository with specific configurations before *that* one.
-
-I wonder what some areas of improvements are on this topic, and if investing on it is worth it (both time-wise and happiness-wise).
-*** Emacs
-Right now I'm using the [[http://spacemacs.org/][Spacemacs]], which is a community package curation and configuration on top of [[https://www.gnu.org/software/emacs/][Emacs]].
-
-Spacemacs does support the notion of [[http://spacemacs.org/doc/LAYERS.html][layers]], which you can declaratively specify and let Spacemacs do the rest.
-
-However this solution isn't nearly as robust as Nix: being purely functional, Nix does describe everything required to build a derivation, and knows how to do so. Spacemacs it closer to more traditional package managers: even though the layers list is declarative, the installation is still very much imperative. I've had trouble with Spacemacs not behaving the same on different computers, both with identical configurations, only brought to convergence back again after a =git clean -fdx= inside =~/.emacs.d/=.
-
-The ideal solution would be managing Emacs packages with Nix itself. After a quick search I did found that [[https://nixos.org/nixos/manual/index.html#module-services-emacs-adding-packages][there is support for Emacs packages in Nix]]. So far I was only aware of [[https://www.gnu.org/software/guix/manual/en/html_node/Application-Setup.html#Emacs-Packages][Guix support for Emacs packages]].
-
-This isn't a trivial change because Spacemacs does include extra curation and configuration on top of Emacs packages. I'm not sure the best way to improve this right now.
-*** myrepos
-I'm using [[https://myrepos.branchable.com/][myrepos]] to manage all my git repositories, and the general rule I apply is to add any repository specific configuration in myrepos' =checkout= phase:
-#+BEGIN_SRC shell
-# sample ~/.mrconfig file snippet
-[dev/guix/guix]
-checkout =
- git clone https://git.savannah.gnu.org/git/guix.git guix
- cd guix/
- git config sendemail.to guix-patches@gnu.org
-#+END_SRC
-This way when I clone this repo again the email sending is already pre-configured.
-
-This works well enough, but the solution is too imperative, and my =checkout= phases tend to become brittle over time if not enough care is taken.
-*** GNU Stow
-For my home profile and personal configuration I already have a few dozens of symlinks that I manage manually. This has worked so far, but the solution is sometimes fragile and [[https://git.sr.ht/~euandreh/dotfiles/tree/316939aa215181b1d22b69e94241eef757add98d/bash/symlinks.sh#L14-75][not declarative at all]]. I wonder if something like [[https://www.gnu.org/software/stow/][GNU Stow]] can help me simplify this.
-** Conclusion
-I'm really satisfied with NixOS, and I intend to keep using it. If what I've said interests you, maybe try tinkering with the [[https://nixos.org/nix/][Nix package manager]] (not the whole NixOS) on your current distribution (it can live alongside any other package manager).
-
-If you have experience with declarative Emacs package managements, GNU Stow or any similar tool, etc., [[mailto:eu@euandre.org][I'd like some tips]]. If you don't have any experience at all, [[mailto:eu@euandre.org][I'd still love to hear from you]].
-
-[fn:last-week] "Last week" as of the start of this writing, so around the end of May 2019.
-[fn:old-laptop] I was using a 32GB RAM, i7 and 250GB SSD Samsung laptop. The switch was back to a 8GB RAM, i5 and 500GB HDD Dell laptop. The biggest difference I noticed was on faster memory, both RAM availability and the disk speed, but I had 250GB less local storage space.
-[fn:convinced] The declarative configuration aspect is something that I now completely take for granted, and wouldn't consider using something which isn't declarative. A good metric to show this is me realising that I can't pinpoint the moment when I decided to switch to NixOS. It's like I had a distant past when this wasn't true.
diff --git a/site/templates/default.html b/site/templates/default.html
deleted file mode 100644
index 2fdf64b..0000000
--- a/site/templates/default.html
+++ /dev/null
@@ -1,48 +0,0 @@
-<!doctype html>
-<html lang="en">
- <head>
- <meta charset="utf-8">
- <meta http-equiv="x-ua-compatible" content="ie=edge">
- <meta name="viewport" content="width=device-width, initial-scale=1">
- <title>$title$ - EuAndreh's blog</title>
- <link rel="stylesheet" href="/css/styles.css" />
- <link rel="alternate" type="application/atom+xml" href="/feed.atom" title="EuAndreh's Feed" />
- </head>
- <body>
- <header>
- <nav>
- <div id="nav-left">
- <a href="/">EuAndreh's blog</a>
- </div>
- <div id="nav-right">
- <a href="/about.html">About</a>
- <a href="/feed.atom">
- <img class="simple-icon" src="/images/atom.svg" alt="Blog feed" />
- </a>
- </div>
- </nav>
- </header>
-
- <main role="main">
- <h1>$title$</h1>
- $body$
- </main>
-
- <footer>
- <ul>
- <li>
- <img class="simple-icon" src="/images/envelope.svg" alt="Envelope icon" />
- <a href="mailto:eu@euandre.org">eu@euandre.org</a>
- </li>
- <li>
- <img class="simple-icon" src="/images/lock.svg" alt="Lock icon" />
- <a href="/public-key.txt">81F90EC3CD356060</a>
- </li>
- </ul>
-
- <p>
- The content for this site is licensed under <a rel="license" href="https://creativecommons.org/licenses/by-sa/4.0/">CC-BY-SA</a>. The <a href="https://git.sr.ht/~euandreh/website">code</a> is <a href="https://git.sr.ht/~euandreh/website/tree/master/LICENSE">GPLv3 or later</a>.
- </p>
- </footer>
- </body>
-</html>
diff --git a/site/templates/pastebin-list.html b/site/templates/pastebin-list.html
deleted file mode 100644
index 4c98c80..0000000
--- a/site/templates/pastebin-list.html
+++ /dev/null
@@ -1,7 +0,0 @@
-<ul>
- $for(pastebins)$
- <li>
- <a href="$url$">$title$</a> - $date$
- </li>
- $endfor$
-</ul>
diff --git a/site/templates/pastebin.html b/site/templates/pastebin.html
deleted file mode 100644
index 7aa2775..0000000
--- a/site/templates/pastebin.html
+++ /dev/null
@@ -1,8 +0,0 @@
-<article>
- <section class="header">
- Posted on $date$
- </section>
- <section>
- $body$
- </section>
-</article>
diff --git a/site/templates/post-list.html b/site/templates/post-list.html
deleted file mode 100644
index c64cdbd..0000000
--- a/site/templates/post-list.html
+++ /dev/null
@@ -1,7 +0,0 @@
-<ul>
- $for(posts)$
- <li>
- <a href="$url$">$title$</a> - $date$
- </li>
- $endfor$
-</ul>
diff --git a/site/templates/post.html b/site/templates/post.html
deleted file mode 100644
index 0ec1efe..0000000
--- a/site/templates/post.html
+++ /dev/null
@@ -1,16 +0,0 @@
-<article>
- <section class="header">
- Posted on $date$
- </section>
- <section>
- $body$
- </section>
- <section>
- <p>Have a comment on this post? Start a discussion
- in my <a href="https://lists.sr.ht/~euandreh/public-inbox">public inbox</a>
- by sending an email to <a href="mailto:~euandreh/public-inbox@lists.sr.ht?Subject=Re%3A%20$title$">~euandreh/public-inbox@lists.sr.ht</a>
- [<a href="https://man.sr.ht/lists.sr.ht/etiquette.md">mailing list etiquette</a>],
- or see <a href="https://lists.sr.ht/~euandreh/public-inbox?search=$title$">existing discussions</a>.
- </p>
- </section>
-</article>
diff --git a/sitemap.xml b/sitemap.xml
new file mode 100644
index 0000000..1c17a14
--- /dev/null
+++ b/sitemap.xml
@@ -0,0 +1,29 @@
+---
+---
+<?xml version="1.0" encoding="UTF-8"?>
+<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml">
+ {% for post in site.posts %}
+ <url>
+ <loc>{{ site.url }}{{ post.url }}</loc>
+ {% assign versions=site.posts | where:"ref", post.ref %}
+ {% for version in versions %}
+ <xhtml:link rel="alternate" hreflang="{{ version.lang }}" href="{{ site.url }}{{ version.url }}" />
+ {% endfor %}
+ <lastmod>{{ post.date | date_to_xmlschema }}</lastmod>
+ <changefreq>weekly</changefreq>
+ </url>
+ {% endfor %}
+
+ {% for page in site.pages %}
+ <url>
+ <loc>{{ site.base }}{{ page.url }}</loc>
+ {% if page.ref != nil %}
+ {% assign versions=site.pages | where:"ref", page.ref %}
+ {% for version in versions %}
+ <xhtml:link rel="alternate" hreflang="{{ version.lang }}" href="{{ site.url }}{{ version.url }}" />
+ {% endfor %}
+ {% endif %}
+ <changefreq>weekly</changefreq>
+ </url>
+ {% endfor %}
+</urlset>
diff --git a/slides/base.org b/slides/base.org
deleted file mode 100644
index 3a2995b..0000000
--- a/slides/base.org
+++ /dev/null
@@ -1,3 +0,0 @@
-#+REVEAL_ROOT: reveal.js/
-#+OPTIONS: num:nil toc:nil reveal_single_file:t
-* sample
diff --git a/slides/reveal.js b/slides/reveal.js
deleted file mode 160000
-Subproject a82c4333ed8c192e26f83f1815593c3db50ab0f
diff --git a/slides/website-slides b/slides/website-slides
deleted file mode 100755
index 77f1f63..0000000
--- a/slides/website-slides
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env perl
-
-=head1 NAME
-
-website slides - Create new HTML slide presentations from org-mode template.
-
-=head1 SYNOPSIS
-
-website slides [options]
-
- Options:
- --help Show the manpage.
- --name The name of the folder containing the slideshow.
-
-=head1 OPTIONS
-
-=over 4
-
-=item B<-h, --help>
-
-Prints the manual page and exits.
-
-=item B<-n, --name>
-
-The name of the folder containing the slideshow.
-
-=back
-
-=head1 DESCRIPTION
-
-B<website slides> creates an slideshow org-mode text files, that are later processed to produce HTML to be deployed statically.
-
-=cut
-
-use strict;
-use warnings;
-use Getopt::Long qw(:config no_ignore_case bundling);
-use Pod::Usage qw(pod2usage);
-use File::Basename qw(dirname);
-use File::Path qw(make_path);
-use File::Copy qw(copy);
-use Term::ANSIColor;
-
-my $help = 0;
-my $name = '';
-my $test = 0;
-GetOptions(
- 'help|h|?' => \$help,
- 'name|n=s' => \$name,
- 'test|?' => \$test
-) or pod2usage(-verbose => 1, -exitval => 2);
-pod2usage(-verbose => 2, -exitval => 0) if $help;
-pod2usage(
- -verbose => 1,
- -exitval => 2,
- -message => colored("Missing required --name argument.", "red")
-) if !$name && !$test;
-
-if ($test) {
- exit;
-}
-
-my $dirname = dirname(__FILE__);
-chdir $dirname ;
-make_path "$name/reveal.js/";
-
-chdir "reveal.js/";
-`git --work-tree="../$name/reveal.js" checkout HEAD -- .`;
-`git checkout \$(cat "../$name/reveal.js/VERSION" &> /dev/null || printf ".")`;
-`git rev-parse HEAD > "../$name/VERSION"`;
-chdir "../";
-
-copy("base.org", "$name/index.org") or die "Failed to copy base.org file: $!";
-
-print `realpath $name/index.org`;
diff --git a/sobre.md b/sobre.md
new file mode 100644
index 0000000..fa2809b
--- /dev/null
+++ b/sobre.md
@@ -0,0 +1,30 @@
+---
+layout: page
+title: Sobre
+lang: pt
+ref: about
+---
+Oi, eu sou EuAndreh. Eu escrevo software, e música ocasionalmente. Você encontra
+dados para me contatar no rodapé dessa página, ou pode mandar também uma
+mensagem para minha [caixa de entrada pública][0].
+
+[0]: mailto:~euandreh/public-inbox@lists.sr.ht
+
+Esse é o meu site pessoal onde eu escrevo artigos, publico software e outros
+trabalhos relacionados.
+
+Abaixo você encontra alguns projetos interessantes meus.
+
+## Projetos de software
+
+### [cool-read-macros](https://euandre.org/cool-read-macros/)
+
+### [cl-BSON](https://euandre.org/cl-bson/)
+
+[cl-intbytes](https://euandre.org/cl-intbytes) como consequência.
+
+## Outros
+
+[Lista de pastebins](./pastebins.html).
+
+[Feed Atom com todos os idiomas](./feed.all.atom).
diff --git a/spelling/check-spelling.sh b/spelling/check-spelling.sh
index c9f03d2..653a5f9 100755
--- a/spelling/check-spelling.sh
+++ b/spelling/check-spelling.sh
@@ -3,7 +3,7 @@ set -Eeuo pipefail
HTML_DIR="${1:-}"
[[ -z "${HTML_DIR}" ]] && {
- echo 'Undefined input HTML_DIR.'
+ echo 'Undefined input HTML_DIR.' >&2
exit 2
}
@@ -12,26 +12,27 @@ export LANG=C.UTF-8
for DICT in spelling/*.txt; do
diff <(sort "$DICT") "$DICT" || {
- echo "The $DICT dictionary is unsorted. To fix it, run:"
- echo " LANG=C.UTF-8 sort $DICT | sponge $DICT"
+ echo "The $DICT dictionary is unsorted. To fix it, run:" >&2
+ echo " LANG=C.UTF-8 sort $DICT | sponge $DICT" >&2
exit 1
}
done
-cat spelling/*.txt > dicts.txt
-check() {
- html="$1"
- echo "$1"
- hunspell -l -p dicts.txt -d fr_FR -d en_US -i utf-8 "$html" | tee -a spelling.txt
+finish() {
+ rm -f spelling.txt
+ rm -f dicts.txt
}
-export -f check
-find "${HTML_DIR}" -type f -name '*.html' | grep -v pastebin | xargs -I{} bash -c "check {}" \;
+trap finish EXIT
+
+cat spelling/*.txt > dicts.txt
+
+find "${HTML_DIR}" -type f -name '*.html' | grep -v pastebin | hunspell -l -p dicts.txt -d fr_FR -d en_US -i utf-8 >> spelling.txt
if [[ -s spelling.txt ]]; then
- printf "\nvvv Mispelled words detected by hunspell.\n\n"
- sort < spelling.txt | uniq
- printf "\n^^^\n"
+ printf "\nvvv Mispelled words detected by hunspell.\n\n" >&2
+ sort < spelling.txt | uniq >&2
+ printf "\n^^^\n" >&2
exit 1
else
- echo "No words mispelled"
+ echo "No words mispelled" >&2
fi
diff --git a/spelling/international.dic.txt b/spelling/international.dic.txt
index c3c3470..f3637be 100644
--- a/spelling/international.dic.txt
+++ b/spelling/international.dic.txt
@@ -34,6 +34,7 @@ buildGoModule
ci
dl
guix
+html
i5
i7
intbytes
@@ -41,6 +42,7 @@ js
libre
merkle
myrepos
+nixos
no-brainer
npm
touchpad
diff --git a/spelling/pt_BR.dic.txt b/spelling/pt_BR.dic.txt
new file mode 100644
index 0000000..7ed8a57
--- /dev/null
+++ b/spelling/pt_BR.dic.txt
@@ -0,0 +1,2 @@
+os
+sobre
diff --git a/site/css/styles.css b/styles.css
index eb4bb78..6b8d514 100644
--- a/site/css/styles.css
+++ b/styles.css
@@ -25,6 +25,16 @@ nav a {
text-decoration: none;
}
+nav ul, nav li {
+ display: inline;
+}
+
+nav ul li a {
+ color: black;
+ font-size: 14px;
+ margin: 6px;
+}
+
footer {
border-top: solid 2px black;
font-size: 14px;
@@ -41,13 +51,10 @@ footer li a {
margin-left: 5px;
}
-div.sourceCode {
- padding: 3px;
+pre {
+ padding: 5px;
border: 1px solid;
border-radius: 10px;
-}
-
-pre {
white-space: pre-wrap;
counter-reset: line;
}
diff --git a/t/website.bats b/t/website.bats
deleted file mode 100755
index 5bbfb59..0000000
--- a/t/website.bats
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env bats
-
-# Go to the directory where ./website is.
-cd "$BATS_TEST_DIRNAME/../"
-
-# exit code 1: error running command
-# exit code 2: couldn't parse the command line argument
-
-@test "Help: show short usage when no subcommand is given, exit code is 2" {
- run ./website
- [[ "$status" -eq 2 ]]
- [[ "${lines[0]}" =~ "Missing subcommand." ]]
- [[ "${lines[1]}" = "Usage:" ]]
-}
-
-@test "Help: show short usage for unknown subcommand, exit code is 2" {
- run ./website bad-subcommand
- [[ "$status" -eq 2 ]]
- [[ "${lines[0]}" =~ "Unknown subcommand: bad-subcommand." ]]
- [[ "${lines[1]}" = "Usage:" ]]
-}
-
-@test "Help: show full toplevel help" {
- run ./website --help
- [[ "$status" -eq 0 ]]
- [[ "${lines[0]}" = "NAME" ]]
- run ./website -h
- [[ "$status" -eq 0 ]]
- [[ "${lines[0]}" = "NAME" ]]
-}
-
-@test "Help: show short subcommand usage when subcommand isn't invoked properly, exit code is 2" {
- run ./website pastebin
- [[ "$status" -eq 2 ]]
- [[ "${lines[0]}" =~ "Missing required --title argument." ]]
- [[ "${lines[1]}" = "Usage:" ]]
- run ./website slides
- [[ "$status" -eq 2 ]]
- [[ "${lines[0]}" =~ "Missing required --name argument." ]]
- [[ "${lines[1]}" = "Usage:" ]]
-}
-
-@test "Help: show subcommand manpage" {
- run ./website pastebin -h
- [[ "$status" -eq 0 ]]
- [[ "${lines[0]}" = "NAME" ]]
- run ./website pastebin --help
- [[ "$status" -eq 0 ]]
- [[ "${lines[0]}" = "NAME" ]]
- run ./website slides -h
- [[ "$status" -eq 0 ]]
- [[ "${lines[0]}" = "NAME" ]]
- run ./website slides --help
- [[ "$status" -eq 0 ]]
- [[ "${lines[0]}" = "NAME" ]]
-}
-
-@test "Pastebin: required input for --title" {
- run ./website pastebin --title
- [[ "$status" = 2 ]]
- [[ "${lines[0]}" = "Option title requires an argument" ]]
- [[ "${lines[1]}" = "Usage:" ]]
-}
-
-@test "Slides: required input for --name" {
- run ./website slides --name
- [[ "$status" = 2 ]]
- [[ "${lines[0]}" = "Option name requires an argument" ]]
- [[ "${lines[1]}" = "Usage:" ]]
-}
diff --git a/website b/website
deleted file mode 100755
index 561cccf..0000000
--- a/website
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/usr/bin/env perl
-
-=head1 NAME
-
-website - Website repository CLI manager.
-
-=head1 SYNOPSIS
-
-website <subcommand> [options]
-
- Subcommands:
- pastebin Create a new pastebin from the org-mode template.
- slides Create a new HTML slideshow from the existing templates.
- test Run internal CLI tests.
-
- Options:
- --help Show the manpage.
-
-=head1 OPTIONS
-
-=over 4
-
-=item B<-h, --help>
-
-Prints the manual page and exits.
-
-=back
-
-=head1 DESCRIPTION
-
-B<website> is the top-level coordinator of subtasks inside the website repo.
-
-=cut
-
-use strict;
-use warnings;
-use Getopt::Long qw(:config no_ignore_case bundling pass_through);
-use Pod::Usage qw(pod2usage);
-use File::Basename qw(dirname);
-use Term::ANSIColor;
-
-my $help = 0;
-my $title = '';
-sub getopts {
- GetOptions(
- 'help|h|?' => \$help
- );
-}
-
-sub escaped_cmd {
- my ($cmd, @args) = @_;
- my $dirname = dirname(__FILE__);
- $cmd = "$dirname/$cmd";
- $cmd = $cmd." \"$_\"", for @args;
- system($cmd);
- exit $? >> 8;
-}
-
-sub dispatch {
- my $action = shift;
- my @args = @_;
- if (!defined $action && $help) {
- pod2usage(
- -verbose => 2,
- -exitval => 0
- );
- } elsif (!defined $action) {
- pod2usage(
- -verbose => 1,
- -exitval => 2,
- -message => colored("Missing subcommand.", "red")
- );
- } elsif ($action eq 'pastebin') {
- my @sub_args = grep { $_ ne $action } @args;
- escaped_cmd("pastebin/website-pastebin", @sub_args);
- } elsif ($action eq 'slides') {
- my @sub_args = grep { $_ ne $action } @args;
- escaped_cmd("slides/website-slides", @sub_args);
- } elsif ($action eq 'test') {
- escaped_cmd("pastebin/website-pastebin", "--test");
- escaped_cmd("slides/website-slides", "--test");
- } else {
- pod2usage(
- -verbose => 1,
- -exitval => 2,
- -message => colored("Unknown subcommand: $action.", "red")
- );
- }
-}
-
-sub main {
- my @orig_args=@ARGV;
- getopts();
- my $action=shift @ARGV;
- dispatch($action, @orig_args);
-}
-
-main();