Initial commit
continuous-integration/drone/push Build is passing Details

master
Simon Bruder 2020-11-04 22:35:10 +01:00
commit 7cc20db4db
No known key found for this signature in database
GPG Key ID: 6F03E0000CC5B62F
18 changed files with 746 additions and 0 deletions

11
.drone.yml Normal file
View File

@ -0,0 +1,11 @@
kind: pipeline
name: default
type: exec
steps:
- name: build
commands:
- nix-build
node:
nix: 1

1
.envrc Normal file
View File

@ -0,0 +1 @@
use nix

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
/site
/result*

144
default.nix Normal file
View File

@ -0,0 +1,144 @@
let
sources = import ./nix/sources.nix;
pkgs = import sources.nixpkgs {};
inherit (import sources.gitignore { inherit (pkgs) lib; }) gitignoreSource;
stdenv = pkgs.stdenv;
python3Packages = pkgs.python3Packages;
buildPythonPackage = python3Packages.buildPythonPackage;
fetchPypi = python3Packages.fetchPypi;
lunr = buildPythonPackage rec {
pname = "lunr";
version = "0.5.8";
src = fetchPypi {
inherit pname version;
sha256 = "0vn2j5iyy40sv0f4v8nin5n5xs4a020g6gcbcgfpbxzgk0xhdyy4";
};
propagatedBuildInputs = with python3Packages; [
future
six
];
checkInputs = with python3Packages; [
mock
pytest
];
};
# FIXME: use nixpkgss mkdocs once it is ≥1.1
mkdocs = buildPythonPackage rec {
pname = "mkdocs";
version = "1.1.2";
src = fetchPypi {
inherit pname version;
sha256 = "0fgv5zawpyyv0vd4j5y8m4h058lh9jkwfcm0xy4pg7dr09a1xdph";
};
propagatedBuildInputs = (with python3Packages; [
backports_tempfile
click
jinja2
livereload
markdown
nltk
pyyaml
tornado
]) ++ [
lunr
];
doCheck = false; # circular depdendency
};
pymdown-extensions = buildPythonPackage rec {
pname = "pymdown-extensions";
version = "8.0.1";
src = fetchPypi {
inherit pname version;
sha256 = "07p5xks6qf73nbv0qb7jrspkpmxdyr7dpxv3rnkh9p2b5l2h99wv";
};
propagatedBuildInputs = with python3Packages; [
markdown
];
doCheck = false; # they fail?
};
mkdocs-material-extensions = buildPythonPackage rec {
pname = "mkdocs-material-extensions";
version = "1.0.1";
src = fetchPypi {
inherit pname version;
sha256 = "13y617sql4hqs376c3dfcb6v7q41km9x7fh52k3f74a2brzzniv9";
};
# circular dependency
doCheck = false;
postPatch = ''
sed -i 's/mkdocs-material>=5.0.0//' requirements/project.txt
'';
};
mkdocs-material = buildPythonPackage rec {
pname = "mkdocs-material";
version = "6.1.2";
src = fetchPypi {
inherit pname version;
sha256 = "0yifc1df94rsaxy72hqnhjkv4c50c30j5kg0yxybkxy07p7kafic";
};
propagatedBuildInputs = (with python3Packages; [
markdown
pygments
]) ++ [
mkdocs
mkdocs-material-extensions
pymdown-extensions
];
};
iosevka-web = stdenv.mkDerivation rec {
pname = "iosevka-web";
version = "3.7.1";
src = pkgs.fetchzip {
url = "https://github.com/be5invis/Iosevka/releases/download/v${version}/pkg-iosevka-${version}.zip";
sha256 = "0d012aqfnyqlpl4smfa2x2qz2qmbbfxb6jr4awv8gp6r359rc9k1";
stripRoot = false;
};
dontBuild = true;
installPhase = ''
mkdir -p $out
cp -r woff woff2 ttf *.css $out
'';
};
in
stdenv.mkDerivation {
name = "wiki";
src = gitignoreSource ./.;
buildPhase = "mkdocs build";
installPhase = ''
cp -r site $out
mkdir -p $out/assets/fonts
ln -s ${iosevka-web} $out/assets/fonts/iosevka
'';
nativeBuildInputs = [
mkdocs
mkdocs-material
];
}

8
docs/data.md Normal file
View File

@ -0,0 +1,8 @@
# Data
## Aria2
### Get list of all connected peers
:::shell
curl -s -X POST -d '{"jsonrpc": "2.0", "id": "", "method": "aria2.tellActive"}' http://localhost:6800/jsonrpc | jq -r '.result[].gid' | while read gid;do echo $gid; curl -s -X POST -d '{"jsonrpc": "2.0", "id": "", "method": "aria2.getPeers", "params": ["'"$gid"'"]}' http://localhost:6800/jsonrpc | jq -r '.result[].ip'; done

24
docs/hostnames.md Normal file
View File

@ -0,0 +1,24 @@
# Hostname Candidates
| name | relation | special field |
| ---- | -------- | ------------- |
| ayu | <https://anidb.net/character/7909> | |
| shibazaki | <https://anidb.net/character/67927> | |
| maaa | <https://madeinabyss.fandom.com/wiki/Maaa> | |
| vueko | <https://madeinabyss.fandom.com/wiki/Vueko> | |
## In use
These once were candidates and are now in use. Might be recycled once the
system is no longer in use.
| name | relation | special field |
| ---- | -------- | ------------- |
| sayuri | <https://anidb.net/ch7914> | |
| renge | <https://anidb.net/character/57473> | |
## Host Group names
| name | relation | special field |
| ---- | -------- | ------------- |
| shinonome | Shinonome Labs from [Nichijou](https://anidb.net/anime/8168) | lab |

6
docs/index.md Normal file
View File

@ -0,0 +1,6 @@
# Main Page
Mostly uncommented ugly code snippets (most are shell for linux commandline and
some are javascript for browser console). It is possible (almost certain) that
some wont work because I last used them three years ago. So, use at your own
risk.

43
docs/kobo.md Normal file
View File

@ -0,0 +1,43 @@
# Kobo
## Links
* Installation without registration: <https://yingtongli.me/blog/2018/07/30/kobo-rego.html>
* Access root telnet shell: <https://yingtongli.me/blog/2018/07/30/kobo-telnet.html>
* Install dropbear (sshd): <https://yingtongli.me/blog/2018/07/30/kobo-ssh.html>
## Enable telnet without modifying the rootfs
Edit `.kobo/Kobo/Kobo eReader.conf` on the filesystem available via USB:
[DeveloperSettings]
EnableDebugServices=true
## Enable dropbear sshd
Copy `dropbearmulti` to `/opt/dropbear/dropbearmulti`.
In `/opt/dropbear/`:
./dropbearmulti dropbearkey -t dss -f dss_key
./dropbearmulti dropbearkey -t rsa -f rsa_key
./dropbearmulti dropbearkey -t ecdsa -f ecdsa_key
Add authorized keys to `/.ssh/authorized_keys`
Link scp: `ln -s /opt/dropbear/dropbearmulti /usr/bin/scp`
Add to `/etc/inittab`:
::IGNORE THIS LINE, it is just here to not confuse mkdocs
::respawn:/opt/dropbear/dropbearmulti dropbear -s -F -E -r /opt/dropbear/dss_key -r /opt/dropbear/rsa_key -r /opt/dropbear/ecdsa_key
!!! note
After a system update, it might be necessary to repeat this step.
## Re-scan the library
Useful after adding a book via ssh.
:::shell
ssh root@kobo-address 'echo -e "usb plug add\nusb plug remove" >> /tmp/nickel-hardware-status'

143
docs/media/index.md Normal file
View File

@ -0,0 +1,143 @@
# Media (General)
## Audio
### Split file with cue sheet and use filename from cue sheet
:::shell
shnsplit -D -f file.cue -t "%n %t" -o "flac flac -8 -o %f -" file.flac
### Remove all tags except MusicBrainz tags from flac files
:::shell
for i in *.flac; do tags=$(metaflac --export-tags-to=- $i | grep -E '^MUSICBRAINZ_'); metaflac --remove-all-tags $i; metaflac --import-tags-from=- $i <<< $tags; done
### Downmix 5.1 to 2.0
:::shell
sox in.wav out.wav remix -m 1v0.3694,3v0.2612,5v0.3694 2v0.3694,3v0.2612,6v0.3694 norm
### Record pulseaudio device to flac
:::shell
parec [ -d DEVICE ] | flac --endian=little --channels=2 --bps=16 --sample-rate=48000 --sign=signed -o foo.flac -
## Video
### Copy DVD stream to file [with dvd already copied to local directory]
:::shell
mpv --stream-dump=1.mkv dvd://1 [--dvd-device path/to/dvd]
## MKV
### Fix mimetype of font attachments
Some matroska files have the mimetype for font attachment for fonts set to
`application/octet-strem`.
:::shell
mkvpropedit --attachment-mime-type font/sfnt --update-attachment mime-type:application/octet-stream file.mkv
## FFmpeg
### Create color timeline image from video
:::shell
(infile=in.mkv; outfile=out.png; rows=320; width=1920; height=1080; ffmpeg -i $infile -vf tblend=all_mode=average,fps=${rows}/$(ffprobe -v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 $infile),scale=1:1,scale=${width}/${rows}:${height},setsar=1,tile=${rows}x1 -frames:v 1 $outfile)
### Show duration of file in seconds
:::shell
ffprobe -v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 file.mkv
### Remove EIA-608 subtitles from video bitstream
([source](https://stackoverflow.com/a/51439554))
:::shell
ffmpeg -i infile.mkv -c copy -bsf:v "filter_units=remove_types=6" outfile.mkv
## QP file
replace 24/1.001 with framerate
:::shell
ffprobe -i infile.mkv -print_format json -show_chapters -loglevel error | jq -r '.chapters[].start / 1000000000 * 24/1.001 | round | tostring + " I"' >> foo.qp
## Manga
### Convert greyscale images to actuual greyscale
For some reasons, many releases encode greyscale manga pages as yuv420. Sadly,
the chroma layers are not completely empty but include some (almost invisible)
noise. This fixes that for lower battery comsumption, a really small file size
gain and just because it seems right.
This only works for pages with the same size (image2/ffmpeg limitation), but
releases suffering from this mostly fulfill this requirement.
**WARNING**: This uses some heuristics (SSIM > 98) to determine if a page is
greyscale. This may not work all the time (it did for me though). Please verify
if all converted images actually are greyscale.
:::shell
ffmpeg -loglevel error -f lavfi -i "movie=%03d.jpg:f=image2,split=2[orig][in2];[in2]extractplanes=y[grey];[orig][grey]ssim=-" -f null - >> ssim
while read frame;do (( $(cut -d' ' -f5 <<< $frame | cut -c 7-8) < 98 )) || {file=$(printf "%03d.jpg\n" $(cut -d' ' -f1 <<< $frame|cut -d: -f2)); echo $file; jpegtran -copy none -optimize -grayscale -outfile $file $file}; done < ssim
jpegoptim -s *.jpg
exiftool -overwrite_original -all= *.jpg
# print all converted images for verification
grep -E 'All:0.(9[0-8]|[0-8][0-9])' ssim
### Merge page spreads to single page
Use function `merge_pages right-page left-page` (without .jpg). Result will be
written to left-page-right-page.jpg`.
:::shell
function merge_pages() {
convert ${2}.jpg ${1}.jpg +append ${1}-${2}.jpg
exiftool -overwrite_original -all= ${1}-${2}.jpg
}
# remove single pages
mkdir single_pages
for i in ???-???.jpg;do mv $(cut -d- -f1 <<< $i).jpg $(cut -d- -f2 <<< $i) single_pages;done
## mpv
### View thumbnails generated by [mpv-gallery-view](https://github.com/occivink/mpv-gallery-view)
:::shell
mpv --pause --demuxer=rawvideo --demuxer-rawvideo-mp-format=bgra --demuxer-rawvideo-w=288 --demuxer-rawvideo-h=162 FILE
Convert to tiles
:::shell
ffmpeg -codec:v rawvideo -pixel_format bgra -video_size 288:162 -f image2 -pattern_type glob -i '*' -vf tile=layout=10x10 tile-%04d.png
## Download
### Bilibili live recording
:::shell
curl 'https://api.live.bilibili.com/xlive/web-room/v1/record/getLiveRecordUrl?rid=R1sx411c7Xn&platform=html5'|jq -r '.data.list | map(.url) | to_entries[] | .value + "\n out=" + (.key|tostring) + ".flv"' | aria2c --auto-file-renaming=false -x 16 -j 10 -i -
mkvmerge '[' $(find . -name '*.flv'|sort -V) ']' -o merge.mkv
## PDF
### Downsample bitmap PDF
Useful for sending large 300/600 dpi scans as e-mail. Change `pdfimage32` to
`pdfimage8` for greyscale, `300` to the input DPI and `DownScaleFactor` to the
desired downscaling. For some reason fails when setting compression to JPEG.
:::shell
gs -sDEVICE=pdfimage24 -r300 -dDownScaleFactor=2 -o downscaled.pdf document.pdf
Imagemagick supports JPEG. Set your desired output density and JPEG quality.
:::shell
convert -density 300 -compress jpeg -quality 80 document.pdf downscaled.pdf

22
docs/media/vapoursynth.md Normal file
View File

@ -0,0 +1,22 @@
# Vapoursynth
## Limit memory usage
:::python3
core.max_cache_size = 1000
## Basic encoding options for 10 bit
### x265
x265 only outputs to its own bistream format.
:::shell
vapoursynth sh -c 'vspipe --y4m script.vpy - | x265 --y4m -D 10 --preset slow --crf 18 -o output.265 -'
### x264
Unlike x265, x264 supports mkv containers as output.
:::shell
vapoursynth sh -c 'vspipe --y4m script.vpy - | x264 --demuxer y4m --profile high10 --input-depth 10 --output-depth 10 --preset slow --crf 18 -o output.mkv -'

31
docs/nix.md Normal file
View File

@ -0,0 +1,31 @@
# Nix(OS)
## List dependencies of package
nix-store -q --references `which bash`
Instead of <code>\`which bash\`</code> you can also specify a file in a
nix store path (or just the store path) or a link to contents of the store.
### Reverse dependencies
nix-store -q --referrers `which bash`
Analogue to `apt-cache rdepends`. Like with `--references`, any nix store
reference can be passed.
### Recursive dependencies
nix-store -qR `which man`
In tree view:
nix-store -q --tree `which man`
## List all system generations
sudo nix-env --list-generations --profile /nix/var/nix/profiles/system
## Build package outside of nixpkgs tree
nix-build -E 'with import <nixpkgs> { }; callPackage ./mypackage.nix { }'

77
docs/sysop.md Normal file
View File

@ -0,0 +1,77 @@
# Sysop
## Prometheus
### Reload config
:::shell
curl -X POST -u simon:$(pass sbruder.de/prometheus|head -n1) https://prometheus.sbruder.de/-/reload
### Remove certain time range from Prometheus
Requires [TSDB Admin APIs to be
enabled](https://prometheus.io/docs/prometheus/latest/querying/api/#tsdb-admin-apis)
(`--web.enable-admin-api`)
:::shell
curl -u user:pass -X POST -g 'https://prometheus-endpoint/api/v1/admin/tsdb/delete_series?match[]=metric{label="foo"}&start=TIMESTAMP&end=TIMESTAMP
## OpenSSL
### Get certificate expiry date
:::shell
openssl s_client -connect hostname:443 2>& /dev/null <<< '' | openssl x509 -noout -dates
# starttls
openssl s_client -connect hostname:587 -starttls smtp 2>& /dev/null <<< '' | openssl x509 -noout -dates
## Docker
### List images by size
:::shell
docker image ls --format "table {{.Size}}\t{{.Repository}}:{{.Tag}}\t{{.ID}}"|sort -h
### Enable IPv6 NAT
Makes no sense on first and second thought, but after a while it seems like the
right thing.
`/etc/docker/daemon.json`:
:::json
{
"ipv6": true,
"fixed-cidr-v6": "fd00:d0ce:d0ce:d0ce::/64"
}
<!--
This is the right way, but since I did not get `netfilter-persistent` to work,
I have to use iptables.
:::shell
nft add table ip6 nat
nft add chain ip6 nat postrouting \{ type nat hook postrouting priority 100 \; \}
nft add rule ip6 nat postrouting ip6 saddr fd00:d0ce:d0ce:d0ce::/64 masquerade
-->
:::shell
ip6tables -t nat -A POSTROUTING -s fd00:d0ce:d0ce:d0ce::/64 -j MASQUERADE
ip6tables-save > /etc/iptables/rules.v6
Publishing a port will still use the userland proxy. If you do not want this,
have a look at <https://github.com/robbertkl/docker-ipv6nat>.
:::shell
docker run -d --restart=always -v /var/run/docker.sock:/var/run/docker.sock:ro --cap-drop=ALL --cap-add=NET_RAW --cap-add=NET_ADMIN --cap-add=SYS_MODULE --net=host robbertkl/ipv6nat
## Misc
### Add swap file
:::shell
fallocate -l 1G /swapfile
chmod 600 /swapfile
mkswap /swapfile
swapon /swapfile

35
docs/web-services.md Normal file
View File

@ -0,0 +1,35 @@
# Web Services
## General
### Remove query string from downloaded files
:::shell
for i in *\?*; do echo mv "$i" "$(echo $i | cut -d'?' -f 1)"; done
## Mora
### Get title listing (for MusicBrainz)
:::js
[...document.querySelectorAll('.package_table tr')].map(el => {try {return `${el.querySelector('.package_td1').innerText} ${el.querySelector('.package_td2').innerText} ${el.querySelector('.package_td3').innerText} ${el.querySelector('.package_td4').innerText.split('\n')[0]}`} catch(e) {}}).slice(1).join('\n')
## Instagram
### Download picture in highest quality available
https://instagram.com/p/SHORTCODE/media/?size=l
## Bandcamp
### Get title listing (for MusicBrainz)
:::js
[...document.querySelectorAll('#track_table .track_row_view')].map(el => `${el.querySelector('.track_number').innerText} ${el.querySelector('.track-title').innerText} (${el.querySelector('.time').innerText})`).join("\n")
## Ototoy
### Get title listing (for MusicBrainz)
:::js
Array.from(document.querySelectorAll('#tracklist tr:not(:nth-child(1))')).map(el => el.querySelector('span[id^="title-"]').innerText + " " + el.querySelector('td:nth-child(3)').innerText).join("\n")

11
mkdocs.yml Normal file
View File

@ -0,0 +1,11 @@
site_name: Wiki
theme:
name: material
custom_dir: theme
markdown_extensions:
- codehilite
- admonition
- toc:
permalink: true

26
nix/sources.json Normal file
View File

@ -0,0 +1,26 @@
{
"gitignore": {
"branch": "master",
"description": "Nix function for filtering local git sources",
"homepage": "",
"owner": "hercules-ci",
"repo": "gitignore",
"rev": "c4662e662462e7bf3c2a968483478a665d00e717",
"sha256": "1npnx0h6bd0d7ql93ka7azhj40zgjp815fw2r6smg8ch9p7mzdlx",
"type": "tarball",
"url": "https://github.com/hercules-ci/gitignore/archive/c4662e662462e7bf3c2a968483478a665d00e717.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs": {
"branch": "nixos-unstable",
"description": "Nix Packages collection",
"homepage": "",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "34ad166a830d3ac1541dcce571c52231f2f0865a",
"sha256": "1jvi1562x3kq65w642vfimpszv65zbc7c2nv8gakhzcx4n3f47xq",
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs/archive/34ad166a830d3ac1541dcce571c52231f2f0865a.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}

148
nix/sources.nix Normal file
View File

@ -0,0 +1,148 @@
# This file has been generated by Niv.
let
#
# The fetchers. fetch_<type> fetches specs of type <type>.
#
fetch_file = pkgs: spec:
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; }
else
pkgs.fetchurl { inherit (spec) url sha256; };
fetch_tarball = pkgs: name: spec:
let
ok = str: ! builtins.isNull (builtins.match "[a-zA-Z0-9+-._?=]" str);
# sanitize the name, though nix will still fail if name starts with period
name' = stringAsChars (x: if ! ok x then "-" else x) "${name}-src";
in
if spec.builtin or true then
builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
else
pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
fetch_git = spec:
builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; };
fetch_local = spec: spec.path;
fetch_builtin-tarball = name: throw
''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=tarball -a builtin=true'';
fetch_builtin-url = name: throw
''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=file -a builtin=true'';
#
# Various helpers
#
# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources:
let
sourcesNixpkgs =
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) {};
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
in
if builtins.hasAttr "nixpkgs" sources
then sourcesNixpkgs
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
import <nixpkgs> {}
else
abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';
# The actual fetching function.
fetch = pkgs: name: spec:
if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs spec
else if spec.type == "tarball" then fetch_tarball pkgs name spec
else if spec.type == "git" then fetch_git spec
else if spec.type == "local" then fetch_local spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
else if spec.type == "builtin-url" then fetch_builtin-url name
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
# If the environment variable NIV_OVERRIDE_${name} is set, then use
# the path directly as opposed to the fetched source.
replace = name: drv:
let
saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
in
if ersatz == "" then drv else ersatz;
# Ports of functions for older nix versions
# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
concatStrings = builtins.concatStringsSep "";
# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, name, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball { inherit name url; }
else
fetchTarball attrs;
# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl { inherit url; }
else
fetchurl attrs;
# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = replace name (fetch config.pkgs name spec); }
) config.sources;
# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
, sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
, pkgs ? mkPkgs sources
}: rec {
# The sources, i.e. the attribute set of spec name to spec
inherit sources;
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
inherit pkgs;
};
in
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }

1
shell.nix Normal file
View File

@ -0,0 +1 @@
import ./default.nix

13
theme/main.html Normal file
View File

@ -0,0 +1,13 @@
{% extends "base.html" %}
{% block fonts %}
<link rel="stylesheet" href="{{ 'assets/fonts/iosevka/iosevka.css' | url }}">
<style>
body, input {
font-family: "Roboto", -apple-system, BlinkMacSystemFont, Helvetica, Arial, sans-serif;
}
code, kbd, pre {
font-family: "Iosevka Web", monospace;
}
</style>
{% endblock %}