Simple workadventure on void.hacc.space
tl;dr: Let's add a workadventure instance to our infra! This commit contains
configs for a workadventure instance and a turn server used by it on domains
"void.hacc.space" and "turn.hacc.space".
In theory, everything should work as it is now (though I haven't test-deployed
this exact config, a similar one runs on space.stuebinm.eu).
Things to note:
- this is not the latest version of workadventure, but an old one that was
packaged as part of the fediventure project
- by default, it pulls the hacc assembly map from rc3, which is relatively
large (both as map and in terms of filesize)
- arbitrary maps from elsewhere are allowed, just put the url after a
"_/global/" in the workadventure url; potentially we could allow cors on
our gitlab for easy hosting of other maps?
Things to potentially discuss:
- this also adds a turn and stun server to our infra; the stun server does
not have any authorisation enabled, and the turn server just has a static
token that is semi-public (it gets compiled into the workadventure frontend
code). Are we okay with that?
- workadventure needs a jitsi server for larger chats. For now, it's set to
meet.ffmuc.net
- the config uses a nginx virtualhost just to get a ACME cert — is there a
better way to do that?
- the container has IP fd00::42:16 --- apparently our container IPs have no
schema whatsoever, so I just made one up
- by default, coturn opens a wide range of ports (see `coturn.min-port` and
`coturn.max-port`). Are we okay with that, or do we want to reduce them?
Other weird stuff:
- if the fetchgits that fetch the workadventure and map packages are moved
into the container config (where, reasonably, they should be), then Nix
fails to build this. The nixos-containers module appears to be somewhat
broken
- apparently the IP address of my local hedgedoc container somehow made it
into the config of the lantifa wiki, but not the actual codimd container? 😂
- workadventure has a prometheus-compatible metrics endpoint. I haven't
configured a scraper for it, though, since it appears to be broken for now
(see space.stuebinm.eu/metrics as an example)
This commit is contained in:
parent
e42376687f
commit
37d9ac34ae
|
@ -18,6 +18,7 @@
|
|||
./services/lantifa.nix
|
||||
./services/syncthing.nix
|
||||
./services/monitoring.nix
|
||||
./services/workadventure.nix
|
||||
];
|
||||
boot.loader.grub.enable = true;
|
||||
boot.loader.grub.version = 2;
|
||||
|
|
109
hosts/hainich/services/workadventure.nix
Normal file
109
hosts/hainich/services/workadventure.nix
Normal file
|
@ -0,0 +1,109 @@
|
|||
{pkgs, lib, config, ...}:
|
||||
|
||||
let
|
||||
wa-container-ip = "fd00::42:16";
|
||||
# this is a static "secret" that is also compiled into workadventure,
|
||||
# so it seems ok to put it into the nix store
|
||||
coturn-auth-secret = "990bc6fc68c720a9159f9c7613b2dcc3cc9ffb4f";
|
||||
# domain on which workadventure is served
|
||||
domain = "turn.hacc.space";
|
||||
|
||||
|
||||
# FUNFACT:
|
||||
# the nixos-container module is sufficiently broken that if you move these
|
||||
# fetchgits into the container config below, Nix will run into infinite recursion!
|
||||
|
||||
# contains the workadventure module
|
||||
haccpkgssrc = pkgs.fetchgit {
|
||||
url = "https://gitlab.infra4future.de/stuebinm/workadventure-nix-hacc";
|
||||
rev = "23a085b0386595f9e769ef3c182749cecc342ead";
|
||||
sha256 = "199np37dkhk52lsjw0f9x2h9vfi86xs18gk5pfijs6pc1hr11scd";
|
||||
};
|
||||
# contains the hacc assembly map
|
||||
fediventure = pkgs.fetchgit {
|
||||
url = "https://gitlab.infra4future.de/stuebinm/fediventure-simple";
|
||||
rev = "791fe2dce2374e1ff8b1cf4dc54bf9aac2b5c8a8";
|
||||
sha256 = "0jzkwqvzpj6vrgrilm5ijmlbk2dvkmar3dmar5nhfply4m1za1xy";
|
||||
};
|
||||
haccpkgs = (import "${haccpkgssrc}/default.nix") {inherit pkgs lib;};
|
||||
|
||||
in
|
||||
{
|
||||
# not the most intuitive of container names, but "workadventure" is too long
|
||||
containers.wa-void = {
|
||||
|
||||
config = {config, pkgs, ...}: {
|
||||
imports = [ "${fediventure}/workadventure.nix" ];
|
||||
networking.firewall.allowedTCPPorts = [ 80 ];
|
||||
|
||||
services.workadventure.instances."workadventure" = {
|
||||
nginx = {
|
||||
default = true;
|
||||
inherit domain;
|
||||
};
|
||||
maps.path = haccpkgs.workadventure-hacc-rc3-map.outPath + "/";
|
||||
frontend.defaultMap = "/main.json";
|
||||
frontend.settings = {
|
||||
stunServer = "stun:turn.hacc.space:3478";
|
||||
turnServer = "turn:95.217.159.23";
|
||||
turnUser = "turn";
|
||||
turnPassword = coturn-auth-secret;
|
||||
jitsiUrl = "meet.ffmuc.net";
|
||||
defaultMapUrl = "/main.json";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
privateNetwork = true;
|
||||
hostAddress6 = "fd00::42:14";
|
||||
localAddress6 = wa-container-ip;
|
||||
|
||||
autoStart = true;
|
||||
|
||||
};
|
||||
|
||||
services.coturn = {
|
||||
enable = true;
|
||||
realm = "turn.hacc.space";
|
||||
static-auth-secret = coturn-auth-secret;
|
||||
use-auth-secret = true;
|
||||
no-cli = true;
|
||||
no-tcp-relay = true;
|
||||
|
||||
cert = config.security.acme.certs."turn.hacc.space".directory + "full.pem";
|
||||
pkey = config.security.acme.certs."turn.hacc.space".directory + "key.pem";
|
||||
};
|
||||
|
||||
|
||||
services.nginx = {
|
||||
virtualHosts."void.hacc.space" = {
|
||||
forceSSL = true;
|
||||
enableACME = true;
|
||||
locations."/" = {
|
||||
proxyPass = "http://[${wa-container-ip}]";
|
||||
proxyWebsockets = true;
|
||||
};
|
||||
};
|
||||
# this isn't actually needed, but acme requires a webserver to serve
|
||||
# challanges, so I guess it's easier to just define a virtualHost here
|
||||
virtualHosts."turn.hacc.space" = {
|
||||
root = "/var/turn-www"; # do we need this, or can acme do with a 404?
|
||||
enableACME = true;
|
||||
forceSSL = true;
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
networking.firewall = with config.services.coturn;
|
||||
let
|
||||
ports = [ listening-port tls-listening-port ];
|
||||
in {
|
||||
allowedTCPPorts = [ 80 ] ++ ports;
|
||||
allowedUDPPorts = ports;
|
||||
allowedUDPPortRanges = [
|
||||
{ from = min-port; to = max-port; }
|
||||
];
|
||||
};
|
||||
|
||||
}
|
||||
|
Loading…
Reference in a new issue