diff options
-rw-r--r-- | hosts/by-name/server2/configuration.nix | 5 | ||||
-rw-r--r-- | modules/by-name/ta/taskchampion-sync/module.nix | 43 | ||||
-rw-r--r-- | pkgs/by-name/sh/sharkey/unstable_package.nix | 6 | ||||
-rw-r--r-- | tests/by-name/sh/sharkey-cpu/test.nix | 7 | ||||
-rw-r--r-- | tests/by-name/sh/sharkey/test.nix | 1 | ||||
-rw-r--r-- | tests/by-name/ta/taskchampion-sync/test.nix | 88 |
6 files changed, 120 insertions, 30 deletions
diff --git a/hosts/by-name/server2/configuration.nix b/hosts/by-name/server2/configuration.nix index 24513f4..f7a5d2d 100644 --- a/hosts/by-name/server2/configuration.nix +++ b/hosts/by-name/server2/configuration.nix @@ -103,7 +103,10 @@ CheckActivityPubGetSigned = false; }; }; - taskchampion-sync.enable = true; + taskchampion-sync = { + enable = true; + fqdn = "taskchampion.vhack.eu"; + }; users.enable = true; }; diff --git a/modules/by-name/ta/taskchampion-sync/module.nix b/modules/by-name/ta/taskchampion-sync/module.nix index 1870186..a722883 100644 --- a/modules/by-name/ta/taskchampion-sync/module.nix +++ b/modules/by-name/ta/taskchampion-sync/module.nix @@ -8,6 +8,12 @@ in { options.vhack.taskchampion-sync = { enable = lib.mkEnableOption "taskchampion-sync"; + + fqdn = lib.mkOption { + description = "The fully qualified domain name of this instance."; + type = lib.types.str; + example = "task-sync.tw.online"; + }; }; config = lib.mkIf cfg.enable { @@ -16,19 +22,32 @@ in { groups.taskchampion.gid = config.vhack.constants.ids.uids.taskchampion; }; - vhack.persist.directories = [ - { - directory = dataDirectory; - user = "taskchampion"; - group = "taskchampion"; - mode = "0700"; - } - ]; + vhack = { + persist.directories = [ + { + directory = dataDirectory; + user = "taskchampion"; + group = "taskchampion"; + mode = "0700"; + } + ]; + nginx.enable = true; + }; + + services = { + taskchampion-sync-server = { + enable = true; + dataDir = dataDirectory; + }; - services.taskchampion-sync-server = { - enable = true; - openFirewall = true; - dataDir = dataDirectory; + nginx.virtualHosts."${cfg.fqdn}" = { + locations."/" = { + proxyPass = "http://127.0.0.1:${toString config.services.taskchampion-sync-server.port}"; + recommendedProxySettings = true; + }; + enableACME = true; + forceSSL = true; + }; }; }; } diff --git a/pkgs/by-name/sh/sharkey/unstable_package.nix b/pkgs/by-name/sh/sharkey/unstable_package.nix index 3922ca0..a88b7df 100644 --- a/pkgs/by-name/sh/sharkey/unstable_package.nix +++ b/pkgs/by-name/sh/sharkey/unstable_package.nix @@ -25,14 +25,14 @@ }: stdenv.mkDerivation (finalAttrs: { pname = "sharkey"; - version = "2025.2.2"; + version = "2025.2.3"; src = fetchFromGitLab { owner = "TransFem-org"; repo = "Sharkey"; domain = "activitypub.software"; rev = finalAttrs.version; - hash = "sha256-KVr4KLtJ22LEk94GuxeTk8/GcFs7oU/gkoVTvrgbYBg="; + hash = "sha256-VBfkJuoQzQ93sUmJNnr1JUjA2GQNgOIuX+j8nAz3bb4="; fetchSubmodules = true; }; @@ -66,7 +66,7 @@ stdenv.mkDerivation (finalAttrs: { dontBuild = true; outputHashMode = "recursive"; - outputHash = "sha256-XWcDchvrYSJr0s/DMb8FIEK7MdE6aC2bAbrW88Ig4ug="; + outputHash = "sha256-ALstAaN8dr5qSnc/ly0hv+oaeKrYFQ3GhObYXOv4E6I="; }; nativeBuildInputs = [ diff --git a/tests/by-name/sh/sharkey-cpu/test.nix b/tests/by-name/sh/sharkey-cpu/test.nix index d4f9332..438cfb3 100644 --- a/tests/by-name/sh/sharkey-cpu/test.nix +++ b/tests/by-name/sh/sharkey-cpu/test.nix @@ -11,7 +11,7 @@ nixos-lib.runTest { hostPkgs = pkgs; # the Nixpkgs package set used outside the VMs - name = "sharkey-images"; + name = "sharkey-cpu"; node = { specialArgs = {inherit pkgsUnstable extraModules vhackPackages nixpkgs-unstable nixLib;}; @@ -40,7 +40,7 @@ nixos-lib.runTest { # Avoid an error from this service. "acme-sharkey.server".serviceConfig.ExecStart = pkgs.lib.mkForce "${pkgs.lib.getExe' pkgs.coreutils "true"}"; - # Test, that sharkey's hardening still allows access to the CPUs. + # Test that sharkey's hardening still allows access to the CPUs. sharkey.serviceConfig.ExecStart = let nodejs = pkgs.lib.getExe pkgsUnstable.nodejs; script = pkgs.writeTextFile { @@ -66,9 +66,8 @@ nixos-lib.runTest { from time import sleep start_all() - server.wait_for_unit("sharkey.service") - # Give the service time to start. + # Give the service time to run. sleep(3) with subtest("All services running"): diff --git a/tests/by-name/sh/sharkey/test.nix b/tests/by-name/sh/sharkey/test.nix index 40efe17..b2ad461 100644 --- a/tests/by-name/sh/sharkey/test.nix +++ b/tests/by-name/sh/sharkey/test.nix @@ -97,7 +97,6 @@ nixos-lib.runTest { # Start the actual testing machines start_all() - with subtest("Add pebble ca key to all services"): for node in [name_server, server, client]: node.wait_for_unit("network-online.target") diff --git a/tests/by-name/ta/taskchampion-sync/test.nix b/tests/by-name/ta/taskchampion-sync/test.nix index 4dd273b..99134bc 100644 --- a/tests/by-name/ta/taskchampion-sync/test.nix +++ b/tests/by-name/ta/taskchampion-sync/test.nix @@ -19,35 +19,87 @@ nixos-lib.runTest { pkgs = null; }; - nodes = { + nodes = let + # The feature flag is only in version 3.2 and upwards. Stable is still on 3.1 + taskwarriorPackage = pkgsUnstable.taskwarrior3.overrideAttrs (final: prev: { + cmakeFlags = (prev.cmakeFlags or []) ++ ["-DENABLE_TLS_NATIVE_ROOTS=true"]; + }); + in { + acme = { + imports = [ + ../../../common/acme/server.nix + ../../../common/dns/client.nix + ]; + }; + name_server = {nodes, ...}: { + imports = + extraModules + ++ [ + ../../../common/acme/client.nix + ../../../common/dns/server.nix + ]; + + vhack.dns.zones = { + "taskchampion.server" = { + SOA = { + nameServer = "ns"; + adminEmail = "admin@server.com"; + serial = 2025012301; + }; + useOrigin = false; + + A = [ + nodes.server.networking.primaryIPAddress + ]; + AAAA = [ + nodes.server.networking.primaryIPv6Address + ]; + }; + }; + }; + server = {config, ...}: { imports = extraModules ++ [ ../../../../modules + ../../../common/acme/client.nix + ../../../common/dns/client.nix ]; vhack = { - taskchampion-sync.enable = true; + persist.enable = true; + nginx.enable = true; + taskchampion-sync = { + enable = true; + fqdn = "taskchampion.server"; + }; }; }; task_client1 = {config, ...}: { + imports = [ + ../../../common/acme/client.nix + ../../../common/dns/client.nix + ]; + environment.systemPackages = [ - pkgs.taskwarrior3 + taskwarriorPackage ]; }; task_client2 = {config, ...}: { + imports = [ + ../../../common/acme/client.nix + ../../../common/dns/client.nix + ]; + environment.systemPackages = [ - pkgs.taskwarrior3 + taskwarriorPackage ]; }; }; testScript = {nodes, ...}: let - cfg = nodes.server.services.taskchampion-sync-server; - port = builtins.toString cfg.port; - # Generated with uuidgen uuid = "bf01376e-04a4-435a-9263-608567531af3"; password = "nixos-test"; @@ -57,19 +109,33 @@ nixos-lib.runTest { set -xe mkdir --parents "$(dirname "${path}")" - echo 'sync.server.origin=http://server:${port}' >> "${path}" + echo 'sync.server.url=https://taskchampion.server' >> "${path}" echo 'sync.server.client_id=${uuid}' >> "${path}" echo 'sync.encryption_secret=${password}' >> "${path}" ''; + + acme_scripts = import ../../../common/acme/scripts.nix {inherit pkgs;}; in /* python */ '' + # Start dependencies for the other services + acme.start() + acme.wait_for_unit("pebble.service") + name_server.start() + name_server.wait_for_unit("nsd.service") + + # Start actual test start_all() + with subtest("Add pebble ca key to all services"): + for node in [name_server, server, task_client1, task_client2]: + node.wait_for_unit("network-online.target") + node.succeed("${acme_scripts.add_pebble_acme_ca}") + server.wait_for_unit("taskchampion-sync-server.service") - server.wait_for_open_port(${port}) + server.wait_for_open_port(443) with subtest("Setup task syncing"): for task in [task_client1, task_client2]: @@ -81,11 +147,15 @@ nixos-lib.runTest { task_client1.succeed("task add 'First task -- task_client1'") task_client2.succeed("task add 'First task -- task_client2'") + # Wait for the server to acquire the acme certificate + task_client1.wait_until_succeed("curl https://taskchampion.server") + with subtest("Can sync tasks"): for task in [task_client1, task_client2]: task.succeed("task sync") task_client1.succeed("task sync") + with subtest("Have correct tasks"): count1 = task_client1.succeed("task count") count2 = task_client2.succeed("task count") |