175 lines
5.8 KiB
Nix
175 lines
5.8 KiB
Nix
{
|
|
config,
|
|
lib,
|
|
pkgs,
|
|
...
|
|
}:
|
|
let
|
|
testPkgs = pkgs.appendOverlays [ (import ../modules/overlays.nix) ];
|
|
in
|
|
testPkgs.testers.runNixOSTest {
|
|
name = "ntfy-alerts";
|
|
|
|
nodes.machine =
|
|
{ pkgs, ... }:
|
|
{
|
|
imports = [
|
|
../modules/ntfy-alerts.nix
|
|
];
|
|
|
|
system.stateVersion = config.system.stateVersion;
|
|
|
|
virtualisation.memorySize = 2048;
|
|
|
|
environment.systemPackages = with pkgs; [
|
|
curl
|
|
jq
|
|
];
|
|
|
|
# Create test topic file
|
|
systemd.tmpfiles.rules = [
|
|
"f /run/ntfy-test-topic 0644 root root - test-alerts"
|
|
];
|
|
|
|
# Mock ntfy server that records POST requests
|
|
systemd.services.mock-ntfy =
|
|
let
|
|
mockNtfyScript = pkgs.writeScript "mock-ntfy.py" ''
|
|
import json
|
|
import os
|
|
from http.server import HTTPServer, BaseHTTPRequestHandler
|
|
from datetime import datetime
|
|
|
|
REQUESTS_FILE = "/tmp/ntfy-requests.json"
|
|
|
|
class MockNtfy(BaseHTTPRequestHandler):
|
|
def _respond(self, code=200, body=b"Ok"):
|
|
self.send_response(code)
|
|
self.send_header("Content-Type", "application/json")
|
|
self.end_headers()
|
|
self.wfile.write(body if isinstance(body, bytes) else body.encode())
|
|
|
|
def do_GET(self):
|
|
self._respond()
|
|
|
|
def do_POST(self):
|
|
content_length = int(self.headers.get("Content-Length", 0))
|
|
body = self.rfile.read(content_length).decode() if content_length > 0 else ""
|
|
|
|
request_data = {
|
|
"timestamp": datetime.now().isoformat(),
|
|
"path": self.path,
|
|
"headers": dict(self.headers),
|
|
"body": body,
|
|
}
|
|
|
|
# Load existing requests or start new list
|
|
requests = []
|
|
if os.path.exists(REQUESTS_FILE):
|
|
try:
|
|
with open(REQUESTS_FILE, "r") as f:
|
|
requests = json.load(f)
|
|
except:
|
|
requests = []
|
|
|
|
requests.append(request_data)
|
|
|
|
with open(REQUESTS_FILE, "w") as f:
|
|
json.dump(requests, f, indent=2)
|
|
|
|
self._respond()
|
|
|
|
def log_message(self, format, *args):
|
|
pass
|
|
|
|
HTTPServer(("0.0.0.0", 8080), MockNtfy).serve_forever()
|
|
'';
|
|
in
|
|
{
|
|
description = "Mock ntfy server";
|
|
wantedBy = [ "multi-user.target" ];
|
|
before = [ "ntfy-alert@test-fail.service" ];
|
|
serviceConfig = {
|
|
ExecStart = "${pkgs.python3}/bin/python3 ${mockNtfyScript}";
|
|
Type = "simple";
|
|
};
|
|
};
|
|
|
|
# Test service that will fail
|
|
systemd.services.test-fail = {
|
|
description = "Test service that fails";
|
|
serviceConfig = {
|
|
Type = "oneshot";
|
|
ExecStart = "${pkgs.coreutils}/bin/false";
|
|
};
|
|
};
|
|
|
|
# Configure ntfy-alerts to use mock server
|
|
services.ntfyAlerts = {
|
|
enable = true;
|
|
serverUrl = "http://localhost:8080";
|
|
topicFile = "/run/ntfy-test-topic";
|
|
|
|
};
|
|
};
|
|
|
|
testScript = ''
|
|
import json
|
|
import time
|
|
|
|
start_all()
|
|
|
|
# Wait for mock ntfy server to be ready
|
|
machine.wait_for_unit("mock-ntfy.service")
|
|
machine.wait_until_succeeds("curl -sf http://localhost:8080/", timeout=30)
|
|
|
|
# Verify the ntfy-alert@ template service exists
|
|
machine.succeed("systemctl list-unit-files | grep ntfy-alert@")
|
|
|
|
# Verify the global OnFailure drop-in is configured
|
|
machine.succeed("cat /etc/systemd/system/service.d/onfailure.conf | grep -q 'OnFailure=ntfy-alert@%p.service'")
|
|
|
|
# Trigger the test-fail service
|
|
machine.succeed("systemctl start test-fail.service || true")
|
|
|
|
# Wait a moment for the failure notification to be sent
|
|
time.sleep(2)
|
|
|
|
# Verify the ntfy-alert@test-fail service ran
|
|
machine.succeed("systemctl is-active ntfy-alert@test-fail.service || systemctl is-failed ntfy-alert@test-fail.service || true")
|
|
|
|
# Check that the mock server received a POST request
|
|
machine.wait_until_succeeds("test -f /tmp/ntfy-requests.json", timeout=30)
|
|
|
|
# Verify the request content
|
|
result = machine.succeed("cat /tmp/ntfy-requests.json")
|
|
requests = json.loads(result)
|
|
|
|
assert len(requests) >= 1, f"Expected at least 1 request, got {len(requests)}"
|
|
|
|
# Check the first request
|
|
req = requests[0]
|
|
assert "/test-alerts" in req["path"], f"Expected path to contain /test-alerts, got {req['path']}"
|
|
assert "Title" in req["headers"], "Expected Title header"
|
|
assert "test-fail" in req["headers"]["Title"], f"Expected Title to contain 'test-fail', got {req['headers']['Title']}"
|
|
assert req["headers"]["Priority"] == "high", f"Expected Priority 'high', got {req['headers'].get('Priority')}"
|
|
assert req["headers"]["Tags"] == "warning", f"Expected Tags 'warning', got {req['headers'].get('Tags')}"
|
|
|
|
print(f"Received notification: Title={req['headers']['Title']}, Body={req['body'][:100]}...")
|
|
|
|
# Idempotency test: trigger failure again
|
|
machine.succeed("rm /tmp/ntfy-requests.json")
|
|
machine.succeed("systemctl reset-failed test-fail.service || true")
|
|
machine.succeed("systemctl start test-fail.service || true")
|
|
time.sleep(2)
|
|
|
|
# Verify another notification was sent
|
|
machine.wait_until_succeeds("test -f /tmp/ntfy-requests.json", timeout=30)
|
|
result = machine.succeed("cat /tmp/ntfy-requests.json")
|
|
requests = json.loads(result)
|
|
assert len(requests) >= 1, f"Expected at least 1 request after second failure, got {len(requests)}"
|
|
|
|
print("All tests passed!")
|
|
'';
|
|
}
|