alloy: Actually implement the functionality

This commit is contained in:
SebastianStork 2025-09-20 15:37:48 +02:00
parent 58e7ffce5c
commit 7fbb1de671

View file

@ -13,6 +13,18 @@ in
type = lib.types.port; type = lib.types.port;
default = 12345; default = 12345;
}; };
metricsEndpoint = lib.mkOption {
type = lib.types.nonEmptyStr;
default = "https://metrics.${config.custom.services.tailscale.domain}/prometheus/api/v1/write";
};
logsEndpoint = lib.mkOption {
type = lib.types.nonEmptyStr;
default = "https://logs.${config.custom.services.tailscale.domain}/insert/loki/api/v1/push";
};
collect = {
hostMetrics = lib.mkEnableOption "";
sshdLogs = lib.mkEnableOption "";
};
}; };
config = lib.mkIf cfg.enable { config = lib.mkIf cfg.enable {
@ -28,5 +40,44 @@ in
"--disable-reporting" "--disable-reporting"
]; ];
}; };
environment.etc = {
"alloy/endpoints.alloy".text = ''
prometheus.remote_write "default" {
endpoint {
url = "${cfg.metricsEndpoint}"
}
}
loki.write "default" {
endpoint {
url = "${cfg.logsEndpoint}"
}
}
'';
"alloy/node-exporter.alloy" = lib.mkIf cfg.collect.hostMetrics {
text = ''
prometheus.exporter.unix "default" {
enable_collectors = [ "systemd" ]
}
prometheus.scrape "node_exporter" {
targets = prometheus.exporter.unix.default.targets
forward_to = [prometheus.remote_write.default.receiver]
scrape_interval = "15s"
}
'';
};
"alloy/sshd-logs.alloy" = lib.mkIf cfg.collect.sshdLogs {
text = ''
loki.source.journal "sshd" {
matches = "_SYSTEMD_UNIT=sshd.service"
forward_to = [loki.write.default.receiver]
}
'';
};
};
}; };
} }