This commit is contained in:
parent
ebc446116c
commit
7f52ad42cd
14 changed files with 290 additions and 2 deletions
|
@ -16,6 +16,7 @@
|
|||
"syncv3.matrix.aciceri.dev"
|
||||
"jellyfin.aciceri.dev"
|
||||
"photos.aciceri.dev"
|
||||
"status.aciceri.dev"
|
||||
];
|
||||
apiTokenFile = config.age.secrets.cloudflare-dyndns-api-token.path;
|
||||
};
|
||||
|
|
30
modules/grafana/default.nix
Normal file
30
modules/grafana/default.nix
Normal file
|
@ -0,0 +1,30 @@
|
|||
{config, ...}: let
|
||||
cfg = config.services.grafana;
|
||||
in {
|
||||
services.grafana = {
|
||||
enable = true;
|
||||
settings = {
|
||||
server = {
|
||||
domain = "status.aciceri.dev";
|
||||
http_addr = "127.0.0.1";
|
||||
http_port = 2342;
|
||||
root_url = "https://${config.services.grafana.settings.server.domain}:443/";
|
||||
};
|
||||
security = {
|
||||
admin_user = "andrea";
|
||||
admin_password = "$__file{${config.age.secrets.grafana-password.path}}";
|
||||
};
|
||||
};
|
||||
};
|
||||
environment.persistence."/persist".directories = [
|
||||
cfg.dataDir
|
||||
];
|
||||
|
||||
services.nginx.virtualHosts = {
|
||||
"status.aciceri.dev" = {
|
||||
enableACME = true;
|
||||
forceSSL = true;
|
||||
locations."/".proxyPass = "http://127.0.0.1:${builtins.toString cfg.settings.server.http_port}";
|
||||
};
|
||||
};
|
||||
}
|
|
@ -94,6 +94,7 @@ in {
|
|||
"media_player"
|
||||
"wyoming"
|
||||
"wake_on_lan"
|
||||
"prometheus"
|
||||
];
|
||||
extraPackages = python3Packages:
|
||||
with python3Packages; [
|
||||
|
@ -148,6 +149,9 @@ in {
|
|||
];
|
||||
shell_command.turn_off_picard = ''${pkgs.openssh}/bin/ssh -i /var/lib/hass/.ssh/id_ed25519 -o StrictHostKeyChecking=no hass@picard.fleet "exec sudo \$(readlink \$(which systemctl)) poweroff"'';
|
||||
# shell_command.turn_off_picard = ''whoami'';
|
||||
prometheus = {
|
||||
namespace = "hass";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
|
|
67
modules/loki/default.nix
Normal file
67
modules/loki/default.nix
Normal file
|
@ -0,0 +1,67 @@
|
|||
{config, ...}: let
|
||||
cfg = config.services.loki;
|
||||
in {
|
||||
services.loki = {
|
||||
enable = true;
|
||||
configuration = {
|
||||
# Basic stuff
|
||||
auth_enabled = false;
|
||||
server = {
|
||||
http_listen_port = 3100;
|
||||
log_level = "warn";
|
||||
};
|
||||
common = {
|
||||
path_prefix = config.services.loki.dataDir;
|
||||
storage.filesystem = {
|
||||
chunks_directory = "${cfg.dataDir}/chunks";
|
||||
rules_directory = "${cfg.dataDir}/rules";
|
||||
};
|
||||
replication_factor = 1;
|
||||
ring.kvstore.store = "inmemory";
|
||||
ring.instance_addr = "127.0.0.1";
|
||||
};
|
||||
|
||||
ingester.chunk_encoding = "snappy";
|
||||
|
||||
limits_config = {
|
||||
retention_period = "120h";
|
||||
ingestion_burst_size_mb = 16;
|
||||
reject_old_samples = true;
|
||||
reject_old_samples_max_age = "12h";
|
||||
};
|
||||
|
||||
table_manager = {
|
||||
retention_deletes_enabled = true;
|
||||
retention_period = "120h";
|
||||
};
|
||||
|
||||
compactor = {
|
||||
retention_enabled = true;
|
||||
compaction_interval = "10m";
|
||||
working_directory = "${cfg.dataDir}/compactor";
|
||||
delete_request_cancel_period = "10m"; # don't wait 24h before processing the delete_request
|
||||
retention_delete_delay = "2h";
|
||||
retention_delete_worker_count = 150;
|
||||
delete_request_store = "filesystem";
|
||||
};
|
||||
|
||||
schema_config.configs = [
|
||||
{
|
||||
from = "2020-11-08";
|
||||
store = "tsdb";
|
||||
object_store = "filesystem";
|
||||
schema = "v13";
|
||||
index.prefix = "index_";
|
||||
index.period = "24h";
|
||||
}
|
||||
];
|
||||
|
||||
query_range.cache_results = true;
|
||||
limits_config.split_queries_by_interval = "24h";
|
||||
};
|
||||
};
|
||||
|
||||
environment.persistence."/persist".directories = [
|
||||
cfg.dataDir
|
||||
];
|
||||
}
|
32
modules/prometheus-exporters/default.nix
Normal file
32
modules/prometheus-exporters/default.nix
Normal file
|
@ -0,0 +1,32 @@
|
|||
{
|
||||
config,
|
||||
pkgs,
|
||||
...
|
||||
}: {
|
||||
services.prometheus.exporters.node = {
|
||||
enable = true;
|
||||
enabledCollectors = [
|
||||
"cpu"
|
||||
"conntrack"
|
||||
"diskstats"
|
||||
"entropy"
|
||||
"filefd"
|
||||
"filesystem"
|
||||
"loadavg"
|
||||
"mdadm"
|
||||
"meminfo"
|
||||
"netdev"
|
||||
"netstat"
|
||||
"stat"
|
||||
"time"
|
||||
"vmstat"
|
||||
"systemd"
|
||||
"logind"
|
||||
"interrupts"
|
||||
"ksmd"
|
||||
"textfile"
|
||||
"pressure"
|
||||
];
|
||||
extraFlags = ["--collector.ethtool" "--collector.softirqs" "--collector.tcpstat" "--collector.wifi"];
|
||||
};
|
||||
}
|
33
modules/prometheus/default.nix
Normal file
33
modules/prometheus/default.nix
Normal file
|
@ -0,0 +1,33 @@
|
|||
{config, ...}: let
|
||||
cfg = config.services.prometheus;
|
||||
in {
|
||||
services.prometheus = {
|
||||
enable = true;
|
||||
checkConfig = false; # Otherwise it will fail because it cannot access bearer_token_file
|
||||
webExternalUrl = "https://status.aciceri.dev";
|
||||
globalConfig.scrape_interval = "10s";
|
||||
scrapeConfigs = [
|
||||
{
|
||||
job_name = "hass";
|
||||
metrics_path = "/api/prometheus";
|
||||
bearer_token_file = config.age.secrets.home-assistant-token.path;
|
||||
static_configs = [
|
||||
{
|
||||
targets = ["sisko.fleet:${builtins.toString config.services.home-assistant.config.http.server_port}"];
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
job_name = "node";
|
||||
static_configs = [
|
||||
{
|
||||
targets = builtins.map (host: "${host}.fleet:9100") ["sisko" "picard"];
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
environment.persistence."/persist".directories = [
|
||||
"/var/lib/${cfg.stateDir}"
|
||||
];
|
||||
}
|
54
modules/promtail/default.nix
Normal file
54
modules/promtail/default.nix
Normal file
|
@ -0,0 +1,54 @@
|
|||
{
|
||||
pkgs,
|
||||
lib,
|
||||
config,
|
||||
...
|
||||
}: let
|
||||
conf = {
|
||||
server = {
|
||||
http_listen_port = 28183;
|
||||
grpc_listen_port = 0;
|
||||
};
|
||||
clients = [
|
||||
{
|
||||
url = "http://sisko.fleet:${builtins.toString config.services.loki.configuration.server.http_listen_port}/loki/api/v1/push";
|
||||
}
|
||||
];
|
||||
positions = {
|
||||
filename = "/tmp/positions.yaml";
|
||||
};
|
||||
scrape_configs = [
|
||||
{
|
||||
job_name = "journal";
|
||||
journal = {
|
||||
max_age = "12h";
|
||||
labels = {
|
||||
job = "systemd-journal";
|
||||
host = config.networking.hostName;
|
||||
};
|
||||
};
|
||||
relabel_configs = [
|
||||
{
|
||||
source_labels = ["__journal__systemd_unit"];
|
||||
target_label = "unit";
|
||||
}
|
||||
];
|
||||
}
|
||||
];
|
||||
};
|
||||
configFile = pkgs.writeTextFile {
|
||||
name = "promtail.yaml";
|
||||
text = lib.generators.toYAML {} conf;
|
||||
};
|
||||
in {
|
||||
systemd.services.promtail = {
|
||||
description = "Promtail service for Loki";
|
||||
wantedBy = ["multi-user.target"];
|
||||
|
||||
serviceConfig = {
|
||||
ExecStart = ''
|
||||
${pkgs.grafana-loki}/bin/promtail --config.file ${configFile}
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
0
modules/promtail/protmail.yaml
Normal file
0
modules/promtail/protmail.yaml
Normal file
Loading…
Add table
Add a link
Reference in a new issue