promtail: Deploy Loki Promtail Agent

[Promtail][0] is the log collection agent for Grafana Loki.  It reads
logs from various locations, including local files and the _systemd_
journal and sends them to Loki via HTTP.

Loki configuration is a highly-structured YAML document.  Thus, instead
of using Tera template syntax for loops, conditionals, etc., we can use
the full power of CUE to construct the configuration.  Using the
`Marshal` function from the built-in `encoding/yaml` package, we
serialize the final configuration structure as a string and write it
verbatim to the configuration file.

I have modeled most of the Promtail configuration schema in the
`du5t1n.me/cfg/app/promtail/schema` package.  Having the schema modeled
will ensure the generated configuration is valid during development
(i.e. `cue export` will fail if it is not), which will save time pushing
changes to machines and having Loki complain.

The `#promtail` "function" in `du5t1n.me/cfg/env/prod` makes it easy to
build our desired configuration.  It accepts an optional `#scrape`
field, which can be used to provide specific log scraping definitions.
If it is unspecified, the default configuration is to scrape the systemd
journal.  Hosts with additional needs can supply their own list,
probably including the `promtail.scrape.journal` object in it to get the
default journal scrape job.

[0]: https://grafana.com/docs/loki/latest/send-data/promtail/
master
Dustin 2024-02-13 20:13:21 -06:00
parent 4608f19724
commit 45c35c065a
10 changed files with 354 additions and 0 deletions

View File

@ -0,0 +1,38 @@
package schema
#BaseMetric: {
type: string
description?: string
prefix?: string
source?: string
max_idle_duration?: string
}
#CounterMetric: {
#BaseMetric
type: "Counter"
config: {
match_all?: bool
count_entry_bytes?: bool
value?: string
action: "inc" | "add"
}
}
#GaugeMetric: {
#BaseMetric
type: "Gauge"
config: {
value?: string
action: "set" | "inc" | "dec" | "add" | "sub"
}
}
#HistogramMetric: {
#BaseMetric
type: "Histogram"
config: {
value?: string
buckets: [int]
}
}

View File

@ -0,0 +1,106 @@
package schema
#PipelineStage: {
cri?: {
max_partial_lines?: int
max_partial_line_size?: int
max_partial_line_size_truncate?: bool
}
decolorize?: null
drop?: {
source?: [...string] | string
separator?: string
expression?: string
value?: string
older_than?: string
longer_than?: string | int
drop_counter_reason?: string
}
json?: {
expressions?: [string]: string
source?: string
drop_malformed?: bool
}
labelallow?: [...string]
labeldrop?: [...string]
labels?: [string]: string
limit?: {
rate?: int
burst?: int
by_label_name?: string
max_distinct_labels?: int
drop?: bool
}
logfmt?: {
mapping?: [string]: string
source?: string
}
match?: {
selector: string
pipeline_name?: string
action?: "keep" | "drop"
drop_counter_reason?: string
stages?: [#PipelineStage]
}
metrics?: [string]: #CounterMetric | #GaugeMetric | #HistogramMetric
multiline?: {
firstline: string
max_wait_time?: string
max_lines?: int
}
output?: {
source: string
}
pack?: {
labels: [string]
ingest_timestamp?: bool
}
regex?: {
expression: string
source?: string
}
replace?: {
expression: string
source?: string
replace?: string
}
sampling?: {
rate?: float
}
static_labels?: [string]: string
template?: {
source: string
template: string
}
tenant?: {
label?: string
source?: string
value?: string
}
timestamp?: {
source: string
format: string
fallback_formats?: [...string]
location?: string
}
}

View File

@ -0,0 +1,62 @@
package schema
#Client: {
url: string
tls_config?: {
ca_file?: string
cert_file?: string
key_file?: string
server_name?: string
}
}
#CommonScrapeConfig: {
labels?: [string]: string
}
#JournalScrapeConfig: {
#CommonScrapeConfig
json: bool | *false
}
#KubernetesScrapeConfig: {
#CommonScrapeConfig
api_server?: string
role: "node" | "service" | "pod" | "endpoints" | "ingress"
}
#RelabelConfig: {
source_labels?: [...string]
separator?: string
target_label?: string
regex?: string
modulus?: int
replacement?: string
action?: "replace" | "keep" | "drop" | "hashmod" | "labelmap" |
"labeldrop" | "labelkeep"
}
#ScrapeConfig: {
job_name: string
journal?: #JournalScrapeConfig
static_configs?: [...#CommonScrapeConfig]
kubernetes_sd_configs?: [...#KubernetesScrapeConfig]
pipeline_stages?: [...#PipelineStage]
relabel_configs?: [...#RelabelConfig]
}
#PromtailConfig: {
server: {
http_listen_port: int | *9080
grpc_listen_port: int | *0
enable_runtime_reload: bool | *true
}
clients: [...#Client]
positions: {
filename: string | *"/var/lib/promtail/positions"
}
scrape_configs: [...#ScrapeConfig]
}

View File

@ -0,0 +1,33 @@
package promtail
import "du5t1n.me/cfg/base/schema/instructions"
templates: [...instructions.#RenderInstruction] & [
{
template: "promtail/ca.crt"
dest: "/etc/promtail/ca.crt"
hooks: {
changed: [{run: "systemctl try-restart promtail"}]
}
},
{
template: "promtail/config.yml"
dest: "/etc/promtail/config.yml"
hooks: {
changed: [{run: "systemctl try-restart promtail"}]
}
},
{
template: "promtail/promtail.container"
dest: "/etc/containers/systemd/promtail.container"
hooks: {
changed: [
{
run: "systemctl daemon-reload"
immediate: true
},
{run: "systemctl restart promtail"},
]
}
},
]

86
env/prod/promtail.cue vendored Normal file
View File

@ -0,0 +1,86 @@
package prod
import "encoding/yaml"
import "du5t1n.me/cfg/app/promtail/schema"
promtail: {
scrape: {
journal: schema.#ScrapeConfig & {
job_name: "journal"
journal: {
labels: {
job: "systemd-journal"
}
}
relabel_configs: [
{
source_labels: ["__journal__hostname"]
target_label: "hostname"
},
{
source_labels: ["__journal__systemd_unit"]
target_label: "unit"
},
{
source_labels: ["__journal_syslog_identifier"]
target_label: "syslog_identifier"
},
{
source_labels: ["__journal_priority"]
target_label: "priority"
},
{
source_labels: ["__journal_message_id"]
target_label: "message_id"
},
{
source_labels: ["__journal__comm"]
target_label: "command"
},
{
source_labels: ["__journal__transport"]
target_label: "transport"
},
]
}
}
ca: """
-----BEGIN CERTIFICATE-----
MIIBgTCCATOgAwIBAgIUTf/ZBSJEi8IQb8Ndoxp4/tHB/lcwBQYDK2VwMEAxCzAJ
BgNVBAYTAlVTMRgwFgYDVQQKDA9EdXN0aW4gQy4gSGF0Y2gxFzAVBgNVBAMMDkRD
SCBSb290IENBIFIzMB4XDTI0MDIxNzIwMjkzNloXDTM0MDIxNzIwMjkzNlowQDEL
MAkGA1UEBhMCVVMxGDAWBgNVBAoMD0R1c3RpbiBDLiBIYXRjaDEXMBUGA1UEAwwO
RENIIFJvb3QgQ0EgUjMwKjAFBgMrZXADIQDORylVcWcxwGDJvsJIc2NctfNfDaIU
T6mLebahKdshaKM/MD0wHQYDVR0OBBYEFLZoxAHBvWqbLWMga/DAAlG9ido5MA8G
A1UdEwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMAUGAytlcANBANLV79joVd9s9bmL
0a91HqvOotOnN/416Ek4UTl95jIqy/TvTfRjXX56wSALXqP1iYQM5i3zk3gVEhh4
DaY+6wQ=
-----END CERTIFICATE-----
"""
config: schema.#PromtailConfig & {
clients: [
{
url: "https://loki.pyrocufflink.blue:3100/loki/api/v1/push"
tls_config: {
ca_file: "/etc/promtail/ca.crt"
}
},
]
}
}
let Marshal = yaml.Marshal
#promtail: {
#scrape: [...schema.#ScrapeConfig] | *[promtail.scrape.journal]
ca: string | *promtail.ca
config: promtail.config & {
scrape_configs: #scrape
}
yaml: Marshal(config)
}

View File

@ -5,6 +5,8 @@ import (
ssh: prod.ssh
sudo: prod.sudo
promtail: prod.#promtail
fetchcert: prod.fetchcert.loki & {
token: """
-----BEGIN AGE ENCRYPTED FILE-----

View File

@ -3,6 +3,7 @@ import (
"du5t1n.me/cfg/app/collectd"
"du5t1n.me/cfg/app/fetchcert"
"du5t1n.me/cfg/app/promtail"
"du5t1n.me/cfg/app/loki"
"du5t1n.me/cfg/env/prod"
)
@ -12,4 +13,5 @@ render: list.Concat([
collectd.templates,
fetchcert.templates,
loki.templates,
promtail.templates,
])

View File

@ -0,0 +1 @@
{{ promtail.ca }}

View File

@ -0,0 +1 @@
{{ promtail.yaml -}}

View File

@ -0,0 +1,23 @@
# vim: set ft=systemd :
[Unit]
Description=Grafana Loki Promtail Agent
After=network-online.target
Wants=network-online.target
[Service]
StateDirectory=%P
[Container]
Image=docker.io/grafana/promtail:2.9.4
Exec=-config.file=/etc/promtail/config.yml
Volume=%S/%P:/var/lib/promtail:rw,Z,U
Volume=/etc/machine-id:/etc/machine-id:ro
Volume=/etc/promtail:/etc/promtail:ro
Volume=/run/log:/run/log:ro
Volume=/var/log:/var/log:ro
PublishPort=9080:9080
# container_t is not allowed to read var_log_t / syslogd_var_run_t
SecurityLabelDisable=true
[Install]
WantedBy=multi-user.target