metrics/prometheus.v

124 lines
2.7 KiB
Coq
Raw Normal View History

2022-12-26 21:49:07 +01:00
module metrics
import strings
import io
import arrays
pub struct PrometheusExporter {
buckets []f64
mut:
prefix string
2022-12-30 09:55:39 +01:00
collector &MetricsCollector = unsafe { nil }
2022-12-26 21:49:07 +01:00
}
pub fn new_prometheus_exporter(buckets []f64) PrometheusExporter {
return PrometheusExporter{
buckets: buckets
}
}
pub fn (mut e PrometheusExporter) load(prefix string, collector &MetricsCollector) {
2022-12-30 09:55:39 +01:00
unsafe {
e.collector = collector
}
e.prefix = prefix
}
[inline]
fn join_two_array(arr [2]string) string {
return '${arr[0]}="${arr[1]}"'
}
pub fn (e &PrometheusExporter) serialize_metric(metric Metric) string {
if metric.labels.len == 0 {
return '$e.prefix$metric.name'
}
return '$e.prefix$metric.name{${metric.labels.map(join_two_array(it)).join(',')}}'
2022-12-26 21:49:07 +01:00
}
pub fn (mut e PrometheusExporter) export_to_string() !string {
mut builder := strings.new_builder(64)
e.export_to_writer(mut builder)!
return builder.str()
}
pub fn (mut e PrometheusExporter) export_to_writer(mut writer io.Writer) ! {
for counter in e.collector.counters() {
val := e.collector.counter_get(counter) or { return error("This can't happen.") }
line := '${e.serialize_metric(counter)} $val\n'
2022-12-26 21:49:07 +01:00
writer.write(line.bytes())!
}
for gauge in e.collector.gauges() {
val := e.collector.gauge_get(gauge) or { return error("This can't happen.") }
line := '${e.serialize_metric(gauge)} $val\n'
2022-12-26 21:49:07 +01:00
writer.write(line.bytes())!
}
for hist in e.collector.histograms() {
data := e.collector.histogram_get(hist) or { return error("This can't happen.") }
sum := arrays.sum(data) or { 0.0 }
total_count := data.len
mut bucket_counts := []u64{len: e.buckets.len}
mut i := bucket_counts.len - 1
// For each data point, increment all buckets that the value is
// contained in. Because the buckets are sorted, we can stop once we
// encounter one that it doesn't fit in
for val in data {
for i >= 0 && val <= e.buckets[i] {
bucket_counts[i]++
i -= 1
}
i = bucket_counts.len - 1
}
mut m := Metric{
...hist
name: '${hist.name}_count'
}
writer.write('${e.serialize_metric(m)} $total_count\n'.bytes())!
2022-12-26 21:49:07 +01:00
m = Metric{
...hist
name: '${hist.name}_sum'
}
writer.write('${e.serialize_metric(m)} $sum\n'.bytes())!
2022-12-26 21:49:07 +01:00
mut le_labels := [][2]string{}
le_labels.prepend(hist.labels)
le_labels << ['le', '']!
for j, bucket in e.buckets {
le_labels[le_labels.len - 1][1] = bucket.str()
m = Metric{
name: '${hist.name}_bucket'
labels: le_labels
}
writer.write('${e.serialize_metric(m)} ${bucket_counts[j]}\n'.bytes())!
2022-12-26 21:49:07 +01:00
}
2023-01-04 10:31:48 +01:00
// Always output the +Inf bucket
le_labels[le_labels.len - 1][1] = '+Inf'
m = Metric{
name: '${hist.name}_bucket'
labels: le_labels
}
writer.write('${e.serialize_metric(m)} $total_count\n'.bytes())!
2022-12-26 21:49:07 +01:00
}
}