diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ed5d2605f..835d4dcde 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -7,8 +7,6 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - with: - fetch-depth: 0 - name: Set up JDK uses: actions/setup-java@v4 with: diff --git a/benchmarks/src/main/java/io/prometheus/metrics/benchmarks/BenchmarkRunner.java b/benchmarks/src/main/java/io/prometheus/metrics/benchmarks/BenchmarkRunner.java index 6503c0d60..9d5d242ae 100644 --- a/benchmarks/src/main/java/io/prometheus/metrics/benchmarks/BenchmarkRunner.java +++ b/benchmarks/src/main/java/io/prometheus/metrics/benchmarks/BenchmarkRunner.java @@ -1,7 +1,7 @@ package io.prometheus.metrics.benchmarks; public class BenchmarkRunner { - public static void main(String[] args) throws Exception { - org.openjdk.jmh.Main.main(args); - } + public static void main(String[] args) throws Exception { + org.openjdk.jmh.Main.main(args); + } } diff --git a/benchmarks/src/main/java/io/prometheus/metrics/benchmarks/CounterBenchmark.java b/benchmarks/src/main/java/io/prometheus/metrics/benchmarks/CounterBenchmark.java index 31678ad10..6730e1bab 100644 --- a/benchmarks/src/main/java/io/prometheus/metrics/benchmarks/CounterBenchmark.java +++ b/benchmarks/src/main/java/io/prometheus/metrics/benchmarks/CounterBenchmark.java @@ -19,6 +19,7 @@ /** * Results on a machine with dedicated 8 vCPU cores: + * *
  * Benchmark                                                                  Mode  Cnt      Score     Error  Units
  * i.p.metrics.benchmarks.CounterBenchmark.codahaleIncNoLabels               thrpt   25  30978.055 ± 424.088  ops/s
@@ -32,182 +33,174 @@
  * i.p.metrics.benchmarks.CounterBenchmark.simpleclientInc                   thrpt   25   9057.637 ±  67.761  ops/s
  * i.p.metrics.benchmarks.CounterBenchmark.simpleclientNoLabelsInc           thrpt   25   8993.471 ±  49.581  ops/s
  * 
- * Prometheus counters are faster than counters of other libraries. For example, incrementing a single counter - * without labels is more than 2 times faster (34752 ops / second) than doing the same with an OpenTelemetry - * counter (16634 ops / sec). + * + * Prometheus counters are faster than counters of other libraries. For example, incrementing a + * single counter without labels is more than 2 times faster (34752 ops / second) than doing the + * same with an OpenTelemetry counter (16634 ops / sec). */ public class CounterBenchmark { - @State(Scope.Benchmark) - public static class PrometheusCounter { - - final Counter noLabels; - final CounterDataPoint dataPoint; - - public PrometheusCounter() { - noLabels = Counter.builder() - .name("test") - .help("help") - .build(); - - Counter labels = Counter.builder() - .name("test") - .help("help") - .labelNames("path", "status") - .build(); - this.dataPoint = labels.labelValues("/", "200"); - } - } + @State(Scope.Benchmark) + public static class PrometheusCounter { - @State(Scope.Benchmark) - public static class SimpleclientCounter { + final Counter noLabels; + final CounterDataPoint dataPoint; - final io.prometheus.client.Counter noLabels; - final io.prometheus.client.Counter.Child dataPoint; + public PrometheusCounter() { + noLabels = Counter.builder().name("test").help("help").build(); - public SimpleclientCounter() { - noLabels = io.prometheus.client.Counter.build() - .name("name") - .help("help") - .create(); + Counter labels = + Counter.builder().name("test").help("help").labelNames("path", "status").build(); + this.dataPoint = labels.labelValues("/", "200"); + } + } - io.prometheus.client.Counter counter = io.prometheus.client.Counter.build() - .name("name") - .help("help") - .labelNames("path", "status") - .create(); + @State(Scope.Benchmark) + public static class SimpleclientCounter { - this.dataPoint = counter.labels("/", "200"); - } - } + final io.prometheus.client.Counter noLabels; + final io.prometheus.client.Counter.Child dataPoint; - @State(Scope.Benchmark) - public static class CodahaleCounterNoLabels { - final com.codahale.metrics.Counter counter = new com.codahale.metrics.MetricRegistry().counter("test"); - } + public SimpleclientCounter() { + noLabels = io.prometheus.client.Counter.build().name("name").help("help").create(); - @State(Scope.Benchmark) - public static class OpenTelemetryCounter { - - final LongCounter longCounter; - final DoubleCounter doubleCounter; - final Attributes attributes; - - public OpenTelemetryCounter() { - - SdkMeterProvider sdkMeterProvider = SdkMeterProvider.builder() - .registerMetricReader(InMemoryMetricReader.create()) - .setResource(Resource.getDefault()) - .build(); - OpenTelemetry openTelemetry = OpenTelemetrySdk.builder() - .setMeterProvider(sdkMeterProvider) - .build(); - Meter meter = openTelemetry - .meterBuilder("instrumentation-library-name") - .setInstrumentationVersion("1.0.0") - .build(); - this.longCounter = meter - .counterBuilder("test1") - .setDescription("test") - .build(); - this.doubleCounter = meter - .counterBuilder("test2") - .ofDoubles() - .setDescription("test") - .build(); - this.attributes = Attributes.of( - AttributeKey.stringKey("path"), "/", - AttributeKey.stringKey("status"), "200"); - } - } + io.prometheus.client.Counter counter = + io.prometheus.client.Counter.build() + .name("name") + .help("help") + .labelNames("path", "status") + .create(); - @Benchmark - @Threads(4) - public CounterDataPoint prometheusAdd(RandomNumbers randomNumbers, PrometheusCounter counter) { - for (int i=0; i * Benchmark Mode Cnt Score Error Units * i.p.metrics.benchmarks.HistogramBenchmark.openTelemetryClassic thrpt 25 1908.715 ± 114.050 ops/s @@ -27,164 +27,159 @@ * i.p.metrics.benchmarks.HistogramBenchmark.prometheusNative thrpt 25 3372.789 ± 339.328 ops/s * i.p.metrics.benchmarks.HistogramBenchmark.simpleclient thrpt 25 6488.252 ± 96.737 ops/s * + * * The simpleclient (i.e. client_java version 0.16.0 and older) histograms perform about the same as * the classic histogram of the current 1.0.0 version. - *

- * Compared to OpenTelemetry histograms the Prometheus Java client histograms perform more than 3 times better - * (OpenTelemetry has 1908 ops / sec for classic histograms, while Prometheus has 6451 ops / sec). + * + *

Compared to OpenTelemetry histograms the Prometheus Java client histograms perform more than 3 + * times better (OpenTelemetry has 1908 ops / sec for classic histograms, while Prometheus has 6451 + * ops / sec). */ - public class HistogramBenchmark { - @State(Scope.Benchmark) - public static class PrometheusClassicHistogram { + @State(Scope.Benchmark) + public static class PrometheusClassicHistogram { - final Histogram noLabels; + final Histogram noLabels; - public PrometheusClassicHistogram() { - noLabels = Histogram.builder() - .name("test") - .help("help") - .classicOnly() - .build(); - } + public PrometheusClassicHistogram() { + noLabels = Histogram.builder().name("test").help("help").classicOnly().build(); } - - @State(Scope.Benchmark) - public static class PrometheusNativeHistogram { - - final Histogram noLabels; - - public PrometheusNativeHistogram() { - noLabels = Histogram.builder() - .name("test") - .help("help") - .nativeOnly() - .nativeInitialSchema(5) - .nativeMaxNumberOfBuckets(0) - .build(); - } + } + + @State(Scope.Benchmark) + public static class PrometheusNativeHistogram { + + final Histogram noLabels; + + public PrometheusNativeHistogram() { + noLabels = + Histogram.builder() + .name("test") + .help("help") + .nativeOnly() + .nativeInitialSchema(5) + .nativeMaxNumberOfBuckets(0) + .build(); } + } - @State(Scope.Benchmark) - public static class SimpleclientHistogram { + @State(Scope.Benchmark) + public static class SimpleclientHistogram { - final io.prometheus.client.Histogram noLabels; + final io.prometheus.client.Histogram noLabels; - public SimpleclientHistogram() { - noLabels = io.prometheus.client.Histogram.build() - .name("name") - .help("help") - .create(); - } + public SimpleclientHistogram() { + noLabels = io.prometheus.client.Histogram.build().name("name").help("help").create(); } - - @State(Scope.Benchmark) - public static class OpenTelemetryClassicHistogram { - - final io.opentelemetry.api.metrics.DoubleHistogram histogram; - - public OpenTelemetryClassicHistogram() { - - SdkMeterProvider sdkMeterProvider = SdkMeterProvider.builder() - .registerMetricReader(InMemoryMetricReader.create()) - .setResource(Resource.getDefault()) - .registerView(InstrumentSelector.builder() - .setName("test") - .build(), - View.builder() - .setAggregation(Aggregation.explicitBucketHistogram(Arrays.asList(.005, .01, .025, .05, .1, .25, .5, 1.0, 2.5, 5.0, 10.0))) - .build() - ) - .build(); - OpenTelemetry openTelemetry = OpenTelemetrySdk.builder() - .setMeterProvider(sdkMeterProvider) - .build(); - Meter meter = openTelemetry - .meterBuilder("instrumentation-library-name") - .setInstrumentationVersion("1.0.0") - .build(); - this.histogram = meter - .histogramBuilder("test") - .setDescription("test") - .build(); - } + } + + @State(Scope.Benchmark) + public static class OpenTelemetryClassicHistogram { + + final io.opentelemetry.api.metrics.DoubleHistogram histogram; + + public OpenTelemetryClassicHistogram() { + + SdkMeterProvider sdkMeterProvider = + SdkMeterProvider.builder() + .registerMetricReader(InMemoryMetricReader.create()) + .setResource(Resource.getDefault()) + .registerView( + InstrumentSelector.builder().setName("test").build(), + View.builder() + .setAggregation( + Aggregation.explicitBucketHistogram( + Arrays.asList( + .005, .01, .025, .05, .1, .25, .5, 1.0, 2.5, 5.0, 10.0))) + .build()) + .build(); + OpenTelemetry openTelemetry = + OpenTelemetrySdk.builder().setMeterProvider(sdkMeterProvider).build(); + Meter meter = + openTelemetry + .meterBuilder("instrumentation-library-name") + .setInstrumentationVersion("1.0.0") + .build(); + this.histogram = meter.histogramBuilder("test").setDescription("test").build(); } - - @State(Scope.Benchmark) - public static class OpenTelemetryExponentialHistogram { - - final io.opentelemetry.api.metrics.DoubleHistogram histogram; - - public OpenTelemetryExponentialHistogram() { - - SdkMeterProvider sdkMeterProvider = SdkMeterProvider.builder() - .registerMetricReader(InMemoryMetricReader.create()) - .setResource(Resource.getDefault()) - .registerView(InstrumentSelector.builder() - .setName("test") - .build(), - View.builder() - .setAggregation(Aggregation.base2ExponentialBucketHistogram(10_000, 5)) - .build() - ) - .build(); - OpenTelemetry openTelemetry = OpenTelemetrySdk.builder() - .setMeterProvider(sdkMeterProvider) - .build(); - Meter meter = openTelemetry - .meterBuilder("instrumentation-library-name") - .setInstrumentationVersion("1.0.0") - .build(); - this.histogram = meter - .histogramBuilder("test") - .setDescription("test") - .build(); - } + } + + @State(Scope.Benchmark) + public static class OpenTelemetryExponentialHistogram { + + final io.opentelemetry.api.metrics.DoubleHistogram histogram; + + public OpenTelemetryExponentialHistogram() { + + SdkMeterProvider sdkMeterProvider = + SdkMeterProvider.builder() + .registerMetricReader(InMemoryMetricReader.create()) + .setResource(Resource.getDefault()) + .registerView( + InstrumentSelector.builder().setName("test").build(), + View.builder() + .setAggregation(Aggregation.base2ExponentialBucketHistogram(10_000, 5)) + .build()) + .build(); + OpenTelemetry openTelemetry = + OpenTelemetrySdk.builder().setMeterProvider(sdkMeterProvider).build(); + Meter meter = + openTelemetry + .meterBuilder("instrumentation-library-name") + .setInstrumentationVersion("1.0.0") + .build(); + this.histogram = meter.histogramBuilder("test").setDescription("test").build(); } - - @Benchmark - @Threads(4) - public Histogram prometheusClassic(RandomNumbers randomNumbers, PrometheusClassicHistogram histogram) { - for (int i = 0; i < randomNumbers.randomNumbers.length; i++) { - histogram.noLabels.observe(randomNumbers.randomNumbers[i]); - } - return histogram.noLabels; + } + + @Benchmark + @Threads(4) + public Histogram prometheusClassic( + RandomNumbers randomNumbers, PrometheusClassicHistogram histogram) { + for (int i = 0; i < randomNumbers.randomNumbers.length; i++) { + histogram.noLabels.observe(randomNumbers.randomNumbers[i]); } - - @Benchmark - @Threads(4) - public Histogram prometheusNative(RandomNumbers randomNumbers, PrometheusNativeHistogram histogram) { - for (int i = 0; i < randomNumbers.randomNumbers.length; i++) { - histogram.noLabels.observe(randomNumbers.randomNumbers[i]); - } - return histogram.noLabels; + return histogram.noLabels; + } + + @Benchmark + @Threads(4) + public Histogram prometheusNative( + RandomNumbers randomNumbers, PrometheusNativeHistogram histogram) { + for (int i = 0; i < randomNumbers.randomNumbers.length; i++) { + histogram.noLabels.observe(randomNumbers.randomNumbers[i]); } - - @Benchmark - @Threads(4) - public io.prometheus.client.Histogram simpleclient(RandomNumbers randomNumbers, SimpleclientHistogram histogram) { - for (int i = 0; i < randomNumbers.randomNumbers.length; i++) { - histogram.noLabels.observe(randomNumbers.randomNumbers[i]); - } - return histogram.noLabels; + return histogram.noLabels; + } + + @Benchmark + @Threads(4) + public io.prometheus.client.Histogram simpleclient( + RandomNumbers randomNumbers, SimpleclientHistogram histogram) { + for (int i = 0; i < randomNumbers.randomNumbers.length; i++) { + histogram.noLabels.observe(randomNumbers.randomNumbers[i]); } - - @Benchmark - @Threads(4) - public io.opentelemetry.api.metrics.DoubleHistogram openTelemetryClassic(RandomNumbers randomNumbers, OpenTelemetryClassicHistogram histogram) { - for (int i = 0; i < randomNumbers.randomNumbers.length; i++) { - histogram.histogram.record(randomNumbers.randomNumbers[i]); - } - return histogram.histogram; + return histogram.noLabels; + } + + @Benchmark + @Threads(4) + public io.opentelemetry.api.metrics.DoubleHistogram openTelemetryClassic( + RandomNumbers randomNumbers, OpenTelemetryClassicHistogram histogram) { + for (int i = 0; i < randomNumbers.randomNumbers.length; i++) { + histogram.histogram.record(randomNumbers.randomNumbers[i]); } - - @Benchmark - @Threads(4) - public io.opentelemetry.api.metrics.DoubleHistogram openTelemetryExponential(RandomNumbers randomNumbers, OpenTelemetryExponentialHistogram histogram) { - for (int i = 0; i < randomNumbers.randomNumbers.length; i++) { - histogram.histogram.record(randomNumbers.randomNumbers[i]); - } - return histogram.histogram; + return histogram.histogram; + } + + @Benchmark + @Threads(4) + public io.opentelemetry.api.metrics.DoubleHistogram openTelemetryExponential( + RandomNumbers randomNumbers, OpenTelemetryExponentialHistogram histogram) { + for (int i = 0; i < randomNumbers.randomNumbers.length; i++) { + histogram.histogram.record(randomNumbers.randomNumbers[i]); } + return histogram.histogram; + } } diff --git a/benchmarks/src/main/java/io/prometheus/metrics/benchmarks/RandomNumbers.java b/benchmarks/src/main/java/io/prometheus/metrics/benchmarks/RandomNumbers.java index d7002d909..6778c4ea1 100644 --- a/benchmarks/src/main/java/io/prometheus/metrics/benchmarks/RandomNumbers.java +++ b/benchmarks/src/main/java/io/prometheus/metrics/benchmarks/RandomNumbers.java @@ -1,19 +1,18 @@ package io.prometheus.metrics.benchmarks; +import java.util.Random; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; -import java.util.Random; - @State(Scope.Thread) public class RandomNumbers { - final double[] randomNumbers = new double[10*1024]; + final double[] randomNumbers = new double[10 * 1024]; - public RandomNumbers() { - Random rand = new Random(0); - for (int i = 0; i < randomNumbers.length; i++) { - randomNumbers[i] = Math.abs(rand.nextGaussian()); - } + public RandomNumbers() { + Random rand = new Random(0); + for (int i = 0; i < randomNumbers.length; i++) { + randomNumbers[i] = Math.abs(rand.nextGaussian()); } + } } diff --git a/examples/example-exemplars-tail-sampling/example-greeting-service/src/main/java/io/prometheus/metrics/examples/otel_exemplars/greeting/GreetingServlet.java b/examples/example-exemplars-tail-sampling/example-greeting-service/src/main/java/io/prometheus/metrics/examples/otel_exemplars/greeting/GreetingServlet.java index 7a0870bf3..af1652f76 100644 --- a/examples/example-exemplars-tail-sampling/example-greeting-service/src/main/java/io/prometheus/metrics/examples/otel_exemplars/greeting/GreetingServlet.java +++ b/examples/example-exemplars-tail-sampling/example-greeting-service/src/main/java/io/prometheus/metrics/examples/otel_exemplars/greeting/GreetingServlet.java @@ -1,47 +1,45 @@ package io.prometheus.metrics.examples.otel_exemplars.greeting; +import static io.prometheus.metrics.model.snapshots.Unit.nanosToSeconds; + import io.prometheus.metrics.core.metrics.Histogram; import io.prometheus.metrics.model.snapshots.Unit; import jakarta.servlet.http.HttpServlet; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; - import java.io.IOException; import java.util.Random; -import static io.prometheus.metrics.model.snapshots.Unit.nanosToSeconds; - -/** - * Hello World REST servlet, with an example counter and an example histogram. - */ +/** Hello World REST servlet, with an example counter and an example histogram. */ public class GreetingServlet extends HttpServlet { - private final Random random = new Random(0); + private final Random random = new Random(0); - private final Histogram histogram; + private final Histogram histogram; - public GreetingServlet() { - histogram = Histogram.builder() + public GreetingServlet() { + histogram = + Histogram.builder() .name("request_duration_seconds") .help("request duration in seconds") .unit(Unit.SECONDS) .labelNames("http_status") .register(); - histogram.initLabelValues("200"); - } - - @Override - protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { - long start = System.nanoTime(); - try { - Thread.sleep((long) (Math.abs((random.nextGaussian() + 1.0) * 100.0))); - resp.setStatus(200); - resp.setContentType("text/plain"); - resp.getWriter().println("Hello, World!"); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } finally { - histogram.labelValues("200").observe(nanosToSeconds(System.nanoTime() - start)); - } + histogram.initLabelValues("200"); + } + + @Override + protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { + long start = System.nanoTime(); + try { + Thread.sleep((long) (Math.abs((random.nextGaussian() + 1.0) * 100.0))); + resp.setStatus(200); + resp.setContentType("text/plain"); + resp.getWriter().println("Hello, World!"); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } finally { + histogram.labelValues("200").observe(nanosToSeconds(System.nanoTime() - start)); } + } } diff --git a/examples/example-exemplars-tail-sampling/example-greeting-service/src/main/java/io/prometheus/metrics/examples/otel_exemplars/greeting/Main.java b/examples/example-exemplars-tail-sampling/example-greeting-service/src/main/java/io/prometheus/metrics/examples/otel_exemplars/greeting/Main.java index 731e05cf9..c0cf09830 100644 --- a/examples/example-exemplars-tail-sampling/example-greeting-service/src/main/java/io/prometheus/metrics/examples/otel_exemplars/greeting/Main.java +++ b/examples/example-exemplars-tail-sampling/example-greeting-service/src/main/java/io/prometheus/metrics/examples/otel_exemplars/greeting/Main.java @@ -2,34 +2,31 @@ import io.prometheus.metrics.exporter.servlet.jakarta.PrometheusMetricsServlet; import io.prometheus.metrics.instrumentation.jvm.JvmMetrics; +import java.io.File; import org.apache.catalina.Context; import org.apache.catalina.LifecycleException; import org.apache.catalina.startup.Tomcat; -import java.io.File; - -/** - * Simple example using embedded Tomcat and the {@link PrometheusMetricsServlet}. - */ +/** Simple example using embedded Tomcat and the {@link PrometheusMetricsServlet}. */ public class Main { - public static void main(String[] args) throws LifecycleException { + public static void main(String[] args) throws LifecycleException { - JvmMetrics.builder().register(); + JvmMetrics.builder().register(); - Tomcat tomcat = new Tomcat(); - tomcat.setPort(8081); + Tomcat tomcat = new Tomcat(); + tomcat.setPort(8081); - Context ctx = tomcat.addContext("", new File(".").getAbsolutePath()); + Context ctx = tomcat.addContext("", new File(".").getAbsolutePath()); - Tomcat.addServlet(ctx, "hello", new GreetingServlet()); - ctx.addServletMappingDecoded("/*", "hello"); + Tomcat.addServlet(ctx, "hello", new GreetingServlet()); + ctx.addServletMappingDecoded("/*", "hello"); - Tomcat.addServlet(ctx, "metrics", new PrometheusMetricsServlet()); - ctx.addServletMappingDecoded("/metrics", "metrics"); + Tomcat.addServlet(ctx, "metrics", new PrometheusMetricsServlet()); + ctx.addServletMappingDecoded("/metrics", "metrics"); - tomcat.getConnector(); - tomcat.start(); - tomcat.getServer().await(); - } + tomcat.getConnector(); + tomcat.start(); + tomcat.getServer().await(); + } } diff --git a/examples/example-exemplars-tail-sampling/example-hello-world-app/src/main/java/io/prometheus/metrics/examples/otel_exemplars/app/HelloWorldServlet.java b/examples/example-exemplars-tail-sampling/example-hello-world-app/src/main/java/io/prometheus/metrics/examples/otel_exemplars/app/HelloWorldServlet.java index 11c048d68..fbe293d50 100644 --- a/examples/example-exemplars-tail-sampling/example-hello-world-app/src/main/java/io/prometheus/metrics/examples/otel_exemplars/app/HelloWorldServlet.java +++ b/examples/example-exemplars-tail-sampling/example-hello-world-app/src/main/java/io/prometheus/metrics/examples/otel_exemplars/app/HelloWorldServlet.java @@ -1,12 +1,14 @@ package io.prometheus.metrics.examples.otel_exemplars.app; +import static io.prometheus.metrics.model.snapshots.Unit.nanosToSeconds; +import static java.net.http.HttpResponse.BodyHandlers.ofString; + import io.prometheus.metrics.core.metrics.Histogram; import io.prometheus.metrics.model.snapshots.Unit; import jakarta.servlet.ServletException; import jakarta.servlet.http.HttpServlet; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; - import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; @@ -15,51 +17,46 @@ import java.net.http.HttpResponse; import java.util.Random; -import static io.prometheus.metrics.model.snapshots.Unit.nanosToSeconds; -import static java.net.http.HttpResponse.BodyHandlers.ofString; - -/** - * Hello World REST servlet, with an example counter and an example histogram. - */ +/** Hello World REST servlet, with an example counter and an example histogram. */ public class HelloWorldServlet extends HttpServlet { - private final Random random = new Random(0); - - private final Histogram histogram; - - public HelloWorldServlet() { - histogram = Histogram.builder() - .name("request_duration_seconds") - .help("request duration in seconds") - .unit(Unit.SECONDS) - .labelNames("http_status") - .register(); - histogram.initLabelValues("200"); - } - - @Override - protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException { - long start = System.nanoTime(); - try { - Thread.sleep((long) (Math.abs((random.nextGaussian() + 1.0) * 100.0))); - String greeting = executeGreetingServiceRequest(); - resp.setStatus(200); - resp.setContentType("text/plain"); - resp.getWriter().print(greeting); - } catch (Exception e) { - throw new ServletException(e); - } finally { - histogram.labelValues("200").observe(nanosToSeconds(System.nanoTime() - start)); - } - } - - private String executeGreetingServiceRequest() throws URISyntaxException, IOException, InterruptedException { - HttpRequest request = HttpRequest.newBuilder() - .GET() - .uri(new URI("http://localhost:8081/")) - .build(); - HttpClient httpClient = HttpClient.newHttpClient(); - HttpResponse response = httpClient.send(request, ofString()); - return response.body(); + private final Random random = new Random(0); + + private final Histogram histogram; + + public HelloWorldServlet() { + histogram = + Histogram.builder() + .name("request_duration_seconds") + .help("request duration in seconds") + .unit(Unit.SECONDS) + .labelNames("http_status") + .register(); + histogram.initLabelValues("200"); + } + + @Override + protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException { + long start = System.nanoTime(); + try { + Thread.sleep((long) (Math.abs((random.nextGaussian() + 1.0) * 100.0))); + String greeting = executeGreetingServiceRequest(); + resp.setStatus(200); + resp.setContentType("text/plain"); + resp.getWriter().print(greeting); + } catch (Exception e) { + throw new ServletException(e); + } finally { + histogram.labelValues("200").observe(nanosToSeconds(System.nanoTime() - start)); } + } + + private String executeGreetingServiceRequest() + throws URISyntaxException, IOException, InterruptedException { + HttpRequest request = + HttpRequest.newBuilder().GET().uri(new URI("http://localhost:8081/")).build(); + HttpClient httpClient = HttpClient.newHttpClient(); + HttpResponse response = httpClient.send(request, ofString()); + return response.body(); + } } diff --git a/examples/example-exemplars-tail-sampling/example-hello-world-app/src/main/java/io/prometheus/metrics/examples/otel_exemplars/app/Main.java b/examples/example-exemplars-tail-sampling/example-hello-world-app/src/main/java/io/prometheus/metrics/examples/otel_exemplars/app/Main.java index 5fb5114bb..dc58256cb 100644 --- a/examples/example-exemplars-tail-sampling/example-hello-world-app/src/main/java/io/prometheus/metrics/examples/otel_exemplars/app/Main.java +++ b/examples/example-exemplars-tail-sampling/example-hello-world-app/src/main/java/io/prometheus/metrics/examples/otel_exemplars/app/Main.java @@ -2,34 +2,31 @@ import io.prometheus.metrics.exporter.servlet.jakarta.PrometheusMetricsServlet; import io.prometheus.metrics.instrumentation.jvm.JvmMetrics; +import java.io.File; import org.apache.catalina.Context; import org.apache.catalina.LifecycleException; import org.apache.catalina.startup.Tomcat; -import java.io.File; - -/** - * Simple example using embedded Tomcat and the {@link PrometheusMetricsServlet}. - */ +/** Simple example using embedded Tomcat and the {@link PrometheusMetricsServlet}. */ public class Main { - public static void main(String[] args) throws LifecycleException { + public static void main(String[] args) throws LifecycleException { - JvmMetrics.builder().register(); + JvmMetrics.builder().register(); - Tomcat tomcat = new Tomcat(); - tomcat.setPort(8080); + Tomcat tomcat = new Tomcat(); + tomcat.setPort(8080); - Context ctx = tomcat.addContext("", new File(".").getAbsolutePath()); + Context ctx = tomcat.addContext("", new File(".").getAbsolutePath()); - Tomcat.addServlet(ctx, "hello", new HelloWorldServlet()); - ctx.addServletMappingDecoded("/*", "hello"); + Tomcat.addServlet(ctx, "hello", new HelloWorldServlet()); + ctx.addServletMappingDecoded("/*", "hello"); - Tomcat.addServlet(ctx, "metrics", new PrometheusMetricsServlet()); - ctx.addServletMappingDecoded("/metrics", "metrics"); + Tomcat.addServlet(ctx, "metrics", new PrometheusMetricsServlet()); + ctx.addServletMappingDecoded("/metrics", "metrics"); - tomcat.getConnector(); - tomcat.start(); - tomcat.getServer().await(); - } + tomcat.getConnector(); + tomcat.start(); + tomcat.getServer().await(); + } } diff --git a/examples/example-exporter-httpserver/src/main/java/io/prometheus/metrics/examples/httpserver/Main.java b/examples/example-exporter-httpserver/src/main/java/io/prometheus/metrics/examples/httpserver/Main.java index f01874ef8..3b0976778 100644 --- a/examples/example-exporter-httpserver/src/main/java/io/prometheus/metrics/examples/httpserver/Main.java +++ b/examples/example-exporter-httpserver/src/main/java/io/prometheus/metrics/examples/httpserver/Main.java @@ -4,39 +4,36 @@ import io.prometheus.metrics.exporter.httpserver.HTTPServer; import io.prometheus.metrics.instrumentation.jvm.JvmMetrics; import io.prometheus.metrics.model.snapshots.Unit; - import java.io.IOException; -/** - * Simple example of an application exposing metrics via Prometheus' built-in HTTPServer. - */ +/** Simple example of an application exposing metrics via Prometheus' built-in HTTPServer. */ public class Main { - public static void main(String[] args) throws IOException, InterruptedException { + public static void main(String[] args) throws IOException, InterruptedException { - JvmMetrics.builder().register(); + JvmMetrics.builder().register(); - // Note: uptime_seconds_total is not a great example: - // The built-in JvmMetrics have an out-of-the-box metric named process_start_time_seconds - // with the start timestamp in seconds, so if you want to know the uptime you can simply - // run the Prometheus query - // time() - process_start_time_seconds - // rather than creating a custom uptime metric. - Counter counter = Counter.builder() - .name("uptime_seconds_total") - .help("total number of seconds since this application was started") - .unit(Unit.SECONDS) - .register(); + // Note: uptime_seconds_total is not a great example: + // The built-in JvmMetrics have an out-of-the-box metric named process_start_time_seconds + // with the start timestamp in seconds, so if you want to know the uptime you can simply + // run the Prometheus query + // time() - process_start_time_seconds + // rather than creating a custom uptime metric. + Counter counter = + Counter.builder() + .name("uptime_seconds_total") + .help("total number of seconds since this application was started") + .unit(Unit.SECONDS) + .register(); - HTTPServer server = HTTPServer.builder() - .port(9400) - .buildAndStart(); + HTTPServer server = HTTPServer.builder().port(9400).buildAndStart(); - System.out.println("HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); + System.out.println( + "HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); - while (true) { - Thread.sleep(1000); - counter.inc(); - } + while (true) { + Thread.sleep(1000); + counter.inc(); } + } } diff --git a/examples/example-exporter-multi-target/src/main/java/io/prometheus/metrics/examples/multitarget/Main.java b/examples/example-exporter-multi-target/src/main/java/io/prometheus/metrics/examples/multitarget/Main.java index da36346b9..0fac9a0d3 100644 --- a/examples/example-exporter-multi-target/src/main/java/io/prometheus/metrics/examples/multitarget/Main.java +++ b/examples/example-exporter-multi-target/src/main/java/io/prometheus/metrics/examples/multitarget/Main.java @@ -1,23 +1,19 @@ package io.prometheus.metrics.examples.multitarget; -import java.io.IOException; - import io.prometheus.metrics.exporter.httpserver.HTTPServer; import io.prometheus.metrics.model.registry.PrometheusRegistry; +import java.io.IOException; -/** - * Simple example of an application exposing metrics via Prometheus' built-in HTTPServer. - */ +/** Simple example of an application exposing metrics via Prometheus' built-in HTTPServer. */ public class Main { - public static void main(String[] args) throws IOException, InterruptedException { + public static void main(String[] args) throws IOException, InterruptedException { - SampleMultiCollector xmc = new SampleMultiCollector(); - PrometheusRegistry.defaultRegistry.register(xmc); - HTTPServer server = HTTPServer.builder() - .port(9401) - .buildAndStart(); + SampleMultiCollector xmc = new SampleMultiCollector(); + PrometheusRegistry.defaultRegistry.register(xmc); + HTTPServer server = HTTPServer.builder().port(9401).buildAndStart(); - System.out.println("HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); - } + System.out.println( + "HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); + } } diff --git a/examples/example-exporter-multi-target/src/main/java/io/prometheus/metrics/examples/multitarget/SampleMultiCollector.java b/examples/example-exporter-multi-target/src/main/java/io/prometheus/metrics/examples/multitarget/SampleMultiCollector.java index 819bb3028..7a05f0a8b 100644 --- a/examples/example-exporter-multi-target/src/main/java/io/prometheus/metrics/examples/multitarget/SampleMultiCollector.java +++ b/examples/example-exporter-multi-target/src/main/java/io/prometheus/metrics/examples/multitarget/SampleMultiCollector.java @@ -1,9 +1,5 @@ package io.prometheus.metrics.examples.multitarget; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - import io.prometheus.metrics.model.registry.MultiCollector; import io.prometheus.metrics.model.registry.PrometheusScrapeRequest; import io.prometheus.metrics.model.snapshots.CounterSnapshot; @@ -13,76 +9,79 @@ import io.prometheus.metrics.model.snapshots.MetricSnapshot; import io.prometheus.metrics.model.snapshots.MetricSnapshots; import io.prometheus.metrics.model.snapshots.PrometheusNaming; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; public class SampleMultiCollector implements MultiCollector { - public SampleMultiCollector() { - super(); - } - - @Override - public MetricSnapshots collect() { - return new MetricSnapshots(); - } + public SampleMultiCollector() { + super(); + } - @Override - public MetricSnapshots collect(PrometheusScrapeRequest scrapeRequest) { - return collectMetricSnapshots(scrapeRequest); - } + @Override + public MetricSnapshots collect() { + return new MetricSnapshots(); + } - protected MetricSnapshots collectMetricSnapshots(PrometheusScrapeRequest scrapeRequest) { + @Override + public MetricSnapshots collect(PrometheusScrapeRequest scrapeRequest) { + return collectMetricSnapshots(scrapeRequest); + } - GaugeSnapshot.Builder gaugeBuilder = GaugeSnapshot.builder(); - gaugeBuilder.name("x_load").help("process load"); + protected MetricSnapshots collectMetricSnapshots(PrometheusScrapeRequest scrapeRequest) { - CounterSnapshot.Builder counterBuilder = CounterSnapshot.builder(); - counterBuilder.name(PrometheusNaming.sanitizeMetricName("x_calls_total")).help("invocations"); + GaugeSnapshot.Builder gaugeBuilder = GaugeSnapshot.builder(); + gaugeBuilder.name("x_load").help("process load"); - String[] targetNames = scrapeRequest.getParameterValues("target"); - String targetName; - String[] procs = scrapeRequest.getParameterValues("proc"); - if (targetNames == null || targetNames.length == 0) { - targetName = "defaultTarget"; - procs = null; //ignore procs param - } else { - targetName = targetNames[0]; - } - Builder counterDataPointBuilder = CounterSnapshot.CounterDataPointSnapshot.builder(); - io.prometheus.metrics.model.snapshots.GaugeSnapshot.GaugeDataPointSnapshot.Builder gaugeDataPointBuilder = GaugeSnapshot.GaugeDataPointSnapshot.builder(); - Labels lbls = Labels.of("target", targetName); + CounterSnapshot.Builder counterBuilder = CounterSnapshot.builder(); + counterBuilder.name(PrometheusNaming.sanitizeMetricName("x_calls_total")).help("invocations"); - if (procs == null || procs.length == 0) { - counterDataPointBuilder.labels(lbls.merge(Labels.of("proc", "defaultProc"))); - gaugeDataPointBuilder.labels(lbls.merge(Labels.of("proc", "defaultProc"))); - counterDataPointBuilder.value(70); - gaugeDataPointBuilder.value(Math.random()); + String[] targetNames = scrapeRequest.getParameterValues("target"); + String targetName; + String[] procs = scrapeRequest.getParameterValues("proc"); + if (targetNames == null || targetNames.length == 0) { + targetName = "defaultTarget"; + procs = null; // ignore procs param + } else { + targetName = targetNames[0]; + } + Builder counterDataPointBuilder = CounterSnapshot.CounterDataPointSnapshot.builder(); + io.prometheus.metrics.model.snapshots.GaugeSnapshot.GaugeDataPointSnapshot.Builder + gaugeDataPointBuilder = GaugeSnapshot.GaugeDataPointSnapshot.builder(); + Labels lbls = Labels.of("target", targetName); - counterBuilder.dataPoint(counterDataPointBuilder.build()); - gaugeBuilder.dataPoint(gaugeDataPointBuilder.build()); + if (procs == null || procs.length == 0) { + counterDataPointBuilder.labels(lbls.merge(Labels.of("proc", "defaultProc"))); + gaugeDataPointBuilder.labels(lbls.merge(Labels.of("proc", "defaultProc"))); + counterDataPointBuilder.value(70); + gaugeDataPointBuilder.value(Math.random()); - } else { - for (int i = 0; i < procs.length; i++) { - counterDataPointBuilder.labels(lbls.merge(Labels.of("proc", procs[i]))); - gaugeDataPointBuilder.labels(lbls.merge(Labels.of("proc", procs[i]))); - counterDataPointBuilder.value(Math.random()); - gaugeDataPointBuilder.value(Math.random()); + counterBuilder.dataPoint(counterDataPointBuilder.build()); + gaugeBuilder.dataPoint(gaugeDataPointBuilder.build()); - counterBuilder.dataPoint(counterDataPointBuilder.build()); - gaugeBuilder.dataPoint(gaugeDataPointBuilder.build()); - } - } - Collection snaps = new ArrayList(); - snaps.add(counterBuilder.build()); - snaps.add(gaugeBuilder.build()); - MetricSnapshots msnaps = new MetricSnapshots(snaps); - return msnaps; - } + } else { + for (int i = 0; i < procs.length; i++) { + counterDataPointBuilder.labels(lbls.merge(Labels.of("proc", procs[i]))); + gaugeDataPointBuilder.labels(lbls.merge(Labels.of("proc", procs[i]))); + counterDataPointBuilder.value(Math.random()); + gaugeDataPointBuilder.value(Math.random()); - public List getPrometheusNames() { - List names = new ArrayList(); - names.add("x_calls_total"); - names.add("x_load"); - return names; - } + counterBuilder.dataPoint(counterDataPointBuilder.build()); + gaugeBuilder.dataPoint(gaugeDataPointBuilder.build()); + } + } + Collection snaps = new ArrayList(); + snaps.add(counterBuilder.build()); + snaps.add(gaugeBuilder.build()); + MetricSnapshots msnaps = new MetricSnapshots(snaps); + return msnaps; + } + public List getPrometheusNames() { + List names = new ArrayList(); + names.add("x_calls_total"); + names.add("x_load"); + return names; + } } diff --git a/examples/example-exporter-opentelemetry/src/main/java/io/prometheus/metrics/examples/opentelemetry/Main.java b/examples/example-exporter-opentelemetry/src/main/java/io/prometheus/metrics/examples/opentelemetry/Main.java index 966b16d92..defe85074 100644 --- a/examples/example-exporter-opentelemetry/src/main/java/io/prometheus/metrics/examples/opentelemetry/Main.java +++ b/examples/example-exporter-opentelemetry/src/main/java/io/prometheus/metrics/examples/opentelemetry/Main.java @@ -5,37 +5,36 @@ import io.prometheus.metrics.instrumentation.jvm.JvmMetrics; import io.prometheus.metrics.model.snapshots.Unit; -/** - * Simple example of an application exposing metrics pushing metrics via OTLP. - */ +/** Simple example of an application exposing metrics pushing metrics via OTLP. */ public class Main { - public static void main(String[] args) throws Exception { + public static void main(String[] args) throws Exception { - // Note: Some JVM metrics are also defined as OpenTelemetry's semantic conventions. - // We have plans to implement a configuration option for JvmMetrics to use OpenTelemetry - // naming conventions rather than the Prometheus names. - JvmMetrics.builder().register(); + // Note: Some JVM metrics are also defined as OpenTelemetry's semantic conventions. + // We have plans to implement a configuration option for JvmMetrics to use OpenTelemetry + // naming conventions rather than the Prometheus names. + JvmMetrics.builder().register(); - // Note: uptime_seconds_total is not a great example: - // The built-in JvmMetrics have an out-of-the-box metric named process_start_time_seconds - // with the start timestamp in seconds, so if you want to know the uptime you can simply - // run the Prometheus query - // time() - process_start_time_seconds - // rather than creating a custom uptime metric. - Counter counter = Counter.builder() - .name("uptime_seconds_total") - .help("total number of seconds since this application was started") - .unit(Unit.SECONDS) - .register(); + // Note: uptime_seconds_total is not a great example: + // The built-in JvmMetrics have an out-of-the-box metric named process_start_time_seconds + // with the start timestamp in seconds, so if you want to know the uptime you can simply + // run the Prometheus query + // time() - process_start_time_seconds + // rather than creating a custom uptime metric. + Counter counter = + Counter.builder() + .name("uptime_seconds_total") + .help("total number of seconds since this application was started") + .unit(Unit.SECONDS) + .register(); - OpenTelemetryExporter.builder() - .intervalSeconds(5) // ridiculously short interval for demo purposes - .buildAndStart(); + OpenTelemetryExporter.builder() + .intervalSeconds(5) // ridiculously short interval for demo purposes + .buildAndStart(); - while (true) { - Thread.sleep(1000); - counter.inc(); - } + while (true) { + Thread.sleep(1000); + counter.inc(); } + } } diff --git a/examples/example-exporter-opentelemetry/src/main/java/io/prometheus/metrics/examples/opentelemetry/ManualCompleteMetricsTest.java b/examples/example-exporter-opentelemetry/src/main/java/io/prometheus/metrics/examples/opentelemetry/ManualCompleteMetricsTest.java index 72e8dd107..b8b8ef98b 100644 --- a/examples/example-exporter-opentelemetry/src/main/java/io/prometheus/metrics/examples/opentelemetry/ManualCompleteMetricsTest.java +++ b/examples/example-exporter-opentelemetry/src/main/java/io/prometheus/metrics/examples/opentelemetry/ManualCompleteMetricsTest.java @@ -19,127 +19,128 @@ public class ManualCompleteMetricsTest { - // This contains a complete set of all metric types, and target_info and otel_scope_info. - // I used this to expose in Prometheus format and OTLP format at the same time and compare the results. - // I'm keeping this as a backup for now, but this should be converted to an integration test. - // - // To run it, add prometheus-metrics-exporter-httpserver as a dependency and configure Prometheus - // to scrape from port 9400 in addition to receiving metrics via remote write. - - /* - public static void main(String[] args) throws Exception { - - Counter counter = Counter.newBuilder() - .withName("uptime_seconds_total") - .withHelp("total number of seconds since this application was started") - .withUnit(Unit.SECONDS) - .register(); - - Gauge gauge = Gauge.newBuilder() - .withName("temperature_celsius") - .withHelp("temperature in celsius") - .withUnit(Unit.CELSIUS) - .withLabelNames("location") - .register(); - - gauge.labelValues("inside").set(23.4); - gauge.labelValues("outside").set(9.3); - - // By default, the histogram will be exported as an exponential histogram in OpenTelemetry. - Histogram histogram = Histogram.newBuilder() - .withName("request_latency_seconds") - .withHelp("Request duration in seconds") - .withUnit(Unit.SECONDS) - .withLabelNames("http_status") - .register(); - - Random random = new Random(0); - for (int i = 0; i < 1000; i++) { - histogram.labelValues("200").observe(random.nextGaussian()); - } - - // Explicitly use a classic-only histogram to have an example of a classic histogram in OpenTelemetry - Histogram classicHistogram = Histogram.newBuilder() - .withName("request_size_bytes") - .withHelp("Request size in Bytes") - .withUnit(Unit.BYTES) - .withLabelNames("path") - .classicOnly() - .withClassicBuckets(128, 256, 512, 1024, 2048) - .register(); - - for (int i = 0; i < 15; i++) { - classicHistogram.labelValues("200").observe(random.nextInt(3000)); - } - - Summary summary = Summary.newBuilder() - .withName("response_latency_seconds") - .withHelp("Response latency seconds") - .withUnit(Unit.BYTES) - .withQuantile(0.95) - .withQuantile(0.99) - .register(); - - for (int i = 0; i < 1000; i++) { - summary.observe(random.nextGaussian()); - } - - Info targetInfo = Info.newBuilder() - .withName("target_info") - .withHelp("OTel resource") - .withLabelNames("service.version") - .register(); - targetInfo.setLabelValues("1.0.0"); - - Info scopeInfo = Info.newBuilder() - .withName("otel_scope_info") - .withLabelNames("otel.scope.name", "otel.scope.version", "library_mascot") - .register(); - - scopeInfo.setLabelValues("my.instrumentation.lib", "100.3", "bear"); - - Info info = Info.newBuilder() - .withName("java_runtime_info") - .withHelp("Java runtime info") - .withLabelNames("version", "vendor", "runtime") - .register(); - - String version = System.getProperty("java.runtime.version", "unknown"); - String vendor = System.getProperty("java.vm.vendor", "unknown"); - String runtime = System.getProperty("java.runtime.name", "unknown"); - - info.setLabelValues(version, vendor, runtime); - - StateSet stateSet = StateSet.newBuilder() - .withName("feature_flags") - .withLabelNames("env") - .withStates("feature1", "feature2") - .register(); - - stateSet.labelValues("dev").setFalse("feature1"); - stateSet.labelValues("dev").setTrue("feature2"); - - PrometheusRegistry.defaultRegistry.register(() -> UnknownSnapshot.newBuilder() - .withName("my_unknown_metric") - .addDataPoint(UnknownSnapshot.UnknownDataPointSnapshot.newBuilder() - .withLabels(Labels.of("a", "1", "b", "2")) - .withValue(3.0) - .build()) - .build()); - - HTTPServer server = HTTPServer.newBuilder() - .withPort(9400) - .buildAndStart(); - System.out.println("HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); - - OpenTelemetryExporter.newBuilder() - .withIntervalSeconds(5) - .buildAndStart(); - - while (true) { - Thread.sleep(1000); - counter.inc(); - } - } - */ + // This contains a complete set of all metric types, and target_info and otel_scope_info. + // I used this to expose in Prometheus format and OTLP format at the same time and compare the + // results. + // I'm keeping this as a backup for now, but this should be converted to an integration test. + // + // To run it, add prometheus-metrics-exporter-httpserver as a dependency and configure Prometheus + // to scrape from port 9400 in addition to receiving metrics via remote write. + + /* + public static void main(String[] args) throws Exception { + + Counter counter = Counter.newBuilder() + .withName("uptime_seconds_total") + .withHelp("total number of seconds since this application was started") + .withUnit(Unit.SECONDS) + .register(); + + Gauge gauge = Gauge.newBuilder() + .withName("temperature_celsius") + .withHelp("temperature in celsius") + .withUnit(Unit.CELSIUS) + .withLabelNames("location") + .register(); + + gauge.labelValues("inside").set(23.4); + gauge.labelValues("outside").set(9.3); + + // By default, the histogram will be exported as an exponential histogram in OpenTelemetry. + Histogram histogram = Histogram.newBuilder() + .withName("request_latency_seconds") + .withHelp("Request duration in seconds") + .withUnit(Unit.SECONDS) + .withLabelNames("http_status") + .register(); + + Random random = new Random(0); + for (int i = 0; i < 1000; i++) { + histogram.labelValues("200").observe(random.nextGaussian()); + } + + // Explicitly use a classic-only histogram to have an example of a classic histogram in OpenTelemetry + Histogram classicHistogram = Histogram.newBuilder() + .withName("request_size_bytes") + .withHelp("Request size in Bytes") + .withUnit(Unit.BYTES) + .withLabelNames("path") + .classicOnly() + .withClassicBuckets(128, 256, 512, 1024, 2048) + .register(); + + for (int i = 0; i < 15; i++) { + classicHistogram.labelValues("200").observe(random.nextInt(3000)); + } + + Summary summary = Summary.newBuilder() + .withName("response_latency_seconds") + .withHelp("Response latency seconds") + .withUnit(Unit.BYTES) + .withQuantile(0.95) + .withQuantile(0.99) + .register(); + + for (int i = 0; i < 1000; i++) { + summary.observe(random.nextGaussian()); + } + + Info targetInfo = Info.newBuilder() + .withName("target_info") + .withHelp("OTel resource") + .withLabelNames("service.version") + .register(); + targetInfo.setLabelValues("1.0.0"); + + Info scopeInfo = Info.newBuilder() + .withName("otel_scope_info") + .withLabelNames("otel.scope.name", "otel.scope.version", "library_mascot") + .register(); + + scopeInfo.setLabelValues("my.instrumentation.lib", "100.3", "bear"); + + Info info = Info.newBuilder() + .withName("java_runtime_info") + .withHelp("Java runtime info") + .withLabelNames("version", "vendor", "runtime") + .register(); + + String version = System.getProperty("java.runtime.version", "unknown"); + String vendor = System.getProperty("java.vm.vendor", "unknown"); + String runtime = System.getProperty("java.runtime.name", "unknown"); + + info.setLabelValues(version, vendor, runtime); + + StateSet stateSet = StateSet.newBuilder() + .withName("feature_flags") + .withLabelNames("env") + .withStates("feature1", "feature2") + .register(); + + stateSet.labelValues("dev").setFalse("feature1"); + stateSet.labelValues("dev").setTrue("feature2"); + + PrometheusRegistry.defaultRegistry.register(() -> UnknownSnapshot.newBuilder() + .withName("my_unknown_metric") + .addDataPoint(UnknownSnapshot.UnknownDataPointSnapshot.newBuilder() + .withLabels(Labels.of("a", "1", "b", "2")) + .withValue(3.0) + .build()) + .build()); + + HTTPServer server = HTTPServer.newBuilder() + .withPort(9400) + .buildAndStart(); + System.out.println("HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); + + OpenTelemetryExporter.newBuilder() + .withIntervalSeconds(5) + .buildAndStart(); + + while (true) { + Thread.sleep(1000); + counter.inc(); + } + } + */ } diff --git a/examples/example-exporter-servlet-tomcat/src/main/java/io/prometheus/metrics/examples/tomcat_servlet/HelloWorldServlet.java b/examples/example-exporter-servlet-tomcat/src/main/java/io/prometheus/metrics/examples/tomcat_servlet/HelloWorldServlet.java index d3d8f37e5..eb2fa4f19 100644 --- a/examples/example-exporter-servlet-tomcat/src/main/java/io/prometheus/metrics/examples/tomcat_servlet/HelloWorldServlet.java +++ b/examples/example-exporter-servlet-tomcat/src/main/java/io/prometheus/metrics/examples/tomcat_servlet/HelloWorldServlet.java @@ -1,57 +1,56 @@ package io.prometheus.metrics.examples.tomcat_servlet; +import static io.prometheus.metrics.model.snapshots.Unit.nanosToSeconds; + import io.prometheus.metrics.core.metrics.Counter; import io.prometheus.metrics.core.metrics.Histogram; import io.prometheus.metrics.model.snapshots.Unit; import jakarta.servlet.http.HttpServlet; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; - import java.io.IOException; import java.util.Random; -import static io.prometheus.metrics.model.snapshots.Unit.nanosToSeconds; - -/** - * Hello World REST servlet, with an example counter and an example histogram. - */ +/** Hello World REST servlet, with an example counter and an example histogram. */ public class HelloWorldServlet extends HttpServlet { - private final Random random = new Random(0); - - // Note: The requests_total counter is not a great example, because the - // request_duration_seconds histogram below also has a count with the number of requests. - private final Counter counter = Counter.builder() - .name("requests_total") - .help("total number of requests") - .labelNames("http_status") - .register(); - - private final Histogram histogram = Histogram.builder() - .name("request_duration_seconds") - .help("request duration in seconds") - .unit(Unit.SECONDS) - .labelNames("http_status") - .register(); - - public HelloWorldServlet() { - counter.initLabelValues("200"); - histogram.initLabelValues("200"); - } - - @Override - protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { - long start = System.nanoTime(); - try { - Thread.sleep((long) (Math.abs((random.nextGaussian() + 1.0) * 100.0))); - resp.setStatus(200); - resp.setContentType("text/plain"); - resp.getWriter().println("Hello, World!"); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } finally { - counter.labelValues("200").inc(); - histogram.labelValues("200").observe(nanosToSeconds(System.nanoTime() - start)); - } + private final Random random = new Random(0); + + // Note: The requests_total counter is not a great example, because the + // request_duration_seconds histogram below also has a count with the number of requests. + private final Counter counter = + Counter.builder() + .name("requests_total") + .help("total number of requests") + .labelNames("http_status") + .register(); + + private final Histogram histogram = + Histogram.builder() + .name("request_duration_seconds") + .help("request duration in seconds") + .unit(Unit.SECONDS) + .labelNames("http_status") + .register(); + + public HelloWorldServlet() { + counter.initLabelValues("200"); + histogram.initLabelValues("200"); + } + + @Override + protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { + long start = System.nanoTime(); + try { + Thread.sleep((long) (Math.abs((random.nextGaussian() + 1.0) * 100.0))); + resp.setStatus(200); + resp.setContentType("text/plain"); + resp.getWriter().println("Hello, World!"); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } finally { + counter.labelValues("200").inc(); + histogram.labelValues("200").observe(nanosToSeconds(System.nanoTime() - start)); } + } } diff --git a/examples/example-exporter-servlet-tomcat/src/main/java/io/prometheus/metrics/examples/tomcat_servlet/Main.java b/examples/example-exporter-servlet-tomcat/src/main/java/io/prometheus/metrics/examples/tomcat_servlet/Main.java index 4ce6353db..81bc2ac19 100644 --- a/examples/example-exporter-servlet-tomcat/src/main/java/io/prometheus/metrics/examples/tomcat_servlet/Main.java +++ b/examples/example-exporter-servlet-tomcat/src/main/java/io/prometheus/metrics/examples/tomcat_servlet/Main.java @@ -2,38 +2,35 @@ import io.prometheus.metrics.exporter.servlet.jakarta.PrometheusMetricsServlet; import io.prometheus.metrics.instrumentation.jvm.JvmMetrics; -import org.apache.catalina.Context; -import org.apache.catalina.LifecycleException; -import org.apache.catalina.startup.Tomcat; - import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import org.apache.catalina.Context; +import org.apache.catalina.LifecycleException; +import org.apache.catalina.startup.Tomcat; -/** - * Simple example using embedded Tomcat and the {@link PrometheusMetricsServlet}. - */ +/** Simple example using embedded Tomcat and the {@link PrometheusMetricsServlet}. */ public class Main { - public static void main(String[] args) throws LifecycleException, IOException { + public static void main(String[] args) throws LifecycleException, IOException { - JvmMetrics.builder().register(); + JvmMetrics.builder().register(); - Tomcat tomcat = new Tomcat(); - Path tmpDir = Files.createTempDirectory("prometheus-tomcat-servlet-example-"); - tomcat.setBaseDir(tmpDir.toFile().getAbsolutePath()); + Tomcat tomcat = new Tomcat(); + Path tmpDir = Files.createTempDirectory("prometheus-tomcat-servlet-example-"); + tomcat.setBaseDir(tmpDir.toFile().getAbsolutePath()); - Context ctx = tomcat.addContext("", new File(".").getAbsolutePath()); + Context ctx = tomcat.addContext("", new File(".").getAbsolutePath()); - Tomcat.addServlet(ctx, "hello", new HelloWorldServlet()); - ctx.addServletMappingDecoded("/*", "hello"); + Tomcat.addServlet(ctx, "hello", new HelloWorldServlet()); + ctx.addServletMappingDecoded("/*", "hello"); - Tomcat.addServlet(ctx, "metrics", new PrometheusMetricsServlet()); - ctx.addServletMappingDecoded("/metrics", "metrics"); + Tomcat.addServlet(ctx, "metrics", new PrometheusMetricsServlet()); + ctx.addServletMappingDecoded("/metrics", "metrics"); - tomcat.getConnector(); - tomcat.start(); - tomcat.getServer().await(); - } + tomcat.getConnector(); + tomcat.start(); + tomcat.getServer().await(); + } } diff --git a/examples/example-native-histogram/src/main/java/io/prometheus/metrics/examples/nativehistogram/Main.java b/examples/example-native-histogram/src/main/java/io/prometheus/metrics/examples/nativehistogram/Main.java index 591216fb7..b23fd054d 100644 --- a/examples/example-native-histogram/src/main/java/io/prometheus/metrics/examples/nativehistogram/Main.java +++ b/examples/example-native-histogram/src/main/java/io/prometheus/metrics/examples/nativehistogram/Main.java @@ -4,36 +4,35 @@ import io.prometheus.metrics.exporter.httpserver.HTTPServer; import io.prometheus.metrics.instrumentation.jvm.JvmMetrics; import io.prometheus.metrics.model.snapshots.Unit; - import java.io.IOException; import java.util.Random; public class Main { - public static void main(String[] args) throws IOException, InterruptedException { + public static void main(String[] args) throws IOException, InterruptedException { - JvmMetrics.builder().register(); + JvmMetrics.builder().register(); - Histogram histogram = Histogram.builder() - .name("request_latency_seconds") - .help("request latency in seconds") - .unit(Unit.SECONDS) - .labelNames("path", "status") - .register(); + Histogram histogram = + Histogram.builder() + .name("request_latency_seconds") + .help("request latency in seconds") + .unit(Unit.SECONDS) + .labelNames("path", "status") + .register(); - HTTPServer server = HTTPServer.builder() - .port(9400) - .buildAndStart(); + HTTPServer server = HTTPServer.builder().port(9400).buildAndStart(); - System.out.println("HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); + System.out.println( + "HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); - Random random = new Random(0); + Random random = new Random(0); - while (true) { - double duration = Math.abs(random.nextGaussian() / 10.0 + 0.2); - String status = random.nextInt(100) < 20 ? "500" : "200"; - histogram.labelValues("/", status).observe(duration); - Thread.sleep(1000); - } + while (true) { + double duration = Math.abs(random.nextGaussian() / 10.0 + 0.2); + String status = random.nextInt(100) < 20 ? "500" : "200"; + histogram.labelValues("/", status).observe(duration); + Thread.sleep(1000); } + } } diff --git a/examples/example-prometheus-properties/src/main/java/io/prometheus/metrics/examples/prometheus_properties/Main.java b/examples/example-prometheus-properties/src/main/java/io/prometheus/metrics/examples/prometheus_properties/Main.java index 97611fe7b..e1f5954bc 100644 --- a/examples/example-prometheus-properties/src/main/java/io/prometheus/metrics/examples/prometheus_properties/Main.java +++ b/examples/example-prometheus-properties/src/main/java/io/prometheus/metrics/examples/prometheus_properties/Main.java @@ -4,42 +4,42 @@ import io.prometheus.metrics.exporter.httpserver.HTTPServer; import io.prometheus.metrics.instrumentation.jvm.JvmMetrics; import io.prometheus.metrics.model.snapshots.Unit; - import java.io.IOException; import java.util.Random; public class Main { - public static void main(String[] args) throws IOException, InterruptedException { + public static void main(String[] args) throws IOException, InterruptedException { - JvmMetrics.builder().register(); + JvmMetrics.builder().register(); - Histogram requestDuration = Histogram.builder() - .name("request_duration_seconds") - .help("request duration in seconds") - .unit(Unit.SECONDS) - .register(); + Histogram requestDuration = + Histogram.builder() + .name("request_duration_seconds") + .help("request duration in seconds") + .unit(Unit.SECONDS) + .register(); - Histogram requestSize = Histogram.builder() - .name("request_size_bytes") - .help("request size in bytes") - .unit(Unit.BYTES) - .register(); + Histogram requestSize = + Histogram.builder() + .name("request_size_bytes") + .help("request size in bytes") + .unit(Unit.BYTES) + .register(); - HTTPServer server = HTTPServer.builder() - .port(9400) - .buildAndStart(); + HTTPServer server = HTTPServer.builder().port(9400).buildAndStart(); - System.out.println("HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); + System.out.println( + "HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); - Random random = new Random(0); + Random random = new Random(0); - while (true) { - double duration = Math.abs(random.nextGaussian() / 10.0 + 0.2); - double size = random.nextInt(1000) + 256; - requestDuration.observe(duration); - requestSize.observe(size); - Thread.sleep(1000); - } + while (true) { + double duration = Math.abs(random.nextGaussian() / 10.0 + 0.2); + double size = random.nextInt(1000) + 256; + requestDuration.observe(duration); + requestSize.observe(size); + Thread.sleep(1000); } + } } diff --git a/examples/example-simpleclient-bridge/src/main/java/io/prometheus/metrics/examples/simpleclient/Main.java b/examples/example-simpleclient-bridge/src/main/java/io/prometheus/metrics/examples/simpleclient/Main.java index cb0488d57..dd9dbcdcb 100644 --- a/examples/example-simpleclient-bridge/src/main/java/io/prometheus/metrics/examples/simpleclient/Main.java +++ b/examples/example-simpleclient-bridge/src/main/java/io/prometheus/metrics/examples/simpleclient/Main.java @@ -3,39 +3,34 @@ import io.prometheus.client.Counter; import io.prometheus.metrics.exporter.httpserver.HTTPServer; import io.prometheus.metrics.simpleclient.bridge.SimpleclientCollector; - import java.io.IOException; -/** - * Simple example of the simpleclient backwards compatibility module. - */ +/** Simple example of the simpleclient backwards compatibility module. */ public class Main { - public static void main(String[] args) throws IOException, InterruptedException { + public static void main(String[] args) throws IOException, InterruptedException { - // The following call will register all metrics from the old CollectorRegistry.defaultRegistry - // with the new PrometheusRegistry.defaultRegistry. + // The following call will register all metrics from the old CollectorRegistry.defaultRegistry + // with the new PrometheusRegistry.defaultRegistry. - SimpleclientCollector.builder().register(); + SimpleclientCollector.builder().register(); - // Register a counter with the old CollectorRegistry. - // It doesn't matter whether the counter is registered before or after bridging with PrometheusRegistry. + // Register a counter with the old CollectorRegistry. + // It doesn't matter whether the counter is registered before or after bridging with + // PrometheusRegistry. - Counter simpleclientCounter = Counter.build() - .name("events_total") - .help("total number of events") - .register(); + Counter simpleclientCounter = + Counter.build().name("events_total").help("total number of events").register(); - simpleclientCounter.inc(); + simpleclientCounter.inc(); - // Expose metrics from the new PrometheusRegistry. This should contain the events_total metric. + // Expose metrics from the new PrometheusRegistry. This should contain the events_total metric. - HTTPServer server = HTTPServer.builder() - .port(9400) - .buildAndStart(); + HTTPServer server = HTTPServer.builder().port(9400).buildAndStart(); - System.out.println("HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); + System.out.println( + "HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); - Thread.currentThread().join(); - } + Thread.currentThread().join(); + } } diff --git a/integration-tests/it-common/src/test/java/io/prometheus/client/it/common/LogConsumer.java b/integration-tests/it-common/src/test/java/io/prometheus/client/it/common/LogConsumer.java index d60e53979..0e6cfbcc6 100644 --- a/integration-tests/it-common/src/test/java/io/prometheus/client/it/common/LogConsumer.java +++ b/integration-tests/it-common/src/test/java/io/prometheus/client/it/common/LogConsumer.java @@ -1,36 +1,33 @@ package io.prometheus.client.it.common; -import org.testcontainers.containers.output.OutputFrame; - import java.util.function.Consumer; +import org.testcontainers.containers.output.OutputFrame; -/** - * Print Docker logs from TestContainers to stdout or stderr. - */ +/** Print Docker logs from TestContainers to stdout or stderr. */ public class LogConsumer implements Consumer { - private final String prefix; + private final String prefix; - private LogConsumer(String prefix) { - this.prefix = prefix; - } + private LogConsumer(String prefix) { + this.prefix = prefix; + } - public static LogConsumer withPrefix(String prefix) { - return new LogConsumer(prefix); - } + public static LogConsumer withPrefix(String prefix) { + return new LogConsumer(prefix); + } - @Override - public void accept(OutputFrame outputFrame) { - switch (outputFrame.getType()) { - case STDOUT: - System.out.print(prefix + " - " + outputFrame.getUtf8String()); - break; - case END: - System.out.println(prefix + " - END"); - break; - default: // STDERR or unexpected - System.err.print(prefix + " - " + outputFrame.getUtf8String()); - break; - } + @Override + public void accept(OutputFrame outputFrame) { + switch (outputFrame.getType()) { + case STDOUT: + System.out.print(prefix + " - " + outputFrame.getUtf8String()); + break; + case END: + System.out.println(prefix + " - END"); + break; + default: // STDERR or unexpected + System.err.print(prefix + " - " + outputFrame.getUtf8String()); + break; } + } } diff --git a/integration-tests/it-common/src/test/java/io/prometheus/client/it/common/Volume.java b/integration-tests/it-common/src/test/java/io/prometheus/client/it/common/Volume.java index dc9a32992..783ccb7a5 100644 --- a/integration-tests/it-common/src/test/java/io/prometheus/client/it/common/Volume.java +++ b/integration-tests/it-common/src/test/java/io/prometheus/client/it/common/Volume.java @@ -1,6 +1,6 @@ package io.prometheus.client.it.common; -import org.junit.Assert; +import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; import java.io.File; import java.io.IOException; @@ -8,94 +8,96 @@ import java.nio.file.*; import java.nio.file.attribute.BasicFileAttributes; import java.util.function.Predicate; +import org.junit.Assert; -import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; - -/** - * Temporary directory in ./target/ to be mounted as a volume in Docker containers. - */ +/** Temporary directory in ./target/ to be mounted as a volume in Docker containers. */ public class Volume { - private final Path tmpDir; // will be created in the ./target/ directory + private final Path tmpDir; // will be created in the ./target/ directory - private Volume(Path tmpDir) { - this.tmpDir = tmpDir; - } + private Volume(Path tmpDir) { + this.tmpDir = tmpDir; + } - public static Volume create(String prefix) throws IOException, URISyntaxException { - Path targetDir = Paths.get(Volume.class.getResource("/").toURI()).getParent(); - Assert.assertEquals("failed to locate target/ directory", "target", targetDir.getFileName().toString()); - return new Volume(Files.createTempDirectory(targetDir, prefix + "-")); - } + public static Volume create(String prefix) throws IOException, URISyntaxException { + Path targetDir = Paths.get(Volume.class.getResource("/").toURI()).getParent(); + Assert.assertEquals( + "failed to locate target/ directory", "target", targetDir.getFileName().toString()); + return new Volume(Files.createTempDirectory(targetDir, prefix + "-")); + } - /** - * Copy a file or directory to this volume. - * @param src is relative to {@code ./target/} - */ - public Volume copy(String src) throws IOException { - Path srcPath = tmpDir.getParent().resolve(src); - if (Files.isRegularFile(srcPath)) { - Files.copy(srcPath, tmpDir.resolve(srcPath.getFileName()), REPLACE_EXISTING); - } else if (Files.isDirectory(srcPath)) { - Path dest = tmpDir.resolve(srcPath.getFileName()); - Files.createDirectories(dest); - Files.walkFileTree(srcPath, new SimpleFileVisitor() { + /** + * Copy a file or directory to this volume. + * + * @param src is relative to {@code ./target/} + */ + public Volume copy(String src) throws IOException { + Path srcPath = tmpDir.getParent().resolve(src); + if (Files.isRegularFile(srcPath)) { + Files.copy(srcPath, tmpDir.resolve(srcPath.getFileName()), REPLACE_EXISTING); + } else if (Files.isDirectory(srcPath)) { + Path dest = tmpDir.resolve(srcPath.getFileName()); + Files.createDirectories(dest); + Files.walkFileTree( + srcPath, + new SimpleFileVisitor() { - // create parent directories - @Override - public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { - Files.createDirectories(dest.resolve(srcPath.relativize(dir))); - return FileVisitResult.CONTINUE; - } + // create parent directories + @Override + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) + throws IOException { + Files.createDirectories(dest.resolve(srcPath.relativize(dir))); + return FileVisitResult.CONTINUE; + } - // copy file - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - Files.copy(file, dest.resolve(srcPath.relativize(file)), REPLACE_EXISTING); - return FileVisitResult.CONTINUE; - } - }); - } else { - Assert.fail(src + ": No such file or directory"); - } - return this; + // copy file + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) + throws IOException { + Files.copy(file, dest.resolve(srcPath.relativize(file)), REPLACE_EXISTING); + return FileVisitResult.CONTINUE; + } + }); + } else { + Assert.fail(src + ": No such file or directory"); } + return this; + } - /** - * Remove files in tmpDir if they match the predicate. - */ - public void rm(Predicate predicate) throws IOException { - Files.walkFileTree(tmpDir, new SimpleFileVisitor() { - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - if (predicate.test(file)) { - Files.delete(file); - } - return FileVisitResult.CONTINUE; + /** Remove files in tmpDir if they match the predicate. */ + public void rm(Predicate predicate) throws IOException { + Files.walkFileTree( + tmpDir, + new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) + throws IOException { + if (predicate.test(file)) { + Files.delete(file); } + return FileVisitResult.CONTINUE; + } }); - } + } - public String getHostPath() { - return tmpDir.toString(); - } + public String getHostPath() { + return tmpDir.toString(); + } - /** - * Recursively remove tmpDir and its contents. - */ - public void remove() throws IOException { - if (!deleteRecursively(tmpDir.toFile())) { - throw new IOException(tmpDir + ": Failed to remove temporary test directory."); - } + /** Recursively remove tmpDir and its contents. */ + public void remove() throws IOException { + if (!deleteRecursively(tmpDir.toFile())) { + throw new IOException(tmpDir + ": Failed to remove temporary test directory."); } + } - private boolean deleteRecursively(File file) { - File[] allContents = file.listFiles(); - if (allContents != null) { - for (File child : allContents) { - deleteRecursively(child); - } - } - return file.delete(); + private boolean deleteRecursively(File file) { + File[] allContents = file.listFiles(); + if (allContents != null) { + for (File child : allContents) { + deleteRecursively(child); + } } + return file.delete(); + } } diff --git a/integration-tests/it-exporter/it-exporter-httpserver-sample/src/main/java/io/prometheus/metrics/it/exporter/httpserver/HTTPServerSample.java b/integration-tests/it-exporter/it-exporter-httpserver-sample/src/main/java/io/prometheus/metrics/it/exporter/httpserver/HTTPServerSample.java index b8cdc5141..4c665dd81 100644 --- a/integration-tests/it-exporter/it-exporter-httpserver-sample/src/main/java/io/prometheus/metrics/it/exporter/httpserver/HTTPServerSample.java +++ b/integration-tests/it-exporter/it-exporter-httpserver-sample/src/main/java/io/prometheus/metrics/it/exporter/httpserver/HTTPServerSample.java @@ -6,85 +6,87 @@ import io.prometheus.metrics.exporter.httpserver.HTTPServer; import io.prometheus.metrics.model.registry.Collector; import io.prometheus.metrics.model.registry.PrometheusRegistry; -import io.prometheus.metrics.model.snapshots.MetricSnapshot; import io.prometheus.metrics.model.snapshots.Unit; - import java.io.IOException; public class HTTPServerSample { - enum Mode { - success, - error - } - - public static void main(String[] args) throws IOException, InterruptedException { - - if (args.length != 2) { - System.err.println("Usage: java -jar exporter-httpserver-sample.jar "); - System.err.println("Where mode is \"success\" or \"error\"."); - System.exit(1); - } - - int port = parsePortOrExit(args[0]); - Mode mode = parseModeOrExit(args[1]); - - Counter counter = Counter.builder() - .name("uptime_seconds_total") - .help("total number of seconds since this application was started") - .unit(Unit.SECONDS) - .register(); - counter.inc(17); + enum Mode { + success, + error + } - Info info = Info.builder() - .name("integration_test_info") - .help("Info metric on this integration test") - .labelNames("test_name") - .register(); - info.addLabelValues("exporter-httpserver-sample"); + public static void main(String[] args) throws IOException, InterruptedException { - Gauge gauge = Gauge.builder() - .name("temperature_celsius") - .help("Temperature in Celsius") - .unit(Unit.CELSIUS) - .labelNames("location") - .register(); - gauge.labelValues("inside").set(23.0); - gauge.labelValues("outside").set(27.0); + if (args.length != 2) { + System.err.println("Usage: java -jar exporter-httpserver-sample.jar "); + System.err.println("Where mode is \"success\" or \"error\"."); + System.exit(1); + } - if (mode == Mode.error) { - Collector failingCollector = () -> { - throw new RuntimeException("Simulating an error."); - }; + int port = parsePortOrExit(args[0]); + Mode mode = parseModeOrExit(args[1]); + + Counter counter = + Counter.builder() + .name("uptime_seconds_total") + .help("total number of seconds since this application was started") + .unit(Unit.SECONDS) + .register(); + counter.inc(17); + + Info info = + Info.builder() + .name("integration_test_info") + .help("Info metric on this integration test") + .labelNames("test_name") + .register(); + info.addLabelValues("exporter-httpserver-sample"); + + Gauge gauge = + Gauge.builder() + .name("temperature_celsius") + .help("Temperature in Celsius") + .unit(Unit.CELSIUS) + .labelNames("location") + .register(); + gauge.labelValues("inside").set(23.0); + gauge.labelValues("outside").set(27.0); + + if (mode == Mode.error) { + Collector failingCollector = + () -> { + throw new RuntimeException("Simulating an error."); + }; + + PrometheusRegistry.defaultRegistry.register(failingCollector); + } - PrometheusRegistry.defaultRegistry.register(failingCollector); - } + HTTPServer server = HTTPServer.builder().port(port).buildAndStart(); - HTTPServer server = HTTPServer.builder() - .port(port) - .buildAndStart(); + System.out.println( + "HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); + Thread.currentThread().join(); // wait forever + } - System.out.println("HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); - Thread.currentThread().join(); // wait forever + private static int parsePortOrExit(String port) { + try { + return Integer.parseInt(port); + } catch (NumberFormatException e) { + System.err.println("\"" + port + "\": Invalid port number."); + System.exit(1); } - - private static int parsePortOrExit(String port) { - try { - return Integer.parseInt(port); - } catch (NumberFormatException e) { - System.err.println("\"" + port + "\": Invalid port number."); - System.exit(1); - } - return 0; // this won't happen - } - - private static Mode parseModeOrExit(String mode) { - try { - return Mode.valueOf(mode); - } catch (IllegalArgumentException e) { - System.err.println("\"" + mode + "\": Invalid mode. Legal values are \"success\" and \"error\"."); - System.exit(1); - } - return null; // this won't happen + return 0; // this won't happen + } + + private static Mode parseModeOrExit(String mode) { + try { + return Mode.valueOf(mode); + } catch (IllegalArgumentException e) { + System.err.println( + "\"" + mode + "\": Invalid mode. Legal values are \"success\" and \"error\"."); + System.exit(1); } + return null; // this won't happen + } } diff --git a/integration-tests/it-exporter/it-exporter-servlet-jetty-sample/src/main/java/io/prometheus/metrics/it/exporter/servlet/jetty/ExporterServletJettySample.java b/integration-tests/it-exporter/it-exporter-servlet-jetty-sample/src/main/java/io/prometheus/metrics/it/exporter/servlet/jetty/ExporterServletJettySample.java index 9e1a22487..1cc16ee4b 100644 --- a/integration-tests/it-exporter/it-exporter-servlet-jetty-sample/src/main/java/io/prometheus/metrics/it/exporter/servlet/jetty/ExporterServletJettySample.java +++ b/integration-tests/it-exporter/it-exporter-servlet-jetty-sample/src/main/java/io/prometheus/metrics/it/exporter/servlet/jetty/ExporterServletJettySample.java @@ -6,99 +6,101 @@ import io.prometheus.metrics.exporter.servlet.jakarta.PrometheusMetricsServlet; import io.prometheus.metrics.model.registry.Collector; import io.prometheus.metrics.model.registry.PrometheusRegistry; -import io.prometheus.metrics.model.snapshots.MetricSnapshot; import io.prometheus.metrics.model.snapshots.Unit; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.servlet.ServletHandler; -/** - * Sample application using the {@link PrometheusMetricsServlet} in Jetty. - */ +/** Sample application using the {@link PrometheusMetricsServlet} in Jetty. */ public class ExporterServletJettySample { - enum Mode { - success, - error - } + enum Mode { + success, + error + } + + public static void main(String[] args) throws Exception { - public static void main(String[] args) throws Exception { - - if (args.length != 2) { - System.err.println("Usage: java -jar exporter-servlet-jetty-sample.jar "); - System.err.println("Where mode is \"success\" or \"error\"."); - System.exit(1); - } - - int port = parsePortOrExit(args[0]); - Mode mode = parseModeOrExit(args[1]); - - Counter counter = Counter.builder() - .name("uptime_seconds_total") - .help("total number of seconds since this application was started") - .unit(Unit.SECONDS) - .register(); - counter.inc(17); - - Info info = Info.builder() - .name("integration_test_info") - .help("Info metric on this integration test") - .labelNames("test_name") - .register(); - info.addLabelValues("exporter-servlet-jetty-sample"); - - Gauge gauge = Gauge.builder() - .name("temperature_celsius") - .help("Temperature in Celsius") - .unit(Unit.CELSIUS) - .labelNames("location") - .register(); - gauge.labelValues("inside").set(23.0); - gauge.labelValues("outside").set(27.0); - - if (mode == Mode.error) { - Collector failingCollector = () -> { - throw new RuntimeException("Simulating an error."); - }; - - PrometheusRegistry.defaultRegistry.register(failingCollector); - } - - Server server = new Server(); - - // set port - ServerConnector connector = new ServerConnector(server); - connector.setPort(port); - server.setConnectors(new Connector[] {connector}); - - // register servlet - ServletHandler servletHandler = new ServletHandler(); - servletHandler.addServletWithMapping(PrometheusMetricsServlet.class, "/metrics"); - server.setHandler(servletHandler); - - System.out.println("Running on http://localhost:" + port + "/metrics"); - - // run - server.start(); + if (args.length != 2) { + System.err.println("Usage: java -jar exporter-servlet-jetty-sample.jar "); + System.err.println("Where mode is \"success\" or \"error\"."); + System.exit(1); } - private static int parsePortOrExit(String port) { - try { - return Integer.parseInt(port); - } catch (NumberFormatException e) { - System.err.println("\"" + port + "\": Invalid port number."); - System.exit(1); - } - return 0; // this won't happen + int port = parsePortOrExit(args[0]); + Mode mode = parseModeOrExit(args[1]); + + Counter counter = + Counter.builder() + .name("uptime_seconds_total") + .help("total number of seconds since this application was started") + .unit(Unit.SECONDS) + .register(); + counter.inc(17); + + Info info = + Info.builder() + .name("integration_test_info") + .help("Info metric on this integration test") + .labelNames("test_name") + .register(); + info.addLabelValues("exporter-servlet-jetty-sample"); + + Gauge gauge = + Gauge.builder() + .name("temperature_celsius") + .help("Temperature in Celsius") + .unit(Unit.CELSIUS) + .labelNames("location") + .register(); + gauge.labelValues("inside").set(23.0); + gauge.labelValues("outside").set(27.0); + + if (mode == Mode.error) { + Collector failingCollector = + () -> { + throw new RuntimeException("Simulating an error."); + }; + + PrometheusRegistry.defaultRegistry.register(failingCollector); } - private static Mode parseModeOrExit(String mode) { - try { - return Mode.valueOf(mode); - } catch (IllegalArgumentException e) { - System.err.println("\"" + mode + "\": Invalid mode. Legal values are \"success\" and \"error\"."); - System.exit(1); - } - return null; // this won't happen + Server server = new Server(); + + // set port + ServerConnector connector = new ServerConnector(server); + connector.setPort(port); + server.setConnectors(new Connector[] {connector}); + + // register servlet + ServletHandler servletHandler = new ServletHandler(); + servletHandler.addServletWithMapping(PrometheusMetricsServlet.class, "/metrics"); + server.setHandler(servletHandler); + + System.out.println("Running on http://localhost:" + port + "/metrics"); + + // run + server.start(); + } + + private static int parsePortOrExit(String port) { + try { + return Integer.parseInt(port); + } catch (NumberFormatException e) { + System.err.println("\"" + port + "\": Invalid port number."); + System.exit(1); + } + return 0; // this won't happen + } + + private static Mode parseModeOrExit(String mode) { + try { + return Mode.valueOf(mode); + } catch (IllegalArgumentException e) { + System.err.println( + "\"" + mode + "\": Invalid mode. Legal values are \"success\" and \"error\"."); + System.exit(1); } + return null; // this won't happen + } } diff --git a/integration-tests/it-exporter/it-exporter-servlet-tomcat-sample/src/main/java/io/prometheus/metrics/it/exporter/servlet/tomcat/ExporterServletTomcatSample.java b/integration-tests/it-exporter/it-exporter-servlet-tomcat-sample/src/main/java/io/prometheus/metrics/it/exporter/servlet/tomcat/ExporterServletTomcatSample.java index 8d13082b7..ead2ee88f 100644 --- a/integration-tests/it-exporter/it-exporter-servlet-tomcat-sample/src/main/java/io/prometheus/metrics/it/exporter/servlet/tomcat/ExporterServletTomcatSample.java +++ b/integration-tests/it-exporter/it-exporter-servlet-tomcat-sample/src/main/java/io/prometheus/metrics/it/exporter/servlet/tomcat/ExporterServletTomcatSample.java @@ -6,99 +6,100 @@ import io.prometheus.metrics.exporter.servlet.jakarta.PrometheusMetricsServlet; import io.prometheus.metrics.model.registry.Collector; import io.prometheus.metrics.model.registry.PrometheusRegistry; -import io.prometheus.metrics.model.snapshots.MetricSnapshot; import io.prometheus.metrics.model.snapshots.Unit; -import org.apache.catalina.Context; -import org.apache.catalina.LifecycleException; -import org.apache.catalina.startup.Tomcat; - import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import org.apache.catalina.Context; +import org.apache.catalina.LifecycleException; +import org.apache.catalina.startup.Tomcat; -/** - * Sample application using the {@link PrometheusMetricsServlet} in Tomcat. - */ +/** Sample application using the {@link PrometheusMetricsServlet} in Tomcat. */ public class ExporterServletTomcatSample { - enum Mode { - success, - error - } - - public static void main(String[] args) throws LifecycleException, IOException { + enum Mode { + success, + error + } - if (args.length != 2) { - System.err.println("Usage: java -jar exporter-servlet-tomcat-sample.jar "); - System.err.println("Where mode is \"success\" or \"error\"."); - System.exit(1); - } + public static void main(String[] args) throws LifecycleException, IOException { - int port = parsePortOrExit(args[0]); - Mode mode = parseModeOrExit(args[1]); - - Counter counter = Counter.builder() - .name("uptime_seconds_total") - .help("total number of seconds since this application was started") - .unit(Unit.SECONDS) - .register(); - counter.inc(17); - - Info info = Info.builder() - .name("integration_test_info") - .help("Info metric on this integration test") - .labelNames("test_name") - .register(); - info.addLabelValues("exporter-servlet-tomcat-sample"); - - Gauge gauge = Gauge.builder() - .name("temperature_celsius") - .help("Temperature in Celsius") - .unit(Unit.CELSIUS) - .labelNames("location") - .register(); - gauge.labelValues("inside").set(23.0); - gauge.labelValues("outside").set(27.0); - - if (mode == Mode.error) { - Collector failingCollector = () -> { - throw new RuntimeException("Simulating an error."); - }; - - PrometheusRegistry.defaultRegistry.register(failingCollector); - } - - Tomcat tomcat = new Tomcat(); - tomcat.setPort(port); - - Path tmpDir = Files.createTempDirectory("exporter-servlet-tomcat-sample-"); - tomcat.setBaseDir(tmpDir.toFile().getAbsolutePath()); - Context ctx = tomcat.addContext("", new File(".").getAbsolutePath()); - Tomcat.addServlet(ctx, "metrics", new PrometheusMetricsServlet()); - ctx.addServletMappingDecoded("/metrics", "metrics"); - - tomcat.getConnector(); - tomcat.start(); - tomcat.getServer().await(); + if (args.length != 2) { + System.err.println("Usage: java -jar exporter-servlet-tomcat-sample.jar "); + System.err.println("Where mode is \"success\" or \"error\"."); + System.exit(1); } - private static int parsePortOrExit(String port) { - try { - return Integer.parseInt(port); - } catch (NumberFormatException e) { - System.err.println("\"" + port + "\": Invalid port number."); - System.exit(1); - } - return 0; // this won't happen + int port = parsePortOrExit(args[0]); + Mode mode = parseModeOrExit(args[1]); + + Counter counter = + Counter.builder() + .name("uptime_seconds_total") + .help("total number of seconds since this application was started") + .unit(Unit.SECONDS) + .register(); + counter.inc(17); + + Info info = + Info.builder() + .name("integration_test_info") + .help("Info metric on this integration test") + .labelNames("test_name") + .register(); + info.addLabelValues("exporter-servlet-tomcat-sample"); + + Gauge gauge = + Gauge.builder() + .name("temperature_celsius") + .help("Temperature in Celsius") + .unit(Unit.CELSIUS) + .labelNames("location") + .register(); + gauge.labelValues("inside").set(23.0); + gauge.labelValues("outside").set(27.0); + + if (mode == Mode.error) { + Collector failingCollector = + () -> { + throw new RuntimeException("Simulating an error."); + }; + + PrometheusRegistry.defaultRegistry.register(failingCollector); } - private static Mode parseModeOrExit(String mode) { - try { - return Mode.valueOf(mode); - } catch (IllegalArgumentException e) { - System.err.println("\"" + mode + "\": Invalid mode. Legal values are \"success\" and \"error\"."); - System.exit(1); - } - return null; // this won't happen + Tomcat tomcat = new Tomcat(); + tomcat.setPort(port); + + Path tmpDir = Files.createTempDirectory("exporter-servlet-tomcat-sample-"); + tomcat.setBaseDir(tmpDir.toFile().getAbsolutePath()); + Context ctx = tomcat.addContext("", new File(".").getAbsolutePath()); + Tomcat.addServlet(ctx, "metrics", new PrometheusMetricsServlet()); + ctx.addServletMappingDecoded("/metrics", "metrics"); + + tomcat.getConnector(); + tomcat.start(); + tomcat.getServer().await(); + } + + private static int parsePortOrExit(String port) { + try { + return Integer.parseInt(port); + } catch (NumberFormatException e) { + System.err.println("\"" + port + "\": Invalid port number."); + System.exit(1); + } + return 0; // this won't happen + } + + private static Mode parseModeOrExit(String mode) { + try { + return Mode.valueOf(mode); + } catch (IllegalArgumentException e) { + System.err.println( + "\"" + mode + "\": Invalid mode. Legal values are \"success\" and \"error\"."); + System.exit(1); } + return null; // this won't happen + } } diff --git a/integration-tests/it-exporter/it-exporter-test/src/test/java/io/prometheus/metrics/it/exporter/test/ExporterIT.java b/integration-tests/it-exporter/it-exporter-test/src/test/java/io/prometheus/metrics/it/exporter/test/ExporterIT.java index 657b890db..949f71840 100644 --- a/integration-tests/it-exporter/it-exporter-test/src/test/java/io/prometheus/metrics/it/exporter/test/ExporterIT.java +++ b/integration-tests/it-exporter/it-exporter-test/src/test/java/io/prometheus/metrics/it/exporter/test/ExporterIT.java @@ -1,17 +1,10 @@ package io.prometheus.metrics.it.exporter.test; +import static java.nio.charset.StandardCharsets.UTF_8; + import io.prometheus.client.it.common.LogConsumer; import io.prometheus.client.it.common.Volume; import io.prometheus.metrics.expositionformats.generated.com_google_protobuf_3_25_3.Metrics; -import org.apache.commons.io.IOUtils; -import org.junit.After; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.testcontainers.containers.BindMode; -import org.testcontainers.containers.GenericContainer; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -26,317 +19,397 @@ import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.zip.GZIPInputStream; - -import static java.nio.charset.StandardCharsets.UTF_8; +import org.apache.commons.io.IOUtils; +import org.junit.After; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.testcontainers.containers.BindMode; +import org.testcontainers.containers.GenericContainer; @RunWith(Parameterized.class) public class ExporterIT { - private final GenericContainer sampleAppContainer; - private final Volume sampleAppVolume; - private final String sampleApp; + private final GenericContainer sampleAppContainer; + private final Volume sampleAppVolume; + private final String sampleApp; - @Parameterized.Parameters(name = "{0}") - public static String[] sampleApps() { - return new String[]{ - "exporter-httpserver-sample", - "exporter-servlet-tomcat-sample", - "exporter-servlet-jetty-sample", - }; - } + @Parameterized.Parameters(name = "{0}") + public static String[] sampleApps() { + return new String[] { + "exporter-httpserver-sample", + "exporter-servlet-tomcat-sample", + "exporter-servlet-jetty-sample", + }; + } - public ExporterIT(String sampleApp) throws IOException, URISyntaxException { - this.sampleApp = sampleApp; - this.sampleAppVolume = Volume.create("it-exporter") - .copy("../../it-" + sampleApp + "/target/" + sampleApp + ".jar"); - this.sampleAppContainer = new GenericContainer<>("openjdk:17") - .withFileSystemBind(sampleAppVolume.getHostPath(), "/app", BindMode.READ_ONLY) - .withWorkingDirectory("/app") - .withLogConsumer(LogConsumer.withPrefix(sampleApp)) - .withExposedPorts(9400); - } + public ExporterIT(String sampleApp) throws IOException, URISyntaxException { + this.sampleApp = sampleApp; + this.sampleAppVolume = + Volume.create("it-exporter") + .copy("../../it-" + sampleApp + "/target/" + sampleApp + ".jar"); + this.sampleAppContainer = + new GenericContainer<>("openjdk:17") + .withFileSystemBind(sampleAppVolume.getHostPath(), "/app", BindMode.READ_ONLY) + .withWorkingDirectory("/app") + .withLogConsumer(LogConsumer.withPrefix(sampleApp)) + .withExposedPorts(9400); + } - @After - public void tearDown() throws IOException { - sampleAppContainer.stop(); - sampleAppVolume.remove(); - } + @After + public void tearDown() throws IOException { + sampleAppContainer.stop(); + sampleAppVolume.remove(); + } - @Test - public void testOpenMetricsTextFormat() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response response = scrape("GET", "", "Accept", "application/openmetrics-text; version=1.0.0; charset=utf-8"); - Assert.assertEquals(200, response.status); - assertContentType("application/openmetrics-text; version=1.0.0; charset=utf-8", response.getHeader("Content-Type")); - Assert.assertNull(response.getHeader("Content-Encoding")); - Assert.assertNull(response.getHeader("Transfer-Encoding")); - Assert.assertEquals(Integer.toString(response.body.length), response.getHeader("Content-Length")); - String bodyString = new String(response.body); - Assert.assertTrue(bodyString.contains("integration_test_info{test_name=\"" + sampleApp + "\"} 1")); - Assert.assertTrue(bodyString.contains("temperature_celsius{location=\"inside\"} 23.0")); - Assert.assertTrue(bodyString.contains("temperature_celsius{location=\"outside\"} 27.0")); - Assert.assertTrue(bodyString.contains("uptime_seconds_total 17.0")); - // OpenMetrics text format has a UNIT. - Assert.assertTrue(bodyString.contains("# UNIT uptime_seconds seconds")); - } + @Test + public void testOpenMetricsTextFormat() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response response = + scrape("GET", "", "Accept", "application/openmetrics-text; version=1.0.0; charset=utf-8"); + Assert.assertEquals(200, response.status); + assertContentType( + "application/openmetrics-text; version=1.0.0; charset=utf-8", + response.getHeader("Content-Type")); + Assert.assertNull(response.getHeader("Content-Encoding")); + Assert.assertNull(response.getHeader("Transfer-Encoding")); + Assert.assertEquals( + Integer.toString(response.body.length), response.getHeader("Content-Length")); + String bodyString = new String(response.body); + Assert.assertTrue( + bodyString.contains("integration_test_info{test_name=\"" + sampleApp + "\"} 1")); + Assert.assertTrue(bodyString.contains("temperature_celsius{location=\"inside\"} 23.0")); + Assert.assertTrue(bodyString.contains("temperature_celsius{location=\"outside\"} 27.0")); + Assert.assertTrue(bodyString.contains("uptime_seconds_total 17.0")); + // OpenMetrics text format has a UNIT. + Assert.assertTrue(bodyString.contains("# UNIT uptime_seconds seconds")); + } - @Test - public void testPrometheusTextFormat() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response response = scrape("GET", ""); - Assert.assertEquals(200, response.status); - assertContentType("text/plain; version=0.0.4; charset=utf-8", response.getHeader("Content-Type")); - Assert.assertNull(response.getHeader("Content-Encoding")); - Assert.assertNull(response.getHeader("Transfer-Encoding")); - Assert.assertEquals(Integer.toString(response.body.length), response.getHeader("Content-Length")); - String bodyString = new String(response.body); - Assert.assertTrue(bodyString.contains("integration_test_info{test_name=\"" + sampleApp + "\"} 1")); - Assert.assertTrue(bodyString.contains("temperature_celsius{location=\"inside\"} 23.0")); - Assert.assertTrue(bodyString.contains("temperature_celsius{location=\"outside\"} 27.0")); - Assert.assertTrue(bodyString.contains("uptime_seconds_total 17.0")); - // Prometheus text format does not have a UNIT. - Assert.assertFalse(bodyString.contains("# UNIT uptime_seconds seconds")); - } + @Test + public void testPrometheusTextFormat() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response response = scrape("GET", ""); + Assert.assertEquals(200, response.status); + assertContentType( + "text/plain; version=0.0.4; charset=utf-8", response.getHeader("Content-Type")); + Assert.assertNull(response.getHeader("Content-Encoding")); + Assert.assertNull(response.getHeader("Transfer-Encoding")); + Assert.assertEquals( + Integer.toString(response.body.length), response.getHeader("Content-Length")); + String bodyString = new String(response.body); + Assert.assertTrue( + bodyString.contains("integration_test_info{test_name=\"" + sampleApp + "\"} 1")); + Assert.assertTrue(bodyString.contains("temperature_celsius{location=\"inside\"} 23.0")); + Assert.assertTrue(bodyString.contains("temperature_celsius{location=\"outside\"} 27.0")); + Assert.assertTrue(bodyString.contains("uptime_seconds_total 17.0")); + // Prometheus text format does not have a UNIT. + Assert.assertFalse(bodyString.contains("# UNIT uptime_seconds seconds")); + } - @Test - public void testPrometheusProtobufFormat() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response response = scrape("GET", "", "Accept", "application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited"); - Assert.assertEquals(200, response.status); - assertContentType("application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited", response.getHeader("Content-Type")); - Assert.assertNull(response.getHeader("Content-Encoding")); - Assert.assertNull(response.getHeader("Transfer-Encoding")); - Assert.assertEquals(Integer.toString(response.body.length), response.getHeader("Content-Length")); - List metrics = new ArrayList<>(); - InputStream in = new ByteArrayInputStream(response.body); - while (in.available() > 0) { - metrics.add(Metrics.MetricFamily.parseDelimitedFrom(in)); - } - Assert.assertEquals(3, metrics.size()); - // metrics are sorted by name - Assert.assertEquals("integration_test_info", metrics.get(0).getName()); - Assert.assertEquals("temperature_celsius", metrics.get(1).getName()); - Assert.assertEquals("uptime_seconds_total", metrics.get(2).getName()); + @Test + public void testPrometheusProtobufFormat() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response response = + scrape( + "GET", + "", + "Accept", + "application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited"); + Assert.assertEquals(200, response.status); + assertContentType( + "application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited", + response.getHeader("Content-Type")); + Assert.assertNull(response.getHeader("Content-Encoding")); + Assert.assertNull(response.getHeader("Transfer-Encoding")); + Assert.assertEquals( + Integer.toString(response.body.length), response.getHeader("Content-Length")); + List metrics = new ArrayList<>(); + InputStream in = new ByteArrayInputStream(response.body); + while (in.available() > 0) { + metrics.add(Metrics.MetricFamily.parseDelimitedFrom(in)); } + Assert.assertEquals(3, metrics.size()); + // metrics are sorted by name + Assert.assertEquals("integration_test_info", metrics.get(0).getName()); + Assert.assertEquals("temperature_celsius", metrics.get(1).getName()); + Assert.assertEquals("uptime_seconds_total", metrics.get(2).getName()); + } - @Test - public void testCompression() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response response = scrape("GET", "", - "Accept", "application/openmetrics-text; version=1.0.0; charset=utf-8", - "Accept-Encoding", "gzip"); - Assert.assertEquals(200, response.status); - Assert.assertEquals("gzip", response.getHeader("Content-Encoding")); - if (response.getHeader("Content-Length") != null) { - // The servlet container might set a content length as the body is very small. - Assert.assertEquals(Integer.toString(response.body.length), response.getHeader("Content-Length")); - Assert.assertNull(response.getHeader("Transfer-Encoding")); - } else { - // If no content length is set, transfer-encoding chunked must be used. - Assert.assertEquals("chunked", response.getHeader("Transfer-Encoding")); - } - assertContentType("application/openmetrics-text; version=1.0.0; charset=utf-8", response.getHeader("Content-Type")); - String body = new String(IOUtils.toByteArray(new GZIPInputStream(new ByteArrayInputStream(response.body))), UTF_8); - Assert.assertTrue(body.contains("uptime_seconds_total 17.0")); + @Test + public void testCompression() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response response = + scrape( + "GET", + "", + "Accept", + "application/openmetrics-text; version=1.0.0; charset=utf-8", + "Accept-Encoding", + "gzip"); + Assert.assertEquals(200, response.status); + Assert.assertEquals("gzip", response.getHeader("Content-Encoding")); + if (response.getHeader("Content-Length") != null) { + // The servlet container might set a content length as the body is very small. + Assert.assertEquals( + Integer.toString(response.body.length), response.getHeader("Content-Length")); + Assert.assertNull(response.getHeader("Transfer-Encoding")); + } else { + // If no content length is set, transfer-encoding chunked must be used. + Assert.assertEquals("chunked", response.getHeader("Transfer-Encoding")); } + assertContentType( + "application/openmetrics-text; version=1.0.0; charset=utf-8", + response.getHeader("Content-Type")); + String body = + new String( + IOUtils.toByteArray(new GZIPInputStream(new ByteArrayInputStream(response.body))), + UTF_8); + Assert.assertTrue(body.contains("uptime_seconds_total 17.0")); + } - @Test - public void testErrorHandling() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "error") - .start(); - Response response = scrape("GET", ""); - Assert.assertEquals(500, response.status); - Assert.assertTrue(new String(response.body, UTF_8).contains("Simulating an error.")); - } + @Test + public void testErrorHandling() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "error") + .start(); + Response response = scrape("GET", ""); + Assert.assertEquals(500, response.status); + Assert.assertTrue(new String(response.body, UTF_8).contains("Simulating an error.")); + } - @Test - public void testHeadRequest() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response fullResponse = scrape("GET", ""); - int size = fullResponse.body.length; - Assert.assertTrue(size > 0); - Response headResponse = scrape("HEAD", ""); - Assert.assertEquals(200, headResponse.status); - Assert.assertEquals(Integer.toString(size), headResponse.getHeader("Content-Length")); - Assert.assertEquals(0, headResponse.body.length); - } + @Test + public void testHeadRequest() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response fullResponse = scrape("GET", ""); + int size = fullResponse.body.length; + Assert.assertTrue(size > 0); + Response headResponse = scrape("HEAD", ""); + Assert.assertEquals(200, headResponse.status); + Assert.assertEquals(Integer.toString(size), headResponse.getHeader("Content-Length")); + Assert.assertEquals(0, headResponse.body.length); + } - @Test - public void testDebug() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response response = scrape("GET", "debug=openmetrics"); - Assert.assertEquals(200, response.status); - assertContentType("text/plain; charset=utf-8", response.getHeader("Content-Type")); - String bodyString = new String(response.body, UTF_8); - Assert.assertTrue(bodyString.contains("uptime_seconds_total 17.0")); - Assert.assertTrue(bodyString.contains("# UNIT uptime_seconds seconds")); - } + @Test + public void testDebug() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response response = scrape("GET", "debug=openmetrics"); + Assert.assertEquals(200, response.status); + assertContentType("text/plain; charset=utf-8", response.getHeader("Content-Type")); + String bodyString = new String(response.body, UTF_8); + Assert.assertTrue(bodyString.contains("uptime_seconds_total 17.0")); + Assert.assertTrue(bodyString.contains("# UNIT uptime_seconds seconds")); + } - @Test - public void testNameFilter() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response response = scrape("GET", nameParam("integration_test_info") + "&" + nameParam("uptime_seconds_total"), - "Accept", "application/openmetrics-text; version=1.0.0; charset=utf-8"); - Assert.assertEquals(200, response.status); - assertContentType("application/openmetrics-text; version=1.0.0; charset=utf-8", response.getHeader("Content-Type")); - String bodyString = new String(response.body, UTF_8); - Assert.assertTrue(bodyString.contains("integration_test_info{test_name=\"" + sampleApp + "\"} 1")); - Assert.assertTrue(bodyString.contains("uptime_seconds_total 17.0")); - Assert.assertFalse(bodyString.contains("temperature_celsius")); - } + @Test + public void testNameFilter() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response response = + scrape( + "GET", + nameParam("integration_test_info") + "&" + nameParam("uptime_seconds_total"), + "Accept", + "application/openmetrics-text; version=1.0.0; charset=utf-8"); + Assert.assertEquals(200, response.status); + assertContentType( + "application/openmetrics-text; version=1.0.0; charset=utf-8", + response.getHeader("Content-Type")); + String bodyString = new String(response.body, UTF_8); + Assert.assertTrue( + bodyString.contains("integration_test_info{test_name=\"" + sampleApp + "\"} 1")); + Assert.assertTrue(bodyString.contains("uptime_seconds_total 17.0")); + Assert.assertFalse(bodyString.contains("temperature_celsius")); + } - @Test - public void testEmptyResponseOpenMetrics() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response response = scrape("GET", nameParam("none_existing"), - "Accept", "application/openmetrics-text; version=1.0.0; charset=utf-8"); - Assert.assertEquals(200, response.status); - assertContentType("application/openmetrics-text; version=1.0.0; charset=utf-8", response.getHeader("Content-Type")); - Assert.assertEquals(Integer.toString(response.body.length), response.getHeader("Content-Length")); - Assert.assertEquals("# EOF\n", new String(response.body, UTF_8)); - } + @Test + public void testEmptyResponseOpenMetrics() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response response = + scrape( + "GET", + nameParam("none_existing"), + "Accept", + "application/openmetrics-text; version=1.0.0; charset=utf-8"); + Assert.assertEquals(200, response.status); + assertContentType( + "application/openmetrics-text; version=1.0.0; charset=utf-8", + response.getHeader("Content-Type")); + Assert.assertEquals( + Integer.toString(response.body.length), response.getHeader("Content-Length")); + Assert.assertEquals("# EOF\n", new String(response.body, UTF_8)); + } - @Test - public void testEmptyResponseText() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response response = scrape("GET", nameParam("none_existing")); - Assert.assertEquals(200, response.status); - assertContentType("text/plain; version=0.0.4; charset=utf-8", response.getHeader("Content-Type")); - if (response.getHeader("Content-Length") != null) { // HTTPServer does not send a zero content length, which is ok - Assert.assertEquals("0", response.getHeader("Content-Length")); - } - Assert.assertEquals(0, response.body.length); + @Test + public void testEmptyResponseText() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response response = scrape("GET", nameParam("none_existing")); + Assert.assertEquals(200, response.status); + assertContentType( + "text/plain; version=0.0.4; charset=utf-8", response.getHeader("Content-Type")); + if (response.getHeader("Content-Length") + != null) { // HTTPServer does not send a zero content length, which is ok + Assert.assertEquals("0", response.getHeader("Content-Length")); } + Assert.assertEquals(0, response.body.length); + } - @Test - public void testEmptyResponseProtobuf() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response response = scrape("GET", nameParam("none_existing"), - "Accept", "application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited"); - Assert.assertEquals(200, response.status); - assertContentType("application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited", response.getHeader("Content-Type")); - Assert.assertEquals(0, response.body.length); - } + @Test + public void testEmptyResponseProtobuf() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response response = + scrape( + "GET", + nameParam("none_existing"), + "Accept", + "application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited"); + Assert.assertEquals(200, response.status); + assertContentType( + "application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited", + response.getHeader("Content-Type")); + Assert.assertEquals(0, response.body.length); + } - @Test - public void testEmptyResponseGzipOpenMetrics() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response response = scrape("GET", nameParam("none_existing"), - "Accept", "application/openmetrics-text; version=1.0.0; charset=utf-8", - "Accept-Encoding", "gzip"); - Assert.assertEquals(200, response.status); - Assert.assertEquals("gzip", response.getHeader("Content-Encoding")); - String body = new String(IOUtils.toByteArray(new GZIPInputStream(new ByteArrayInputStream(response.body))), UTF_8); - Assert.assertEquals("# EOF\n", body); - } + @Test + public void testEmptyResponseGzipOpenMetrics() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response response = + scrape( + "GET", + nameParam("none_existing"), + "Accept", + "application/openmetrics-text; version=1.0.0; charset=utf-8", + "Accept-Encoding", + "gzip"); + Assert.assertEquals(200, response.status); + Assert.assertEquals("gzip", response.getHeader("Content-Encoding")); + String body = + new String( + IOUtils.toByteArray(new GZIPInputStream(new ByteArrayInputStream(response.body))), + UTF_8); + Assert.assertEquals("# EOF\n", body); + } - @Test - public void testEmptyResponseGzipText() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response response = scrape("GET", nameParam("none_existing"), - "Accept-Encoding", "gzip"); - Assert.assertEquals(200, response.status); - Assert.assertEquals("gzip", response.getHeader("Content-Encoding")); - String body = new String(IOUtils.toByteArray(new GZIPInputStream(new ByteArrayInputStream(response.body))), UTF_8); - Assert.assertEquals(0, body.length()); - } + @Test + public void testEmptyResponseGzipText() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response response = scrape("GET", nameParam("none_existing"), "Accept-Encoding", "gzip"); + Assert.assertEquals(200, response.status); + Assert.assertEquals("gzip", response.getHeader("Content-Encoding")); + String body = + new String( + IOUtils.toByteArray(new GZIPInputStream(new ByteArrayInputStream(response.body))), + UTF_8); + Assert.assertEquals(0, body.length()); + } - private String nameParam(String name) throws UnsupportedEncodingException { - return URLEncoder.encode("name[]", UTF_8.name()) + "=" + URLEncoder.encode(name, UTF_8.name()); - } + private String nameParam(String name) throws UnsupportedEncodingException { + return URLEncoder.encode("name[]", UTF_8.name()) + "=" + URLEncoder.encode(name, UTF_8.name()); + } - @Test - public void testDebugUnknown() throws IOException { - sampleAppContainer - .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") - .start(); - Response response = scrape("GET", "debug=unknown"); - Assert.assertEquals(500, response.status); - assertContentType("text/plain; charset=utf-8", response.getHeader("Content-Type")); - } + @Test + public void testDebugUnknown() throws IOException { + sampleAppContainer + .withCommand("java", "-jar", "/app/" + sampleApp + ".jar", "9400", "success") + .start(); + Response response = scrape("GET", "debug=unknown"); + Assert.assertEquals(500, response.status); + assertContentType("text/plain; charset=utf-8", response.getHeader("Content-Type")); + } - private void assertContentType(String expected, String actual) { - if (!expected.replace(" ", "").equals(actual)) { - Assert.assertEquals(expected, actual); - } + private void assertContentType(String expected, String actual) { + if (!expected.replace(" ", "").equals(actual)) { + Assert.assertEquals(expected, actual); } + } - private Response scrape(String method, String queryString, String... requestHeaders) throws IOException { - long timeoutMillis = TimeUnit.SECONDS.toMillis(5); - URL url = new URL("http://localhost:" + sampleAppContainer.getMappedPort(9400) + "/metrics?" + queryString); - HttpURLConnection con = (HttpURLConnection) url.openConnection(); - con.setRequestMethod(method); - for (int i = 0; i < requestHeaders.length; i += 2) { - con.setRequestProperty(requestHeaders[i], requestHeaders[i + 1]); - } - long start = System.currentTimeMillis(); - Exception exception = null; - while (System.currentTimeMillis() - start < timeoutMillis) { - try { - if (con.getResponseCode() == 200) { - return new Response(con.getResponseCode(), con.getHeaderFields(), IOUtils.toByteArray(con.getInputStream())); - } else { - return new Response(con.getResponseCode(), con.getHeaderFields(), IOUtils.toByteArray(con.getErrorStream())); - } - } catch (Exception e) { - exception = e; - try { - Thread.sleep(100); - } catch (InterruptedException ignored) { - } - } + private Response scrape(String method, String queryString, String... requestHeaders) + throws IOException { + long timeoutMillis = TimeUnit.SECONDS.toMillis(5); + URL url = + new URL( + "http://localhost:" + + sampleAppContainer.getMappedPort(9400) + + "/metrics?" + + queryString); + HttpURLConnection con = (HttpURLConnection) url.openConnection(); + con.setRequestMethod(method); + for (int i = 0; i < requestHeaders.length; i += 2) { + con.setRequestProperty(requestHeaders[i], requestHeaders[i + 1]); + } + long start = System.currentTimeMillis(); + Exception exception = null; + while (System.currentTimeMillis() - start < timeoutMillis) { + try { + if (con.getResponseCode() == 200) { + return new Response( + con.getResponseCode(), + con.getHeaderFields(), + IOUtils.toByteArray(con.getInputStream())); + } else { + return new Response( + con.getResponseCode(), + con.getHeaderFields(), + IOUtils.toByteArray(con.getErrorStream())); } - if (exception != null) { - exception.printStackTrace(); + } catch (Exception e) { + exception = e; + try { + Thread.sleep(100); + } catch (InterruptedException ignored) { } - Assert.fail("timeout while getting metrics from " + url); - return null; // will not happen + } + } + if (exception != null) { + exception.printStackTrace(); } + Assert.fail("timeout while getting metrics from " + url); + return null; // will not happen + } - private static class Response { - private final int status; - private final Map headers; - private final byte[] body; + private static class Response { + private final int status; + private final Map headers; + private final byte[] body; - private Response(int status, Map> headers, byte[] body) { - this.status = status; - this.headers = new HashMap<>(headers.size()); - this.body = body; - for (Map.Entry> entry : headers.entrySet()) { - if (entry.getKey() != null) { // HttpUrlConnection uses pseudo key "null" for the status line - this.headers.put(entry.getKey().toLowerCase(), entry.getValue().get(0)); - } - } + private Response(int status, Map> headers, byte[] body) { + this.status = status; + this.headers = new HashMap<>(headers.size()); + this.body = body; + for (Map.Entry> entry : headers.entrySet()) { + if (entry.getKey() + != null) { // HttpUrlConnection uses pseudo key "null" for the status line + this.headers.put(entry.getKey().toLowerCase(), entry.getValue().get(0)); } + } + } - private String getHeader(String name) { - // HTTP headers are case-insensitive - return headers.get(name.toLowerCase()); - } + private String getHeader(String name) { + // HTTP headers are case-insensitive + return headers.get(name.toLowerCase()); } + } } diff --git a/integration-tests/it-pushgateway/src/main/java/io/prometheus/metrics/it/pushgateway/PushGatewayTestApp.java b/integration-tests/it-pushgateway/src/main/java/io/prometheus/metrics/it/pushgateway/PushGatewayTestApp.java index 69ef63081..6376b03fb 100644 --- a/integration-tests/it-pushgateway/src/main/java/io/prometheus/metrics/it/pushgateway/PushGatewayTestApp.java +++ b/integration-tests/it-pushgateway/src/main/java/io/prometheus/metrics/it/pushgateway/PushGatewayTestApp.java @@ -1,131 +1,126 @@ package io.prometheus.metrics.it.pushgateway; +import static io.prometheus.metrics.exporter.pushgateway.Scheme.HTTPS; + import io.prometheus.metrics.core.metrics.Gauge; import io.prometheus.metrics.core.metrics.Histogram; import io.prometheus.metrics.exporter.pushgateway.Format; import io.prometheus.metrics.exporter.pushgateway.HttpConnectionFactory; import io.prometheus.metrics.exporter.pushgateway.PushGateway; import io.prometheus.metrics.model.snapshots.Unit; - -import javax.net.ssl.HttpsURLConnection; -import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManager; -import javax.net.ssl.X509TrustManager; import java.io.IOException; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.cert.X509Certificate; +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; -import static io.prometheus.metrics.exporter.pushgateway.Scheme.HTTPS; - -/** - * Example application using the {@link PushGateway}. - */ +/** Example application using the {@link PushGateway}. */ public class PushGatewayTestApp { - public static void main(String[] args) throws IOException { - if (args.length != 1) { - System.err.println("Usage: java -jar pushgateway-test-app.jar "); - System.exit(-1); - } - switch (args[0]) { - case "simple": - runSimpleTest(); - break; - case "textFormat": - runTextFormatTest(); - break; - case "basicauth": - runBasicAuthTest(); - break; - case "ssl": - runSslTest(); - break; - default: - System.err.println(args[0] + ": Not implemented."); - System.exit(-1); - } + public static void main(String[] args) throws IOException { + if (args.length != 1) { + System.err.println("Usage: java -jar pushgateway-test-app.jar "); + System.exit(-1); } - - private static void runSimpleTest() throws IOException { - makeMetrics(); - PushGateway pg = PushGateway.builder().build(); - System.out.println("Pushing metrics..."); - pg.push(); - System.out.println("Push successful."); + switch (args[0]) { + case "simple": + runSimpleTest(); + break; + case "textFormat": + runTextFormatTest(); + break; + case "basicauth": + runBasicAuthTest(); + break; + case "ssl": + runSslTest(); + break; + default: + System.err.println(args[0] + ": Not implemented."); + System.exit(-1); } + } - private static void runTextFormatTest() throws IOException { - makeMetrics(); - PushGateway pg = PushGateway.builder().format(Format.PROMETHEUS_TEXT).build(); - System.out.println("Pushing metrics..."); - pg.push(); - System.out.println("Push successful."); - } + private static void runSimpleTest() throws IOException { + makeMetrics(); + PushGateway pg = PushGateway.builder().build(); + System.out.println("Pushing metrics..."); + pg.push(); + System.out.println("Push successful."); + } - private static void runBasicAuthTest() throws IOException { - makeMetrics(); - PushGateway pg = PushGateway.builder() - .basicAuth("my_user", "secret_password") - .build(); - System.out.println("Pushing metrics..."); - pg.push(); - System.out.println("Push successful."); - } + private static void runTextFormatTest() throws IOException { + makeMetrics(); + PushGateway pg = PushGateway.builder().format(Format.PROMETHEUS_TEXT).build(); + System.out.println("Pushing metrics..."); + pg.push(); + System.out.println("Push successful."); + } - private static void runSslTest() throws IOException { - makeMetrics(); - PushGateway pg = PushGateway.builder() - .scheme(HTTPS) - .connectionFactory(insecureConnectionFactory) - .build(); - System.out.println("Pushing metrics..."); - pg.push(); - System.out.println("Push successful."); - } + private static void runBasicAuthTest() throws IOException { + makeMetrics(); + PushGateway pg = PushGateway.builder().basicAuth("my_user", "secret_password").build(); + System.out.println("Pushing metrics..."); + pg.push(); + System.out.println("Push successful."); + } + + private static void runSslTest() throws IOException { + makeMetrics(); + PushGateway pg = + PushGateway.builder().scheme(HTTPS).connectionFactory(insecureConnectionFactory).build(); + System.out.println("Pushing metrics..."); + pg.push(); + System.out.println("Push successful."); + } - static TrustManager insecureTrustManager = new X509TrustManager() { + static TrustManager insecureTrustManager = + new X509TrustManager() { public java.security.cert.X509Certificate[] getAcceptedIssuers() { - return null; + return null; } @Override - public void checkClientTrusted(X509Certificate[] chain, String authType) { - } + public void checkClientTrusted(X509Certificate[] chain, String authType) {} @Override - public void checkServerTrusted(X509Certificate[] chain, String authType) { - } - }; + public void checkServerTrusted(X509Certificate[] chain, String authType) {} + }; - static HttpConnectionFactory insecureConnectionFactory = url -> { + static HttpConnectionFactory insecureConnectionFactory = + url -> { try { - SSLContext sslContext = SSLContext.getInstance("TLS"); - sslContext.init(null, new TrustManager[]{insecureTrustManager}, null); - SSLContext.setDefault(sslContext); + SSLContext sslContext = SSLContext.getInstance("TLS"); + sslContext.init(null, new TrustManager[] {insecureTrustManager}, null); + SSLContext.setDefault(sslContext); - HttpsURLConnection connection = (HttpsURLConnection) url.openConnection(); - connection.setHostnameVerifier((hostname, session) -> true); - return connection; + HttpsURLConnection connection = (HttpsURLConnection) url.openConnection(); + connection.setHostnameVerifier((hostname, session) -> true); + return connection; } catch (NoSuchAlgorithmException | KeyManagementException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }; + }; - private static void makeMetrics() { - Histogram sizes = Histogram.builder() - .name("file_sizes_bytes") - .classicUpperBounds(256, 512, 1024, 2048) - .unit(Unit.BYTES) - .register(); - sizes.observe(513); - sizes.observe(814); - sizes.observe(1553); - Gauge duration = Gauge.builder() - .name("my_batch_job_duration_seconds") - .help("Duration of my batch job in seconds.") - .unit(Unit.SECONDS) - .register(); - duration.set(0.5); - } + private static void makeMetrics() { + Histogram sizes = + Histogram.builder() + .name("file_sizes_bytes") + .classicUpperBounds(256, 512, 1024, 2048) + .unit(Unit.BYTES) + .register(); + sizes.observe(513); + sizes.observe(814); + sizes.observe(1553); + Gauge duration = + Gauge.builder() + .name("my_batch_job_duration_seconds") + .help("Duration of my batch job in seconds.") + .unit(Unit.SECONDS) + .register(); + duration.set(0.5); + } } diff --git a/integration-tests/it-pushgateway/src/test/java/io/prometheus/metrics/it/pushgateway/PushGatewayIT.java b/integration-tests/it-pushgateway/src/test/java/io/prometheus/metrics/it/pushgateway/PushGatewayIT.java index 6b89a8c58..7e1198a55 100644 --- a/integration-tests/it-pushgateway/src/test/java/io/prometheus/metrics/it/pushgateway/PushGatewayIT.java +++ b/integration-tests/it-pushgateway/src/test/java/io/prometheus/metrics/it/pushgateway/PushGatewayIT.java @@ -6,6 +6,9 @@ import com.squareup.okhttp.*; import io.prometheus.client.it.common.LogConsumer; import io.prometheus.client.it.common.Volume; +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.concurrent.TimeUnit; import net.minidev.json.JSONArray; import org.junit.After; import org.junit.Assert; @@ -17,212 +20,247 @@ import org.testcontainers.containers.wait.strategy.Wait; import org.testcontainers.utility.MountableFile; -import java.io.IOException; -import java.net.URISyntaxException; -import java.util.concurrent.TimeUnit; - public class PushGatewayIT { - private GenericContainer sampleAppContainer; - private GenericContainer pushGatewayContainer; - private GenericContainer prometheusContainer; - private Volume sampleAppVolume; - - @Before - public void setUp() throws IOException, URISyntaxException { - Network network = Network.newNetwork(); - sampleAppVolume = Volume.create("it-pushgateway") - .copy("pushgateway-test-app.jar"); - pushGatewayContainer = new GenericContainer<>("prom/pushgateway:v1.8.0") - .withExposedPorts(9091) - .withNetwork(network) - .withNetworkAliases("pushgateway") - .withLogConsumer(LogConsumer.withPrefix("pushgateway")) - .waitingFor(Wait.forListeningPort()); - sampleAppContainer = new GenericContainer<>("openjdk:17") - .withFileSystemBind(sampleAppVolume.getHostPath(), "/app", BindMode.READ_ONLY) - .withNetwork(network) - .withWorkingDirectory("/app") - .dependsOn(pushGatewayContainer) - .withLogConsumer(LogConsumer.withPrefix("test-app")); - prometheusContainer = new GenericContainer<>("prom/prometheus:v2.51.2") - .withNetwork(network) - .dependsOn(pushGatewayContainer) - .withExposedPorts(9090) - .withLogConsumer(LogConsumer.withPrefix("prometheus")); - } + private GenericContainer sampleAppContainer; + private GenericContainer pushGatewayContainer; + private GenericContainer prometheusContainer; + private Volume sampleAppVolume; - @After - public void tearDown() throws IOException { - prometheusContainer.stop(); - pushGatewayContainer.stop(); - sampleAppContainer.stop(); - sampleAppVolume.remove(); - } + @Before + public void setUp() throws IOException, URISyntaxException { + Network network = Network.newNetwork(); + sampleAppVolume = Volume.create("it-pushgateway").copy("pushgateway-test-app.jar"); + pushGatewayContainer = + new GenericContainer<>("prom/pushgateway:v1.8.0") + .withExposedPorts(9091) + .withNetwork(network) + .withNetworkAliases("pushgateway") + .withLogConsumer(LogConsumer.withPrefix("pushgateway")) + .waitingFor(Wait.forListeningPort()); + sampleAppContainer = + new GenericContainer<>("openjdk:17") + .withFileSystemBind(sampleAppVolume.getHostPath(), "/app", BindMode.READ_ONLY) + .withNetwork(network) + .withWorkingDirectory("/app") + .dependsOn(pushGatewayContainer) + .withLogConsumer(LogConsumer.withPrefix("test-app")); + prometheusContainer = + new GenericContainer<>("prom/prometheus:v2.51.2") + .withNetwork(network) + .dependsOn(pushGatewayContainer) + .withExposedPorts(9090) + .withLogConsumer(LogConsumer.withPrefix("prometheus")); + } - final OkHttpClient client = new OkHttpClient(); - - @Test - public void testSimple() throws IOException, InterruptedException { - pushGatewayContainer - .start(); - sampleAppContainer - .withCommand("java", - "-Dio.prometheus.exporter.pushgateway.address=pushgateway:9091", - "-jar", - "/app/pushgateway-test-app.jar", - "simple" - ).start(); - prometheusContainer - .withCopyFileToContainer(MountableFile.forClasspathResource("/prometheus.yaml"), "/etc/prometheus/prometheus.yml") - .start(); - awaitTermination(sampleAppContainer, 10, TimeUnit.SECONDS); - assertMetrics(); - } + @After + public void tearDown() throws IOException { + prometheusContainer.stop(); + pushGatewayContainer.stop(); + sampleAppContainer.stop(); + sampleAppVolume.remove(); + } - @Test - public void testTextFormat() throws IOException, InterruptedException { - pushGatewayContainer - .start(); - sampleAppContainer - .withCommand("java", - "-Dio.prometheus.exporter.pushgateway.address=pushgateway:9091", - "-jar", - "/app/pushgateway-test-app.jar", - "textFormat" - ).start(); - prometheusContainer - .withCopyFileToContainer(MountableFile.forClasspathResource("/prometheus.yaml"), "/etc/prometheus/prometheus.yml") - .start(); - awaitTermination(sampleAppContainer, 10, TimeUnit.SECONDS); - assertMetrics(); - } + final OkHttpClient client = new OkHttpClient(); - @Test - public void testBasicAuth() throws IOException, InterruptedException { - pushGatewayContainer - .withCopyFileToContainer(MountableFile.forClasspathResource("/pushgateway-basicauth.yaml"), "/pushgateway/pushgateway-basicauth.yaml") - .withCommand("--web.config.file", "pushgateway-basicauth.yaml") - .start(); - sampleAppContainer - .withCommand("java", - "-Dio.prometheus.exporter.pushgateway.address=pushgateway:9091", - "-jar", - "/app/pushgateway-test-app.jar", - "basicauth" - ).start(); - prometheusContainer - .withCopyFileToContainer(MountableFile.forClasspathResource("/prometheus-basicauth.yaml"), "/etc/prometheus/prometheus.yml") - .start(); - awaitTermination(sampleAppContainer, 10, TimeUnit.SECONDS); - assertMetrics(); - } + @Test + public void testSimple() throws IOException, InterruptedException { + pushGatewayContainer.start(); + sampleAppContainer + .withCommand( + "java", + "-Dio.prometheus.exporter.pushgateway.address=pushgateway:9091", + "-jar", + "/app/pushgateway-test-app.jar", + "simple") + .start(); + prometheusContainer + .withCopyFileToContainer( + MountableFile.forClasspathResource("/prometheus.yaml"), + "/etc/prometheus/prometheus.yml") + .start(); + awaitTermination(sampleAppContainer, 10, TimeUnit.SECONDS); + assertMetrics(); + } - @Test - public void testSsl() throws InterruptedException, IOException { - pushGatewayContainer - .withCopyFileToContainer(MountableFile.forClasspathResource("/pushgateway-ssl.yaml"), "/pushgateway/pushgateway-ssl.yaml") - .withCommand("--web.config.file", "pushgateway-ssl.yaml") - .start(); - sampleAppContainer - .withCommand("java", - "-Dio.prometheus.exporter.pushgateway.address=pushgateway:9091", - "-jar", - "/app/pushgateway-test-app.jar", - "ssl" - ).start(); - prometheusContainer - .withCopyFileToContainer(MountableFile.forClasspathResource("/prometheus-ssl.yaml"), "/etc/prometheus/prometheus.yml") - .start(); - awaitTermination(sampleAppContainer, 10, TimeUnit.SECONDS); - assertMetrics(); - } + @Test + public void testTextFormat() throws IOException, InterruptedException { + pushGatewayContainer.start(); + sampleAppContainer + .withCommand( + "java", + "-Dio.prometheus.exporter.pushgateway.address=pushgateway:9091", + "-jar", + "/app/pushgateway-test-app.jar", + "textFormat") + .start(); + prometheusContainer + .withCopyFileToContainer( + MountableFile.forClasspathResource("/prometheus.yaml"), + "/etc/prometheus/prometheus.yml") + .start(); + awaitTermination(sampleAppContainer, 10, TimeUnit.SECONDS); + assertMetrics(); + } - @Test - public void testProtobuf() throws IOException, InterruptedException { - pushGatewayContainer - .start(); - sampleAppContainer - .withCommand("java", - "-Dio.prometheus.exporter.pushgateway.address=pushgateway:9091", - "-jar", - "/app/pushgateway-test-app.jar", - "simple" - ).start(); - prometheusContainer - .withCommand("--enable-feature=native-histograms", "--config.file", "/etc/prometheus/prometheus.yml") - .withCopyFileToContainer(MountableFile.forClasspathResource("/prometheus.yaml"), "/etc/prometheus/prometheus.yml") - .start(); - awaitTermination(sampleAppContainer, 10, TimeUnit.SECONDS); - assertNativeHistogram(); - } + @Test + public void testBasicAuth() throws IOException, InterruptedException { + pushGatewayContainer + .withCopyFileToContainer( + MountableFile.forClasspathResource("/pushgateway-basicauth.yaml"), + "/pushgateway/pushgateway-basicauth.yaml") + .withCommand("--web.config.file", "pushgateway-basicauth.yaml") + .start(); + sampleAppContainer + .withCommand( + "java", + "-Dio.prometheus.exporter.pushgateway.address=pushgateway:9091", + "-jar", + "/app/pushgateway-test-app.jar", + "basicauth") + .start(); + prometheusContainer + .withCopyFileToContainer( + MountableFile.forClasspathResource("/prometheus-basicauth.yaml"), + "/etc/prometheus/prometheus.yml") + .start(); + awaitTermination(sampleAppContainer, 10, TimeUnit.SECONDS); + assertMetrics(); + } - private void assertMetrics() throws IOException, InterruptedException { - double value = getValue("my_batch_job_duration_seconds", "job", "pushgateway-test-app"); - Assert.assertEquals(0.5, value, 0.0); - value = getValue("file_sizes_bytes_bucket", "job", "pushgateway-test-app", "le", "512"); - Assert.assertEquals(0.0, value, 0.0); - value = getValue("file_sizes_bytes_bucket", "job", "pushgateway-test-app", "le", "1024"); - Assert.assertEquals(2.0, value, 0.0); - value = getValue("file_sizes_bytes_bucket", "job", "pushgateway-test-app", "le", "+Inf"); - Assert.assertEquals(3.0, value, 0.0); - } + @Test + public void testSsl() throws InterruptedException, IOException { + pushGatewayContainer + .withCopyFileToContainer( + MountableFile.forClasspathResource("/pushgateway-ssl.yaml"), + "/pushgateway/pushgateway-ssl.yaml") + .withCommand("--web.config.file", "pushgateway-ssl.yaml") + .start(); + sampleAppContainer + .withCommand( + "java", + "-Dio.prometheus.exporter.pushgateway.address=pushgateway:9091", + "-jar", + "/app/pushgateway-test-app.jar", + "ssl") + .start(); + prometheusContainer + .withCopyFileToContainer( + MountableFile.forClasspathResource("/prometheus-ssl.yaml"), + "/etc/prometheus/prometheus.yml") + .start(); + awaitTermination(sampleAppContainer, 10, TimeUnit.SECONDS); + assertMetrics(); + } - private double getValue(String name, String... labels) throws IOException, InterruptedException { - String scrapeResponseJson = scrape(name); - Criteria criteria = Criteria.where("metric.__name__").eq(name); - for (int i = 0; i < labels.length; i += 2) { - criteria = criteria.and("metric." + labels[i]).eq(labels[i + 1]); - } - JSONArray result = JsonPath.parse(scrapeResponseJson).read("$.data.result" + Filter.filter(criteria) + ".value[1]"); - Assert.assertEquals(1, result.size()); - return Double.valueOf(result.get(0).toString()); - } + @Test + public void testProtobuf() throws IOException, InterruptedException { + pushGatewayContainer.start(); + sampleAppContainer + .withCommand( + "java", + "-Dio.prometheus.exporter.pushgateway.address=pushgateway:9091", + "-jar", + "/app/pushgateway-test-app.jar", + "simple") + .start(); + prometheusContainer + .withCommand( + "--enable-feature=native-histograms", "--config.file", "/etc/prometheus/prometheus.yml") + .withCopyFileToContainer( + MountableFile.forClasspathResource("/prometheus.yaml"), + "/etc/prometheus/prometheus.yml") + .start(); + awaitTermination(sampleAppContainer, 10, TimeUnit.SECONDS); + assertNativeHistogram(); + } - private void assertNativeHistogram() throws IOException, InterruptedException { - double count = getNativeHistogramCount("file_sizes_bytes", "pushgateway-test-app"); - Assert.assertEquals(3, count, 0.0); - } + private void assertMetrics() throws IOException, InterruptedException { + double value = getValue("my_batch_job_duration_seconds", "job", "pushgateway-test-app"); + Assert.assertEquals(0.5, value, 0.0); + value = getValue("file_sizes_bytes_bucket", "job", "pushgateway-test-app", "le", "512"); + Assert.assertEquals(0.0, value, 0.0); + value = getValue("file_sizes_bytes_bucket", "job", "pushgateway-test-app", "le", "1024"); + Assert.assertEquals(2.0, value, 0.0); + value = getValue("file_sizes_bytes_bucket", "job", "pushgateway-test-app", "le", "+Inf"); + Assert.assertEquals(3.0, value, 0.0); + } - private double getNativeHistogramCount(String name, String job) throws IOException, InterruptedException { - String scrapeResponseJson = scrape("histogram_count(" + name + ")"); - Criteria criteria = Criteria.where("metric.job").eq(job); - JSONArray result = JsonPath.parse(scrapeResponseJson).read("$.data.result" + Filter.filter(criteria) + ".value[1]"); - return Double.valueOf(result.get(0).toString()); + private double getValue(String name, String... labels) throws IOException, InterruptedException { + String scrapeResponseJson = scrape(name); + Criteria criteria = Criteria.where("metric.__name__").eq(name); + for (int i = 0; i < labels.length; i += 2) { + criteria = criteria.and("metric." + labels[i]).eq(labels[i + 1]); } + JSONArray result = + JsonPath.parse(scrapeResponseJson) + .read("$.data.result" + Filter.filter(criteria) + ".value[1]"); + Assert.assertEquals(1, result.size()); + return Double.valueOf(result.get(0).toString()); + } + + private void assertNativeHistogram() throws IOException, InterruptedException { + double count = getNativeHistogramCount("file_sizes_bytes", "pushgateway-test-app"); + Assert.assertEquals(3, count, 0.0); + } + + private double getNativeHistogramCount(String name, String job) + throws IOException, InterruptedException { + String scrapeResponseJson = scrape("histogram_count(" + name + ")"); + Criteria criteria = Criteria.where("metric.job").eq(job); + JSONArray result = + JsonPath.parse(scrapeResponseJson) + .read("$.data.result" + Filter.filter(criteria) + ".value[1]"); + return Double.valueOf(result.get(0).toString()); + } - private String scrape(String query) throws IOException, InterruptedException { - System.out.println("Querying http://" + prometheusContainer.getHost() + ":" + prometheusContainer.getMappedPort(9090)); - HttpUrl baseUrl = HttpUrl.parse("http://" + prometheusContainer.getHost() + ":" + prometheusContainer.getMappedPort(9090) + "/api/v1/query"); - HttpUrl url = baseUrl.newBuilder() - .addQueryParameter("query", query) - .build(); - long timeRemaining = TimeUnit.SECONDS.toMillis(15); - while (timeRemaining > 0) { - Request request = new Request.Builder().url(url).build(); - Call call = client.newCall(request); - Response response = call.execute(); - String body = response.body().string(); - if (!body.contains("\"result\":[]")) { - // Result when data is not available yet: - // {"status":"success","data":{"resultType":"vector","result":[]}} - return body; - } - Thread.sleep(250); - timeRemaining -= 250; - } - Assert.fail("timeout while scraping " + url); - return null; + private String scrape(String query) throws IOException, InterruptedException { + System.out.println( + "Querying http://" + + prometheusContainer.getHost() + + ":" + + prometheusContainer.getMappedPort(9090)); + HttpUrl baseUrl = + HttpUrl.parse( + "http://" + + prometheusContainer.getHost() + + ":" + + prometheusContainer.getMappedPort(9090) + + "/api/v1/query"); + HttpUrl url = baseUrl.newBuilder().addQueryParameter("query", query).build(); + long timeRemaining = TimeUnit.SECONDS.toMillis(15); + while (timeRemaining > 0) { + Request request = new Request.Builder().url(url).build(); + Call call = client.newCall(request); + Response response = call.execute(); + String body = response.body().string(); + if (!body.contains("\"result\":[]")) { + // Result when data is not available yet: + // {"status":"success","data":{"resultType":"vector","result":[]}} + return body; + } + Thread.sleep(250); + timeRemaining -= 250; } + Assert.fail("timeout while scraping " + url); + return null; + } - private void awaitTermination(GenericContainer container, long timeout, TimeUnit unit) throws InterruptedException { - long waitTimeMillis = 0; - while (container.isRunning()) { - if (waitTimeMillis > unit.toMillis(timeout)) { - Assert.fail(container.getContainerName() + " did not terminate after " + timeout + " " + unit + "."); - } - Thread.sleep(20); - waitTimeMillis += 20; - } + private void awaitTermination(GenericContainer container, long timeout, TimeUnit unit) + throws InterruptedException { + long waitTimeMillis = 0; + while (container.isRunning()) { + if (waitTimeMillis > unit.toMillis(timeout)) { + Assert.fail( + container.getContainerName() + + " did not terminate after " + + timeout + + " " + + unit + + "."); + } + Thread.sleep(20); + waitTimeMillis += 20; } + } } diff --git a/pom.xml b/pom.xml index 44eadc391..5ab8f42e2 100644 --- a/pom.xml +++ b/pom.xml @@ -169,7 +169,6 @@ com.diffplug.spotless spotless-maven-plugin - origin/main diff --git a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExemplarsProperties.java b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExemplarsProperties.java index ff955bc50..f660725bf 100644 --- a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExemplarsProperties.java +++ b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExemplarsProperties.java @@ -2,113 +2,135 @@ import java.util.Map; -/** - * Properties starting with io.prometheus.exemplars - */ +/** Properties starting with io.prometheus.exemplars */ public class ExemplarsProperties { - private static final String MIN_RETENTION_PERIOD_SECONDS = "minRetentionPeriodSeconds"; - private static final String MAX_RETENTION_PERIOD_SECONDS = "maxRetentionPeriodSeconds"; - private static final String SAMPLE_INTERVAL_MILLISECONDS = "sampleIntervalMilliseconds"; - - private final Integer minRetentionPeriodSeconds; - private final Integer maxRetentionPeriodSeconds; - private final Integer sampleIntervalMilliseconds; - - private ExemplarsProperties( - Integer minRetentionPeriodSeconds, - Integer maxRetentionPeriodSeconds, - Integer sampleIntervalMilliseconds) { - this.minRetentionPeriodSeconds = minRetentionPeriodSeconds; - this.maxRetentionPeriodSeconds = maxRetentionPeriodSeconds; - this.sampleIntervalMilliseconds = sampleIntervalMilliseconds; + private static final String MIN_RETENTION_PERIOD_SECONDS = "minRetentionPeriodSeconds"; + private static final String MAX_RETENTION_PERIOD_SECONDS = "maxRetentionPeriodSeconds"; + private static final String SAMPLE_INTERVAL_MILLISECONDS = "sampleIntervalMilliseconds"; + + private final Integer minRetentionPeriodSeconds; + private final Integer maxRetentionPeriodSeconds; + private final Integer sampleIntervalMilliseconds; + + private ExemplarsProperties( + Integer minRetentionPeriodSeconds, + Integer maxRetentionPeriodSeconds, + Integer sampleIntervalMilliseconds) { + this.minRetentionPeriodSeconds = minRetentionPeriodSeconds; + this.maxRetentionPeriodSeconds = maxRetentionPeriodSeconds; + this.sampleIntervalMilliseconds = sampleIntervalMilliseconds; + } + + /** + * Minimum time how long Exemplars are kept before they may be replaced by new Exemplars. + * + *

Default see {@code ExemplarSamplerConfig.DEFAULT_MIN_RETENTION_PERIOD_SECONDS} + */ + public Integer getMinRetentionPeriodSeconds() { + return minRetentionPeriodSeconds; + } + + /** + * Maximum time how long Exemplars are kept before they are evicted. + * + *

Default see {@code ExemplarSamplerConfig.DEFAULT_MAX_RETENTION_PERIOD_SECONDS} + */ + public Integer getMaxRetentionPeriodSeconds() { + return maxRetentionPeriodSeconds; + } + + /** + * Time between attempts to sample new Exemplars. This is a performance improvement for + * high-frequency applications, because with the sample interval we make sure that the exemplar + * sampler is not called for every single request. + * + *

Default see {@code ExemplarSamplerConfig.DEFAULT_SAMPLE_INTERVAL_MILLISECONDS} + */ + public Integer getSampleIntervalMilliseconds() { + return sampleIntervalMilliseconds; + } + + /** + * Note that this will remove entries from {@code properties}. This is because we want to know if + * there are unused properties remaining after all properties have been loaded. + */ + static ExemplarsProperties load(String prefix, Map properties) + throws PrometheusPropertiesException { + Integer minRetentionPeriodSeconds = + Util.loadInteger(prefix + "." + MIN_RETENTION_PERIOD_SECONDS, properties); + Integer maxRetentionPeriodSeconds = + Util.loadInteger(prefix + "." + MAX_RETENTION_PERIOD_SECONDS, properties); + Integer sampleIntervalMilliseconds = + Util.loadInteger(prefix + "." + SAMPLE_INTERVAL_MILLISECONDS, properties); + + Util.assertValue( + minRetentionPeriodSeconds, + t -> t > 0, + "Expecting value > 0.", + prefix, + MIN_RETENTION_PERIOD_SECONDS); + Util.assertValue( + minRetentionPeriodSeconds, + t -> t > 0, + "Expecting value > 0.", + prefix, + MAX_RETENTION_PERIOD_SECONDS); + Util.assertValue( + sampleIntervalMilliseconds, + t -> t > 0, + "Expecting value > 0.", + prefix, + SAMPLE_INTERVAL_MILLISECONDS); + + if (minRetentionPeriodSeconds != null && maxRetentionPeriodSeconds != null) { + if (minRetentionPeriodSeconds > maxRetentionPeriodSeconds) { + throw new PrometheusPropertiesException( + prefix + + "." + + MIN_RETENTION_PERIOD_SECONDS + + " must not be greater than " + + prefix + + "." + + MAX_RETENTION_PERIOD_SECONDS + + "."); + } } - /** - * Minimum time how long Exemplars are kept before they may be replaced by new Exemplars. - *

- * Default see {@code ExemplarSamplerConfig.DEFAULT_MIN_RETENTION_PERIOD_SECONDS} - */ - public Integer getMinRetentionPeriodSeconds() { - return minRetentionPeriodSeconds; - } - - /** - * Maximum time how long Exemplars are kept before they are evicted. - *

- * Default see {@code ExemplarSamplerConfig.DEFAULT_MAX_RETENTION_PERIOD_SECONDS} - */ - public Integer getMaxRetentionPeriodSeconds() { - return maxRetentionPeriodSeconds; - } + return new ExemplarsProperties( + minRetentionPeriodSeconds, maxRetentionPeriodSeconds, sampleIntervalMilliseconds); + } - /** - * Time between attempts to sample new Exemplars. This is a performance improvement for high-frequency - * applications, because with the sample interval we make sure that the exemplar sampler is not called - * for every single request. - *

- * Default see {@code ExemplarSamplerConfig.DEFAULT_SAMPLE_INTERVAL_MILLISECONDS} - */ - public Integer getSampleIntervalMilliseconds() { - return sampleIntervalMilliseconds; - } + public static Builder builder() { + return new Builder(); + } - /** - * Note that this will remove entries from {@code properties}. - * This is because we want to know if there are unused properties remaining after all properties have been loaded. - */ - static ExemplarsProperties load(String prefix, Map properties) throws PrometheusPropertiesException { - Integer minRetentionPeriodSeconds = Util.loadInteger(prefix + "." + MIN_RETENTION_PERIOD_SECONDS, properties); - Integer maxRetentionPeriodSeconds = Util.loadInteger(prefix + "." + MAX_RETENTION_PERIOD_SECONDS, properties); - Integer sampleIntervalMilliseconds = Util.loadInteger(prefix + "." + SAMPLE_INTERVAL_MILLISECONDS, properties); - - Util.assertValue(minRetentionPeriodSeconds, t -> t > 0, "Expecting value > 0.", prefix, MIN_RETENTION_PERIOD_SECONDS); - Util.assertValue(minRetentionPeriodSeconds, t -> t > 0, "Expecting value > 0.", prefix, MAX_RETENTION_PERIOD_SECONDS); - Util.assertValue(sampleIntervalMilliseconds, t -> t > 0, "Expecting value > 0.", prefix, SAMPLE_INTERVAL_MILLISECONDS); - - if (minRetentionPeriodSeconds != null && maxRetentionPeriodSeconds != null) { - if (minRetentionPeriodSeconds > maxRetentionPeriodSeconds) { - throw new PrometheusPropertiesException(prefix + "." + MIN_RETENTION_PERIOD_SECONDS + " must not be greater than " + prefix + "." + MAX_RETENTION_PERIOD_SECONDS + "."); - } - } - - return new ExemplarsProperties( - minRetentionPeriodSeconds, - maxRetentionPeriodSeconds, - sampleIntervalMilliseconds - ); - } + public static class Builder { - public static Builder builder() { - return new Builder(); - } + private Integer minRetentionPeriodSeconds; + private Integer maxRetentionPeriodSeconds; + private Integer sampleIntervalMilliseconds; - public static class Builder { + private Builder() {} - private Integer minRetentionPeriodSeconds; - private Integer maxRetentionPeriodSeconds; - private Integer sampleIntervalMilliseconds; - - private Builder() { - } - - public Builder minRetentionPeriodSeconds(int minRetentionPeriodSeconds) { - this.minRetentionPeriodSeconds = minRetentionPeriodSeconds; - return this; - } + public Builder minRetentionPeriodSeconds(int minRetentionPeriodSeconds) { + this.minRetentionPeriodSeconds = minRetentionPeriodSeconds; + return this; + } - public Builder maxRetentionPeriodSeconds(int maxRetentionPeriodSeconds) { - this.maxRetentionPeriodSeconds = maxRetentionPeriodSeconds; - return this; - } + public Builder maxRetentionPeriodSeconds(int maxRetentionPeriodSeconds) { + this.maxRetentionPeriodSeconds = maxRetentionPeriodSeconds; + return this; + } - public Builder sampleIntervalMilliseconds(int sampleIntervalMilliseconds) { - this.sampleIntervalMilliseconds = sampleIntervalMilliseconds; - return this; - } + public Builder sampleIntervalMilliseconds(int sampleIntervalMilliseconds) { + this.sampleIntervalMilliseconds = sampleIntervalMilliseconds; + return this; + } - public ExemplarsProperties build() { - return new ExemplarsProperties(minRetentionPeriodSeconds, maxRetentionPeriodSeconds, sampleIntervalMilliseconds); - } + public ExemplarsProperties build() { + return new ExemplarsProperties( + minRetentionPeriodSeconds, maxRetentionPeriodSeconds, sampleIntervalMilliseconds); } + } } diff --git a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterFilterProperties.java b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterFilterProperties.java index e9cc46767..4758a48fe 100644 --- a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterFilterProperties.java +++ b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterFilterProperties.java @@ -6,112 +6,124 @@ import java.util.List; import java.util.Map; -/** - * Properties starting with io.prometheus.exporter.filter - */ +/** Properties starting with io.prometheus.exporter.filter */ public class ExporterFilterProperties { - public static final String METRIC_NAME_MUST_BE_EQUAL_TO = "metricNameMustBeEqualTo"; - public static final String METRIC_NAME_MUST_NOT_BE_EQUAL_TO = "metricNameMustNotBeEqualTo"; - public static final String METRIC_NAME_MUST_START_WITH = "metricNameMustStartWith"; - public static final String METRIC_NAME_MUST_NOT_START_WITH = "metricNameMustNotStartWith"; - - private final List allowedNames; - private final List excludedNames; - private final List allowedPrefixes; - private final List excludedPrefixes; - - private ExporterFilterProperties(List allowedNames, List excludedNames, List allowedPrefixes, List excludedPrefixes) { - this(allowedNames, excludedNames, allowedPrefixes, excludedPrefixes, ""); - } - - private ExporterFilterProperties(List allowedNames, List excludedNames, List allowedPrefixes, List excludedPrefixes, String prefix) { - this.allowedNames = allowedNames == null ? null : Collections.unmodifiableList(new ArrayList<>(allowedNames)); - this.excludedNames = excludedNames == null ? null : Collections.unmodifiableList(new ArrayList<>(excludedNames)); - this.allowedPrefixes = allowedPrefixes == null ? null : Collections.unmodifiableList(new ArrayList<>(allowedPrefixes)); - this.excludedPrefixes = excludedPrefixes == null ? null : Collections.unmodifiableList(new ArrayList<>(excludedPrefixes)); - validate(prefix); - } - - public List getAllowedMetricNames() { - return allowedNames; - } - - public List getExcludedMetricNames() { - return excludedNames; - } - - public List getAllowedMetricNamePrefixes() { - return allowedPrefixes; - } - - public List getExcludedMetricNamePrefixes() { - return excludedPrefixes; + public static final String METRIC_NAME_MUST_BE_EQUAL_TO = "metricNameMustBeEqualTo"; + public static final String METRIC_NAME_MUST_NOT_BE_EQUAL_TO = "metricNameMustNotBeEqualTo"; + public static final String METRIC_NAME_MUST_START_WITH = "metricNameMustStartWith"; + public static final String METRIC_NAME_MUST_NOT_START_WITH = "metricNameMustNotStartWith"; + + private final List allowedNames; + private final List excludedNames; + private final List allowedPrefixes; + private final List excludedPrefixes; + + private ExporterFilterProperties( + List allowedNames, + List excludedNames, + List allowedPrefixes, + List excludedPrefixes) { + this(allowedNames, excludedNames, allowedPrefixes, excludedPrefixes, ""); + } + + private ExporterFilterProperties( + List allowedNames, + List excludedNames, + List allowedPrefixes, + List excludedPrefixes, + String prefix) { + this.allowedNames = + allowedNames == null ? null : Collections.unmodifiableList(new ArrayList<>(allowedNames)); + this.excludedNames = + excludedNames == null ? null : Collections.unmodifiableList(new ArrayList<>(excludedNames)); + this.allowedPrefixes = + allowedPrefixes == null + ? null + : Collections.unmodifiableList(new ArrayList<>(allowedPrefixes)); + this.excludedPrefixes = + excludedPrefixes == null + ? null + : Collections.unmodifiableList(new ArrayList<>(excludedPrefixes)); + validate(prefix); + } + + public List getAllowedMetricNames() { + return allowedNames; + } + + public List getExcludedMetricNames() { + return excludedNames; + } + + public List getAllowedMetricNamePrefixes() { + return allowedPrefixes; + } + + public List getExcludedMetricNamePrefixes() { + return excludedPrefixes; + } + + private void validate(String prefix) throws PrometheusPropertiesException {} + + /** + * Note that this will remove entries from {@code properties}. This is because we want to know if + * there are unused properties remaining after all properties have been loaded. + */ + static ExporterFilterProperties load(String prefix, Map properties) + throws PrometheusPropertiesException { + List allowedNames = + Util.loadStringList(prefix + "." + METRIC_NAME_MUST_BE_EQUAL_TO, properties); + List excludedNames = + Util.loadStringList(prefix + "." + METRIC_NAME_MUST_NOT_BE_EQUAL_TO, properties); + List allowedPrefixes = + Util.loadStringList(prefix + "." + METRIC_NAME_MUST_START_WITH, properties); + List excludedPrefixes = + Util.loadStringList(prefix + "." + METRIC_NAME_MUST_NOT_START_WITH, properties); + return new ExporterFilterProperties( + allowedNames, excludedNames, allowedPrefixes, excludedPrefixes, prefix); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + + private List allowedNames; + private List excludedNames; + private List allowedPrefixes; + private List excludedPrefixes; + + private Builder() {} + + /** Only allowed metric names will be exposed. */ + public Builder allowedNames(String... allowedNames) { + this.allowedNames = Arrays.asList(allowedNames); + return this; } - private void validate(String prefix) throws PrometheusPropertiesException { + /** Excluded metric names will not be exposed. */ + public Builder excludedNames(String... excludedNames) { + this.excludedNames = Arrays.asList(excludedNames); + return this; } - /** - * Note that this will remove entries from {@code properties}. - * This is because we want to know if there are unused properties remaining after all properties have been loaded. - */ - static ExporterFilterProperties load(String prefix, Map properties) throws PrometheusPropertiesException { - List allowedNames = Util.loadStringList(prefix + "." + METRIC_NAME_MUST_BE_EQUAL_TO, properties); - List excludedNames = Util.loadStringList(prefix + "." + METRIC_NAME_MUST_NOT_BE_EQUAL_TO, properties); - List allowedPrefixes = Util.loadStringList(prefix + "." + METRIC_NAME_MUST_START_WITH, properties); - List excludedPrefixes = Util.loadStringList(prefix + "." + METRIC_NAME_MUST_NOT_START_WITH, properties); - return new ExporterFilterProperties(allowedNames, excludedNames, allowedPrefixes, excludedPrefixes, prefix); + /** Only metrics with a name starting with an allowed prefix will be exposed. */ + public Builder allowedPrefixes(String... allowedPrefixes) { + this.allowedPrefixes = Arrays.asList(allowedPrefixes); + return this; } - public static Builder builder() { - return new Builder(); + /** Metrics with a name starting with an excluded prefix will not be exposed. */ + public Builder excludedPrefixes(String... excludedPrefixes) { + this.excludedPrefixes = Arrays.asList(excludedPrefixes); + return this; } - public static class Builder { - - private List allowedNames; - private List excludedNames; - private List allowedPrefixes; - private List excludedPrefixes; - - private Builder() { - } - - /** - * Only allowed metric names will be exposed. - */ - public Builder allowedNames(String... allowedNames) { - this.allowedNames = Arrays.asList(allowedNames); - return this; - } - - /** - * Excluded metric names will not be exposed. - */ - public Builder excludedNames(String... excludedNames) { - this.excludedNames = Arrays.asList(excludedNames); - return this; - } - - /** - * Only metrics with a name starting with an allowed prefix will be exposed. - */ - public Builder allowedPrefixes(String... allowedPrefixes) { - this.allowedPrefixes = Arrays.asList(allowedPrefixes); - return this; - } - - /** - * Metrics with a name starting with an excluded prefix will not be exposed. - */ - public Builder excludedPrefixes(String... excludedPrefixes) { - this.excludedPrefixes = Arrays.asList(excludedPrefixes); - return this; - } - - public ExporterFilterProperties build() { - return new ExporterFilterProperties(allowedNames, excludedNames, allowedPrefixes, excludedPrefixes); - } + public ExporterFilterProperties build() { + return new ExporterFilterProperties( + allowedNames, excludedNames, allowedPrefixes, excludedPrefixes); } + } } diff --git a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterHttpServerProperties.java b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterHttpServerProperties.java index c458d69d1..fe048ac6b 100644 --- a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterHttpServerProperties.java +++ b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterHttpServerProperties.java @@ -2,49 +2,48 @@ import java.util.Map; -/** - * Properties starting with io.prometheus.exporter.httpServer - */ +/** Properties starting with io.prometheus.exporter.httpServer */ public class ExporterHttpServerProperties { - private static final String PORT = "port"; - private final Integer port; + private static final String PORT = "port"; + private final Integer port; - private ExporterHttpServerProperties(Integer port) { - this.port = port; - } + private ExporterHttpServerProperties(Integer port) { + this.port = port; + } - public Integer getPort() { - return port; - } + public Integer getPort() { + return port; + } - /** - * Note that this will remove entries from {@code properties}. - * This is because we want to know if there are unused properties remaining after all properties have been loaded. - */ - static ExporterHttpServerProperties load(String prefix, Map properties) throws PrometheusPropertiesException { - Integer port = Util.loadInteger(prefix + "." + PORT, properties); - Util.assertValue(port, t -> t > 0, "Expecting value > 0", prefix, PORT); - return new ExporterHttpServerProperties(port); - } + /** + * Note that this will remove entries from {@code properties}. This is because we want to know if + * there are unused properties remaining after all properties have been loaded. + */ + static ExporterHttpServerProperties load(String prefix, Map properties) + throws PrometheusPropertiesException { + Integer port = Util.loadInteger(prefix + "." + PORT, properties); + Util.assertValue(port, t -> t > 0, "Expecting value > 0", prefix, PORT); + return new ExporterHttpServerProperties(port); + } - public static Builder builder() { - return new Builder(); - } + public static Builder builder() { + return new Builder(); + } - public static class Builder { + public static class Builder { - private Integer port; + private Integer port; - private Builder() {} + private Builder() {} - public Builder port(int port) { - this.port = port; - return this; - } + public Builder port(int port) { + this.port = port; + return this; + } - public ExporterHttpServerProperties build() { - return new ExporterHttpServerProperties(port); - } + public ExporterHttpServerProperties build() { + return new ExporterHttpServerProperties(port); } + } } diff --git a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterOpenTelemetryProperties.java b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterOpenTelemetryProperties.java index 77bcc21cb..b8498d6c5 100644 --- a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterOpenTelemetryProperties.java +++ b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterOpenTelemetryProperties.java @@ -6,188 +6,222 @@ // TODO: JavaDoc is currently only in OpenTelemetryExporter.Builder. Look there for reference. public class ExporterOpenTelemetryProperties { - // See https://github.com/open-telemetry/opentelemetry-java/blob/main/sdk-extensions/autoconfigure/README.md - private static String PROTOCOL = "protocol"; // otel.exporter.otlp.protocol - private static String ENDPOINT = "endpoint"; // otel.exporter.otlp.endpoint - private static String HEADERS = "headers"; // otel.exporter.otlp.headers - private static String INTERVAL_SECONDS = "intervalSeconds"; // otel.metric.export.interval - private static String TIMEOUT_SECONDS = "timeoutSeconds"; // otel.exporter.otlp.timeout - private static String SERVICE_NAME = "serviceName"; // otel.service.name - private static String SERVICE_NAMESPACE = "serviceNamespace"; - private static String SERVICE_INSTANCE_ID = "serviceInstanceId"; - private static String SERVICE_VERSION = "serviceVersion"; - private static String RESOURCE_ATTRIBUTES = "resourceAttributes"; // otel.resource.attributes - - private final String protocol; - private final String endpoint; - private final Map headers; - private final Integer intervalSeconds; - private final Integer timeoutSeconds; - private final String serviceName; - private final String serviceNamespace; - private final String serviceInstanceId; - private final String serviceVersion; - private final Map resourceAttributes; - - private ExporterOpenTelemetryProperties(String protocol, String endpoint, Map headers, Integer intervalSeconds, Integer timeoutSeconds, String serviceName, String serviceNamespace, String serviceInstanceId, String serviceVersion, Map resourceAttributes) { - this.protocol = protocol; - this.endpoint = endpoint; - this.headers = headers; - this.intervalSeconds = intervalSeconds; - this.timeoutSeconds = timeoutSeconds; - this.serviceName = serviceName; - this.serviceNamespace = serviceNamespace; - this.serviceInstanceId = serviceInstanceId; - this.serviceVersion = serviceVersion; - this.resourceAttributes = resourceAttributes; + // See + // https://github.com/open-telemetry/opentelemetry-java/blob/main/sdk-extensions/autoconfigure/README.md + private static String PROTOCOL = "protocol"; // otel.exporter.otlp.protocol + private static String ENDPOINT = "endpoint"; // otel.exporter.otlp.endpoint + private static String HEADERS = "headers"; // otel.exporter.otlp.headers + private static String INTERVAL_SECONDS = "intervalSeconds"; // otel.metric.export.interval + private static String TIMEOUT_SECONDS = "timeoutSeconds"; // otel.exporter.otlp.timeout + private static String SERVICE_NAME = "serviceName"; // otel.service.name + private static String SERVICE_NAMESPACE = "serviceNamespace"; + private static String SERVICE_INSTANCE_ID = "serviceInstanceId"; + private static String SERVICE_VERSION = "serviceVersion"; + private static String RESOURCE_ATTRIBUTES = "resourceAttributes"; // otel.resource.attributes + + private final String protocol; + private final String endpoint; + private final Map headers; + private final Integer intervalSeconds; + private final Integer timeoutSeconds; + private final String serviceName; + private final String serviceNamespace; + private final String serviceInstanceId; + private final String serviceVersion; + private final Map resourceAttributes; + + private ExporterOpenTelemetryProperties( + String protocol, + String endpoint, + Map headers, + Integer intervalSeconds, + Integer timeoutSeconds, + String serviceName, + String serviceNamespace, + String serviceInstanceId, + String serviceVersion, + Map resourceAttributes) { + this.protocol = protocol; + this.endpoint = endpoint; + this.headers = headers; + this.intervalSeconds = intervalSeconds; + this.timeoutSeconds = timeoutSeconds; + this.serviceName = serviceName; + this.serviceNamespace = serviceNamespace; + this.serviceInstanceId = serviceInstanceId; + this.serviceVersion = serviceVersion; + this.resourceAttributes = resourceAttributes; + } + + public String getProtocol() { + return protocol; + } + + public String getEndpoint() { + return endpoint; + } + + public Map getHeaders() { + return headers; + } + + public Integer getIntervalSeconds() { + return intervalSeconds; + } + + public Integer getTimeoutSeconds() { + return timeoutSeconds; + } + + public String getServiceName() { + return serviceName; + } + + public String getServiceNamespace() { + return serviceNamespace; + } + + public String getServiceInstanceId() { + return serviceInstanceId; + } + + public String getServiceVersion() { + return serviceVersion; + } + + public Map getResourceAttributes() { + return resourceAttributes; + } + + /** + * Note that this will remove entries from {@code properties}. This is because we want to know if + * there are unused properties remaining after all properties have been loaded. + */ + static ExporterOpenTelemetryProperties load(String prefix, Map properties) + throws PrometheusPropertiesException { + String protocol = Util.loadString(prefix + "." + PROTOCOL, properties); + String endpoint = Util.loadString(prefix + "." + ENDPOINT, properties); + Map headers = Util.loadMap(prefix + "." + HEADERS, properties); + Integer intervalSeconds = Util.loadInteger(prefix + "." + INTERVAL_SECONDS, properties); + Integer timeoutSeconds = Util.loadInteger(prefix + "." + TIMEOUT_SECONDS, properties); + String serviceName = Util.loadString(prefix + "." + SERVICE_NAME, properties); + String serviceNamespace = Util.loadString(prefix + "." + SERVICE_NAMESPACE, properties); + String serviceInstanceId = Util.loadString(prefix + "." + SERVICE_INSTANCE_ID, properties); + String serviceVersion = Util.loadString(prefix + "." + SERVICE_VERSION, properties); + Map resourceAttributes = + Util.loadMap(prefix + "." + RESOURCE_ATTRIBUTES, properties); + Util.assertValue(intervalSeconds, t -> t > 0, "Expecting value > 0", prefix, INTERVAL_SECONDS); + Util.assertValue(timeoutSeconds, t -> t > 0, "Expecting value > 0", prefix, TIMEOUT_SECONDS); + if (protocol != null && !protocol.equals("grpc") && !protocol.equals("http/protobuf")) { + throw new PrometheusPropertiesException( + protocol + + ": Unsupported OpenTelemetry exporter protocol. Expecting grpc or http/protobuf"); } - - public String getProtocol() { - return protocol; - } - - public String getEndpoint() { - return endpoint; - } - - public Map getHeaders() { - return headers; + return new ExporterOpenTelemetryProperties( + protocol, + endpoint, + headers, + intervalSeconds, + timeoutSeconds, + serviceName, + serviceNamespace, + serviceInstanceId, + serviceVersion, + resourceAttributes); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + + private String protocol; + private String endpoint; + private Map headers = new HashMap<>(); + private Integer intervalSeconds; + private Integer timeoutSeconds; + private String serviceName; + private String serviceNamespace; + private String serviceInstanceId; + private String serviceVersion; + private Map resourceAttributes = new HashMap<>(); + + private Builder() {} + + public Builder protocol(String protocol) { + if (!protocol.equals("grpc") && !protocol.equals("http/protobuf")) { + throw new IllegalArgumentException( + protocol + ": Unsupported protocol. Expecting grpc or http/protobuf"); + } + this.protocol = protocol; + return this; } - public Integer getIntervalSeconds() { - return intervalSeconds; + public Builder endpoint(String endpoint) { + this.endpoint = endpoint; + return this; } - public Integer getTimeoutSeconds() { - return timeoutSeconds; + /** Add a request header. Call multiple times to add multiple headers. */ + public Builder header(String name, String value) { + this.headers.put(name, value); + return this; } - public String getServiceName() { - return serviceName; + public Builder intervalSeconds(int intervalSeconds) { + if (intervalSeconds <= 0) { + throw new IllegalArgumentException(intervalSeconds + ": Expecting intervalSeconds > 0"); + } + this.intervalSeconds = intervalSeconds; + return this; } - public String getServiceNamespace() { - return serviceNamespace; + public Builder timeoutSeconds(int timeoutSeconds) { + if (timeoutSeconds <= 0) { + throw new IllegalArgumentException(timeoutSeconds + ": Expecting timeoutSeconds > 0"); + } + this.timeoutSeconds = timeoutSeconds; + return this; } - public String getServiceInstanceId() { - return serviceInstanceId; + public Builder serviceName(String serviceName) { + this.serviceName = serviceName; + return this; } - public String getServiceVersion() { - return serviceVersion; + public Builder serviceNamespace(String serviceNamespace) { + this.serviceNamespace = serviceNamespace; + return this; } - public Map getResourceAttributes() { - return resourceAttributes; + public Builder serviceInstanceId(String serviceInstanceId) { + this.serviceInstanceId = serviceInstanceId; + return this; } - /** - * Note that this will remove entries from {@code properties}. - * This is because we want to know if there are unused properties remaining after all properties have been loaded. - */ - static ExporterOpenTelemetryProperties load(String prefix, Map properties) throws PrometheusPropertiesException { - String protocol = Util.loadString(prefix + "." + PROTOCOL, properties); - String endpoint = Util.loadString(prefix + "." + ENDPOINT, properties); - Map headers = Util.loadMap(prefix + "." + HEADERS, properties); - Integer intervalSeconds = Util.loadInteger(prefix + "." + INTERVAL_SECONDS, properties); - Integer timeoutSeconds = Util.loadInteger(prefix + "." + TIMEOUT_SECONDS, properties); - String serviceName = Util.loadString(prefix + "." + SERVICE_NAME, properties); - String serviceNamespace = Util.loadString(prefix + "." + SERVICE_NAMESPACE, properties); - String serviceInstanceId = Util.loadString(prefix + "." + SERVICE_INSTANCE_ID, properties); - String serviceVersion = Util.loadString(prefix + "." + SERVICE_VERSION, properties); - Map resourceAttributes = Util.loadMap(prefix + "." + RESOURCE_ATTRIBUTES, properties); - Util.assertValue(intervalSeconds, t -> t > 0, "Expecting value > 0", prefix, INTERVAL_SECONDS); - Util.assertValue(timeoutSeconds, t -> t > 0, "Expecting value > 0", prefix, TIMEOUT_SECONDS); - if (protocol != null && !protocol.equals("grpc") && !protocol.equals("http/protobuf")) { - throw new PrometheusPropertiesException(protocol + ": Unsupported OpenTelemetry exporter protocol. Expecting grpc or http/protobuf"); - } - return new ExporterOpenTelemetryProperties(protocol, endpoint, headers, intervalSeconds, timeoutSeconds, serviceName, serviceNamespace, serviceInstanceId, serviceVersion, resourceAttributes); + public Builder serviceVersion(String serviceVersion) { + this.serviceVersion = serviceVersion; + return this; } - public static Builder builder() { - return new Builder(); + public Builder resourceAttribute(String name, String value) { + this.resourceAttributes.put(name, value); + return this; } - public static class Builder { - - private String protocol; - private String endpoint; - private Map headers = new HashMap<>(); - private Integer intervalSeconds; - private Integer timeoutSeconds; - private String serviceName; - private String serviceNamespace; - private String serviceInstanceId; - private String serviceVersion; - private Map resourceAttributes = new HashMap<>(); - - private Builder() {} - - public Builder protocol(String protocol) { - if (!protocol.equals("grpc") && !protocol.equals("http/protobuf")) { - throw new IllegalArgumentException(protocol + ": Unsupported protocol. Expecting grpc or http/protobuf"); - } - this.protocol = protocol; - return this; - } - - public Builder endpoint(String endpoint) { - this.endpoint = endpoint; - return this; - } - - /** - * Add a request header. Call multiple times to add multiple headers. - */ - public Builder header(String name, String value) { - this.headers.put(name, value); - return this; - } - - public Builder intervalSeconds(int intervalSeconds) { - if (intervalSeconds <= 0) { - throw new IllegalArgumentException(intervalSeconds + ": Expecting intervalSeconds > 0"); - } - this.intervalSeconds = intervalSeconds; - return this; - } - - public Builder timeoutSeconds(int timeoutSeconds) { - if (timeoutSeconds <= 0) { - throw new IllegalArgumentException(timeoutSeconds + ": Expecting timeoutSeconds > 0"); - } - this.timeoutSeconds = timeoutSeconds; - return this; - } - - public Builder serviceName(String serviceName) { - this.serviceName = serviceName; - return this; - } - - public Builder serviceNamespace(String serviceNamespace) { - this.serviceNamespace = serviceNamespace; - return this; - } - - public Builder serviceInstanceId(String serviceInstanceId) { - this.serviceInstanceId = serviceInstanceId; - return this; - } - - public Builder serviceVersion(String serviceVersion) { - this.serviceVersion = serviceVersion; - return this; - } - - public Builder resourceAttribute(String name, String value) { - this.resourceAttributes.put(name, value); - return this; - } - - public ExporterOpenTelemetryProperties build() { - return new ExporterOpenTelemetryProperties(protocol, endpoint, headers, intervalSeconds, timeoutSeconds, serviceName, serviceNamespace, serviceInstanceId, serviceVersion, resourceAttributes); - } + public ExporterOpenTelemetryProperties build() { + return new ExporterOpenTelemetryProperties( + protocol, + endpoint, + headers, + intervalSeconds, + timeoutSeconds, + serviceName, + serviceNamespace, + serviceInstanceId, + serviceVersion, + resourceAttributes); } + } } diff --git a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterProperties.java b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterProperties.java index 9493b571c..dd0606e6d 100644 --- a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterProperties.java +++ b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterProperties.java @@ -2,77 +2,71 @@ import java.util.Map; -/** - * Properties starting with io.prometheus.exporter - */ +/** Properties starting with io.prometheus.exporter */ public class ExporterProperties { - private static final String INCLUDE_CREATED_TIMESTAMPS = "includeCreatedTimestamps"; - private static final String EXEMPLARS_ON_ALL_METRIC_TYPES = "exemplarsOnAllMetricTypes"; + private static final String INCLUDE_CREATED_TIMESTAMPS = "includeCreatedTimestamps"; + private static final String EXEMPLARS_ON_ALL_METRIC_TYPES = "exemplarsOnAllMetricTypes"; - private final Boolean includeCreatedTimestamps; - private final Boolean exemplarsOnAllMetricTypes; + private final Boolean includeCreatedTimestamps; + private final Boolean exemplarsOnAllMetricTypes; - private ExporterProperties(Boolean includeCreatedTimestamps, Boolean exemplarsOnAllMetricTypes) { - this.includeCreatedTimestamps = includeCreatedTimestamps; - this.exemplarsOnAllMetricTypes = exemplarsOnAllMetricTypes; - } + private ExporterProperties(Boolean includeCreatedTimestamps, Boolean exemplarsOnAllMetricTypes) { + this.includeCreatedTimestamps = includeCreatedTimestamps; + this.exemplarsOnAllMetricTypes = exemplarsOnAllMetricTypes; + } - /** - * Include the {@code _created} timestamps in text format? Default is {@code false}. - */ - public boolean getIncludeCreatedTimestamps() { - return includeCreatedTimestamps != null && includeCreatedTimestamps; - } + /** Include the {@code _created} timestamps in text format? Default is {@code false}. */ + public boolean getIncludeCreatedTimestamps() { + return includeCreatedTimestamps != null && includeCreatedTimestamps; + } - /** - * Allow Exemplars on all metric types in OpenMetrics format? - * Default is {@code false}, which means Exemplars will only be added for Counters and Histogram buckets. - */ - public boolean getExemplarsOnAllMetricTypes() { - return exemplarsOnAllMetricTypes != null && exemplarsOnAllMetricTypes; - } + /** + * Allow Exemplars on all metric types in OpenMetrics format? Default is {@code false}, which + * means Exemplars will only be added for Counters and Histogram buckets. + */ + public boolean getExemplarsOnAllMetricTypes() { + return exemplarsOnAllMetricTypes != null && exemplarsOnAllMetricTypes; + } - /** - * Note that this will remove entries from {@code properties}. - * This is because we want to know if there are unused properties remaining after all properties have been loaded. - */ - static ExporterProperties load(String prefix, Map properties) throws PrometheusPropertiesException { - Boolean includeCreatedTimestamps = Util.loadBoolean(prefix + "." + INCLUDE_CREATED_TIMESTAMPS, properties); - Boolean exemplarsOnAllMetricTypes = Util.loadBoolean(prefix + "." + EXEMPLARS_ON_ALL_METRIC_TYPES, properties); - return new ExporterProperties(includeCreatedTimestamps, exemplarsOnAllMetricTypes); - } + /** + * Note that this will remove entries from {@code properties}. This is because we want to know if + * there are unused properties remaining after all properties have been loaded. + */ + static ExporterProperties load(String prefix, Map properties) + throws PrometheusPropertiesException { + Boolean includeCreatedTimestamps = + Util.loadBoolean(prefix + "." + INCLUDE_CREATED_TIMESTAMPS, properties); + Boolean exemplarsOnAllMetricTypes = + Util.loadBoolean(prefix + "." + EXEMPLARS_ON_ALL_METRIC_TYPES, properties); + return new ExporterProperties(includeCreatedTimestamps, exemplarsOnAllMetricTypes); + } - public static Builder builder() { - return new Builder(); - } + public static Builder builder() { + return new Builder(); + } - public static class Builder { + public static class Builder { - private Boolean includeCreatedTimestamps; - private Boolean exemplarsOnAllMetricTypes; + private Boolean includeCreatedTimestamps; + private Boolean exemplarsOnAllMetricTypes; - private Builder() { - } + private Builder() {} - /** - * See {@link #getIncludeCreatedTimestamps()} - */ - public Builder includeCreatedTimestamps(boolean includeCreatedTimestamps) { - this.includeCreatedTimestamps = includeCreatedTimestamps; - return this; - } + /** See {@link #getIncludeCreatedTimestamps()} */ + public Builder includeCreatedTimestamps(boolean includeCreatedTimestamps) { + this.includeCreatedTimestamps = includeCreatedTimestamps; + return this; + } - /** - * See {@link #getExemplarsOnAllMetricTypes()}. - */ - public Builder exemplarsOnAllMetricTypes(boolean exemplarsOnAllMetricTypes) { - this.exemplarsOnAllMetricTypes = exemplarsOnAllMetricTypes; - return this; - } + /** See {@link #getExemplarsOnAllMetricTypes()}. */ + public Builder exemplarsOnAllMetricTypes(boolean exemplarsOnAllMetricTypes) { + this.exemplarsOnAllMetricTypes = exemplarsOnAllMetricTypes; + return this; + } - public ExporterProperties build() { - return new ExporterProperties(includeCreatedTimestamps, exemplarsOnAllMetricTypes); - } + public ExporterProperties build() { + return new ExporterProperties(includeCreatedTimestamps, exemplarsOnAllMetricTypes); } + } } diff --git a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterPushgatewayProperties.java b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterPushgatewayProperties.java index f38a7f4af..4a794eba2 100644 --- a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterPushgatewayProperties.java +++ b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/ExporterPushgatewayProperties.java @@ -4,56 +4,55 @@ public class ExporterPushgatewayProperties { - private static final String ADDRESS = "address"; - private static final String JOB = "job"; - private static final String SCHEME = "scheme"; - private final String scheme; - private final String address; - private final String job; - - private ExporterPushgatewayProperties(String address, String job, String scheme) { - this.address = address; - this.job = job; - this.scheme = scheme; - } - - /** - * Address of the Pushgateway in the form {@code host:port}. - * Default is {@code localhost:9091} - */ - public String getAddress() { - return address; - } - - /** - * {@code job} label for metrics being pushed. - * Default is the name of the JAR file that is running. - */ - public String getJob() { - return job; - } - - /** - * Scheme to be used when pushing metrics to the pushgateway. - * Must be "http" or "https". Default is "http". - */ - public String getScheme() { - return scheme; - } - - /** - * Note that this will remove entries from {@code properties}. - * This is because we want to know if there are unused properties remaining after all properties have been loaded. - */ - static ExporterPushgatewayProperties load(String prefix, Map properties) throws PrometheusPropertiesException { - String address = Util.loadString(prefix + "." + ADDRESS, properties); - String job = Util.loadString(prefix + "." + JOB, properties); - String scheme = Util.loadString(prefix + "." + SCHEME, properties); - if (scheme != null) { - if (!scheme.equals("http") && !scheme.equals("https")) { - throw new PrometheusPropertiesException(prefix + "." + SCHEME + "=" + scheme + ": Illegal value. Expecting 'http' or 'https'."); - } - } - return new ExporterPushgatewayProperties(address, job, scheme); + private static final String ADDRESS = "address"; + private static final String JOB = "job"; + private static final String SCHEME = "scheme"; + private final String scheme; + private final String address; + private final String job; + + private ExporterPushgatewayProperties(String address, String job, String scheme) { + this.address = address; + this.job = job; + this.scheme = scheme; + } + + /** Address of the Pushgateway in the form {@code host:port}. Default is {@code localhost:9091} */ + public String getAddress() { + return address; + } + + /** + * {@code job} label for metrics being pushed. Default is the name of the JAR file that is + * running. + */ + public String getJob() { + return job; + } + + /** + * Scheme to be used when pushing metrics to the pushgateway. Must be "http" or "https". Default + * is "http". + */ + public String getScheme() { + return scheme; + } + + /** + * Note that this will remove entries from {@code properties}. This is because we want to know if + * there are unused properties remaining after all properties have been loaded. + */ + static ExporterPushgatewayProperties load(String prefix, Map properties) + throws PrometheusPropertiesException { + String address = Util.loadString(prefix + "." + ADDRESS, properties); + String job = Util.loadString(prefix + "." + JOB, properties); + String scheme = Util.loadString(prefix + "." + SCHEME, properties); + if (scheme != null) { + if (!scheme.equals("http") && !scheme.equals("https")) { + throw new PrometheusPropertiesException( + prefix + "." + SCHEME + "=" + scheme + ": Illegal value. Expecting 'http' or 'https'."); + } } + return new ExporterPushgatewayProperties(address, job, scheme); + } } diff --git a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/MetricsProperties.java b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/MetricsProperties.java index 4574d0198..744822005 100644 --- a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/MetricsProperties.java +++ b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/MetricsProperties.java @@ -1,432 +1,455 @@ package io.prometheus.metrics.config; +import static java.util.Collections.unmodifiableList; + import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; -import static java.util.Collections.unmodifiableList; - -/** - * Properties starting with io.prometheus.metrics - */ +/** Properties starting with io.prometheus.metrics */ public class MetricsProperties { - private static final String EXEMPLARS_ENABLED = "exemplarsEnabled"; - private static final String HISTOGRAM_NATIVE_ONLY = "histogramNativeOnly"; - private static final String HISTOGRAM_CLASSIC_ONLY = "histogramClassicOnly"; - private static final String HISTOGRAM_CLASSIC_UPPER_BOUNDS = "histogramClassicUpperBounds"; - private static final String HISTOGRAM_NATIVE_INITIAL_SCHEMA = "histogramNativeInitialSchema"; - private static final String HISTOGRAM_NATIVE_MIN_ZERO_THRESHOLD = "histogramNativeMinZeroThreshold"; - private static final String HISTOGRAM_NATIVE_MAX_ZERO_THRESHOLD = "histogramNativeMaxZeroThreshold"; - private static final String HISTOGRAM_NATIVE_MAX_NUMBER_OF_BUCKETS = "histogramNativeMaxNumberOfBuckets"; // 0 means unlimited number of buckets - private static final String HISTOGRAM_NATIVE_RESET_DURATION_SECONDS = "histogramNativeResetDurationSeconds"; // 0 means no reset - private static final String SUMMARY_QUANTILES = "summaryQuantiles"; - private static final String SUMMARY_QUANTILE_ERRORS = "summaryQuantileErrors"; - private static final String SUMMARY_MAX_AGE_SECONDS = "summaryMaxAgeSeconds"; - private static final String SUMMARY_NUMBER_OF_AGE_BUCKETS = "summaryNumberOfAgeBuckets"; - - private final Boolean exemplarsEnabled; - private final Boolean histogramNativeOnly; - private final Boolean histogramClassicOnly; - private final List histogramClassicUpperBounds; - private final Integer histogramNativeInitialSchema; - private final Double histogramNativeMinZeroThreshold; - private final Double histogramNativeMaxZeroThreshold; - private final Integer histogramNativeMaxNumberOfBuckets; - private final Long histogramNativeResetDurationSeconds; - private final List summaryQuantiles; - private final List summaryQuantileErrors; - private final Long summaryMaxAgeSeconds; - private final Integer summaryNumberOfAgeBuckets; - - public MetricsProperties( - Boolean exemplarsEnabled, - Boolean histogramNativeOnly, - Boolean histogramClassicOnly, - List histogramClassicUpperBounds, - Integer histogramNativeInitialSchema, - Double histogramNativeMinZeroThreshold, - Double histogramNativeMaxZeroThreshold, - Integer histogramNativeMaxNumberOfBuckets, - Long histogramNativeResetDurationSeconds, - List summaryQuantiles, - List summaryQuantileErrors, - Long summaryMaxAgeSeconds, - Integer summaryNumberOfAgeBuckets) { - this(exemplarsEnabled, - histogramNativeOnly, - histogramClassicOnly, - histogramClassicUpperBounds, - histogramNativeInitialSchema, - histogramNativeMinZeroThreshold, - histogramNativeMaxZeroThreshold, - histogramNativeMaxNumberOfBuckets, - histogramNativeResetDurationSeconds, - summaryQuantiles, - summaryQuantileErrors, - summaryMaxAgeSeconds, - summaryNumberOfAgeBuckets, - ""); + private static final String EXEMPLARS_ENABLED = "exemplarsEnabled"; + private static final String HISTOGRAM_NATIVE_ONLY = "histogramNativeOnly"; + private static final String HISTOGRAM_CLASSIC_ONLY = "histogramClassicOnly"; + private static final String HISTOGRAM_CLASSIC_UPPER_BOUNDS = "histogramClassicUpperBounds"; + private static final String HISTOGRAM_NATIVE_INITIAL_SCHEMA = "histogramNativeInitialSchema"; + private static final String HISTOGRAM_NATIVE_MIN_ZERO_THRESHOLD = + "histogramNativeMinZeroThreshold"; + private static final String HISTOGRAM_NATIVE_MAX_ZERO_THRESHOLD = + "histogramNativeMaxZeroThreshold"; + private static final String HISTOGRAM_NATIVE_MAX_NUMBER_OF_BUCKETS = + "histogramNativeMaxNumberOfBuckets"; // 0 means unlimited number of buckets + private static final String HISTOGRAM_NATIVE_RESET_DURATION_SECONDS = + "histogramNativeResetDurationSeconds"; // 0 means no reset + private static final String SUMMARY_QUANTILES = "summaryQuantiles"; + private static final String SUMMARY_QUANTILE_ERRORS = "summaryQuantileErrors"; + private static final String SUMMARY_MAX_AGE_SECONDS = "summaryMaxAgeSeconds"; + private static final String SUMMARY_NUMBER_OF_AGE_BUCKETS = "summaryNumberOfAgeBuckets"; + + private final Boolean exemplarsEnabled; + private final Boolean histogramNativeOnly; + private final Boolean histogramClassicOnly; + private final List histogramClassicUpperBounds; + private final Integer histogramNativeInitialSchema; + private final Double histogramNativeMinZeroThreshold; + private final Double histogramNativeMaxZeroThreshold; + private final Integer histogramNativeMaxNumberOfBuckets; + private final Long histogramNativeResetDurationSeconds; + private final List summaryQuantiles; + private final List summaryQuantileErrors; + private final Long summaryMaxAgeSeconds; + private final Integer summaryNumberOfAgeBuckets; + + public MetricsProperties( + Boolean exemplarsEnabled, + Boolean histogramNativeOnly, + Boolean histogramClassicOnly, + List histogramClassicUpperBounds, + Integer histogramNativeInitialSchema, + Double histogramNativeMinZeroThreshold, + Double histogramNativeMaxZeroThreshold, + Integer histogramNativeMaxNumberOfBuckets, + Long histogramNativeResetDurationSeconds, + List summaryQuantiles, + List summaryQuantileErrors, + Long summaryMaxAgeSeconds, + Integer summaryNumberOfAgeBuckets) { + this( + exemplarsEnabled, + histogramNativeOnly, + histogramClassicOnly, + histogramClassicUpperBounds, + histogramNativeInitialSchema, + histogramNativeMinZeroThreshold, + histogramNativeMaxZeroThreshold, + histogramNativeMaxNumberOfBuckets, + histogramNativeResetDurationSeconds, + summaryQuantiles, + summaryQuantileErrors, + summaryMaxAgeSeconds, + summaryNumberOfAgeBuckets, + ""); + } + + private MetricsProperties( + Boolean exemplarsEnabled, + Boolean histogramNativeOnly, + Boolean histogramClassicOnly, + List histogramClassicUpperBounds, + Integer histogramNativeInitialSchema, + Double histogramNativeMinZeroThreshold, + Double histogramNativeMaxZeroThreshold, + Integer histogramNativeMaxNumberOfBuckets, + Long histogramNativeResetDurationSeconds, + List summaryQuantiles, + List summaryQuantileErrors, + Long summaryMaxAgeSeconds, + Integer summaryNumberOfAgeBuckets, + String configPropertyPrefix) { + this.exemplarsEnabled = exemplarsEnabled; + this.histogramNativeOnly = isHistogramNativeOnly(histogramClassicOnly, histogramNativeOnly); + this.histogramClassicOnly = isHistogramClassicOnly(histogramClassicOnly, histogramNativeOnly); + this.histogramClassicUpperBounds = + histogramClassicUpperBounds == null + ? null + : unmodifiableList(new ArrayList<>(histogramClassicUpperBounds)); + this.histogramNativeInitialSchema = histogramNativeInitialSchema; + this.histogramNativeMinZeroThreshold = histogramNativeMinZeroThreshold; + this.histogramNativeMaxZeroThreshold = histogramNativeMaxZeroThreshold; + this.histogramNativeMaxNumberOfBuckets = histogramNativeMaxNumberOfBuckets; + this.histogramNativeResetDurationSeconds = histogramNativeResetDurationSeconds; + this.summaryQuantiles = + summaryQuantiles == null ? null : unmodifiableList(new ArrayList<>(summaryQuantiles)); + this.summaryQuantileErrors = + summaryQuantileErrors == null + ? null + : unmodifiableList(new ArrayList<>(summaryQuantileErrors)); + this.summaryMaxAgeSeconds = summaryMaxAgeSeconds; + this.summaryNumberOfAgeBuckets = summaryNumberOfAgeBuckets; + validate(configPropertyPrefix); + } + + private Boolean isHistogramClassicOnly( + Boolean histogramClassicOnly, Boolean histogramNativeOnly) { + if (histogramClassicOnly == null && histogramNativeOnly == null) { + return null; } - - private MetricsProperties( - Boolean exemplarsEnabled, - Boolean histogramNativeOnly, - Boolean histogramClassicOnly, - List histogramClassicUpperBounds, - Integer histogramNativeInitialSchema, - Double histogramNativeMinZeroThreshold, - Double histogramNativeMaxZeroThreshold, - Integer histogramNativeMaxNumberOfBuckets, - Long histogramNativeResetDurationSeconds, - List summaryQuantiles, - List summaryQuantileErrors, - Long summaryMaxAgeSeconds, - Integer summaryNumberOfAgeBuckets, - String configPropertyPrefix) { - this.exemplarsEnabled = exemplarsEnabled; - this.histogramNativeOnly = isHistogramNativeOnly(histogramClassicOnly, histogramNativeOnly); - this.histogramClassicOnly = isHistogramClassicOnly(histogramClassicOnly, histogramNativeOnly); - this.histogramClassicUpperBounds = histogramClassicUpperBounds == null ? null : unmodifiableList(new ArrayList<>(histogramClassicUpperBounds)); - this.histogramNativeInitialSchema = histogramNativeInitialSchema; - this.histogramNativeMinZeroThreshold = histogramNativeMinZeroThreshold; - this.histogramNativeMaxZeroThreshold = histogramNativeMaxZeroThreshold; - this.histogramNativeMaxNumberOfBuckets = histogramNativeMaxNumberOfBuckets; - this.histogramNativeResetDurationSeconds = histogramNativeResetDurationSeconds; - this.summaryQuantiles = summaryQuantiles == null ? null : unmodifiableList(new ArrayList<>(summaryQuantiles)); - this.summaryQuantileErrors = summaryQuantileErrors == null ? null : unmodifiableList(new ArrayList<>(summaryQuantileErrors)); - this.summaryMaxAgeSeconds = summaryMaxAgeSeconds; - this.summaryNumberOfAgeBuckets = summaryNumberOfAgeBuckets; - validate(configPropertyPrefix); + if (histogramClassicOnly != null) { + return histogramClassicOnly; } + return !histogramNativeOnly; + } - - private Boolean isHistogramClassicOnly(Boolean histogramClassicOnly, Boolean histogramNativeOnly) { - if (histogramClassicOnly == null && histogramNativeOnly == null) { - return null; - } - if (histogramClassicOnly != null) { - return histogramClassicOnly; - } - return !histogramNativeOnly; + private Boolean isHistogramNativeOnly(Boolean histogramClassicOnly, Boolean histogramNativeOnly) { + if (histogramClassicOnly == null && histogramNativeOnly == null) { + return null; } - - private Boolean isHistogramNativeOnly(Boolean histogramClassicOnly, Boolean histogramNativeOnly) { - if (histogramClassicOnly == null && histogramNativeOnly == null) { - return null; - } - if (histogramNativeOnly != null) { - return histogramNativeOnly; - } - return !histogramClassicOnly; + if (histogramNativeOnly != null) { + return histogramNativeOnly; + } + return !histogramClassicOnly; + } + + private void validate(String prefix) throws PrometheusPropertiesException { + Util.assertValue( + histogramNativeInitialSchema, + s -> s >= -4 && s <= 8, + "Expecting number between -4 and +8.", + prefix, + HISTOGRAM_NATIVE_INITIAL_SCHEMA); + Util.assertValue( + histogramNativeMinZeroThreshold, + t -> t >= 0, + "Expecting value >= 0.", + prefix, + HISTOGRAM_NATIVE_MIN_ZERO_THRESHOLD); + Util.assertValue( + histogramNativeMaxZeroThreshold, + t -> t >= 0, + "Expecting value >= 0.", + prefix, + HISTOGRAM_NATIVE_MAX_ZERO_THRESHOLD); + Util.assertValue( + histogramNativeMaxNumberOfBuckets, + n -> n >= 0, + "Expecting value >= 0.", + prefix, + HISTOGRAM_NATIVE_MAX_NUMBER_OF_BUCKETS); + Util.assertValue( + histogramNativeResetDurationSeconds, + t -> t >= 0, + "Expecting value >= 0.", + prefix, + HISTOGRAM_NATIVE_RESET_DURATION_SECONDS); + Util.assertValue( + summaryMaxAgeSeconds, t -> t > 0, "Expecting value > 0", prefix, SUMMARY_MAX_AGE_SECONDS); + Util.assertValue( + summaryNumberOfAgeBuckets, + t -> t > 0, + "Expecting value > 0", + prefix, + SUMMARY_NUMBER_OF_AGE_BUCKETS); + + if (Boolean.TRUE.equals(histogramNativeOnly) && Boolean.TRUE.equals(histogramClassicOnly)) { + throw new PrometheusPropertiesException( + prefix + + "." + + HISTOGRAM_NATIVE_ONLY + + " and " + + prefix + + "." + + HISTOGRAM_CLASSIC_ONLY + + " cannot both be true"); } - private void validate(String prefix) throws PrometheusPropertiesException { - Util.assertValue(histogramNativeInitialSchema, s -> s >= -4 && s <= 8, "Expecting number between -4 and +8.", prefix, HISTOGRAM_NATIVE_INITIAL_SCHEMA); - Util.assertValue(histogramNativeMinZeroThreshold, t -> t >= 0, "Expecting value >= 0.", prefix, HISTOGRAM_NATIVE_MIN_ZERO_THRESHOLD); - Util.assertValue(histogramNativeMaxZeroThreshold, t -> t >= 0, "Expecting value >= 0.", prefix, HISTOGRAM_NATIVE_MAX_ZERO_THRESHOLD); - Util.assertValue(histogramNativeMaxNumberOfBuckets, n -> n >= 0, "Expecting value >= 0.", prefix, HISTOGRAM_NATIVE_MAX_NUMBER_OF_BUCKETS); - Util.assertValue(histogramNativeResetDurationSeconds, t -> t >= 0, "Expecting value >= 0.", prefix, HISTOGRAM_NATIVE_RESET_DURATION_SECONDS); - Util.assertValue(summaryMaxAgeSeconds, t -> t > 0, "Expecting value > 0", prefix, SUMMARY_MAX_AGE_SECONDS); - Util.assertValue(summaryNumberOfAgeBuckets, t -> t > 0, "Expecting value > 0", prefix, SUMMARY_NUMBER_OF_AGE_BUCKETS); - - if (Boolean.TRUE.equals(histogramNativeOnly) && Boolean.TRUE.equals(histogramClassicOnly)) { - throw new PrometheusPropertiesException(prefix + "." + HISTOGRAM_NATIVE_ONLY + " and " + prefix + "." + HISTOGRAM_CLASSIC_ONLY + " cannot both be true"); - } - - if (histogramNativeMinZeroThreshold != null && histogramNativeMaxZeroThreshold != null) { - if (histogramNativeMinZeroThreshold > histogramNativeMaxZeroThreshold) { - throw new PrometheusPropertiesException(prefix + "." + HISTOGRAM_NATIVE_MIN_ZERO_THRESHOLD + " cannot be greater than " + prefix + "." + HISTOGRAM_NATIVE_MAX_ZERO_THRESHOLD); - } - } + if (histogramNativeMinZeroThreshold != null && histogramNativeMaxZeroThreshold != null) { + if (histogramNativeMinZeroThreshold > histogramNativeMaxZeroThreshold) { + throw new PrometheusPropertiesException( + prefix + + "." + + HISTOGRAM_NATIVE_MIN_ZERO_THRESHOLD + + " cannot be greater than " + + prefix + + "." + + HISTOGRAM_NATIVE_MAX_ZERO_THRESHOLD); + } + } - if (summaryQuantiles != null) { - for (double quantile : summaryQuantiles) { - if (quantile < 0 || quantile > 1) { - throw new PrometheusPropertiesException(prefix + "." + SUMMARY_QUANTILES + ": Expecting 0.0 <= quantile <= 1.0"); - } - } + if (summaryQuantiles != null) { + for (double quantile : summaryQuantiles) { + if (quantile < 0 || quantile > 1) { + throw new PrometheusPropertiesException( + prefix + "." + SUMMARY_QUANTILES + ": Expecting 0.0 <= quantile <= 1.0"); } + } + } - if (summaryQuantileErrors != null) { - if (summaryQuantiles == null) { - throw new PrometheusPropertiesException(prefix + "." + SUMMARY_QUANTILE_ERRORS + ": Can't configure " + SUMMARY_QUANTILE_ERRORS + " without configuring " + SUMMARY_QUANTILES); - } - if (summaryQuantileErrors.size() != summaryQuantiles.size()) { - throw new PrometheusPropertiesException(prefix + "." + SUMMARY_QUANTILE_ERRORS + ": must have the same length as " + SUMMARY_QUANTILES); - } - for (double error : summaryQuantileErrors) { - if (error < 0 || error > 1) { - throw new PrometheusPropertiesException(prefix + "." + SUMMARY_QUANTILE_ERRORS + ": Expecting 0.0 <= error <= 1.0"); - } - } + if (summaryQuantileErrors != null) { + if (summaryQuantiles == null) { + throw new PrometheusPropertiesException( + prefix + + "." + + SUMMARY_QUANTILE_ERRORS + + ": Can't configure " + + SUMMARY_QUANTILE_ERRORS + + " without configuring " + + SUMMARY_QUANTILES); + } + if (summaryQuantileErrors.size() != summaryQuantiles.size()) { + throw new PrometheusPropertiesException( + prefix + + "." + + SUMMARY_QUANTILE_ERRORS + + ": must have the same length as " + + SUMMARY_QUANTILES); + } + for (double error : summaryQuantileErrors) { + if (error < 0 || error > 1) { + throw new PrometheusPropertiesException( + prefix + "." + SUMMARY_QUANTILE_ERRORS + ": Expecting 0.0 <= error <= 1.0"); } + } } - - /** - * This is the only configuration property that can be applied to all metric types. - * You can use it to turn Exemplar support off. Default is {@code true}. - */ - public Boolean getExemplarsEnabled() { - return exemplarsEnabled; + } + + /** + * This is the only configuration property that can be applied to all metric types. You can use it + * to turn Exemplar support off. Default is {@code true}. + */ + public Boolean getExemplarsEnabled() { + return exemplarsEnabled; + } + + /** See {@code Histogram.Builder.nativeOnly()} */ + public Boolean getHistogramNativeOnly() { + return histogramNativeOnly; + } + + /** See {@code Histogram.Builder.classicOnly()} */ + public Boolean getHistogramClassicOnly() { + return histogramClassicOnly; + } + + /** See {@code Histogram.Builder.classicBuckets()} */ + public List getHistogramClassicUpperBounds() { + return histogramClassicUpperBounds; + } + + /** See {@code Histogram.Builder.nativeInitialSchema()} */ + public Integer getHistogramNativeInitialSchema() { + return histogramNativeInitialSchema; + } + + /** See {@code Histogram.Builder.nativeMinZeroThreshold()} */ + public Double getHistogramNativeMinZeroThreshold() { + return histogramNativeMinZeroThreshold; + } + + /** See {@code Histogram.Builder.nativeMaxZeroThreshold()} */ + public Double getHistogramNativeMaxZeroThreshold() { + return histogramNativeMaxZeroThreshold; + } + + /** See {@code Histogram.Builder.nativeMaxNumberOfBuckets()} */ + public Integer getHistogramNativeMaxNumberOfBuckets() { + return histogramNativeMaxNumberOfBuckets; + } + + /** See {@code Histogram.Builder.nativeResetDuration()} */ + public Long getHistogramNativeResetDurationSeconds() { + return histogramNativeResetDurationSeconds; + } + + /** See {@code Summary.Builder.quantile()} */ + public List getSummaryQuantiles() { + return summaryQuantiles; + } + + /** + * See {@code Summary.Builder.quantile()} + * + *

Returns {@code null} only if {@link #getSummaryQuantiles()} is also {@code null}. Returns an + * empty list if {@link #getSummaryQuantiles()} are specified without specifying errors. If the + * list is not empty, it has the same size as {@link #getSummaryQuantiles()}. + */ + public List getSummaryQuantileErrors() { + if (summaryQuantiles != null) { + if (summaryQuantileErrors == null) { + return Collections.emptyList(); + } } - - /** - * See {@code Histogram.Builder.nativeOnly()} - */ - public Boolean getHistogramNativeOnly() { - return histogramNativeOnly; + return summaryQuantileErrors; + } + + /** See {@code Summary.Builder.maxAgeSeconds()} */ + public Long getSummaryMaxAgeSeconds() { + return summaryMaxAgeSeconds; + } + + /** See {@code Summary.Builder.numberOfAgeBuckets()} */ + public Integer getSummaryNumberOfAgeBuckets() { + return summaryNumberOfAgeBuckets; + } + + /** + * Note that this will remove entries from {@code properties}. This is because we want to know if + * there are unused properties remaining after all properties have been loaded. + */ + static MetricsProperties load(String prefix, Map properties) + throws PrometheusPropertiesException { + return new MetricsProperties( + Util.loadBoolean(prefix + "." + EXEMPLARS_ENABLED, properties), + Util.loadBoolean(prefix + "." + HISTOGRAM_NATIVE_ONLY, properties), + Util.loadBoolean(prefix + "." + HISTOGRAM_CLASSIC_ONLY, properties), + Util.loadDoubleList(prefix + "." + HISTOGRAM_CLASSIC_UPPER_BOUNDS, properties), + Util.loadInteger(prefix + "." + HISTOGRAM_NATIVE_INITIAL_SCHEMA, properties), + Util.loadDouble(prefix + "." + HISTOGRAM_NATIVE_MIN_ZERO_THRESHOLD, properties), + Util.loadDouble(prefix + "." + HISTOGRAM_NATIVE_MAX_ZERO_THRESHOLD, properties), + Util.loadInteger(prefix + "." + HISTOGRAM_NATIVE_MAX_NUMBER_OF_BUCKETS, properties), + Util.loadLong(prefix + "." + HISTOGRAM_NATIVE_RESET_DURATION_SECONDS, properties), + Util.loadDoubleList(prefix + "." + SUMMARY_QUANTILES, properties), + Util.loadDoubleList(prefix + "." + SUMMARY_QUANTILE_ERRORS, properties), + Util.loadLong(prefix + "." + SUMMARY_MAX_AGE_SECONDS, properties), + Util.loadInteger(prefix + "." + SUMMARY_NUMBER_OF_AGE_BUCKETS, properties), + prefix); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private Boolean exemplarsEnabled; + private Boolean histogramNativeOnly; + private Boolean histogramClassicOnly; + private List histogramClassicUpperBounds; + private Integer histogramNativeInitialSchema; + private Double histogramNativeMinZeroThreshold; + private Double histogramNativeMaxZeroThreshold; + private Integer histogramNativeMaxNumberOfBuckets; + private Long histogramNativeResetDurationSeconds; + private List summaryQuantiles; + private List summaryQuantileErrors; + private Long summaryMaxAgeSeconds; + private Integer summaryNumberOfAgeBuckets; + + private Builder() {} + + public MetricsProperties build() { + return new MetricsProperties( + exemplarsEnabled, + histogramNativeOnly, + histogramClassicOnly, + histogramClassicUpperBounds, + histogramNativeInitialSchema, + histogramNativeMinZeroThreshold, + histogramNativeMaxZeroThreshold, + histogramNativeMaxNumberOfBuckets, + histogramNativeResetDurationSeconds, + summaryQuantiles, + summaryQuantileErrors, + summaryMaxAgeSeconds, + summaryNumberOfAgeBuckets); } - /** - * See {@code Histogram.Builder.classicOnly()} - */ - public Boolean getHistogramClassicOnly() { - return histogramClassicOnly; + /** See {@link MetricsProperties#getExemplarsEnabled()} */ + public Builder exemplarsEnabled(Boolean exemplarsEnabled) { + this.exemplarsEnabled = exemplarsEnabled; + return this; } - /** - * See {@code Histogram.Builder.classicBuckets()} - */ - public List getHistogramClassicUpperBounds() { - return histogramClassicUpperBounds; + /** See {@link MetricsProperties#getHistogramNativeOnly()} */ + public Builder histogramNativeOnly(Boolean histogramNativeOnly) { + this.histogramNativeOnly = histogramNativeOnly; + return this; } - /** - * See {@code Histogram.Builder.nativeInitialSchema()} - */ - public Integer getHistogramNativeInitialSchema() { - return histogramNativeInitialSchema; + /** See {@link MetricsProperties#getHistogramClassicOnly()} */ + public Builder histogramClassicOnly(Boolean histogramClassicOnly) { + this.histogramClassicOnly = histogramClassicOnly; + return this; } - /** - * See {@code Histogram.Builder.nativeMinZeroThreshold()} - */ - public Double getHistogramNativeMinZeroThreshold() { - return histogramNativeMinZeroThreshold; + /** See {@link MetricsProperties#getHistogramClassicUpperBounds()} */ + public Builder histogramClassicUpperBounds(double... histogramClassicUpperBounds) { + this.histogramClassicUpperBounds = Util.toList(histogramClassicUpperBounds); + return this; } - /** - * See {@code Histogram.Builder.nativeMaxZeroThreshold()} - */ - public Double getHistogramNativeMaxZeroThreshold() { - return histogramNativeMaxZeroThreshold; + /** See {@link MetricsProperties#getHistogramNativeInitialSchema()} */ + public Builder histogramNativeInitialSchema(Integer histogramNativeInitialSchema) { + this.histogramNativeInitialSchema = histogramNativeInitialSchema; + return this; } - /** - * See {@code Histogram.Builder.nativeMaxNumberOfBuckets()} - */ - public Integer getHistogramNativeMaxNumberOfBuckets() { - return histogramNativeMaxNumberOfBuckets; + /** See {@link MetricsProperties#getHistogramNativeMinZeroThreshold()} */ + public Builder histogramNativeMinZeroThreshold(Double histogramNativeMinZeroThreshold) { + this.histogramNativeMinZeroThreshold = histogramNativeMinZeroThreshold; + return this; } - /** - * See {@code Histogram.Builder.nativeResetDuration()} - */ - public Long getHistogramNativeResetDurationSeconds() { - return histogramNativeResetDurationSeconds; + /** See {@link MetricsProperties#getHistogramNativeMaxZeroThreshold()} */ + public Builder histogramNativeMaxZeroThreshold(Double histogramNativeMaxZeroThreshold) { + this.histogramNativeMaxZeroThreshold = histogramNativeMaxZeroThreshold; + return this; } - /** - * See {@code Summary.Builder.quantile()} - */ - public List getSummaryQuantiles() { - return summaryQuantiles; + /** See {@link MetricsProperties#getHistogramNativeMaxNumberOfBuckets()} */ + public Builder histogramNativeMaxNumberOfBuckets(Integer histogramNativeMaxNumberOfBuckets) { + this.histogramNativeMaxNumberOfBuckets = histogramNativeMaxNumberOfBuckets; + return this; } - /** - * See {@code Summary.Builder.quantile()} - *

- * Returns {@code null} only if {@link #getSummaryQuantiles()} is also {@code null}. - * Returns an empty list if {@link #getSummaryQuantiles()} are specified without specifying errors. - * If the list is not empty, it has the same size as {@link #getSummaryQuantiles()}. - */ - public List getSummaryQuantileErrors() { - if (summaryQuantiles != null) { - if (summaryQuantileErrors == null) { - return Collections.emptyList(); - } - } - return summaryQuantileErrors; + /** See {@link MetricsProperties#getHistogramNativeResetDurationSeconds()} */ + public Builder histogramNativeResetDurationSeconds(Long histogramNativeResetDurationSeconds) { + this.histogramNativeResetDurationSeconds = histogramNativeResetDurationSeconds; + return this; } - /** - * See {@code Summary.Builder.maxAgeSeconds()} - */ - public Long getSummaryMaxAgeSeconds() { - return summaryMaxAgeSeconds; + /** See {@link MetricsProperties#getSummaryQuantiles()} */ + public Builder summaryQuantiles(double... summaryQuantiles) { + this.summaryQuantiles = Util.toList(summaryQuantiles); + return this; } - /** - * See {@code Summary.Builder.numberOfAgeBuckets()} - */ - public Integer getSummaryNumberOfAgeBuckets() { - return summaryNumberOfAgeBuckets; + /** See {@link MetricsProperties#getSummaryQuantileErrors()} */ + public Builder summaryQuantileErrors(double... summaryQuantileErrors) { + this.summaryQuantileErrors = Util.toList(summaryQuantileErrors); + return this; } - /** - * Note that this will remove entries from {@code properties}. - * This is because we want to know if there are unused properties remaining after all properties have been loaded. - */ - static MetricsProperties load(String prefix, Map properties) throws PrometheusPropertiesException { - return new MetricsProperties( - Util.loadBoolean(prefix + "." + EXEMPLARS_ENABLED, properties), - Util.loadBoolean(prefix + "." + HISTOGRAM_NATIVE_ONLY, properties), - Util.loadBoolean(prefix + "." + HISTOGRAM_CLASSIC_ONLY, properties), - Util.loadDoubleList(prefix + "." + HISTOGRAM_CLASSIC_UPPER_BOUNDS, properties), - Util.loadInteger(prefix + "." + HISTOGRAM_NATIVE_INITIAL_SCHEMA, properties), - Util.loadDouble(prefix + "." + HISTOGRAM_NATIVE_MIN_ZERO_THRESHOLD, properties), - Util.loadDouble(prefix + "." + HISTOGRAM_NATIVE_MAX_ZERO_THRESHOLD, properties), - Util.loadInteger(prefix + "." + HISTOGRAM_NATIVE_MAX_NUMBER_OF_BUCKETS, properties), - Util.loadLong(prefix + "." + HISTOGRAM_NATIVE_RESET_DURATION_SECONDS, properties), - Util.loadDoubleList(prefix + "." + SUMMARY_QUANTILES, properties), - Util.loadDoubleList(prefix + "." + SUMMARY_QUANTILE_ERRORS, properties), - Util.loadLong(prefix + "." + SUMMARY_MAX_AGE_SECONDS, properties), - Util.loadInteger(prefix + "." + SUMMARY_NUMBER_OF_AGE_BUCKETS, properties), - prefix); + /** See {@link MetricsProperties#getSummaryMaxAgeSeconds()} */ + public Builder summaryMaxAgeSeconds(Long summaryMaxAgeSeconds) { + this.summaryMaxAgeSeconds = summaryMaxAgeSeconds; + return this; } - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - private Boolean exemplarsEnabled; - private Boolean histogramNativeOnly; - private Boolean histogramClassicOnly; - private List histogramClassicUpperBounds; - private Integer histogramNativeInitialSchema; - private Double histogramNativeMinZeroThreshold; - private Double histogramNativeMaxZeroThreshold; - private Integer histogramNativeMaxNumberOfBuckets; - private Long histogramNativeResetDurationSeconds; - private List summaryQuantiles; - private List summaryQuantileErrors; - private Long summaryMaxAgeSeconds; - private Integer summaryNumberOfAgeBuckets; - - private Builder() { - } - - public MetricsProperties build() { - return new MetricsProperties(exemplarsEnabled, - histogramNativeOnly, - histogramClassicOnly, - histogramClassicUpperBounds, - histogramNativeInitialSchema, - histogramNativeMinZeroThreshold, - histogramNativeMaxZeroThreshold, - histogramNativeMaxNumberOfBuckets, - histogramNativeResetDurationSeconds, - summaryQuantiles, - summaryQuantileErrors, - summaryMaxAgeSeconds, - summaryNumberOfAgeBuckets); - } - - /** - * See {@link MetricsProperties#getExemplarsEnabled()} - */ - public Builder exemplarsEnabled(Boolean exemplarsEnabled) { - this.exemplarsEnabled = exemplarsEnabled; - return this; - } - - /** - * See {@link MetricsProperties#getHistogramNativeOnly()} - */ - public Builder histogramNativeOnly(Boolean histogramNativeOnly) { - this.histogramNativeOnly = histogramNativeOnly; - return this; - } - - /** - * See {@link MetricsProperties#getHistogramClassicOnly()} - */ - public Builder histogramClassicOnly(Boolean histogramClassicOnly) { - this.histogramClassicOnly = histogramClassicOnly; - return this; - } - - /** - * See {@link MetricsProperties#getHistogramClassicUpperBounds()} - */ - public Builder histogramClassicUpperBounds(double... histogramClassicUpperBounds) { - this.histogramClassicUpperBounds = Util.toList(histogramClassicUpperBounds); - return this; - } - - /** - * See {@link MetricsProperties#getHistogramNativeInitialSchema()} - */ - public Builder histogramNativeInitialSchema(Integer histogramNativeInitialSchema) { - this.histogramNativeInitialSchema = histogramNativeInitialSchema; - return this; - } - - /** - * See {@link MetricsProperties#getHistogramNativeMinZeroThreshold()} - */ - public Builder histogramNativeMinZeroThreshold(Double histogramNativeMinZeroThreshold) { - this.histogramNativeMinZeroThreshold = histogramNativeMinZeroThreshold; - return this; - } - - /** - * See {@link MetricsProperties#getHistogramNativeMaxZeroThreshold()} - */ - public Builder histogramNativeMaxZeroThreshold(Double histogramNativeMaxZeroThreshold) { - this.histogramNativeMaxZeroThreshold = histogramNativeMaxZeroThreshold; - return this; - } - - /** - * See {@link MetricsProperties#getHistogramNativeMaxNumberOfBuckets()} - */ - public Builder histogramNativeMaxNumberOfBuckets(Integer histogramNativeMaxNumberOfBuckets) { - this.histogramNativeMaxNumberOfBuckets = histogramNativeMaxNumberOfBuckets; - return this; - } - - /** - * See {@link MetricsProperties#getHistogramNativeResetDurationSeconds()} - */ - public Builder histogramNativeResetDurationSeconds(Long histogramNativeResetDurationSeconds) { - this.histogramNativeResetDurationSeconds = histogramNativeResetDurationSeconds; - return this; - } - - /** - * See {@link MetricsProperties#getSummaryQuantiles()} - */ - public Builder summaryQuantiles(double... summaryQuantiles) { - this.summaryQuantiles = Util.toList(summaryQuantiles); - return this; - } - - /** - * See {@link MetricsProperties#getSummaryQuantileErrors()} - */ - public Builder summaryQuantileErrors(double... summaryQuantileErrors) { - this.summaryQuantileErrors = Util.toList(summaryQuantileErrors); - return this; - } - - /** - * See {@link MetricsProperties#getSummaryMaxAgeSeconds()} - */ - public Builder summaryMaxAgeSeconds(Long summaryMaxAgeSeconds) { - this.summaryMaxAgeSeconds = summaryMaxAgeSeconds; - return this; - } - - /** - * See {@link MetricsProperties#getSummaryNumberOfAgeBuckets()} - */ - public Builder summaryNumberOfAgeBuckets(Integer summaryNumberOfAgeBuckets) { - this.summaryNumberOfAgeBuckets = summaryNumberOfAgeBuckets; - return this; - } + /** See {@link MetricsProperties#getSummaryNumberOfAgeBuckets()} */ + public Builder summaryNumberOfAgeBuckets(Integer summaryNumberOfAgeBuckets) { + this.summaryNumberOfAgeBuckets = summaryNumberOfAgeBuckets; + return this; } + } } diff --git a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/PrometheusProperties.java b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/PrometheusProperties.java index 2e26a4c7b..da31fe8cc 100644 --- a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/PrometheusProperties.java +++ b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/PrometheusProperties.java @@ -5,89 +5,94 @@ /** * The Prometheus Java client library can be configured at runtime (e.g. using a properties file). - *

- * This class represents the runtime configuration. + * + *

This class represents the runtime configuration. */ public class PrometheusProperties { - private static final PrometheusProperties instance = PrometheusPropertiesLoader.load(); + private static final PrometheusProperties instance = PrometheusPropertiesLoader.load(); - private final MetricsProperties defaultMetricsProperties; - private final Map metricProperties = new HashMap<>(); - private final ExemplarsProperties exemplarProperties; - private final ExporterProperties exporterProperties; - private final ExporterFilterProperties exporterFilterProperties; - private final ExporterHttpServerProperties exporterHttpServerProperties; - private final ExporterOpenTelemetryProperties exporterOpenTelemetryProperties; - private final ExporterPushgatewayProperties exporterPushgatewayProperties; + private final MetricsProperties defaultMetricsProperties; + private final Map metricProperties = new HashMap<>(); + private final ExemplarsProperties exemplarProperties; + private final ExporterProperties exporterProperties; + private final ExporterFilterProperties exporterFilterProperties; + private final ExporterHttpServerProperties exporterHttpServerProperties; + private final ExporterOpenTelemetryProperties exporterOpenTelemetryProperties; + private final ExporterPushgatewayProperties exporterPushgatewayProperties; - /** - * Get the properties instance. When called for the first time, {@code get()} loads the properties from the following locations: - *

- */ - public static PrometheusProperties get() throws PrometheusPropertiesException { - return instance; - } + /** + * Get the properties instance. When called for the first time, {@code get()} loads the properties + * from the following locations: + * + * + */ + public static PrometheusProperties get() throws PrometheusPropertiesException { + return instance; + } - public PrometheusProperties( - MetricsProperties defaultMetricsProperties, - Map metricProperties, - ExemplarsProperties exemplarProperties, - ExporterProperties exporterProperties, - ExporterFilterProperties exporterFilterProperties, - ExporterHttpServerProperties httpServerConfig, - ExporterPushgatewayProperties pushgatewayProperties, - ExporterOpenTelemetryProperties otelConfig) { - this.defaultMetricsProperties = defaultMetricsProperties; - this.metricProperties.putAll(metricProperties); - this.exemplarProperties = exemplarProperties; - this.exporterProperties = exporterProperties; - this.exporterFilterProperties = exporterFilterProperties; - this.exporterHttpServerProperties = httpServerConfig; - this.exporterPushgatewayProperties = pushgatewayProperties; - this.exporterOpenTelemetryProperties = otelConfig; - } + public PrometheusProperties( + MetricsProperties defaultMetricsProperties, + Map metricProperties, + ExemplarsProperties exemplarProperties, + ExporterProperties exporterProperties, + ExporterFilterProperties exporterFilterProperties, + ExporterHttpServerProperties httpServerConfig, + ExporterPushgatewayProperties pushgatewayProperties, + ExporterOpenTelemetryProperties otelConfig) { + this.defaultMetricsProperties = defaultMetricsProperties; + this.metricProperties.putAll(metricProperties); + this.exemplarProperties = exemplarProperties; + this.exporterProperties = exporterProperties; + this.exporterFilterProperties = exporterFilterProperties; + this.exporterHttpServerProperties = httpServerConfig; + this.exporterPushgatewayProperties = pushgatewayProperties; + this.exporterOpenTelemetryProperties = otelConfig; + } - /** - * The default metric properties apply for metrics where {@link #getMetricProperties(String)} is {@code null}. - */ - public MetricsProperties getDefaultMetricProperties() { - return defaultMetricsProperties; - } + /** + * The default metric properties apply for metrics where {@link #getMetricProperties(String)} is + * {@code null}. + */ + public MetricsProperties getDefaultMetricProperties() { + return defaultMetricsProperties; + } - /** - * Properties specific for one metric. Should be merged with {@link #getDefaultMetricProperties()}. - * May return {@code null} if no metric-specific properties are configured for a metric name. - */ - public MetricsProperties getMetricProperties(String metricName) { - return metricProperties.get(metricName.replace(".", "_")); - } + /** + * Properties specific for one metric. Should be merged with {@link + * #getDefaultMetricProperties()}. May return {@code null} if no metric-specific properties are + * configured for a metric name. + */ + public MetricsProperties getMetricProperties(String metricName) { + return metricProperties.get(metricName.replace(".", "_")); + } - public ExemplarsProperties getExemplarProperties() { - return exemplarProperties; - } + public ExemplarsProperties getExemplarProperties() { + return exemplarProperties; + } - public ExporterProperties getExporterProperties() { - return exporterProperties; - } + public ExporterProperties getExporterProperties() { + return exporterProperties; + } - public ExporterFilterProperties getExporterFilterProperties() { - return exporterFilterProperties; - } + public ExporterFilterProperties getExporterFilterProperties() { + return exporterFilterProperties; + } - public ExporterHttpServerProperties getExporterHttpServerProperties() { - return exporterHttpServerProperties; - } + public ExporterHttpServerProperties getExporterHttpServerProperties() { + return exporterHttpServerProperties; + } - public ExporterPushgatewayProperties getExporterPushgatewayProperties() { - return exporterPushgatewayProperties; - } + public ExporterPushgatewayProperties getExporterPushgatewayProperties() { + return exporterPushgatewayProperties; + } - public ExporterOpenTelemetryProperties getExporterOpenTelemetryProperties() { - return exporterOpenTelemetryProperties; - } + public ExporterOpenTelemetryProperties getExporterOpenTelemetryProperties() { + return exporterOpenTelemetryProperties; + } } diff --git a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/PrometheusPropertiesException.java b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/PrometheusPropertiesException.java index e41009ca2..5024cc45a 100644 --- a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/PrometheusPropertiesException.java +++ b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/PrometheusPropertiesException.java @@ -2,11 +2,11 @@ public class PrometheusPropertiesException extends RuntimeException { - public PrometheusPropertiesException(String msg) { - super(msg); - } + public PrometheusPropertiesException(String msg) { + super(msg); + } - public PrometheusPropertiesException(String msg, Exception cause) { - super(msg, cause); - } + public PrometheusPropertiesException(String msg, Exception cause) { + super(msg, cause); + } } diff --git a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/PrometheusPropertiesLoader.java b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/PrometheusPropertiesLoader.java index bb9b3c2cb..9d1fde4ab 100644 --- a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/PrometheusPropertiesLoader.java +++ b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/PrometheusPropertiesLoader.java @@ -14,101 +14,123 @@ /** * The Properties Loader is early stages. - *

- * It would be great to implement a subset of - * Spring Boot's Externalized Configuration, - * like support for YAML, Properties, and env vars, or support for Spring's naming conventions for properties. + * + *

It would be great to implement a subset of Spring + * Boot's Externalized Configuration, like support for YAML, Properties, and env vars, or + * support for Spring's naming conventions for properties. */ public class PrometheusPropertiesLoader { - /** - * See {@link PrometheusProperties#get()}. - */ - public static PrometheusProperties load() throws PrometheusPropertiesException { - return load(new Properties()); - } + /** See {@link PrometheusProperties#get()}. */ + public static PrometheusProperties load() throws PrometheusPropertiesException { + return load(new Properties()); + } - public static PrometheusProperties load(Map externalProperties) throws PrometheusPropertiesException { - Map properties = loadProperties(externalProperties); - Map metricsConfigs = loadMetricsConfigs(properties); - MetricsProperties defaultMetricsProperties = MetricsProperties.load("io.prometheus.metrics", properties); - ExemplarsProperties exemplarConfig = ExemplarsProperties.load("io.prometheus.exemplars", properties); - ExporterProperties exporterProperties = ExporterProperties.load("io.prometheus.exporter", properties); - ExporterFilterProperties exporterFilterProperties = ExporterFilterProperties.load("io.prometheus.exporter.filter", properties); - ExporterHttpServerProperties exporterHttpServerProperties = ExporterHttpServerProperties.load("io.prometheus.exporter.httpServer", properties); - ExporterPushgatewayProperties exporterPushgatewayProperties = ExporterPushgatewayProperties.load("io.prometheus.exporter.pushgateway", properties); - ExporterOpenTelemetryProperties exporterOpenTelemetryProperties = ExporterOpenTelemetryProperties.load("io.prometheus.exporter.opentelemetry", properties); - validateAllPropertiesProcessed(properties); - return new PrometheusProperties(defaultMetricsProperties, metricsConfigs, exemplarConfig, exporterProperties, exporterFilterProperties, exporterHttpServerProperties, exporterPushgatewayProperties, exporterOpenTelemetryProperties); - } + public static PrometheusProperties load(Map externalProperties) + throws PrometheusPropertiesException { + Map properties = loadProperties(externalProperties); + Map metricsConfigs = loadMetricsConfigs(properties); + MetricsProperties defaultMetricsProperties = + MetricsProperties.load("io.prometheus.metrics", properties); + ExemplarsProperties exemplarConfig = + ExemplarsProperties.load("io.prometheus.exemplars", properties); + ExporterProperties exporterProperties = + ExporterProperties.load("io.prometheus.exporter", properties); + ExporterFilterProperties exporterFilterProperties = + ExporterFilterProperties.load("io.prometheus.exporter.filter", properties); + ExporterHttpServerProperties exporterHttpServerProperties = + ExporterHttpServerProperties.load("io.prometheus.exporter.httpServer", properties); + ExporterPushgatewayProperties exporterPushgatewayProperties = + ExporterPushgatewayProperties.load("io.prometheus.exporter.pushgateway", properties); + ExporterOpenTelemetryProperties exporterOpenTelemetryProperties = + ExporterOpenTelemetryProperties.load("io.prometheus.exporter.opentelemetry", properties); + validateAllPropertiesProcessed(properties); + return new PrometheusProperties( + defaultMetricsProperties, + metricsConfigs, + exemplarConfig, + exporterProperties, + exporterFilterProperties, + exporterHttpServerProperties, + exporterPushgatewayProperties, + exporterOpenTelemetryProperties); + } - // This will remove entries from properties when they are processed. - private static Map loadMetricsConfigs(Map properties) { - Map result = new HashMap<>(); - // Note that the metric name in the properties file must be as exposed in the Prometheus exposition formats, - // i.e. all dots replaced with underscores. - Pattern pattern = Pattern.compile("io\\.prometheus\\.metrics\\.([^.]+)\\."); - // Create a copy of the keySet() for iterating. We cannot iterate directly over keySet() - // because entries are removed when MetricsConfig.load(...) is called. - Set propertyNames = new HashSet<>(); - for (Object key : properties.keySet()) { - propertyNames.add(key.toString()); - } - for (String propertyName : propertyNames) { - Matcher matcher = pattern.matcher(propertyName); - if (matcher.find()) { - String metricName = matcher.group(1).replace(".", "_"); - if (!result.containsKey(metricName)) { - result.put(metricName, MetricsProperties.load("io.prometheus.metrics." + metricName, properties)); - } - } - } - return result; + // This will remove entries from properties when they are processed. + private static Map loadMetricsConfigs(Map properties) { + Map result = new HashMap<>(); + // Note that the metric name in the properties file must be as exposed in the Prometheus + // exposition formats, + // i.e. all dots replaced with underscores. + Pattern pattern = Pattern.compile("io\\.prometheus\\.metrics\\.([^.]+)\\."); + // Create a copy of the keySet() for iterating. We cannot iterate directly over keySet() + // because entries are removed when MetricsConfig.load(...) is called. + Set propertyNames = new HashSet<>(); + for (Object key : properties.keySet()) { + propertyNames.add(key.toString()); } - - // If there are properties left starting with io.prometheus it's likely a typo, - // because we didn't use that property. - // Throw a config error to let the user know that this property doesn't exist. - private static void validateAllPropertiesProcessed(Map properties) { - for (Object key : properties.keySet()) { - if (key.toString().startsWith("io.prometheus")) { - throw new PrometheusPropertiesException(key + ": Unknown property"); - } + for (String propertyName : propertyNames) { + Matcher matcher = pattern.matcher(propertyName); + if (matcher.find()) { + String metricName = matcher.group(1).replace(".", "_"); + if (!result.containsKey(metricName)) { + result.put( + metricName, + MetricsProperties.load("io.prometheus.metrics." + metricName, properties)); } + } } + return result; + } - private static Map loadProperties(Map externalProperties) { - Map properties = new HashMap<>(); - properties.putAll(loadPropertiesFromClasspath()); - properties.putAll(loadPropertiesFromFile()); // overriding the entries from the classpath file - properties.putAll(System.getProperties()); // overriding the entries from the properties file - properties.putAll(externalProperties); // overriding all the entries above - // TODO: Add environment variables like EXEMPLARS_ENABLED. - return properties; + // If there are properties left starting with io.prometheus it's likely a typo, + // because we didn't use that property. + // Throw a config error to let the user know that this property doesn't exist. + private static void validateAllPropertiesProcessed(Map properties) { + for (Object key : properties.keySet()) { + if (key.toString().startsWith("io.prometheus")) { + throw new PrometheusPropertiesException(key + ": Unknown property"); + } } + } - private static Properties loadPropertiesFromClasspath() { - Properties properties = new Properties(); - try (InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("prometheus.properties")) { - properties.load(stream); - } catch (Exception ignored) { - } - return properties; + private static Map loadProperties(Map externalProperties) { + Map properties = new HashMap<>(); + properties.putAll(loadPropertiesFromClasspath()); + properties.putAll(loadPropertiesFromFile()); // overriding the entries from the classpath file + properties.putAll(System.getProperties()); // overriding the entries from the properties file + properties.putAll(externalProperties); // overriding all the entries above + // TODO: Add environment variables like EXEMPLARS_ENABLED. + return properties; + } + + private static Properties loadPropertiesFromClasspath() { + Properties properties = new Properties(); + try (InputStream stream = + Thread.currentThread() + .getContextClassLoader() + .getResourceAsStream("prometheus.properties")) { + properties.load(stream); + } catch (Exception ignored) { } + return properties; + } - private static Properties loadPropertiesFromFile() throws PrometheusPropertiesException { - Properties properties = new Properties(); - String path = System.getProperty("prometheus.config"); - if (System.getenv("PROMETHEUS_CONFIG") != null) { - path = System.getenv("PROMETHEUS_CONFIG"); - } - if (path != null) { - try (InputStream stream = Files.newInputStream(Paths.get(path))) { - properties.load(stream); - } catch (IOException e) { - throw new PrometheusPropertiesException("Failed to read Prometheus properties from " + path + ": " + e.getMessage(), e); - } - } - return properties; + private static Properties loadPropertiesFromFile() throws PrometheusPropertiesException { + Properties properties = new Properties(); + String path = System.getProperty("prometheus.config"); + if (System.getenv("PROMETHEUS_CONFIG") != null) { + path = System.getenv("PROMETHEUS_CONFIG"); + } + if (path != null) { + try (InputStream stream = Files.newInputStream(Paths.get(path))) { + properties.load(stream); + } catch (IOException e) { + throw new PrometheusPropertiesException( + "Failed to read Prometheus properties from " + path + ": " + e.getMessage(), e); + } } + return properties; + } } diff --git a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/Util.java b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/Util.java index 5b3f97252..66d6b2d92 100644 --- a/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/Util.java +++ b/prometheus-metrics-config/src/main/java/io/prometheus/metrics/config/Util.java @@ -9,131 +9,145 @@ class Util { - private static String getProperty(String name, Map properties) { - Object object = properties.remove(name); - if (object != null) { - return object.toString(); - } - return null; + private static String getProperty(String name, Map properties) { + Object object = properties.remove(name); + if (object != null) { + return object.toString(); } + return null; + } - static Boolean loadBoolean(String name, Map properties) throws PrometheusPropertiesException { - String property = getProperty(name, properties); - if (property != null) { - if (!"true".equalsIgnoreCase(property) && !"false".equalsIgnoreCase(property)) { - throw new PrometheusPropertiesException(name + "=" + property + ": Expecting 'true' or 'false'"); - } - return Boolean.parseBoolean(property); - } - return null; + static Boolean loadBoolean(String name, Map properties) + throws PrometheusPropertiesException { + String property = getProperty(name, properties); + if (property != null) { + if (!"true".equalsIgnoreCase(property) && !"false".equalsIgnoreCase(property)) { + throw new PrometheusPropertiesException( + name + "=" + property + ": Expecting 'true' or 'false'"); + } + return Boolean.parseBoolean(property); } + return null; + } - static List toList(double... values) { - if (values == null) { - return null; - } - List result = new ArrayList<>(values.length); - for (double value : values) { - result.add(value); - } - return result; + static List toList(double... values) { + if (values == null) { + return null; } - - static String loadString(String name, Map properties) throws PrometheusPropertiesException { - return getProperty(name, properties); + List result = new ArrayList<>(values.length); + for (double value : values) { + result.add(value); } + return result; + } - static List loadStringList(String name, Map properties) throws PrometheusPropertiesException { - String property = getProperty(name, properties); - if (property != null) { - return Arrays.asList(property.split("\\s*,\\s*")); - } - return null; + static String loadString(String name, Map properties) + throws PrometheusPropertiesException { + return getProperty(name, properties); + } + + static List loadStringList(String name, Map properties) + throws PrometheusPropertiesException { + String property = getProperty(name, properties); + if (property != null) { + return Arrays.asList(property.split("\\s*,\\s*")); } + return null; + } - static List loadDoubleList(String name, Map properties) throws PrometheusPropertiesException { - String property = getProperty(name, properties); - if (property != null) { - String[] numbers = property.split("\\s*,\\s*"); - Double[] result = new Double[numbers.length]; - for (int i = 0; i < numbers.length; i++) { - try { - if ("+Inf".equals(numbers[i].trim())) { - result[i] = Double.POSITIVE_INFINITY; - } else { - result[i] = Double.parseDouble(numbers[i]); - } - } catch (NumberFormatException e) { - throw new PrometheusPropertiesException(name + "=" + property + ": Expecting comma separated list of double values"); - } - } - return Arrays.asList(result); + static List loadDoubleList(String name, Map properties) + throws PrometheusPropertiesException { + String property = getProperty(name, properties); + if (property != null) { + String[] numbers = property.split("\\s*,\\s*"); + Double[] result = new Double[numbers.length]; + for (int i = 0; i < numbers.length; i++) { + try { + if ("+Inf".equals(numbers[i].trim())) { + result[i] = Double.POSITIVE_INFINITY; + } else { + result[i] = Double.parseDouble(numbers[i]); + } + } catch (NumberFormatException e) { + throw new PrometheusPropertiesException( + name + "=" + property + ": Expecting comma separated list of double values"); } - return null; + } + return Arrays.asList(result); } + return null; + } - // Map is represented as "key1=value1,key2=value2" - static Map loadMap(String name, Map properties) throws PrometheusPropertiesException { - Map result = new HashMap<>(); - String property = getProperty(name, properties); - if (property != null) { - String[] pairs = property.split(","); - for (String pair : pairs) { - if (pair.contains("=")) { - String[] keyValue = pair.split("=", 1); - if (keyValue.length == 2) { - String key = keyValue[0].trim(); - String value = keyValue[1].trim(); - if (key.length() > 0 && value.length() > 0) { - result.putIfAbsent(key, value); - } - } - } + // Map is represented as "key1=value1,key2=value2" + static Map loadMap(String name, Map properties) + throws PrometheusPropertiesException { + Map result = new HashMap<>(); + String property = getProperty(name, properties); + if (property != null) { + String[] pairs = property.split(","); + for (String pair : pairs) { + if (pair.contains("=")) { + String[] keyValue = pair.split("=", 1); + if (keyValue.length == 2) { + String key = keyValue[0].trim(); + String value = keyValue[1].trim(); + if (key.length() > 0 && value.length() > 0) { + result.putIfAbsent(key, value); } + } } - return result; + } } + return result; + } - static Integer loadInteger(String name, Map properties) throws PrometheusPropertiesException { - String property = getProperty(name, properties); - if (property != null) { - try { - return Integer.parseInt(property); - } catch (NumberFormatException e) { - throw new PrometheusPropertiesException(name + "=" + property + ": Expecting integer value"); - } - } - return null; + static Integer loadInteger(String name, Map properties) + throws PrometheusPropertiesException { + String property = getProperty(name, properties); + if (property != null) { + try { + return Integer.parseInt(property); + } catch (NumberFormatException e) { + throw new PrometheusPropertiesException( + name + "=" + property + ": Expecting integer value"); + } } + return null; + } - static Double loadDouble(String name, Map properties) throws PrometheusPropertiesException { - String property = getProperty(name, properties); - if (property != null) { - try { - return Double.parseDouble(property); - } catch (NumberFormatException e) { - throw new PrometheusPropertiesException(name + "=" + property + ": Expecting double value"); - } - } - return null; + static Double loadDouble(String name, Map properties) + throws PrometheusPropertiesException { + String property = getProperty(name, properties); + if (property != null) { + try { + return Double.parseDouble(property); + } catch (NumberFormatException e) { + throw new PrometheusPropertiesException(name + "=" + property + ": Expecting double value"); + } } + return null; + } - static Long loadLong(String name, Map properties) throws PrometheusPropertiesException { - String property = getProperty(name, properties); - if (property != null) { - try { - return Long.parseLong(property); - } catch (NumberFormatException e) { - throw new PrometheusPropertiesException(name + "=" + property + ": Expecting long value"); - } - } - return null; + static Long loadLong(String name, Map properties) + throws PrometheusPropertiesException { + String property = getProperty(name, properties); + if (property != null) { + try { + return Long.parseLong(property); + } catch (NumberFormatException e) { + throw new PrometheusPropertiesException(name + "=" + property + ": Expecting long value"); + } } + return null; + } - static void assertValue(T number, Predicate predicate, String message, String prefix, String name) throws PrometheusPropertiesException { - if (number != null && !predicate.test(number)) { - String fullMessage = prefix == null ? name + ": " + message : prefix + "." + name + ": " + message; - throw new PrometheusPropertiesException(fullMessage); - } + static void assertValue( + T number, Predicate predicate, String message, String prefix, String name) + throws PrometheusPropertiesException { + if (number != null && !predicate.test(number)) { + String fullMessage = + prefix == null ? name + ": " + message : prefix + "." + name + ": " + message; + throw new PrometheusPropertiesException(fullMessage); } + } } diff --git a/prometheus-metrics-config/src/test/java/io/prometheus/metrics/config/PrometheusPropertiesLoaderTests.java b/prometheus-metrics-config/src/test/java/io/prometheus/metrics/config/PrometheusPropertiesLoaderTests.java index 79c20702c..8d16eadb7 100644 --- a/prometheus-metrics-config/src/test/java/io/prometheus/metrics/config/PrometheusPropertiesLoaderTests.java +++ b/prometheus-metrics-config/src/test/java/io/prometheus/metrics/config/PrometheusPropertiesLoaderTests.java @@ -1,33 +1,46 @@ package io.prometheus.metrics.config; import java.util.Properties; - import org.junit.Assert; import org.junit.Test; -/** - * Tests for {@link PrometheusPropertiesLoader}. - */ +/** Tests for {@link PrometheusPropertiesLoader}. */ public class PrometheusPropertiesLoaderTests { - @Test - public void propertiesShouldBeLoadedFromPropertiesFile() { - PrometheusProperties prometheusProperties = PrometheusPropertiesLoader.load(); - Assert.assertEquals(11, prometheusProperties.getDefaultMetricProperties().getHistogramClassicUpperBounds().size()); - Assert.assertEquals(4, prometheusProperties.getMetricProperties("http_duration_seconds").getHistogramClassicUpperBounds().size()); - Assert.assertTrue(prometheusProperties.getExporterProperties().getExemplarsOnAllMetricTypes()); - } + @Test + public void propertiesShouldBeLoadedFromPropertiesFile() { + PrometheusProperties prometheusProperties = PrometheusPropertiesLoader.load(); + Assert.assertEquals( + 11, + prometheusProperties.getDefaultMetricProperties().getHistogramClassicUpperBounds().size()); + Assert.assertEquals( + 4, + prometheusProperties + .getMetricProperties("http_duration_seconds") + .getHistogramClassicUpperBounds() + .size()); + Assert.assertTrue(prometheusProperties.getExporterProperties().getExemplarsOnAllMetricTypes()); + } - @Test - public void externalPropertiesShouldOverridePropertiesFile() { - Properties properties = new Properties(); - properties.setProperty("io.prometheus.metrics.histogramClassicUpperBounds", ".005, .01"); - properties.setProperty("io.prometheus.metrics.http_duration_seconds.histogramClassicUpperBounds", ".005, .01, .015"); - properties.setProperty("io.prometheus.exporter.exemplarsOnAllMetricTypes", "false"); + @Test + public void externalPropertiesShouldOverridePropertiesFile() { + Properties properties = new Properties(); + properties.setProperty("io.prometheus.metrics.histogramClassicUpperBounds", ".005, .01"); + properties.setProperty( + "io.prometheus.metrics.http_duration_seconds.histogramClassicUpperBounds", + ".005, .01, .015"); + properties.setProperty("io.prometheus.exporter.exemplarsOnAllMetricTypes", "false"); - PrometheusProperties prometheusProperties = PrometheusPropertiesLoader.load(properties); - Assert.assertEquals(2, prometheusProperties.getDefaultMetricProperties().getHistogramClassicUpperBounds().size()); - Assert.assertEquals(3, prometheusProperties.getMetricProperties("http_duration_seconds").getHistogramClassicUpperBounds().size()); - Assert.assertFalse(prometheusProperties.getExporterProperties().getExemplarsOnAllMetricTypes()); - } + PrometheusProperties prometheusProperties = PrometheusPropertiesLoader.load(properties); + Assert.assertEquals( + 2, + prometheusProperties.getDefaultMetricProperties().getHistogramClassicUpperBounds().size()); + Assert.assertEquals( + 3, + prometheusProperties + .getMetricProperties("http_duration_seconds") + .getHistogramClassicUpperBounds() + .size()); + Assert.assertFalse(prometheusProperties.getExporterProperties().getExemplarsOnAllMetricTypes()); + } } diff --git a/prometheus-metrics-config/src/test/java/io/prometheus/metrics/config/PrometheusPropertiesTest.java b/prometheus-metrics-config/src/test/java/io/prometheus/metrics/config/PrometheusPropertiesTest.java index 45fec1be0..f9ef25a7a 100644 --- a/prometheus-metrics-config/src/test/java/io/prometheus/metrics/config/PrometheusPropertiesTest.java +++ b/prometheus-metrics-config/src/test/java/io/prometheus/metrics/config/PrometheusPropertiesTest.java @@ -1,29 +1,37 @@ package io.prometheus.metrics.config; -import org.junit.Assert; -import org.junit.Test; - import java.io.IOException; import java.io.InputStream; import java.util.Properties; +import org.junit.Assert; +import org.junit.Test; public class PrometheusPropertiesTest { - @Test - public void testPrometheusConfig() { - PrometheusProperties result = PrometheusProperties.get(); - Assert.assertEquals(11, result.getDefaultMetricProperties().getHistogramClassicUpperBounds().size()); - Assert.assertEquals(4, result.getMetricProperties("http_duration_seconds").getHistogramClassicUpperBounds().size()); - } + @Test + public void testPrometheusConfig() { + PrometheusProperties result = PrometheusProperties.get(); + Assert.assertEquals( + 11, result.getDefaultMetricProperties().getHistogramClassicUpperBounds().size()); + Assert.assertEquals( + 4, + result + .getMetricProperties("http_duration_seconds") + .getHistogramClassicUpperBounds() + .size()); + } - @Test - public void testEmptyUpperBounds() throws IOException { - Properties properties = new Properties(); - try (InputStream stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("emptyUpperBounds.properties")) { - properties.load(stream); - } - Assert.assertEquals(1, properties.size()); - MetricsProperties.load("io.prometheus.metrics", properties); - Assert.assertEquals(0, properties.size()); + @Test + public void testEmptyUpperBounds() throws IOException { + Properties properties = new Properties(); + try (InputStream stream = + Thread.currentThread() + .getContextClassLoader() + .getResourceAsStream("emptyUpperBounds.properties")) { + properties.load(stream); } + Assert.assertEquals(1, properties.size()); + MetricsProperties.load("io.prometheus.metrics", properties); + Assert.assertEquals(0, properties.size()); + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/CounterDataPoint.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/CounterDataPoint.java index 51b45c62a..7055d7565 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/CounterDataPoint.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/CounterDataPoint.java @@ -3,9 +3,11 @@ import io.prometheus.metrics.model.snapshots.Labels; /** - * Represents a single counter data point, i.e. a single line for a counter metric in Prometheus text format. - *

- * Example usage: + * Represents a single counter data point, i.e. a single line for a counter metric in Prometheus + * text format. + * + *

Example usage: + * *

{@code
  * Counter counter = Counter.builder()
  *     .name("tasks_total")
@@ -15,84 +17,86 @@
  * CounterDataPoint pendingTasks = counter.labelValues("pending");
  * CounterDataPoint completedTasks = counter.labelValues("completed");
  * }
- *

- * Using {@code DataPoint} directly improves performance. If you increment a counter like this: + * + *

Using {@code DataPoint} directly improves performance. If you increment a counter like this: + * *

{@code
  * counter.labelValues("pending").inc();
  * }
- * the label value {@code "pending"} needs to be looked up every single time. - * Using the {@code CounterDataPoint} like this: + * + * the label value {@code "pending"} needs to be looked up every single time. Using the {@code + * CounterDataPoint} like this: + * *
{@code
  * CounterDataPoint pendingTasks = counter.labelValues("pending");
  * pendingTasks.inc();
  * }
- * allows you to look up the label value only once, and then use the {@code CounterDataPoint} directly. - * This is a worthwhile performance improvement when instrumenting a performance-critical code path. - *

- * If you have a counter without labels like this: + * + * allows you to look up the label value only once, and then use the {@code CounterDataPoint} + * directly. This is a worthwhile performance improvement when instrumenting a performance-critical + * code path. + * + *

If you have a counter without labels like this: + * *

{@code
  * Counter counterWithoutLabels = Counter.builder()
  *     .name("events_total")
  *     .register();
  * }
+ * * You can use it as a {@code CounterDataPoint} directly. So the following: + * *
{@code
  * CounterDataPoint counterData = counterWithoutLabels.labelValues(); // empty label values
  * }
+ * * is equivalent to + * *
{@code
  * CounterDataPoint counterData = counterWithoutLabels;
  * }
*/ public interface CounterDataPoint extends DataPoint { - /** - * Add one. - */ - default void inc() { - inc(1L); - } + /** Add one. */ + default void inc() { + inc(1L); + } - /** - * Add {@code amount}. Throws an {@link IllegalArgumentException} if {@code amount} is negative. - */ - default void inc(long amount) { - inc((double) amount); - } + /** + * Add {@code amount}. Throws an {@link IllegalArgumentException} if {@code amount} is negative. + */ + default void inc(long amount) { + inc((double) amount); + } - /** - * Add {@code amount}. Throws an {@link IllegalArgumentException} if {@code amount} is negative. - */ - void inc(double amount); + /** + * Add {@code amount}. Throws an {@link IllegalArgumentException} if {@code amount} is negative. + */ + void inc(double amount); - /** - * Add one, and create a custom exemplar with the given labels. - */ - default void incWithExemplar(Labels labels) { - incWithExemplar(1.0, labels); - } + /** Add one, and create a custom exemplar with the given labels. */ + default void incWithExemplar(Labels labels) { + incWithExemplar(1.0, labels); + } - /** - * Add {@code amount}, and create a custom exemplar with the given labels. - * Throws an {@link IllegalArgumentException} if {@code amount} is negative. - */ - default void incWithExemplar(long amount, Labels labels) { - inc((double) amount); - } + /** + * Add {@code amount}, and create a custom exemplar with the given labels. Throws an {@link + * IllegalArgumentException} if {@code amount} is negative. + */ + default void incWithExemplar(long amount, Labels labels) { + inc((double) amount); + } - /** - * Add {@code amount}, and create a custom exemplar with the given labels. - * Throws an {@link IllegalArgumentException} if {@code amount} is negative. - */ - void incWithExemplar(double amount, Labels labels); + /** + * Add {@code amount}, and create a custom exemplar with the given labels. Throws an {@link + * IllegalArgumentException} if {@code amount} is negative. + */ + void incWithExemplar(double amount, Labels labels); - /** - * Get the current value. - */ - double get(); + /** Get the current value. */ + double get(); - /** - * Get the current value as a {@code long}. Decimal places will be discarded. - */ - long getLongValue(); + /** Get the current value as a {@code long}. Decimal places will be discarded. */ + long getLongValue(); } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/DataPoint.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/DataPoint.java index 8dd277d81..5b6017ddf 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/DataPoint.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/DataPoint.java @@ -1,4 +1,3 @@ package io.prometheus.metrics.core.datapoints; -public interface DataPoint { -} +public interface DataPoint {} diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/DistributionDataPoint.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/DistributionDataPoint.java index f08ccae9c..57e7a3e86 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/DistributionDataPoint.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/DistributionDataPoint.java @@ -4,33 +4,29 @@ /** * Represents a single data point of a histogram or a summary metric. - *

- * Single data point means identified label values like {@code {method="GET", path="/", status_code="200"}}, - * ignoring the {@code "le"} label for histograms or the {@code "quantile"} label for summaries. - *

- * This interface is named DistributionDataPoint because both histograms and summaries are used to observe - * distributions, like latency distributions or distributions of request sizes. Therefore - * DistributionDataPoint is a good name for a common interface implemented by histogram data points - * and summary data points. - *

- * See JavaDoc of {@link CounterDataPoint} on how using data points directly can improve performance. + * + *

Single data point means identified label values like {@code {method="GET", path="/", + * status_code="200"}}, ignoring the {@code "le"} label for histograms or the {@code "quantile"} + * label for summaries. + * + *

This interface is named DistributionDataPoint because both histograms and summaries are + * used to observe distributions, like latency distributions or distributions of request sizes. + * Therefore DistributionDataPoint is a good name for a common interface implemented by + * histogram data points and summary data points. + * + *

See JavaDoc of {@link CounterDataPoint} on how using data points directly can improve + * performance. */ public interface DistributionDataPoint extends DataPoint, TimerApi { - /** - * Observe {@code value}. - */ - void observe(double value); + /** Observe {@code value}. */ + void observe(double value); - /** - * Observe {@code value}, and create a custom exemplar with the given labels. - */ - void observeWithExemplar(double value, Labels labels); + /** Observe {@code value}, and create a custom exemplar with the given labels. */ + void observeWithExemplar(double value, Labels labels); - /** - * {@inheritDoc} - */ - default Timer startTimer() { - return new Timer(this::observe); - } + /** {@inheritDoc} */ + default Timer startTimer() { + return new Timer(this::observe); + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/GaugeDataPoint.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/GaugeDataPoint.java index c208d5414..003e3e5e8 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/GaugeDataPoint.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/GaugeDataPoint.java @@ -3,83 +3,61 @@ import io.prometheus.metrics.model.snapshots.Labels; /** - * Represents a single gauge data point, i.e. a single line for a gauge metric in Prometheus text format. - *

- * See JavaDoc of {@link CounterDataPoint} on how using data points directly can improve performance. + * Represents a single gauge data point, i.e. a single line for a gauge metric in Prometheus text + * format. + * + *

See JavaDoc of {@link CounterDataPoint} on how using data points directly can improve + * performance. */ public interface GaugeDataPoint extends DataPoint, TimerApi { - /** - * Add one. - */ - default void inc() { - inc(1.0); - } + /** Add one. */ + default void inc() { + inc(1.0); + } - /** - * Add {@code amount}. - */ - void inc(double amount); + /** Add {@code amount}. */ + void inc(double amount); - /** - * Add one, and create a custom exemplar with the given labels. - */ - default void incWithExemplar(Labels labels) { - incWithExemplar(1.0, labels); - } + /** Add one, and create a custom exemplar with the given labels. */ + default void incWithExemplar(Labels labels) { + incWithExemplar(1.0, labels); + } - /** - * Add {@code amount}, and create a custom exemplar with the given labels. - */ - void incWithExemplar(double amount, Labels labels); + /** Add {@code amount}, and create a custom exemplar with the given labels. */ + void incWithExemplar(double amount, Labels labels); - /** - * Subtract one. - */ - default void dec() { - inc(-1.0); - } + /** Subtract one. */ + default void dec() { + inc(-1.0); + } - /** - * Subtract {@code amount}. - */ - default void dec(double amount) { - inc(-amount); - } + /** Subtract {@code amount}. */ + default void dec(double amount) { + inc(-amount); + } - /** - * Subtract one, and create a custom exemplar with the given labels. - */ - default void decWithExemplar(Labels labels) { - incWithExemplar(-1.0, labels); - } + /** Subtract one, and create a custom exemplar with the given labels. */ + default void decWithExemplar(Labels labels) { + incWithExemplar(-1.0, labels); + } - /** - * Subtract {@code amount}, and create a custom exemplar with the given labels. - */ - default void decWithExemplar(double amount, Labels labels) { - incWithExemplar(-amount, labels); - } + /** Subtract {@code amount}, and create a custom exemplar with the given labels. */ + default void decWithExemplar(double amount, Labels labels) { + incWithExemplar(-amount, labels); + } - /** - * Set the gauge to {@code value}. - */ - void set(double value); + /** Set the gauge to {@code value}. */ + void set(double value); - /** - * Get the current value. - */ - double get(); + /** Get the current value. */ + double get(); - /** - * Set the gauge to {@code value}, and create a custom exemplar with the given labels. - */ - void setWithExemplar(double value, Labels labels); + /** Set the gauge to {@code value}, and create a custom exemplar with the given labels. */ + void setWithExemplar(double value, Labels labels); - /** - * {@inheritDoc} - */ - default Timer startTimer() { - return new Timer(this::set); - } + /** {@inheritDoc} */ + default Timer startTimer() { + return new Timer(this::set); + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/StateSetDataPoint.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/StateSetDataPoint.java index 7dd8f68a4..61b458ba8 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/StateSetDataPoint.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/StateSetDataPoint.java @@ -2,32 +2,37 @@ /** * Represents a single StateSet data point. - *

- * See JavaDoc of {@link CounterDataPoint} on how using data points directly can improve performance. + * + *

See JavaDoc of {@link CounterDataPoint} on how using data points directly can improve + * performance. */ public interface StateSetDataPoint extends DataPoint { - /** - * {@code state} must be one of the states from when the {@code StateSet} was created with {@link io.prometheus.metrics.core.metrics.StateSet.Builder#states(String...)}. - */ - void setTrue(String state); + /** + * {@code state} must be one of the states from when the {@code StateSet} was created with {@link + * io.prometheus.metrics.core.metrics.StateSet.Builder#states(String...)}. + */ + void setTrue(String state); - /** - * {@code state} must be one of the states from when the {@code StateSet} was created with {@link io.prometheus.metrics.core.metrics.StateSet.Builder#states(String...)}. - */ - void setFalse(String state); + /** + * {@code state} must be one of the states from when the {@code StateSet} was created with {@link + * io.prometheus.metrics.core.metrics.StateSet.Builder#states(String...)}. + */ + void setFalse(String state); - /** - * {@code state} must be one of the states from when the {@code StateSet} was created with {@link io.prometheus.metrics.core.metrics.StateSet.Builder#states(Class)}. - */ - default void setTrue(Enum state) { - setTrue(state.toString()); - } + /** + * {@code state} must be one of the states from when the {@code StateSet} was created with {@link + * io.prometheus.metrics.core.metrics.StateSet.Builder#states(Class)}. + */ + default void setTrue(Enum state) { + setTrue(state.toString()); + } - /** - * {@code state} must be one of the states from when the {@code StateSet} was created with {@link io.prometheus.metrics.core.metrics.StateSet.Builder#states(Class)}. - */ - default void setFalse(Enum state) { - setFalse(state.toString()); - } + /** + * {@code state} must be one of the states from when the {@code StateSet} was created with {@link + * io.prometheus.metrics.core.metrics.StateSet.Builder#states(Class)}. + */ + default void setFalse(Enum state) { + setFalse(state.toString()); + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/Timer.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/Timer.java index fd6461869..d860bfa08 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/Timer.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/Timer.java @@ -1,40 +1,37 @@ package io.prometheus.metrics.core.datapoints; import io.prometheus.metrics.model.snapshots.Unit; - import java.io.Closeable; import java.util.function.DoubleConsumer; -/** - * Helper class for observing durations. - */ +/** Helper class for observing durations. */ public class Timer implements Closeable { - private final DoubleConsumer observeFunction; - private final long startTimeNanos = System.nanoTime(); + private final DoubleConsumer observeFunction; + private final long startTimeNanos = System.nanoTime(); - /** - * Constructor is package private. Use the {@link TimerApi} provided by the implementation of the {@link DataPoint}. - */ - Timer(DoubleConsumer observeFunction) { - this.observeFunction = observeFunction; - } + /** + * Constructor is package private. Use the {@link TimerApi} provided by the implementation of the + * {@link DataPoint}. + */ + Timer(DoubleConsumer observeFunction) { + this.observeFunction = observeFunction; + } - /** - * Records the observed duration in seconds since this {@code Timer} instance was created. - * @return the observed duration in seconds. - */ - public double observeDuration() { - double elapsed = Unit.nanosToSeconds(System.nanoTime() - startTimeNanos); - observeFunction.accept(elapsed); - return elapsed; - } + /** + * Records the observed duration in seconds since this {@code Timer} instance was created. + * + * @return the observed duration in seconds. + */ + public double observeDuration() { + double elapsed = Unit.nanosToSeconds(System.nanoTime() - startTimeNanos); + observeFunction.accept(elapsed); + return elapsed; + } - /** - * Same as {@link #observeDuration()}. - */ - @Override - public void close() { - observeDuration(); - } + /** Same as {@link #observeDuration()}. */ + @Override + public void close() { + observeDuration(); + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/TimerApi.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/TimerApi.java index 27245bbcb..d4266c04c 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/TimerApi.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/datapoints/TimerApi.java @@ -5,70 +5,75 @@ /** * Convenience API for timing durations. - *

- * Durations are recorded in seconds. The Prometheus instrumentation guidelines say: - * "Metrics must use base units (e.g. seconds, bytes) and leave converting them to something more readable to graphing tools". + * + *

Durations are recorded in seconds. The Prometheus instrumentation guidelines say: "Metrics + * must use base units (e.g. seconds, bytes) and leave converting them to something more readable to + * graphing tools". */ public interface TimerApi { - /** - * Start a {@code Timer}. Example: - *

{@code
-     * Histogram histogram = Histogram.builder()
-     *         .name("http_request_duration_seconds")
-     *         .help("HTTP request service time in seconds")
-     *         .unit(SECONDS)
-     *         .labelNames("method", "path")
-     *         .register();
-     *
-     * try (Timer timer = histogram.labelValues("GET", "/").startTimer()) {
-     *     // duration of this code block will be observed.
-     * }
-     * }
- * Durations are recorded in seconds. The Prometheus instrumentation guidelines say: - * "Metrics must use base units (e.g. seconds, bytes) and leave converting them to something more readable to graphing tools". - */ - Timer startTimer(); + /** + * Start a {@code Timer}. Example: + * + *
{@code
+   * Histogram histogram = Histogram.builder()
+   *         .name("http_request_duration_seconds")
+   *         .help("HTTP request service time in seconds")
+   *         .unit(SECONDS)
+   *         .labelNames("method", "path")
+   *         .register();
+   *
+   * try (Timer timer = histogram.labelValues("GET", "/").startTimer()) {
+   *     // duration of this code block will be observed.
+   * }
+   * }
+ * + * Durations are recorded in seconds. The Prometheus instrumentation guidelines say: "Metrics + * must use base units (e.g. seconds, bytes) and leave converting them to something more readable + * to graphing tools". + */ + Timer startTimer(); - /** - * Observe the duration of the {@code func} call. Example: - *
{@code
-     * Histogram histogram = Histogram.builder()
-     *         .name("request_duration_seconds")
-     *         .help("HTTP request service time in seconds")
-     *         .unit(SECONDS)
-     *         .labelNames("method", "path")
-     *         .register();
-     *
-     * histogram2.labelValues("GET", "/").time(() -> {
-     *     // duration of this code block will be observed.
-     * });
-     * }
- *

- * Durations are recorded in seconds. The Prometheus instrumentation guidelines say: - * "Metrics must use base units (e.g. seconds, bytes) and leave converting them to something more readable to graphing tools". - */ - default void time(Runnable func) { - try (Timer timer = startTimer()) { - func.run(); - } + /** + * Observe the duration of the {@code func} call. Example: + * + *

{@code
+   * Histogram histogram = Histogram.builder()
+   *         .name("request_duration_seconds")
+   *         .help("HTTP request service time in seconds")
+   *         .unit(SECONDS)
+   *         .labelNames("method", "path")
+   *         .register();
+   *
+   * histogram2.labelValues("GET", "/").time(() -> {
+   *     // duration of this code block will be observed.
+   * });
+   * }
+ * + *

Durations are recorded in seconds. The Prometheus instrumentation guidelines say: "Metrics + * must use base units (e.g. seconds, bytes) and leave converting them to something more readable + * to graphing tools". + */ + default void time(Runnable func) { + try (Timer timer = startTimer()) { + func.run(); } + } - /** - * Like {@link #time(Runnable)}, but returns the return value of {@code func}. - */ - default T time(Supplier func) { - try (Timer timer = startTimer()) { - return func.get(); - } + /** Like {@link #time(Runnable)}, but returns the return value of {@code func}. */ + default T time(Supplier func) { + try (Timer timer = startTimer()) { + return func.get(); } + } - /** - * Like {@link #time(Supplier)}, but {@code func} may throw a checked {@code Exception}. - */ - default T timeChecked(Callable func) throws Exception { - try (Timer timer = startTimer()) { - return func.call(); - } + /** Like {@link #time(Supplier)}, but {@code func} may throw a checked {@code Exception}. */ + default T timeChecked(Callable func) throws Exception { + try (Timer timer = startTimer()) { + return func.call(); } + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/exemplars/ExemplarSampler.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/exemplars/ExemplarSampler.java index 7c46c244d..b606d5239 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/exemplars/ExemplarSampler.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/exemplars/ExemplarSampler.java @@ -1,11 +1,10 @@ package io.prometheus.metrics.core.exemplars; -import io.prometheus.metrics.tracer.common.SpanContext; +import io.prometheus.metrics.core.util.Scheduler; import io.prometheus.metrics.model.snapshots.Exemplar; import io.prometheus.metrics.model.snapshots.Exemplars; import io.prometheus.metrics.model.snapshots.Labels; -import io.prometheus.metrics.core.util.Scheduler; - +import io.prometheus.metrics.tracer.common.SpanContext; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; @@ -14,330 +13,346 @@ /** * The ExemplarSampler selects Spans as exemplars. - *

- * There are two types of Exemplars: Regular exemplars are sampled implicitly if a supported tracing - * library is detected. Custom exemplars are provided explicitly in code, for example if a developer - * wants to make sure an Exemplar is created for a specific code path. - *

- * Spans will be marked as being an Exemplar by calling {@link SpanContext#markCurrentSpanAsExemplar()}. - * The tracer implementation should set a Span attribute to mark the current Span as an Exemplar. - * This attribute can be used by a trace sampling algorithm to make sure traces with Exemplars are sampled. - *

- * The ExemplarSample is rate-limited, so only a small fraction of Spans will be marked as Exemplars in - * an application with a large number of requests. - *

- * See {@link ExemplarSamplerConfig} for configuration options. + * + *

There are two types of Exemplars: Regular exemplars are sampled implicitly if a supported + * tracing library is detected. Custom exemplars are provided explicitly in code, for example if a + * developer wants to make sure an Exemplar is created for a specific code path. + * + *

Spans will be marked as being an Exemplar by calling {@link + * SpanContext#markCurrentSpanAsExemplar()}. The tracer implementation should set a Span attribute + * to mark the current Span as an Exemplar. This attribute can be used by a trace sampling algorithm + * to make sure traces with Exemplars are sampled. + * + *

The ExemplarSample is rate-limited, so only a small fraction of Spans will be marked as + * Exemplars in an application with a large number of requests. + * + *

See {@link ExemplarSamplerConfig} for configuration options. */ public class ExemplarSampler { - private final ExemplarSamplerConfig config; - private final Exemplar[] exemplars; - private final Exemplar[] customExemplars; // Separate from exemplars, because we don't want custom exemplars - // to be overwritten by automatic exemplar sampling. exemplars.lengt == customExemplars.length - private final AtomicBoolean acceptingNewExemplars = new AtomicBoolean(true); - private final AtomicBoolean acceptingNewCustomExemplars = new AtomicBoolean(true); - private final SpanContext spanContext; // may be null, in that case SpanContextSupplier.getSpanContext() is used. + private final ExemplarSamplerConfig config; + private final Exemplar[] exemplars; + private final Exemplar[] + customExemplars; // Separate from exemplars, because we don't want custom exemplars + // to be overwritten by automatic exemplar sampling. exemplars.lengt == customExemplars.length + private final AtomicBoolean acceptingNewExemplars = new AtomicBoolean(true); + private final AtomicBoolean acceptingNewCustomExemplars = new AtomicBoolean(true); + private final SpanContext + spanContext; // may be null, in that case SpanContextSupplier.getSpanContext() is used. - public ExemplarSampler(ExemplarSamplerConfig config) { - this(config, null); - } + public ExemplarSampler(ExemplarSamplerConfig config) { + this(config, null); + } - /** - * Constructor with an additional {code spanContext} argument. - * This is useful for testing, but may also be useful in some production scenarios. - * If {@code spanContext != null} that spanContext is used and - * {@link io.prometheus.metrics.tracer.initializer.SpanContextSupplier SpanContextSupplier} is not used. - * If {@code spanContext == null} - * {@link io.prometheus.metrics.tracer.initializer.SpanContextSupplier#getSpanContext() SpanContextSupplier.getSpanContext()} - * is called to find a span context. - */ - public ExemplarSampler(ExemplarSamplerConfig config, SpanContext spanContext) { - this.config = config; - this.exemplars = new Exemplar[config.getNumberOfExemplars()]; - this.customExemplars = new Exemplar[exemplars.length]; - this.spanContext = spanContext; - } + /** + * Constructor with an additional {code spanContext} argument. This is useful for testing, but may + * also be useful in some production scenarios. If {@code spanContext != null} that spanContext is + * used and {@link io.prometheus.metrics.tracer.initializer.SpanContextSupplier + * SpanContextSupplier} is not used. If {@code spanContext == null} {@link + * io.prometheus.metrics.tracer.initializer.SpanContextSupplier#getSpanContext() + * SpanContextSupplier.getSpanContext()} is called to find a span context. + */ + public ExemplarSampler(ExemplarSamplerConfig config, SpanContext spanContext) { + this.config = config; + this.exemplars = new Exemplar[config.getNumberOfExemplars()]; + this.customExemplars = new Exemplar[exemplars.length]; + this.spanContext = spanContext; + } - public Exemplars collect() { - // this may run in parallel with observe() - long now = System.currentTimeMillis(); - List result = new ArrayList<>(exemplars.length); - for (int i = 0; i < customExemplars.length; i++) { - Exemplar exemplar = customExemplars[i]; - if (exemplar != null) { - if (now - exemplar.getTimestampMillis() > config.getMaxRetentionPeriodMillis()) { - customExemplars[i] = null; - } else { - result.add(exemplar); - } - } + public Exemplars collect() { + // this may run in parallel with observe() + long now = System.currentTimeMillis(); + List result = new ArrayList<>(exemplars.length); + for (int i = 0; i < customExemplars.length; i++) { + Exemplar exemplar = customExemplars[i]; + if (exemplar != null) { + if (now - exemplar.getTimestampMillis() > config.getMaxRetentionPeriodMillis()) { + customExemplars[i] = null; + } else { + result.add(exemplar); } - for (int i = 0; i < exemplars.length && result.size() < exemplars.length; i++) { - Exemplar exemplar = exemplars[i]; - if (exemplar != null) { - if (now - exemplar.getTimestampMillis() > config.getMaxRetentionPeriodMillis()) { - exemplars[i] = null; - } else { - result.add(exemplar); - } - } + } + } + for (int i = 0; i < exemplars.length && result.size() < exemplars.length; i++) { + Exemplar exemplar = exemplars[i]; + if (exemplar != null) { + if (now - exemplar.getTimestampMillis() > config.getMaxRetentionPeriodMillis()) { + exemplars[i] = null; + } else { + result.add(exemplar); } - return Exemplars.of(result); + } } + return Exemplars.of(result); + } - public void reset() { - for (int i = 0; i < exemplars.length; i++) { - exemplars[i] = null; - customExemplars[i] = null; - } + public void reset() { + for (int i = 0; i < exemplars.length; i++) { + exemplars[i] = null; + customExemplars[i] = null; } + } - public void observe(double value) { - if (!acceptingNewExemplars.get()) { - return; // This is the hot path in a high-throughput application and should be as efficient as possible. - } - rateLimitedObserve(acceptingNewExemplars, value, exemplars, () -> doObserve(value)); + public void observe(double value) { + if (!acceptingNewExemplars.get()) { + return; // This is the hot path in a high-throughput application and should be as efficient as + // possible. } + rateLimitedObserve(acceptingNewExemplars, value, exemplars, () -> doObserve(value)); + } - public void observeWithExemplar(double value, Labels labels) { - if (!acceptingNewCustomExemplars.get()) { - return; // This is the hot path in a high-throughput application and should be as efficient as possible. - } - rateLimitedObserve(acceptingNewCustomExemplars, value, customExemplars, () -> doObserveWithExemplar(value, labels)); + public void observeWithExemplar(double value, Labels labels) { + if (!acceptingNewCustomExemplars.get()) { + return; // This is the hot path in a high-throughput application and should be as efficient as + // possible. } + rateLimitedObserve( + acceptingNewCustomExemplars, + value, + customExemplars, + () -> doObserveWithExemplar(value, labels)); + } - private long doObserve(double value) { - if (exemplars.length == 1) { - return doObserveSingleExemplar(value); - } else if (config.getHistogramClassicUpperBounds() != null) { - return doObserveWithUpperBounds(value); - } else { - return doObserveWithoutUpperBounds(value); - } + private long doObserve(double value) { + if (exemplars.length == 1) { + return doObserveSingleExemplar(value); + } else if (config.getHistogramClassicUpperBounds() != null) { + return doObserveWithUpperBounds(value); + } else { + return doObserveWithoutUpperBounds(value); } + } - private long doObserveSingleExemplar(double value) { - long now = System.currentTimeMillis(); - Exemplar current = exemplars[0]; - if (current == null || now - current.getTimestampMillis() > config.getMinRetentionPeriodMillis()) { - return updateExemplar(0, value, now); - } - return 0; + private long doObserveSingleExemplar(double value) { + long now = System.currentTimeMillis(); + Exemplar current = exemplars[0]; + if (current == null + || now - current.getTimestampMillis() > config.getMinRetentionPeriodMillis()) { + return updateExemplar(0, value, now); } + return 0; + } - private long doObserveWithUpperBounds(double value) { - long now = System.currentTimeMillis(); - double[] upperBounds = config.getHistogramClassicUpperBounds(); - for (int i = 0; i < upperBounds.length; i++) { - if (value <= upperBounds[i]) { - Exemplar previous = exemplars[i]; - if (previous == null || now - previous.getTimestampMillis() > config.getMinRetentionPeriodMillis()) { - return updateExemplar(i, value, now); - } else { - return 0; - } - } + private long doObserveWithUpperBounds(double value) { + long now = System.currentTimeMillis(); + double[] upperBounds = config.getHistogramClassicUpperBounds(); + for (int i = 0; i < upperBounds.length; i++) { + if (value <= upperBounds[i]) { + Exemplar previous = exemplars[i]; + if (previous == null + || now - previous.getTimestampMillis() > config.getMinRetentionPeriodMillis()) { + return updateExemplar(i, value, now); + } else { + return 0; } - return 0; // will never happen, as upperBounds contains +Inf + } } + return 0; // will never happen, as upperBounds contains +Inf + } - private long doObserveWithoutUpperBounds(double value) { - final long now = System.currentTimeMillis(); - Exemplar smallest = null; - int smallestIndex = -1; - Exemplar largest = null; - int largestIndex = -1; - int nullIndex = -1; - for (int i = exemplars.length - 1; i >= 0; i--) { - Exemplar exemplar = exemplars[i]; - if (exemplar == null) { - nullIndex = i; - } else if (now - exemplar.getTimestampMillis() > config.getMaxRetentionPeriodMillis()) { - exemplars[i] = null; - nullIndex = i; - } else { - if (smallest == null || exemplar.getValue() < smallest.getValue()) { - smallest = exemplar; - smallestIndex = i; - } - if (largest == null || exemplar.getValue() > largest.getValue()) { - largest = exemplar; - largestIndex = i; - } - } - } - if (nullIndex >= 0) { - return updateExemplar(nullIndex, value, now); - } - if (now - smallest.getTimestampMillis() > config.getMinRetentionPeriodMillis() && value < smallest.getValue()) { - return updateExemplar(smallestIndex, value, now); - } - if (now - largest.getTimestampMillis() > config.getMinRetentionPeriodMillis() && value > largest.getValue()) { - return updateExemplar(largestIndex, value, now); - } - long oldestTimestamp = 0; - int oldestIndex = -1; - for (int i = 0; i < exemplars.length; i++) { - Exemplar exemplar = exemplars[i]; - if (exemplar != null && exemplar != smallest && exemplar != largest) { - if (oldestTimestamp == 0 || exemplar.getTimestampMillis() < oldestTimestamp) { - oldestTimestamp = exemplar.getTimestampMillis(); - oldestIndex = i; - } - } + private long doObserveWithoutUpperBounds(double value) { + final long now = System.currentTimeMillis(); + Exemplar smallest = null; + int smallestIndex = -1; + Exemplar largest = null; + int largestIndex = -1; + int nullIndex = -1; + for (int i = exemplars.length - 1; i >= 0; i--) { + Exemplar exemplar = exemplars[i]; + if (exemplar == null) { + nullIndex = i; + } else if (now - exemplar.getTimestampMillis() > config.getMaxRetentionPeriodMillis()) { + exemplars[i] = null; + nullIndex = i; + } else { + if (smallest == null || exemplar.getValue() < smallest.getValue()) { + smallest = exemplar; + smallestIndex = i; } - if (oldestIndex != -1 && now - oldestTimestamp > config.getMinRetentionPeriodMillis()) { - return updateExemplar(oldestIndex, value, now); + if (largest == null || exemplar.getValue() > largest.getValue()) { + largest = exemplar; + largestIndex = i; } - return 0; + } } - - // Returns the timestamp of the newly added Exemplar (which is System.currentTimeMillis()) - // or 0 if no Exemplar was added. - private long doObserveWithExemplar(double amount, Labels labels) { - if (customExemplars.length == 1) { - return doObserveSingleExemplar(amount, labels); - } else if (config.getHistogramClassicUpperBounds() != null) { - return doObserveWithExemplarWithUpperBounds(amount, labels); - } else { - return doObserveWithExemplarWithoutUpperBounds(amount, labels); + if (nullIndex >= 0) { + return updateExemplar(nullIndex, value, now); + } + if (now - smallest.getTimestampMillis() > config.getMinRetentionPeriodMillis() + && value < smallest.getValue()) { + return updateExemplar(smallestIndex, value, now); + } + if (now - largest.getTimestampMillis() > config.getMinRetentionPeriodMillis() + && value > largest.getValue()) { + return updateExemplar(largestIndex, value, now); + } + long oldestTimestamp = 0; + int oldestIndex = -1; + for (int i = 0; i < exemplars.length; i++) { + Exemplar exemplar = exemplars[i]; + if (exemplar != null && exemplar != smallest && exemplar != largest) { + if (oldestTimestamp == 0 || exemplar.getTimestampMillis() < oldestTimestamp) { + oldestTimestamp = exemplar.getTimestampMillis(); + oldestIndex = i; } + } } + if (oldestIndex != -1 && now - oldestTimestamp > config.getMinRetentionPeriodMillis()) { + return updateExemplar(oldestIndex, value, now); + } + return 0; + } - private long doObserveSingleExemplar(double amount, Labels labels) { - long now = System.currentTimeMillis(); - Exemplar current = customExemplars[0]; - if (current == null || now - current.getTimestampMillis() > config.getMinRetentionPeriodMillis()) { - return updateCustomExemplar(0, amount, labels, now); - } - return 0; + // Returns the timestamp of the newly added Exemplar (which is System.currentTimeMillis()) + // or 0 if no Exemplar was added. + private long doObserveWithExemplar(double amount, Labels labels) { + if (customExemplars.length == 1) { + return doObserveSingleExemplar(amount, labels); + } else if (config.getHistogramClassicUpperBounds() != null) { + return doObserveWithExemplarWithUpperBounds(amount, labels); + } else { + return doObserveWithExemplarWithoutUpperBounds(amount, labels); } + } - private long doObserveWithExemplarWithUpperBounds(double value, Labels labels) { - long now = System.currentTimeMillis(); - double[] upperBounds = config.getHistogramClassicUpperBounds(); - for (int i = 0; i < upperBounds.length; i++) { - if (value <= upperBounds[i]) { - Exemplar previous = customExemplars[i]; - if (previous == null || now - previous.getTimestampMillis() > config.getMinRetentionPeriodMillis()) { - return updateCustomExemplar(i, value, labels, now); - } else { - return 0; - } - } - } - return 0; // will never happen, as upperBounds contains +Inf + private long doObserveSingleExemplar(double amount, Labels labels) { + long now = System.currentTimeMillis(); + Exemplar current = customExemplars[0]; + if (current == null + || now - current.getTimestampMillis() > config.getMinRetentionPeriodMillis()) { + return updateCustomExemplar(0, amount, labels, now); } + return 0; + } - private long doObserveWithExemplarWithoutUpperBounds(double amount, Labels labels) { - final long now = System.currentTimeMillis(); - int nullPos = -1; - int oldestPos = -1; - Exemplar oldest = null; - for (int i = customExemplars.length - 1; i >= 0; i--) { - Exemplar exemplar = customExemplars[i]; - if (exemplar == null) { - nullPos = i; - } else if (now - exemplar.getTimestampMillis() > config.getMaxRetentionPeriodMillis()) { - customExemplars[i] = null; - nullPos = i; - } else { - if (oldest == null || exemplar.getTimestampMillis() < oldest.getTimestampMillis()) { - oldest = exemplar; - oldestPos = i; - } - } - } - if (nullPos != -1) { - return updateCustomExemplar(nullPos, amount, labels, now); - } else if (now - oldest.getTimestampMillis() > config.getMinRetentionPeriodMillis()) { - return updateCustomExemplar(oldestPos, amount, labels, now); + private long doObserveWithExemplarWithUpperBounds(double value, Labels labels) { + long now = System.currentTimeMillis(); + double[] upperBounds = config.getHistogramClassicUpperBounds(); + for (int i = 0; i < upperBounds.length; i++) { + if (value <= upperBounds[i]) { + Exemplar previous = customExemplars[i]; + if (previous == null + || now - previous.getTimestampMillis() > config.getMinRetentionPeriodMillis()) { + return updateCustomExemplar(i, value, labels, now); } else { - return 0; + return 0; } + } } + return 0; // will never happen, as upperBounds contains +Inf + } - /** - * Observing requires a system call to {@link System#currentTimeMillis()}, - * and it requires iterating over the existing exemplars to check if one of the existing - * exemplars can be replaced. - *

- * To avoid performance issues, we rate limit observing exemplars to - * {@link ExemplarSamplerConfig#getSampleIntervalMillis()} milliseconds. - */ - private void rateLimitedObserve(AtomicBoolean accepting, double value, Exemplar[] exemplars, LongSupplier observeFunc) { - if (Double.isNaN(value)) { - return; - } - if (!accepting.compareAndSet(true, false)) { - return; + private long doObserveWithExemplarWithoutUpperBounds(double amount, Labels labels) { + final long now = System.currentTimeMillis(); + int nullPos = -1; + int oldestPos = -1; + Exemplar oldest = null; + for (int i = customExemplars.length - 1; i >= 0; i--) { + Exemplar exemplar = customExemplars[i]; + if (exemplar == null) { + nullPos = i; + } else if (now - exemplar.getTimestampMillis() > config.getMaxRetentionPeriodMillis()) { + customExemplars[i] = null; + nullPos = i; + } else { + if (oldest == null || exemplar.getTimestampMillis() < oldest.getTimestampMillis()) { + oldest = exemplar; + oldestPos = i; } - // observeFunc returns the current timestamp or 0 if no Exemplar was added. - long now = observeFunc.getAsLong(); - long sleepTime = now == 0 ? config.getSampleIntervalMillis() : durationUntilNextExemplarExpires(now); - Scheduler.schedule(() -> accepting.compareAndSet(false, true), sleepTime, TimeUnit.MILLISECONDS); + } } + if (nullPos != -1) { + return updateCustomExemplar(nullPos, amount, labels, now); + } else if (now - oldest.getTimestampMillis() > config.getMinRetentionPeriodMillis()) { + return updateCustomExemplar(oldestPos, amount, labels, now); + } else { + return 0; + } + } - private long durationUntilNextExemplarExpires(long now) { - long oldestTimestamp = now; - for (Exemplar exemplar : exemplars) { - if (exemplar == null) { - return config.getSampleIntervalMillis(); - } else if (exemplar.getTimestampMillis() < oldestTimestamp) { - oldestTimestamp = exemplar.getTimestampMillis(); - } - } - long oldestAge = now - oldestTimestamp; - if (oldestAge < config.getMinRetentionPeriodMillis()) { - return config.getMinRetentionPeriodMillis() - oldestAge; - } + /** + * Observing requires a system call to {@link System#currentTimeMillis()}, and it requires + * iterating over the existing exemplars to check if one of the existing exemplars can be + * replaced. + * + *

To avoid performance issues, we rate limit observing exemplars to {@link + * ExemplarSamplerConfig#getSampleIntervalMillis()} milliseconds. + */ + private void rateLimitedObserve( + AtomicBoolean accepting, double value, Exemplar[] exemplars, LongSupplier observeFunc) { + if (Double.isNaN(value)) { + return; + } + if (!accepting.compareAndSet(true, false)) { + return; + } + // observeFunc returns the current timestamp or 0 if no Exemplar was added. + long now = observeFunc.getAsLong(); + long sleepTime = + now == 0 ? config.getSampleIntervalMillis() : durationUntilNextExemplarExpires(now); + Scheduler.schedule( + () -> accepting.compareAndSet(false, true), sleepTime, TimeUnit.MILLISECONDS); + } + + private long durationUntilNextExemplarExpires(long now) { + long oldestTimestamp = now; + for (Exemplar exemplar : exemplars) { + if (exemplar == null) { return config.getSampleIntervalMillis(); + } else if (exemplar.getTimestampMillis() < oldestTimestamp) { + oldestTimestamp = exemplar.getTimestampMillis(); + } + } + long oldestAge = now - oldestTimestamp; + if (oldestAge < config.getMinRetentionPeriodMillis()) { + return config.getMinRetentionPeriodMillis() - oldestAge; } + return config.getSampleIntervalMillis(); + } - private long updateCustomExemplar(int index, double value, Labels labels, long now) { - if (!labels.contains(Exemplar.TRACE_ID) && !labels.contains(Exemplar.SPAN_ID)) { - labels = labels.merge(doSampleExemplar()); - } - customExemplars[index] = Exemplar.builder() - .value(value) - .labels(labels) - .timestampMillis(now) - .build(); - return now; + private long updateCustomExemplar(int index, double value, Labels labels, long now) { + if (!labels.contains(Exemplar.TRACE_ID) && !labels.contains(Exemplar.SPAN_ID)) { + labels = labels.merge(doSampleExemplar()); } + customExemplars[index] = + Exemplar.builder().value(value).labels(labels).timestampMillis(now).build(); + return now; + } - private long updateExemplar(int index, double value, long now) { - Labels traceLabels = doSampleExemplar(); - if (!traceLabels.isEmpty()) { - exemplars[index] = Exemplar.builder() - .value(value) - .labels(traceLabels) - .timestampMillis(now) - .build(); - return now; - } else { - return 0; - } + private long updateExemplar(int index, double value, long now) { + Labels traceLabels = doSampleExemplar(); + if (!traceLabels.isEmpty()) { + exemplars[index] = + Exemplar.builder().value(value).labels(traceLabels).timestampMillis(now).build(); + return now; + } else { + return 0; } + } - private Labels doSampleExemplar() { - // Using the qualified name so that Micrometer can exclude the dependency on prometheus-metrics-tracer-initializer - // as they provide their own implementation of SpanContextSupplier. - // If we had an import statement for SpanContextSupplier the dependency would be needed in any case. - SpanContext spanContext = this.spanContext != null ? this.spanContext : io.prometheus.metrics.tracer.initializer.SpanContextSupplier.getSpanContext(); - try { - if (spanContext != null) { - if (spanContext.isCurrentSpanSampled()) { - String spanId = spanContext.getCurrentSpanId(); - String traceId = spanContext.getCurrentTraceId(); - if (spanId != null && traceId != null) { - spanContext.markCurrentSpanAsExemplar(); - return Labels.of(Exemplar.TRACE_ID, traceId, Exemplar.SPAN_ID, spanId); - } - } - } - } catch (NoClassDefFoundError ignored) { + private Labels doSampleExemplar() { + // Using the qualified name so that Micrometer can exclude the dependency on + // prometheus-metrics-tracer-initializer + // as they provide their own implementation of SpanContextSupplier. + // If we had an import statement for SpanContextSupplier the dependency would be needed in any + // case. + SpanContext spanContext = + this.spanContext != null + ? this.spanContext + : io.prometheus.metrics.tracer.initializer.SpanContextSupplier.getSpanContext(); + try { + if (spanContext != null) { + if (spanContext.isCurrentSpanSampled()) { + String spanId = spanContext.getCurrentSpanId(); + String traceId = spanContext.getCurrentTraceId(); + if (spanId != null && traceId != null) { + spanContext.markCurrentSpanAsExemplar(); + return Labels.of(Exemplar.TRACE_ID, traceId, Exemplar.SPAN_ID, spanId); + } } - return Labels.EMPTY; + } + } catch (NoClassDefFoundError ignored) { } + return Labels.EMPTY; + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/exemplars/ExemplarSamplerConfig.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/exemplars/ExemplarSamplerConfig.java index 9712f29df..7acfbec22 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/exemplars/ExemplarSamplerConfig.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/exemplars/ExemplarSamplerConfig.java @@ -2,134 +2,143 @@ import io.prometheus.metrics.config.ExemplarsProperties; import io.prometheus.metrics.config.PrometheusProperties; - import java.util.concurrent.TimeUnit; public class ExemplarSamplerConfig { - /** - * See {@link ExemplarsProperties#getMinRetentionPeriodSeconds()} - */ - public static final int DEFAULT_MIN_RETENTION_PERIOD_SECONDS = 7; - - /** - * See {@link ExemplarsProperties#getMaxRetentionPeriodSeconds()} - */ - public static final int DEFAULT_MAX_RETENTION_PERIOD_SECONDS = 70; - - /** - * See {@link ExemplarsProperties#getSampleIntervalMilliseconds()} - */ - private static final int DEFAULT_SAMPLE_INTERVAL_MILLISECONDS = 90; - - private final long minRetentionPeriodMillis; - private final long maxRetentionPeriodMillis; - private final long sampleIntervalMillis; - private final double[] histogramClassicUpperBounds; // null unless it's a classic histogram - private final int numberOfExemplars; // if histogramClassicUpperBounds != null, then numberOfExemplars == histogramClassicUpperBounds.length - - /** - * Constructor for all metric types except classic histograms. - * - * @param properties See {@link PrometheusProperties#getExemplarProperties()}. - * @param numberOfExemplars Counters have 1 Exemplar, native histograms and summaries have 4 Exemplars by default. - * For classic histogram use {@link #ExemplarSamplerConfig(ExemplarsProperties, double[])}. - */ - public ExemplarSamplerConfig(ExemplarsProperties properties, int numberOfExemplars) { - this(properties, numberOfExemplars, null); - } - - /** - * Constructor for classic histogram metrics. - * - * @param properties See {@link PrometheusProperties#getExemplarProperties()}. - * @param histogramClassicUpperBounds the ExemplarSampler will provide one Exemplar per histogram bucket. - * Must be sorted, and must include the +Inf bucket. - */ - public ExemplarSamplerConfig(ExemplarsProperties properties, double[] histogramClassicUpperBounds) { - this(properties, histogramClassicUpperBounds.length, histogramClassicUpperBounds); + /** See {@link ExemplarsProperties#getMinRetentionPeriodSeconds()} */ + public static final int DEFAULT_MIN_RETENTION_PERIOD_SECONDS = 7; + + /** See {@link ExemplarsProperties#getMaxRetentionPeriodSeconds()} */ + public static final int DEFAULT_MAX_RETENTION_PERIOD_SECONDS = 70; + + /** See {@link ExemplarsProperties#getSampleIntervalMilliseconds()} */ + private static final int DEFAULT_SAMPLE_INTERVAL_MILLISECONDS = 90; + + private final long minRetentionPeriodMillis; + private final long maxRetentionPeriodMillis; + private final long sampleIntervalMillis; + private final double[] histogramClassicUpperBounds; // null unless it's a classic histogram + private final int + numberOfExemplars; // if histogramClassicUpperBounds != null, then numberOfExemplars == + + // histogramClassicUpperBounds.length + + /** + * Constructor for all metric types except classic histograms. + * + * @param properties See {@link PrometheusProperties#getExemplarProperties()}. + * @param numberOfExemplars Counters have 1 Exemplar, native histograms and summaries have 4 + * Exemplars by default. For classic histogram use {@link + * #ExemplarSamplerConfig(ExemplarsProperties, double[])}. + */ + public ExemplarSamplerConfig(ExemplarsProperties properties, int numberOfExemplars) { + this(properties, numberOfExemplars, null); + } + + /** + * Constructor for classic histogram metrics. + * + * @param properties See {@link PrometheusProperties#getExemplarProperties()}. + * @param histogramClassicUpperBounds the ExemplarSampler will provide one Exemplar per histogram + * bucket. Must be sorted, and must include the +Inf bucket. + */ + public ExemplarSamplerConfig( + ExemplarsProperties properties, double[] histogramClassicUpperBounds) { + this(properties, histogramClassicUpperBounds.length, histogramClassicUpperBounds); + } + + private ExemplarSamplerConfig( + ExemplarsProperties properties, int numberOfExemplars, double[] histogramClassicUpperBounds) { + this( + TimeUnit.SECONDS.toMillis( + getOrDefault( + properties.getMinRetentionPeriodSeconds(), DEFAULT_MIN_RETENTION_PERIOD_SECONDS)), + TimeUnit.SECONDS.toMillis( + getOrDefault( + properties.getMaxRetentionPeriodSeconds(), DEFAULT_MAX_RETENTION_PERIOD_SECONDS)), + getOrDefault( + properties.getSampleIntervalMilliseconds(), DEFAULT_SAMPLE_INTERVAL_MILLISECONDS), + numberOfExemplars, + histogramClassicUpperBounds); + } + + ExemplarSamplerConfig( + long minRetentionPeriodMillis, + long maxRetentionPeriodMillis, + long sampleIntervalMillis, + int numberOfExemplars, + double[] histogramClassicUpperBounds) { + this.minRetentionPeriodMillis = minRetentionPeriodMillis; + this.maxRetentionPeriodMillis = maxRetentionPeriodMillis; + this.sampleIntervalMillis = sampleIntervalMillis; + this.numberOfExemplars = numberOfExemplars; + this.histogramClassicUpperBounds = histogramClassicUpperBounds; + validate(); + } + + private void validate() { + if (minRetentionPeriodMillis <= 0) { + throw new IllegalArgumentException( + minRetentionPeriodMillis + ": minRetentionPeriod must be > 0."); } - - private ExemplarSamplerConfig(ExemplarsProperties properties, int numberOfExemplars, double[] histogramClassicUpperBounds) { - this( - TimeUnit.SECONDS.toMillis(getOrDefault(properties.getMinRetentionPeriodSeconds(), DEFAULT_MIN_RETENTION_PERIOD_SECONDS)), - TimeUnit.SECONDS.toMillis(getOrDefault(properties.getMaxRetentionPeriodSeconds(), DEFAULT_MAX_RETENTION_PERIOD_SECONDS)), - getOrDefault(properties.getSampleIntervalMilliseconds(), DEFAULT_SAMPLE_INTERVAL_MILLISECONDS), - numberOfExemplars, - histogramClassicUpperBounds); - } - - ExemplarSamplerConfig(long minRetentionPeriodMillis, long maxRetentionPeriodMillis, long sampleIntervalMillis, int numberOfExemplars, double[] histogramClassicUpperBounds) { - this.minRetentionPeriodMillis = minRetentionPeriodMillis; - this.maxRetentionPeriodMillis = maxRetentionPeriodMillis; - this.sampleIntervalMillis = sampleIntervalMillis; - this.numberOfExemplars = numberOfExemplars; - this.histogramClassicUpperBounds = histogramClassicUpperBounds; - validate(); + if (maxRetentionPeriodMillis <= 0) { + throw new IllegalArgumentException( + maxRetentionPeriodMillis + ": maxRetentionPeriod must be > 0."); } - - private void validate() { - if (minRetentionPeriodMillis <= 0) { - throw new IllegalArgumentException(minRetentionPeriodMillis + ": minRetentionPeriod must be > 0."); - } - if (maxRetentionPeriodMillis <= 0) { - throw new IllegalArgumentException(maxRetentionPeriodMillis + ": maxRetentionPeriod must be > 0."); - } - if (histogramClassicUpperBounds != null) { - if (histogramClassicUpperBounds.length == 0 || histogramClassicUpperBounds[histogramClassicUpperBounds.length - 1] != Double.POSITIVE_INFINITY) { - throw new IllegalArgumentException("histogramClassicUpperBounds must contain the +Inf bucket."); - } - if (histogramClassicUpperBounds.length != numberOfExemplars) { - throw new IllegalArgumentException("histogramClassicUpperBounds.length must be equal to numberOfExemplars."); - } - double bound = histogramClassicUpperBounds[0]; - for (int i = 1; i < histogramClassicUpperBounds.length; i++) { - if (bound >= histogramClassicUpperBounds[i]) { - throw new IllegalArgumentException("histogramClassicUpperBounds must be sorted and must not contain duplicates."); - } - } - } - if (numberOfExemplars <= 0) { - throw new IllegalArgumentException(numberOfExemplars + ": numberOfExemplars must be > 0."); + if (histogramClassicUpperBounds != null) { + if (histogramClassicUpperBounds.length == 0 + || histogramClassicUpperBounds[histogramClassicUpperBounds.length - 1] + != Double.POSITIVE_INFINITY) { + throw new IllegalArgumentException( + "histogramClassicUpperBounds must contain the +Inf bucket."); + } + if (histogramClassicUpperBounds.length != numberOfExemplars) { + throw new IllegalArgumentException( + "histogramClassicUpperBounds.length must be equal to numberOfExemplars."); + } + double bound = histogramClassicUpperBounds[0]; + for (int i = 1; i < histogramClassicUpperBounds.length; i++) { + if (bound >= histogramClassicUpperBounds[i]) { + throw new IllegalArgumentException( + "histogramClassicUpperBounds must be sorted and must not contain duplicates."); } + } } - - private static T getOrDefault(T result, T defaultValue) { - return result != null ? result : defaultValue; - } - - /** - * May be {@code null}. - */ - public double[] getHistogramClassicUpperBounds() { - return histogramClassicUpperBounds; - } - - /** - * See {@link ExemplarsProperties#getMinRetentionPeriodSeconds()} - */ - public long getMinRetentionPeriodMillis() { - return minRetentionPeriodMillis; - } - - /** - * See {@link ExemplarsProperties#getMaxRetentionPeriodSeconds()} - */ - public long getMaxRetentionPeriodMillis() { - return maxRetentionPeriodMillis; - } - - /** - * See {@link ExemplarsProperties#getSampleIntervalMilliseconds()} - */ - public long getSampleIntervalMillis() { - return sampleIntervalMillis; - } - - /** - * Defaults: Counters have one Exemplar, native histograms and summaries have 4 Exemplars, classic histograms have one Exemplar per bucket. - */ - public int getNumberOfExemplars() { - return numberOfExemplars; + if (numberOfExemplars <= 0) { + throw new IllegalArgumentException(numberOfExemplars + ": numberOfExemplars must be > 0."); } + } + + private static T getOrDefault(T result, T defaultValue) { + return result != null ? result : defaultValue; + } + + /** May be {@code null}. */ + public double[] getHistogramClassicUpperBounds() { + return histogramClassicUpperBounds; + } + + /** See {@link ExemplarsProperties#getMinRetentionPeriodSeconds()} */ + public long getMinRetentionPeriodMillis() { + return minRetentionPeriodMillis; + } + + /** See {@link ExemplarsProperties#getMaxRetentionPeriodSeconds()} */ + public long getMaxRetentionPeriodMillis() { + return maxRetentionPeriodMillis; + } + + /** See {@link ExemplarsProperties#getSampleIntervalMilliseconds()} */ + public long getSampleIntervalMillis() { + return sampleIntervalMillis; + } + + /** + * Defaults: Counters have one Exemplar, native histograms and summaries have 4 Exemplars, classic + * histograms have one Exemplar per bucket. + */ + public int getNumberOfExemplars() { + return numberOfExemplars; + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Buffer.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Buffer.java index 390a2bc2b..113a85d55 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Buffer.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Buffer.java @@ -1,7 +1,6 @@ package io.prometheus.metrics.core.metrics; import io.prometheus.metrics.model.snapshots.DataPointSnapshot; - import java.util.Arrays; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; @@ -10,75 +9,74 @@ /** * Metrics support concurrent write and scrape operations. - *

- * This is implemented by switching to a Buffer when the scrape starts, - * and applying the values from the buffer after the scrape ends. + * + *

This is implemented by switching to a Buffer when the scrape starts, and applying the values + * from the buffer after the scrape ends. */ class Buffer { - private static final long signBit = 1L << 63; - private final AtomicLong observationCount = new AtomicLong(0); - private double[] observationBuffer = new double[0]; - private int bufferPos = 0; - private boolean reset = false; - private final Object appendLock = new Object(); - private final Object runLock = new Object(); + private static final long signBit = 1L << 63; + private final AtomicLong observationCount = new AtomicLong(0); + private double[] observationBuffer = new double[0]; + private int bufferPos = 0; + private boolean reset = false; + private final Object appendLock = new Object(); + private final Object runLock = new Object(); - boolean append(double value) { - long count = observationCount.incrementAndGet(); - if ((count & signBit) == 0) { - return false; // sign bit not set -> buffer not active. - } else { - doAppend(value); - return true; - } + boolean append(double value) { + long count = observationCount.incrementAndGet(); + if ((count & signBit) == 0) { + return false; // sign bit not set -> buffer not active. + } else { + doAppend(value); + return true; } + } - private void doAppend(double amount) { - synchronized (appendLock) { - if (bufferPos >= observationBuffer.length) { - observationBuffer = Arrays.copyOf(observationBuffer, observationBuffer.length + 128); - } - observationBuffer[bufferPos] = amount; - bufferPos++; - } + private void doAppend(double amount) { + synchronized (appendLock) { + if (bufferPos >= observationBuffer.length) { + observationBuffer = Arrays.copyOf(observationBuffer, observationBuffer.length + 128); + } + observationBuffer[bufferPos] = amount; + bufferPos++; } + } - /** - * Must be called by the runnable in the run() method. - */ - void reset() { - reset = true; - } + /** Must be called by the runnable in the run() method. */ + void reset() { + reset = true; + } - T run(Function complete, Supplier runnable, Consumer observeFunction) { - double[] buffer; - int bufferSize; - T result; - synchronized (runLock) { - Long count = observationCount.getAndAdd(signBit); - while (!complete.apply(count)) { - Thread.yield(); - } - result = runnable.get(); - int expectedBufferSize; - if (reset) { - expectedBufferSize = (int) ((observationCount.getAndSet(0) & ~signBit) - count); - reset = false; - } else { - expectedBufferSize = (int) (observationCount.addAndGet(signBit) - count); - } - while (bufferPos != expectedBufferSize) { - Thread.yield(); - } - buffer = observationBuffer; - bufferSize = bufferPos; - observationBuffer = new double[0]; - bufferPos = 0; - } - for (int i = 0; i < bufferSize; i++) { - observeFunction.accept(buffer[i]); - } - return result; + T run( + Function complete, Supplier runnable, Consumer observeFunction) { + double[] buffer; + int bufferSize; + T result; + synchronized (runLock) { + Long count = observationCount.getAndAdd(signBit); + while (!complete.apply(count)) { + Thread.yield(); + } + result = runnable.get(); + int expectedBufferSize; + if (reset) { + expectedBufferSize = (int) ((observationCount.getAndSet(0) & ~signBit) - count); + reset = false; + } else { + expectedBufferSize = (int) (observationCount.addAndGet(signBit) - count); + } + while (bufferPos != expectedBufferSize) { + Thread.yield(); + } + buffer = observationBuffer; + bufferSize = bufferPos; + observationBuffer = new double[0]; + bufferPos = 0; + } + for (int i = 0; i < bufferSize; i++) { + observeFunction.accept(buffer[i]); } + return result; + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/CKMSQuantiles.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/CKMSQuantiles.java index 22662c62e..c86f17c21 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/CKMSQuantiles.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/CKMSQuantiles.java @@ -6,20 +6,20 @@ // However, it has been heavily refactored in the meantime. /* - Copyright 2012 Andrew Wang (andrew@umbrant.com) +Copyright 2012 Andrew Wang (andrew@umbrant.com) - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 +http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - */ +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ import java.util.Arrays; import java.util.Iterator; @@ -27,270 +27,248 @@ import java.util.ListIterator; /** - * Algorithm solving the "Targeted Quantile Problem" as described in - * "Effective Computation of Biased Quantiles over Data Streams" - * by Cormode, Korn, Muthukrishnan, and Srivastava. - * + * Algorithm solving the "Targeted Quantile Problem" as described in "Effective Computation of + * Biased Quantiles over Data Streams" by Cormode, Korn, Muthukrishnan, and Srivastava. */ final class CKMSQuantiles { - final Quantile[] quantiles; + final Quantile[] quantiles; - /** - * Total number of observations (not including those that are still in the buffer). - */ - int n = 0; + /** Total number of observations (not including those that are still in the buffer). */ + int n = 0; - /** - * List of sampled observations, ordered by Sample.value. - */ - final LinkedList samples = new LinkedList(); - - /** - * Compress is called every compressInterval inserts. - * Note that the buffer is flushed whenever get() is called, so we - * cannot just wait until the buffer is full before we call compress. - */ - private final int compressInterval = 128; - private int insertsSinceLastCompress = 0; + /** List of sampled observations, ordered by Sample.value. */ + final LinkedList samples = new LinkedList(); - /** - * Note that the buffer size could as well be less than the compressInterval. - * However, the buffer size should not be greater than the compressInterval, - * because the compressInterval is not respected in flush(), so if you want - * to compress more often than calling flush() that won't work. - */ - private final double[] buffer = new double[compressInterval]; - private int bufferPos = 0; + /** + * Compress is called every compressInterval inserts. Note that the buffer is flushed whenever + * get() is called, so we cannot just wait until the buffer is full before we call compress. + */ + private final int compressInterval = 128; - public CKMSQuantiles(Quantile... quantiles) { - if (quantiles.length == 0) { - throw new IllegalArgumentException("quantiles cannot be empty"); - } - this.quantiles = quantiles; - } + private int insertsSinceLastCompress = 0; - /** - * Add an observed value - */ - public void insert(double value) { - buffer[bufferPos++] = value; + /** + * Note that the buffer size could as well be less than the compressInterval. However, the buffer + * size should not be greater than the compressInterval, because the compressInterval is not + * respected in flush(), so if you want to compress more often than calling flush() that won't + * work. + */ + private final double[] buffer = new double[compressInterval]; - if (bufferPos == buffer.length) { - flush(); - } + private int bufferPos = 0; - if (++insertsSinceLastCompress == compressInterval) { - compress(); - insertsSinceLastCompress = 0; - } + public CKMSQuantiles(Quantile... quantiles) { + if (quantiles.length == 0) { + throw new IllegalArgumentException("quantiles cannot be empty"); } + this.quantiles = quantiles; + } - private void flush() { - Arrays.sort(buffer, 0, bufferPos); - insertBatch(buffer, bufferPos); - bufferPos = 0; - } + /** Add an observed value */ + public void insert(double value) { + buffer[bufferPos++] = value; - /** - * Inserts the elements from index 0 to index toIndex from the sortedBuffer. - */ - void insertBatch(double[] sortedBuffer, int toIndex) { - if (toIndex == 0) { - return; - } - ListIterator iterator = samples.listIterator(); - int i = 0; // position in buffer - int r = 0; // sum of g's left of the current sample - while (iterator.hasNext() && i < toIndex) { - Sample item = iterator.next(); - while (i < toIndex) { - if (sortedBuffer[i] > item.value) { - break; - } - insertBefore(iterator, sortedBuffer[i], r); - r++; // new item with g=1 was inserted before, so increment r - i++; - n++; - } - r += item.g; - } - while (i < toIndex) { - samples.add(new Sample(sortedBuffer[i], 0)); - i++; - n++; - } + if (bufferPos == buffer.length) { + flush(); } - private void insertBefore(ListIterator iterator, double value, int r) { - if (!iterator.hasPrevious()) { - samples.addFirst(new Sample(value, 0)); - } else { - iterator.previous(); - iterator.add(new Sample(value, f(r) - 1)); - iterator.next(); + if (++insertsSinceLastCompress == compressInterval) { + compress(); + insertsSinceLastCompress = 0; + } + } + + private void flush() { + Arrays.sort(buffer, 0, bufferPos); + insertBatch(buffer, bufferPos); + bufferPos = 0; + } + + /** Inserts the elements from index 0 to index toIndex from the sortedBuffer. */ + void insertBatch(double[] sortedBuffer, int toIndex) { + if (toIndex == 0) { + return; + } + ListIterator iterator = samples.listIterator(); + int i = 0; // position in buffer + int r = 0; // sum of g's left of the current sample + while (iterator.hasNext() && i < toIndex) { + Sample item = iterator.next(); + while (i < toIndex) { + if (sortedBuffer[i] > item.value) { + break; } + insertBefore(iterator, sortedBuffer[i], r); + r++; // new item with g=1 was inserted before, so increment r + i++; + n++; + } + r += item.g; } + while (i < toIndex) { + samples.add(new Sample(sortedBuffer[i], 0)); + i++; + n++; + } + } + + private void insertBefore(ListIterator iterator, double value, int r) { + if (!iterator.hasPrevious()) { + samples.addFirst(new Sample(value, 0)); + } else { + iterator.previous(); + iterator.add(new Sample(value, f(r) - 1)); + iterator.next(); + } + } - /** - * Get the estimated value at the specified quantile. - */ - public double get(double q) { - flush(); - - if (samples.size() == 0) { - return Double.NaN; - } + /** Get the estimated value at the specified quantile. */ + public double get(double q) { + flush(); - if (q == 0.0) { - return samples.getFirst().value; - } + if (samples.size() == 0) { + return Double.NaN; + } - if (q == 1.0) { - return samples.getLast().value; - } + if (q == 0.0) { + return samples.getFirst().value; + } - int r = 0; // sum of g's left of the current sample - int desiredRank = (int) Math.ceil(q * n); - int upperBound = desiredRank + f(desiredRank) / 2; - - ListIterator iterator = samples.listIterator(); - while (iterator.hasNext()) { - Sample sample = iterator.next(); - if (r + sample.g + sample.delta > upperBound) { - iterator.previous(); // roll back the item.next() above - if (iterator.hasPrevious()) { - Sample result = iterator.previous(); - return result.value; - } else { - return sample.value; - } - } - r += sample.g; - } - return samples.getLast().value; + if (q == 1.0) { + return samples.getLast().value; } - /** - * Error function, as in definition 5 of the paper. - */ - int f(int r) { - int minResult = Integer.MAX_VALUE; - for (Quantile q : quantiles) { - if (q.quantile == 0 || q.quantile == 1) { - continue; - } - int result; - // We had a numerical error here with the following example: - // quantile = 0.95, epsilon = 0.01, (n-r) = 30. - // The expected result of (2*0.01*30)/(1-0.95) is 12. The actual result is 11.99999999999999. - // To avoid running into these types of error we add 0.00000000001 before rounding down. - if (r >= q.quantile * n) { - result = (int) (q.v * r + 0.00000000001); - } else { - result = (int) (q.u * (n - r) + 0.00000000001); - } - if (result < minResult) { - minResult = result; - } + int r = 0; // sum of g's left of the current sample + int desiredRank = (int) Math.ceil(q * n); + int upperBound = desiredRank + f(desiredRank) / 2; + + ListIterator iterator = samples.listIterator(); + while (iterator.hasNext()) { + Sample sample = iterator.next(); + if (r + sample.g + sample.delta > upperBound) { + iterator.previous(); // roll back the item.next() above + if (iterator.hasPrevious()) { + Sample result = iterator.previous(); + return result.value; + } else { + return sample.value; } - return Math.max(minResult, 1); + } + r += sample.g; } + return samples.getLast().value; + } + + /** Error function, as in definition 5 of the paper. */ + int f(int r) { + int minResult = Integer.MAX_VALUE; + for (Quantile q : quantiles) { + if (q.quantile == 0 || q.quantile == 1) { + continue; + } + int result; + // We had a numerical error here with the following example: + // quantile = 0.95, epsilon = 0.01, (n-r) = 30. + // The expected result of (2*0.01*30)/(1-0.95) is 12. The actual result is 11.99999999999999. + // To avoid running into these types of error we add 0.00000000001 before rounding down. + if (r >= q.quantile * n) { + result = (int) (q.v * r + 0.00000000001); + } else { + result = (int) (q.u * (n - r) + 0.00000000001); + } + if (result < minResult) { + minResult = result; + } + } + return Math.max(minResult, 1); + } - /** - * Merge pairs of consecutive samples if this doesn't violate the error function. - */ - void compress() { - if (samples.size() < 3) { - return; - } - Iterator descendingIterator = samples.descendingIterator(); - int r = n; // n is equal to the sum of the g's of all samples - - Sample right; - Sample left = descendingIterator.next(); - r -= left.g; - - while (descendingIterator.hasNext()) { - right = left; - left = descendingIterator.next(); - r = r - left.g; - if (left == samples.getFirst()) { - // The min sample must never be merged. - break; - } - if (left.g + right.g + right.delta < f(r)) { - right.g += left.g; - descendingIterator.remove(); - left = right; - } - } + /** Merge pairs of consecutive samples if this doesn't violate the error function. */ + void compress() { + if (samples.size() < 3) { + return; } + Iterator descendingIterator = samples.descendingIterator(); + int r = n; // n is equal to the sum of the g's of all samples + + Sample right; + Sample left = descendingIterator.next(); + r -= left.g; + + while (descendingIterator.hasNext()) { + right = left; + left = descendingIterator.next(); + r = r - left.g; + if (left == samples.getFirst()) { + // The min sample must never be merged. + break; + } + if (left.g + right.g + right.delta < f(r)) { + right.g += left.g; + descendingIterator.remove(); + left = right; + } + } + } - static class Sample { + static class Sample { - /** - * Observed value. - */ - final double value; + /** Observed value. */ + final double value; - /** - * Difference between the lowest possible rank of this sample and its predecessor. - * This always starts with 1, but will be updated when compress() merges Samples. - */ - int g = 1; + /** + * Difference between the lowest possible rank of this sample and its predecessor. This always + * starts with 1, but will be updated when compress() merges Samples. + */ + int g = 1; - /** - * Difference between the greatest possible rank of this sample and the lowest possible rank of this sample. - */ - final int delta; + /** + * Difference between the greatest possible rank of this sample and the lowest possible rank of + * this sample. + */ + final int delta; - Sample(double value, int delta) { - this.value = value; - this.delta = delta; - } + Sample(double value, int delta) { + this.value = value; + this.delta = delta; + } - @Override - public String toString() { - return String.format("Sample{val=%.3f, g=%d, delta=%d}", value, g, delta); - } + @Override + public String toString() { + return String.format("Sample{val=%.3f, g=%d, delta=%d}", value, g, delta); } + } - static class Quantile { + static class Quantile { - /** - * Quantile. Must be between 0 and 1. - */ - final double quantile; + /** Quantile. Must be between 0 and 1. */ + final double quantile; - /** - * Allowed error. Must be between 0 and 1. - */ - final double epsilon; + /** Allowed error. Must be between 0 and 1. */ + final double epsilon; - /** - * Helper used in the error function f(), see definition 5 in the paper. - */ - final double u; + /** Helper used in the error function f(), see definition 5 in the paper. */ + final double u; - /** - * Helper used in the error function f(), see definition 5 in the paper. - */ - final double v; + /** Helper used in the error function f(), see definition 5 in the paper. */ + final double v; - Quantile(double quantile, double epsilon) { - if (quantile < 0.0 || quantile > 1.0) throw new IllegalArgumentException("Quantile must be between 0 and 1"); - if (epsilon < 0.0 || epsilon > 1.0) throw new IllegalArgumentException("Epsilon must be between 0 and 1"); + Quantile(double quantile, double epsilon) { + if (quantile < 0.0 || quantile > 1.0) + throw new IllegalArgumentException("Quantile must be between 0 and 1"); + if (epsilon < 0.0 || epsilon > 1.0) + throw new IllegalArgumentException("Epsilon must be between 0 and 1"); - this.quantile = quantile; - this.epsilon = epsilon; - u = 2.0 * epsilon / (1.0 - quantile); // if quantile == 1 this will be Double.NaN - v = 2.0 * epsilon / quantile; // if quantile == 0 this will be Double.NaN - } + this.quantile = quantile; + this.epsilon = epsilon; + u = 2.0 * epsilon / (1.0 - quantile); // if quantile == 1 this will be Double.NaN + v = 2.0 * epsilon / quantile; // if quantile == 0 this will be Double.NaN + } - @Override - public String toString() { - return String.format("Quantile{q=%.3f, epsilon=%.3f}", quantile, epsilon); - } + @Override + public String toString() { + return String.format("Quantile{q=%.3f, epsilon=%.3f}", quantile, epsilon); } + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/CallbackMetric.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/CallbackMetric.java index 2d9eb0b54..cf25e6527 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/CallbackMetric.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/CallbackMetric.java @@ -2,41 +2,52 @@ import io.prometheus.metrics.config.PrometheusProperties; import io.prometheus.metrics.model.snapshots.Labels; - import java.util.List; /** * There are two kinds of metrics: {@code StatefulMetric} and {@code CallbackMetric}. - *

- * See JavaDoc on {@link StatefulMetric} for more info. + * + *

See JavaDoc on {@link StatefulMetric} for more info. */ abstract class CallbackMetric extends MetricWithFixedMetadata { - protected CallbackMetric(Builder builder) { - super(builder); - } + protected CallbackMetric(Builder builder) { + super(builder); + } - protected Labels makeLabels(String... labelValues) { - if (labelNames.length == 0) { - if (labelValues != null && labelValues.length > 0) { - throw new IllegalArgumentException("Cannot pass label values to a " + this.getClass().getSimpleName() + " that was created without label names."); - } - return constLabels; - } else { - if (labelValues == null) { - throw new IllegalArgumentException(this.getClass().getSimpleName() + " was created with label names, but the callback was called without label values."); - } - if (labelValues.length != labelNames.length) { - throw new IllegalArgumentException(this.getClass().getSimpleName() + " was created with " + labelNames.length + " label names, but the callback was called with " + labelValues.length + " label values."); - } - return constLabels.merge(Labels.of(labelNames, labelValues)); - } + protected Labels makeLabels(String... labelValues) { + if (labelNames.length == 0) { + if (labelValues != null && labelValues.length > 0) { + throw new IllegalArgumentException( + "Cannot pass label values to a " + + this.getClass().getSimpleName() + + " that was created without label names."); + } + return constLabels; + } else { + if (labelValues == null) { + throw new IllegalArgumentException( + this.getClass().getSimpleName() + + " was created with label names, but the callback was called without label values."); + } + if (labelValues.length != labelNames.length) { + throw new IllegalArgumentException( + this.getClass().getSimpleName() + + " was created with " + + labelNames.length + + " label names, but the callback was called with " + + labelValues.length + + " label values."); + } + return constLabels.merge(Labels.of(labelNames, labelValues)); } + } - static abstract class Builder, M extends CallbackMetric> extends MetricWithFixedMetadata.Builder { + abstract static class Builder, M extends CallbackMetric> + extends MetricWithFixedMetadata.Builder { - protected Builder(List illegalLabelNames, PrometheusProperties properties) { - super(illegalLabelNames, properties); - } + protected Builder(List illegalLabelNames, PrometheusProperties properties) { + super(illegalLabelNames, properties); } + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Counter.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Counter.java index c5a55ca78..5e435f695 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Counter.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Counter.java @@ -8,7 +8,6 @@ import io.prometheus.metrics.model.snapshots.CounterSnapshot; import io.prometheus.metrics.model.snapshots.Exemplar; import io.prometheus.metrics.model.snapshots.Labels; - import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -17,8 +16,9 @@ /** * Counter metric. - *

- * Example usage: + * + *

Example usage: + * *

{@code
  * Counter requestCount = Counter.builder()
  *     .name("requests_total")
@@ -29,251 +29,234 @@
  * requestCount.labelValues("/hello-world", "500").inc();
  * }
*/ -public class Counter extends StatefulMetric implements CounterDataPoint { - - private final boolean exemplarsEnabled; - private final ExemplarSamplerConfig exemplarSamplerConfig; - - private Counter(Builder builder, PrometheusProperties prometheusProperties) { - super(builder); - MetricsProperties[] properties = getMetricProperties(builder, prometheusProperties); - exemplarsEnabled = getConfigProperty(properties, MetricsProperties::getExemplarsEnabled); - if (exemplarsEnabled) { - exemplarSamplerConfig = new ExemplarSamplerConfig(prometheusProperties.getExemplarProperties(), 1); - } else { - exemplarSamplerConfig = null; - } +public class Counter extends StatefulMetric + implements CounterDataPoint { + + private final boolean exemplarsEnabled; + private final ExemplarSamplerConfig exemplarSamplerConfig; + + private Counter(Builder builder, PrometheusProperties prometheusProperties) { + super(builder); + MetricsProperties[] properties = getMetricProperties(builder, prometheusProperties); + exemplarsEnabled = getConfigProperty(properties, MetricsProperties::getExemplarsEnabled); + if (exemplarsEnabled) { + exemplarSamplerConfig = + new ExemplarSamplerConfig(prometheusProperties.getExemplarProperties(), 1); + } else { + exemplarSamplerConfig = null; } - - /** - * {@inheritDoc} - */ - @Override - public void inc(long amount) { - getNoLabels().inc(amount); + } + + /** {@inheritDoc} */ + @Override + public void inc(long amount) { + getNoLabels().inc(amount); + } + + /** {@inheritDoc} */ + @Override + public void inc(double amount) { + getNoLabels().inc(amount); + } + + /** {@inheritDoc} */ + @Override + public void incWithExemplar(long amount, Labels labels) { + getNoLabels().incWithExemplar(amount, labels); + } + + /** {@inheritDoc} */ + @Override + public void incWithExemplar(double amount, Labels labels) { + getNoLabels().incWithExemplar(amount, labels); + } + + /** {@inheritDoc} */ + public double get() { + return getNoLabels().get(); + } + + /** {@inheritDoc} */ + public long getLongValue() { + return getNoLabels().getLongValue(); + } + + /** {@inheritDoc} */ + @Override + public CounterSnapshot collect() { + return (CounterSnapshot) super.collect(); + } + + @Override + protected boolean isExemplarsEnabled() { + return exemplarsEnabled; + } + + @Override + protected DataPoint newDataPoint() { + if (isExemplarsEnabled()) { + return new DataPoint(new ExemplarSampler(exemplarSamplerConfig)); + } else { + return new DataPoint(null); } + } - /** - * {@inheritDoc} - */ - @Override - public void inc(double amount) { - getNoLabels().inc(amount); + @Override + protected CounterSnapshot collect(List labels, List metricData) { + List data = new ArrayList<>(labels.size()); + for (int i = 0; i < labels.size(); i++) { + data.add(metricData.get(i).collect(labels.get(i))); } + return new CounterSnapshot(getMetadata(), data); + } - /** - * {@inheritDoc} - */ - @Override - public void incWithExemplar(long amount, Labels labels) { - getNoLabels().incWithExemplar(amount, labels); + static String stripTotalSuffix(String name) { + if (name != null && (name.endsWith("_total") || name.endsWith(".total"))) { + name = name.substring(0, name.length() - 6); } + return name; + } - /** - * {@inheritDoc} - */ - @Override - public void incWithExemplar(double amount, Labels labels) { - getNoLabels().incWithExemplar(amount, labels); + class DataPoint implements CounterDataPoint { + + private final DoubleAdder doubleValue = new DoubleAdder(); + // LongAdder is 20% faster than DoubleAdder. So let's use the LongAdder for long observations, + // and DoubleAdder for double observations. If the user doesn't observe any double at all, + // we will be using the LongAdder and get the best performance. + private final LongAdder longValue = new LongAdder(); + private final long createdTimeMillis = System.currentTimeMillis(); + private final ExemplarSampler exemplarSampler; // null if isExemplarsEnabled() is false + + private DataPoint(ExemplarSampler exemplarSampler) { + this.exemplarSampler = exemplarSampler; } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ public double get() { - return getNoLabels().get(); + return longValue.sum() + doubleValue.sum(); } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ public long getLongValue() { - return getNoLabels().getLongValue(); + return longValue.sum() + (long) doubleValue.sum(); } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override - public CounterSnapshot collect() { - return (CounterSnapshot) super.collect(); + public void inc(long amount) { + validateAndAdd(amount); + if (isExemplarsEnabled()) { + exemplarSampler.observe(amount); + } } + /** {@inheritDoc} */ @Override - protected boolean isExemplarsEnabled() { - return exemplarsEnabled; + public void inc(double amount) { + validateAndAdd(amount); + if (isExemplarsEnabled()) { + exemplarSampler.observe(amount); + } } + /** {@inheritDoc} */ @Override - protected DataPoint newDataPoint() { - if (isExemplarsEnabled()) { - return new DataPoint(new ExemplarSampler(exemplarSamplerConfig)); - } else { - return new DataPoint(null); - } + public void incWithExemplar(long amount, Labels labels) { + validateAndAdd(amount); + if (isExemplarsEnabled()) { + exemplarSampler.observeWithExemplar(amount, labels); + } } + /** {@inheritDoc} */ @Override - protected CounterSnapshot collect(List labels, List metricData) { - List data = new ArrayList<>(labels.size()); - for (int i = 0; i < labels.size(); i++) { - data.add(metricData.get(i).collect(labels.get(i))); - } - return new CounterSnapshot(getMetadata(), data); + public void incWithExemplar(double amount, Labels labels) { + validateAndAdd(amount); + if (isExemplarsEnabled()) { + exemplarSampler.observeWithExemplar(amount, labels); + } } - static String stripTotalSuffix(String name) { - if (name != null && (name.endsWith("_total") || name.endsWith(".total"))) { - name = name.substring(0, name.length() - 6); - } - return name; + private void validateAndAdd(long amount) { + if (amount < 0) { + throw new IllegalArgumentException( + "Negative increment " + amount + " is illegal for Counter metrics."); + } + longValue.add(amount); } - class DataPoint implements CounterDataPoint { - - private final DoubleAdder doubleValue = new DoubleAdder(); - // LongAdder is 20% faster than DoubleAdder. So let's use the LongAdder for long observations, - // and DoubleAdder for double observations. If the user doesn't observe any double at all, - // we will be using the LongAdder and get the best performance. - private final LongAdder longValue = new LongAdder(); - private final long createdTimeMillis = System.currentTimeMillis(); - private final ExemplarSampler exemplarSampler; // null if isExemplarsEnabled() is false - - private DataPoint(ExemplarSampler exemplarSampler) { - this.exemplarSampler = exemplarSampler; - } - - /** - * {@inheritDoc} - */ - public double get() { - return longValue.sum() + doubleValue.sum(); - } - - /** - * {@inheritDoc} - */ - public long getLongValue() { - return longValue.sum() + (long) doubleValue.sum(); - } - - /** - * {@inheritDoc} - */ - @Override - public void inc(long amount) { - validateAndAdd(amount); - if (isExemplarsEnabled()) { - exemplarSampler.observe(amount); - } - } - - /** - * {@inheritDoc} - */ - @Override - public void inc(double amount) { - validateAndAdd(amount); - if (isExemplarsEnabled()) { - exemplarSampler.observe(amount); - } - } + private void validateAndAdd(double amount) { + if (amount < 0) { + throw new IllegalArgumentException( + "Negative increment " + amount + " is illegal for Counter metrics."); + } + doubleValue.add(amount); + } - /** - * {@inheritDoc} - */ - @Override - public void incWithExemplar(long amount, Labels labels) { - validateAndAdd(amount); - if (isExemplarsEnabled()) { - exemplarSampler.observeWithExemplar(amount, labels); - } + private CounterSnapshot.CounterDataPointSnapshot collect(Labels labels) { + // Read the exemplar first. Otherwise, there is a race condition where you might + // see an Exemplar for a value that's not counted yet. + // If there are multiple Exemplars (by default it's just one), use the newest. + Exemplar latestExemplar = null; + if (exemplarSampler != null) { + for (Exemplar exemplar : exemplarSampler.collect()) { + if (latestExemplar == null + || exemplar.getTimestampMillis() > latestExemplar.getTimestampMillis()) { + latestExemplar = exemplar; + } } + } + return new CounterSnapshot.CounterDataPointSnapshot( + get(), labels, latestExemplar, createdTimeMillis); + } + } - /** - * {@inheritDoc} - */ - @Override - public void incWithExemplar(double amount, Labels labels) { - validateAndAdd(amount); - if (isExemplarsEnabled()) { - exemplarSampler.observeWithExemplar(amount, labels); - } - } + public static Builder builder() { + return new Builder(PrometheusProperties.get()); + } - private void validateAndAdd(long amount) { - if (amount < 0) { - throw new IllegalArgumentException("Negative increment " + amount + " is illegal for Counter metrics."); - } - longValue.add(amount); - } + public static Builder builder(PrometheusProperties config) { + return new Builder(config); + } - private void validateAndAdd(double amount) { - if (amount < 0) { - throw new IllegalArgumentException("Negative increment " + amount + " is illegal for Counter metrics."); - } - doubleValue.add(amount); - } + public static class Builder extends StatefulMetric.Builder { - private CounterSnapshot.CounterDataPointSnapshot collect(Labels labels) { - // Read the exemplar first. Otherwise, there is a race condition where you might - // see an Exemplar for a value that's not counted yet. - // If there are multiple Exemplars (by default it's just one), use the newest. - Exemplar latestExemplar = null; - if (exemplarSampler != null) { - for (Exemplar exemplar : exemplarSampler.collect()) { - if (latestExemplar == null || exemplar.getTimestampMillis() > latestExemplar.getTimestampMillis()) { - latestExemplar = exemplar; - } - } - } - return new CounterSnapshot.CounterDataPointSnapshot(get(), labels, latestExemplar, createdTimeMillis); - } + private Builder(PrometheusProperties properties) { + super(Collections.emptyList(), properties); } - public static Builder builder() { - return new Builder(PrometheusProperties.get()); + /** + * The {@code _total} suffix will automatically be appended if it's missing. + * + *
{@code
+     * Counter c1 = Counter.builder()
+     *     .name("events_total")
+     *     .build();
+     * Counter c2 = Counter.builder()
+     *     .name("events")
+     *     .build();
+     * }
+ * + * In the example above both {@code c1} and {@code c2} would be named {@code "events_total"} in + * Prometheus. + * + *

Throws an {@link IllegalArgumentException} if {@link + * io.prometheus.metrics.model.snapshots.PrometheusNaming#isValidMetricName(String) + * MetricMetadata.isValidMetricName(name)} is {@code false}. + */ + @Override + public Builder name(String name) { + return super.name(stripTotalSuffix(name)); } - public static Builder builder(PrometheusProperties config) { - return new Builder(config); + @Override + public Counter build() { + return new Counter(this, properties); } - public static class Builder extends StatefulMetric.Builder { - - private Builder(PrometheusProperties properties) { - super(Collections.emptyList(), properties); - } - - /** - * The {@code _total} suffix will automatically be appended if it's missing. - *

{@code
-         * Counter c1 = Counter.builder()
-         *     .name("events_total")
-         *     .build();
-         * Counter c2 = Counter.builder()
-         *     .name("events")
-         *     .build();
-         * }
- * In the example above both {@code c1} and {@code c2} would be named {@code "events_total"} in Prometheus. - *

- * Throws an {@link IllegalArgumentException} if - * {@link io.prometheus.metrics.model.snapshots.PrometheusNaming#isValidMetricName(String) MetricMetadata.isValidMetricName(name)} - * is {@code false}. - */ - @Override - public Builder name(String name) { - return super.name(stripTotalSuffix(name)); - } - - @Override - public Counter build() { - return new Counter(this, properties); - } - - @Override - protected Builder self() { - return this; - } + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/CounterWithCallback.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/CounterWithCallback.java index 76ce68f23..5dc533d58 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/CounterWithCallback.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/CounterWithCallback.java @@ -2,7 +2,6 @@ import io.prometheus.metrics.config.PrometheusProperties; import io.prometheus.metrics.model.snapshots.CounterSnapshot; - import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -10,6 +9,7 @@ /** * Example: + * *

{@code
  * ClassLoadingMXBean classLoadingMXBean = ManagementFactory.getClassLoadingMXBean();
  *
@@ -22,80 +22,87 @@
  */
 public class CounterWithCallback extends CallbackMetric {
 
-    @FunctionalInterface
-    public interface Callback {
-        void call(double value, String... labelValues);
-    }
+  @FunctionalInterface
+  public interface Callback {
+    void call(double value, String... labelValues);
+  }
 
-    private final Consumer callback;
+  private final Consumer callback;
 
-    private CounterWithCallback(Builder builder) {
-        super(builder);
-        this.callback = builder.callback;
-        if (callback == null) {
-            throw new IllegalArgumentException("callback cannot be null");
-        }
+  private CounterWithCallback(Builder builder) {
+    super(builder);
+    this.callback = builder.callback;
+    if (callback == null) {
+      throw new IllegalArgumentException("callback cannot be null");
     }
-
-    @Override
-    public CounterSnapshot collect() {
-        List dataPoints = new ArrayList<>();
-        callback.accept((value, labelValues) -> {
-            dataPoints.add(new CounterSnapshot.CounterDataPointSnapshot(value, makeLabels(labelValues), null, 0L));
+  }
+
+  @Override
+  public CounterSnapshot collect() {
+    List dataPoints = new ArrayList<>();
+    callback.accept(
+        (value, labelValues) -> {
+          dataPoints.add(
+              new CounterSnapshot.CounterDataPointSnapshot(
+                  value, makeLabels(labelValues), null, 0L));
         });
-        return new CounterSnapshot(getMetadata(), dataPoints);
-    }
+    return new CounterSnapshot(getMetadata(), dataPoints);
+  }
 
-    public static Builder builder() {
-        return new Builder(PrometheusProperties.get());
-    }
+  public static Builder builder() {
+    return new Builder(PrometheusProperties.get());
+  }
 
-    public static Builder builder(PrometheusProperties properties) {
-        return new Builder(properties);
-    }
+  public static Builder builder(PrometheusProperties properties) {
+    return new Builder(properties);
+  }
 
-    public static class Builder extends CallbackMetric.Builder {
+  public static class Builder
+      extends CallbackMetric.Builder {
 
-        private Consumer callback;
+    private Consumer callback;
 
-        public Builder callback(Consumer callback) {
-            this.callback = callback;
-            return self();
-        }
+    public Builder callback(Consumer callback) {
+      this.callback = callback;
+      return self();
+    }
 
-        private Builder(PrometheusProperties properties) {
-            super(Collections.emptyList(), properties);
-        }
+    private Builder(PrometheusProperties properties) {
+      super(Collections.emptyList(), properties);
+    }
 
-        /**
-         * The {@code _total} suffix will automatically be appended if it's missing.
-         * 
{@code
-         * CounterWithCallback c1 = CounterWithCallback.builder()
-         *     .name("events_total")
-         *     .build();
-         * CounterWithCallback c2 = CounterWithCallback.builder()
-         *     .name("events")
-         *     .build();
-         * }
- * In the example above both {@code c1} and {@code c2} would be named {@code "events_total"} in Prometheus. - *

- * Throws an {@link IllegalArgumentException} if - * {@link io.prometheus.metrics.model.snapshots.PrometheusNaming#isValidMetricName(String) MetricMetadata.isValidMetricName(name)} - * is {@code false}. - */ - @Override - public Builder name(String name) { - return super.name(Counter.stripTotalSuffix(name)); - } + /** + * The {@code _total} suffix will automatically be appended if it's missing. + * + *

{@code
+     * CounterWithCallback c1 = CounterWithCallback.builder()
+     *     .name("events_total")
+     *     .build();
+     * CounterWithCallback c2 = CounterWithCallback.builder()
+     *     .name("events")
+     *     .build();
+     * }
+ * + * In the example above both {@code c1} and {@code c2} would be named {@code "events_total"} in + * Prometheus. + * + *

Throws an {@link IllegalArgumentException} if {@link + * io.prometheus.metrics.model.snapshots.PrometheusNaming#isValidMetricName(String) + * MetricMetadata.isValidMetricName(name)} is {@code false}. + */ + @Override + public Builder name(String name) { + return super.name(Counter.stripTotalSuffix(name)); + } - @Override - public CounterWithCallback build() { - return new CounterWithCallback(this); - } + @Override + public CounterWithCallback build() { + return new CounterWithCallback(this); + } - @Override - protected Builder self() { - return this; - } + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Gauge.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Gauge.java index 1c46435f6..a3f7e290d 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Gauge.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Gauge.java @@ -8,7 +8,6 @@ import io.prometheus.metrics.model.snapshots.Exemplar; import io.prometheus.metrics.model.snapshots.GaugeSnapshot; import io.prometheus.metrics.model.snapshots.Labels; - import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -16,8 +15,9 @@ /** * Gauge metric. - *

- * Example usage: + * + *

Example usage: + * *

{@code
  * Gauge currentActiveUsers = Gauge.builder()
  *     .name("current_active_users")
@@ -36,194 +36,176 @@
  * }
  * }
*/ -public class Gauge extends StatefulMetric implements GaugeDataPoint { - - private final boolean exemplarsEnabled; - private final ExemplarSamplerConfig exemplarSamplerConfig; - - private Gauge(Builder builder, PrometheusProperties prometheusProperties) { - super(builder); - MetricsProperties[] properties = getMetricProperties(builder, prometheusProperties); - exemplarsEnabled = getConfigProperty(properties, MetricsProperties::getExemplarsEnabled); - if (exemplarsEnabled) { - exemplarSamplerConfig = new ExemplarSamplerConfig(prometheusProperties.getExemplarProperties(), 1); - } else { - exemplarSamplerConfig = null; - } +public class Gauge extends StatefulMetric + implements GaugeDataPoint { + + private final boolean exemplarsEnabled; + private final ExemplarSamplerConfig exemplarSamplerConfig; + + private Gauge(Builder builder, PrometheusProperties prometheusProperties) { + super(builder); + MetricsProperties[] properties = getMetricProperties(builder, prometheusProperties); + exemplarsEnabled = getConfigProperty(properties, MetricsProperties::getExemplarsEnabled); + if (exemplarsEnabled) { + exemplarSamplerConfig = + new ExemplarSamplerConfig(prometheusProperties.getExemplarProperties(), 1); + } else { + exemplarSamplerConfig = null; } - - /** - * {@inheritDoc} - */ - @Override - public void inc(double amount) { - getNoLabels().inc(amount); + } + + /** {@inheritDoc} */ + @Override + public void inc(double amount) { + getNoLabels().inc(amount); + } + + /** {@inheritDoc} */ + @Override + public double get() { + return getNoLabels().get(); + } + + /** {@inheritDoc} */ + @Override + public void incWithExemplar(double amount, Labels labels) { + getNoLabels().incWithExemplar(amount, labels); + } + + /** {@inheritDoc} */ + @Override + public void set(double value) { + getNoLabels().set(value); + } + + /** {@inheritDoc} */ + @Override + public void setWithExemplar(double value, Labels labels) { + getNoLabels().setWithExemplar(value, labels); + } + + /** {@inheritDoc} */ + @Override + public GaugeSnapshot collect() { + return (GaugeSnapshot) super.collect(); + } + + @Override + protected GaugeSnapshot collect(List labels, List metricData) { + List dataPointSnapshots = new ArrayList<>(labels.size()); + for (int i = 0; i < labels.size(); i++) { + dataPointSnapshots.add(metricData.get(i).collect(labels.get(i))); } - - /** - * {@inheritDoc} - */ - @Override - public double get() { - return getNoLabels().get(); + return new GaugeSnapshot(getMetadata(), dataPointSnapshots); + } + + @Override + protected DataPoint newDataPoint() { + if (isExemplarsEnabled()) { + return new DataPoint(new ExemplarSampler(exemplarSamplerConfig)); + } else { + return new DataPoint(null); } + } - /** - * {@inheritDoc} - */ - @Override - public void incWithExemplar(double amount, Labels labels) { - getNoLabels().incWithExemplar(amount, labels); - } + @Override + protected boolean isExemplarsEnabled() { + return exemplarsEnabled; + } - /** - * {@inheritDoc} - */ - @Override - public void set(double value) { - getNoLabels().set(value); + class DataPoint implements GaugeDataPoint { + + private final ExemplarSampler exemplarSampler; // null if isExemplarsEnabled() is false + + private DataPoint(ExemplarSampler exemplarSampler) { + this.exemplarSampler = exemplarSampler; } - /** - * {@inheritDoc} - */ + private final AtomicLong value = new AtomicLong(Double.doubleToRawLongBits(0)); + + /** {@inheritDoc} */ @Override - public void setWithExemplar(double value, Labels labels) { - getNoLabels().setWithExemplar(value, labels); + public void inc(double amount) { + long next = + value.updateAndGet(l -> Double.doubleToRawLongBits(Double.longBitsToDouble(l) + amount)); + if (isExemplarsEnabled()) { + exemplarSampler.observe(Double.longBitsToDouble(next)); + } } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override - public GaugeSnapshot collect() { - return (GaugeSnapshot) super.collect(); + public void incWithExemplar(double amount, Labels labels) { + long next = + value.updateAndGet(l -> Double.doubleToRawLongBits(Double.longBitsToDouble(l) + amount)); + if (isExemplarsEnabled()) { + exemplarSampler.observeWithExemplar(Double.longBitsToDouble(next), labels); + } } + /** {@inheritDoc} */ @Override - protected GaugeSnapshot collect(List labels, List metricData) { - List dataPointSnapshots = new ArrayList<>(labels.size()); - for (int i = 0; i < labels.size(); i++) { - dataPointSnapshots.add(metricData.get(i).collect(labels.get(i))); - } - return new GaugeSnapshot(getMetadata(), dataPointSnapshots); + public void set(double value) { + this.value.set(Double.doubleToRawLongBits(value)); + if (isExemplarsEnabled()) { + exemplarSampler.observe(value); + } } + /** {@inheritDoc} */ @Override - protected DataPoint newDataPoint() { - if (isExemplarsEnabled()) { - return new DataPoint(new ExemplarSampler(exemplarSamplerConfig)); - } else { - return new DataPoint(null); - } + public double get() { + return Double.longBitsToDouble(value.get()); } + /** {@inheritDoc} */ @Override - protected boolean isExemplarsEnabled() { - return exemplarsEnabled; + public void setWithExemplar(double value, Labels labels) { + this.value.set(Double.doubleToRawLongBits(value)); + if (isExemplarsEnabled()) { + exemplarSampler.observeWithExemplar(value, labels); + } } - class DataPoint implements GaugeDataPoint { - - private final ExemplarSampler exemplarSampler; // null if isExemplarsEnabled() is false - - private DataPoint(ExemplarSampler exemplarSampler) { - this.exemplarSampler = exemplarSampler; - } - - private final AtomicLong value = new AtomicLong(Double.doubleToRawLongBits(0)); - - /** - * {@inheritDoc} - */ - @Override - public void inc(double amount) { - long next = value.updateAndGet(l -> Double.doubleToRawLongBits(Double.longBitsToDouble(l) + amount)); - if (isExemplarsEnabled()) { - exemplarSampler.observe(Double.longBitsToDouble(next)); - } - } - - /** - * {@inheritDoc} - */ - @Override - public void incWithExemplar(double amount, Labels labels) { - long next = value.updateAndGet(l -> Double.doubleToRawLongBits(Double.longBitsToDouble(l) + amount)); - if (isExemplarsEnabled()) { - exemplarSampler.observeWithExemplar(Double.longBitsToDouble(next), labels); - } + private GaugeSnapshot.GaugeDataPointSnapshot collect(Labels labels) { + // Read the exemplar first. Otherwise, there is a race condition where you might + // see an Exemplar for a value that's not represented in getValue() yet. + // If there are multiple Exemplars (by default it's just one), use the oldest + // so that we don't violate min age. + Exemplar oldest = null; + if (isExemplarsEnabled()) { + for (Exemplar exemplar : exemplarSampler.collect()) { + if (oldest == null || exemplar.getTimestampMillis() < oldest.getTimestampMillis()) { + oldest = exemplar; + } } + } + return new GaugeSnapshot.GaugeDataPointSnapshot(get(), labels, oldest); + } + } - /** - * {@inheritDoc} - */ - @Override - public void set(double value) { - this.value.set(Double.doubleToRawLongBits(value)); - if (isExemplarsEnabled()) { - exemplarSampler.observe(value); - } - } + public static Builder builder() { + return new Builder(PrometheusProperties.get()); + } - /** - * {@inheritDoc} - */ - @Override - public double get() { - return Double.longBitsToDouble(value.get()); - } + public static Builder builder(PrometheusProperties config) { + return new Builder(config); + } - /** - * {@inheritDoc} - */ - @Override - public void setWithExemplar(double value, Labels labels) { - this.value.set(Double.doubleToRawLongBits(value)); - if (isExemplarsEnabled()) { - exemplarSampler.observeWithExemplar(value, labels); - } - } + public static class Builder extends StatefulMetric.Builder { - private GaugeSnapshot.GaugeDataPointSnapshot collect(Labels labels) { - // Read the exemplar first. Otherwise, there is a race condition where you might - // see an Exemplar for a value that's not represented in getValue() yet. - // If there are multiple Exemplars (by default it's just one), use the oldest - // so that we don't violate min age. - Exemplar oldest = null; - if (isExemplarsEnabled()) { - for (Exemplar exemplar : exemplarSampler.collect()) { - if (oldest == null || exemplar.getTimestampMillis() < oldest.getTimestampMillis()) { - oldest = exemplar; - } - } - } - return new GaugeSnapshot.GaugeDataPointSnapshot(get(), labels, oldest); - } - } - - public static Builder builder() { - return new Builder(PrometheusProperties.get()); + private Builder(PrometheusProperties config) { + super(Collections.emptyList(), config); } - public static Builder builder(PrometheusProperties config) { - return new Builder(config); + @Override + public Gauge build() { + return new Gauge(this, properties); } - public static class Builder extends StatefulMetric.Builder { - - private Builder(PrometheusProperties config) { - super(Collections.emptyList(), config); - } - - @Override - public Gauge build() { - return new Gauge(this, properties); - } - - @Override - protected Builder self() { - return this; - } + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/GaugeWithCallback.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/GaugeWithCallback.java index a88a9acca..8b2d7a0ba 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/GaugeWithCallback.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/GaugeWithCallback.java @@ -2,7 +2,6 @@ import io.prometheus.metrics.config.PrometheusProperties; import io.prometheus.metrics.model.snapshots.GaugeSnapshot; - import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -10,6 +9,7 @@ /** * Example: + * *
{@code
  * MemoryMXBean memoryBean = ManagementFactory.getMemoryMXBean();
  *
@@ -27,59 +27,62 @@
  */
 public class GaugeWithCallback extends CallbackMetric {
 
-    @FunctionalInterface
-    public interface Callback {
-        void call(double value, String... labelValues);
-    }
+  @FunctionalInterface
+  public interface Callback {
+    void call(double value, String... labelValues);
+  }
 
-    private final Consumer callback;
+  private final Consumer callback;
 
-    private GaugeWithCallback(Builder builder) {
-        super(builder);
-        this.callback = builder.callback;
-        if (callback == null) {
-            throw new IllegalArgumentException("callback cannot be null");
-        }
+  private GaugeWithCallback(Builder builder) {
+    super(builder);
+    this.callback = builder.callback;
+    if (callback == null) {
+      throw new IllegalArgumentException("callback cannot be null");
     }
+  }
 
-    @Override
-    public GaugeSnapshot collect() {
-        List dataPoints = new ArrayList<>();
-        callback.accept((value, labelValues) -> {
-            dataPoints.add(new GaugeSnapshot.GaugeDataPointSnapshot(value, makeLabels(labelValues), null, 0L));
+  @Override
+  public GaugeSnapshot collect() {
+    List dataPoints = new ArrayList<>();
+    callback.accept(
+        (value, labelValues) -> {
+          dataPoints.add(
+              new GaugeSnapshot.GaugeDataPointSnapshot(value, makeLabels(labelValues), null, 0L));
         });
-        return new GaugeSnapshot(getMetadata(), dataPoints);
-    }
+    return new GaugeSnapshot(getMetadata(), dataPoints);
+  }
 
-    public static Builder builder() {
-        return new Builder(PrometheusProperties.get());
-    }
+  public static Builder builder() {
+    return new Builder(PrometheusProperties.get());
+  }
 
-    public static Builder builder(PrometheusProperties properties) {
-        return new Builder(properties);
-    }
+  public static Builder builder(PrometheusProperties properties) {
+    return new Builder(properties);
+  }
 
-    public static class Builder extends CallbackMetric.Builder {
+  public static class Builder
+      extends CallbackMetric.Builder {
 
-        private Consumer callback;
+    private Consumer callback;
 
-        public Builder callback(Consumer callback) {
-            this.callback = callback;
-            return self();
-        }
+    public Builder callback(Consumer callback) {
+      this.callback = callback;
+      return self();
+    }
 
-        private Builder(PrometheusProperties properties) {
-            super(Collections.emptyList(), properties);
-        }
+    private Builder(PrometheusProperties properties) {
+      super(Collections.emptyList(), properties);
+    }
 
-        @Override
-        public GaugeWithCallback build() {
-            return new GaugeWithCallback(this);
-        }
+    @Override
+    public GaugeWithCallback build() {
+      return new GaugeWithCallback(this);
+    }
 
-        @Override
-        protected Builder self() {
-            return this;
-        }
+    @Override
+    protected Builder self() {
+      return this;
     }
+  }
 }
diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Histogram.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Histogram.java
index 0cad6376c..494f7fded 100644
--- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Histogram.java
+++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Histogram.java
@@ -3,16 +3,15 @@
 import io.prometheus.metrics.config.ExemplarsProperties;
 import io.prometheus.metrics.config.MetricsProperties;
 import io.prometheus.metrics.config.PrometheusProperties;
+import io.prometheus.metrics.core.datapoints.DistributionDataPoint;
 import io.prometheus.metrics.core.exemplars.ExemplarSampler;
 import io.prometheus.metrics.core.exemplars.ExemplarSamplerConfig;
+import io.prometheus.metrics.core.util.Scheduler;
 import io.prometheus.metrics.model.snapshots.ClassicHistogramBuckets;
 import io.prometheus.metrics.model.snapshots.Exemplars;
 import io.prometheus.metrics.model.snapshots.HistogramSnapshot;
 import io.prometheus.metrics.model.snapshots.Labels;
 import io.prometheus.metrics.model.snapshots.NativeHistogramBuckets;
-import io.prometheus.metrics.core.datapoints.DistributionDataPoint;
-import io.prometheus.metrics.core.util.Scheduler;
-
 import java.math.BigDecimal;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -28,6 +27,7 @@
 
 /**
  * Histogram metric. Example usage:
+ *
  * 
{@code
  * Histogram histogram = Histogram.builder()
  *         .name("http_request_duration_seconds")
@@ -40,888 +40,921 @@
  * // do something
  * histogram.labelValues("GET", "/", "200").observe(Unit.nanosToSeconds(System.nanoTime() - start));
  * }
+ * * Prometheus supports two internal representations of histograms: + * *
    - *
  1. Classic Histograms have a fixed number of buckets with fixed bucket boundaries.
  2. - *
  3. Native Histograms have an infinite number of buckets with a dynamic resolution. - * Prometheus native histograms are the same as OpenTelemetry's exponential histograms.
  4. + *
  5. Classic Histograms have a fixed number of buckets with fixed bucket boundaries. + *
  6. Native Histograms have an infinite number of buckets with a dynamic resolution. + * Prometheus native histograms are the same as OpenTelemetry's exponential histograms. *
- * By default, a histogram maintains both representations, i.e. the example above will maintain a classic - * histogram representation with Prometheus' default bucket boundaries as well as native histogram representation. - * Which representation is used depends on the exposition format, i.e. which content type the Prometheus server - * accepts when scraping. Exposition format "Text" exposes the classic histogram, exposition format "Protobuf" - * exposes both representations. This is great for migrating from classic histograms to native histograms. - *

- * If you want the classic representation only, use {@link Histogram.Builder#classicOnly}. - * If you want the native representation only, use {@link Histogram.Builder#nativeOnly}. + * + * By default, a histogram maintains both representations, i.e. the example above will maintain a + * classic histogram representation with Prometheus' default bucket boundaries as well as native + * histogram representation. Which representation is used depends on the exposition format, i.e. + * which content type the Prometheus server accepts when scraping. Exposition format "Text" exposes + * the classic histogram, exposition format "Protobuf" exposes both representations. This is great + * for migrating from classic histograms to native histograms. + * + *

If you want the classic representation only, use {@link Histogram.Builder#classicOnly}. If you + * want the native representation only, use {@link Histogram.Builder#nativeOnly}. */ -public class Histogram extends StatefulMetric implements DistributionDataPoint { - - // nativeSchema == CLASSIC_HISTOGRAM indicates that this is a classic histogram only. - private final int CLASSIC_HISTOGRAM = Integer.MIN_VALUE; - - // NATIVE_BOUNDS is used to look up the native bucket index depending on the current schema. - private static final double[][] NATIVE_BOUNDS; - - private final boolean exemplarsEnabled; - private final ExemplarSamplerConfig exemplarSamplerConfig; - - // Upper bounds for the classic histogram buckets. Contains at least +Inf. - // An empty array indicates that this is a native histogram only. - private final double[] classicUpperBounds; - - // The schema defines the resolution of the native histogram. - // Schema is Prometheus terminology, in OpenTelemetry it's named "scale". - // The formula for the bucket boundaries at position "index" is: - // - // base := base = (2^(2^-scale)) - // lowerBound := base^(index-1) - // upperBound := base^(index) - // - // Note that this is off-by-one compared to OpenTelemetry. - // - // Example: With schema 0 the bucket boundaries are ... 1/16, 1/8, 1/4, 1/2, 1, 2, 4, 8, 16, ... - // Each increment in schema doubles the number of buckets. - // - // The initialNativeSchema is the schema we start with. The histogram will automatically scale down - // if the number of native histogram buckets exceeds nativeMaxBuckets. - private final int nativeInitialSchema; // integer in [-4, 8] - - // Native histogram buckets get smaller and smaller the closer they get to zero. - // To avoid wasting a lot of buckets for observations fluctuating around zero, we consider all - // values in [-zeroThreshold, +zeroThreshold] to be equal to zero. - // - // The zeroThreshold is initialized with minZeroThreshold, and will grow up to maxZeroThreshold if - // the number of native histogram buckets exceeds nativeMaxBuckets. - private final double nativeMinZeroThreshold; - private final double nativeMaxZeroThreshold; - - // When the number of native histogram buckets becomes larger than nativeMaxBuckets, - // an attempt is made to reduce the number of buckets: - // (1) Reset if the last reset is longer than the reset duration ago - // (2) Increase the zero bucket width if it's smaller than nativeMaxZeroThreshold - // (3) Decrease the nativeSchema, i.e. merge pairs of neighboring buckets into one - private final int nativeMaxBuckets; - - // If the number of native histogram buckets exceeds nativeMaxBuckets, - // the histogram may reset (all values set to zero) after nativeResetDurationSeconds is expired. - private final long nativeResetDurationSeconds; // 0 indicates no reset - - private Histogram(Histogram.Builder builder, PrometheusProperties prometheusProperties) { - super(builder); - MetricsProperties[] properties = getMetricProperties(builder, prometheusProperties); - exemplarsEnabled = getConfigProperty(properties, MetricsProperties::getExemplarsEnabled); - nativeInitialSchema = getConfigProperty(properties, props -> { - if (Boolean.TRUE.equals(props.getHistogramClassicOnly())) { +public class Histogram extends StatefulMetric + implements DistributionDataPoint { + + // nativeSchema == CLASSIC_HISTOGRAM indicates that this is a classic histogram only. + private final int CLASSIC_HISTOGRAM = Integer.MIN_VALUE; + + // NATIVE_BOUNDS is used to look up the native bucket index depending on the current schema. + private static final double[][] NATIVE_BOUNDS; + + private final boolean exemplarsEnabled; + private final ExemplarSamplerConfig exemplarSamplerConfig; + + // Upper bounds for the classic histogram buckets. Contains at least +Inf. + // An empty array indicates that this is a native histogram only. + private final double[] classicUpperBounds; + + // The schema defines the resolution of the native histogram. + // Schema is Prometheus terminology, in OpenTelemetry it's named "scale". + // The formula for the bucket boundaries at position "index" is: + // + // base := base = (2^(2^-scale)) + // lowerBound := base^(index-1) + // upperBound := base^(index) + // + // Note that this is off-by-one compared to OpenTelemetry. + // + // Example: With schema 0 the bucket boundaries are ... 1/16, 1/8, 1/4, 1/2, 1, 2, 4, 8, 16, ... + // Each increment in schema doubles the number of buckets. + // + // The initialNativeSchema is the schema we start with. The histogram will automatically scale + // down + // if the number of native histogram buckets exceeds nativeMaxBuckets. + private final int nativeInitialSchema; // integer in [-4, 8] + + // Native histogram buckets get smaller and smaller the closer they get to zero. + // To avoid wasting a lot of buckets for observations fluctuating around zero, we consider all + // values in [-zeroThreshold, +zeroThreshold] to be equal to zero. + // + // The zeroThreshold is initialized with minZeroThreshold, and will grow up to maxZeroThreshold if + // the number of native histogram buckets exceeds nativeMaxBuckets. + private final double nativeMinZeroThreshold; + private final double nativeMaxZeroThreshold; + + // When the number of native histogram buckets becomes larger than nativeMaxBuckets, + // an attempt is made to reduce the number of buckets: + // (1) Reset if the last reset is longer than the reset duration ago + // (2) Increase the zero bucket width if it's smaller than nativeMaxZeroThreshold + // (3) Decrease the nativeSchema, i.e. merge pairs of neighboring buckets into one + private final int nativeMaxBuckets; + + // If the number of native histogram buckets exceeds nativeMaxBuckets, + // the histogram may reset (all values set to zero) after nativeResetDurationSeconds is expired. + private final long nativeResetDurationSeconds; // 0 indicates no reset + + private Histogram(Histogram.Builder builder, PrometheusProperties prometheusProperties) { + super(builder); + MetricsProperties[] properties = getMetricProperties(builder, prometheusProperties); + exemplarsEnabled = getConfigProperty(properties, MetricsProperties::getExemplarsEnabled); + nativeInitialSchema = + getConfigProperty( + properties, + props -> { + if (Boolean.TRUE.equals(props.getHistogramClassicOnly())) { return CLASSIC_HISTOGRAM; - } else { + } else { return props.getHistogramNativeInitialSchema(); - } - }); - classicUpperBounds = getConfigProperty(properties, props -> { - if (Boolean.TRUE.equals(props.getHistogramNativeOnly())) { - return new double[]{}; - } else if (props.getHistogramClassicUpperBounds() != null) { - SortedSet upperBounds = new TreeSet<>(props.getHistogramClassicUpperBounds()); + } + }); + classicUpperBounds = + getConfigProperty( + properties, + props -> { + if (Boolean.TRUE.equals(props.getHistogramNativeOnly())) { + return new double[] {}; + } else if (props.getHistogramClassicUpperBounds() != null) { + SortedSet upperBounds = + new TreeSet<>(props.getHistogramClassicUpperBounds()); upperBounds.add(Double.POSITIVE_INFINITY); double[] result = new double[upperBounds.size()]; int i = 0; for (double upperBound : upperBounds) { - result[i++] = upperBound; + result[i++] = upperBound; } return result; - } else { + } else { return null; - } - }); - double max = getConfigProperty(properties, MetricsProperties::getHistogramNativeMaxZeroThreshold); - double min = getConfigProperty(properties, MetricsProperties::getHistogramNativeMinZeroThreshold); - nativeMaxZeroThreshold = max == builder.DEFAULT_NATIVE_MAX_ZERO_THRESHOLD && min > max ? min : max; - nativeMinZeroThreshold = Math.min(min, nativeMaxZeroThreshold); - nativeMaxBuckets = getConfigProperty(properties, MetricsProperties::getHistogramNativeMaxNumberOfBuckets); - nativeResetDurationSeconds = getConfigProperty(properties, MetricsProperties::getHistogramNativeResetDurationSeconds); - ExemplarsProperties exemplarsProperties = prometheusProperties.getExemplarProperties(); - exemplarSamplerConfig = classicUpperBounds.length == 0 ? - new ExemplarSamplerConfig(exemplarsProperties, 4) : - new ExemplarSamplerConfig(exemplarsProperties, classicUpperBounds); + } + }); + double max = + getConfigProperty(properties, MetricsProperties::getHistogramNativeMaxZeroThreshold); + double min = + getConfigProperty(properties, MetricsProperties::getHistogramNativeMinZeroThreshold); + nativeMaxZeroThreshold = + max == builder.DEFAULT_NATIVE_MAX_ZERO_THRESHOLD && min > max ? min : max; + nativeMinZeroThreshold = Math.min(min, nativeMaxZeroThreshold); + nativeMaxBuckets = + getConfigProperty(properties, MetricsProperties::getHistogramNativeMaxNumberOfBuckets); + nativeResetDurationSeconds = + getConfigProperty(properties, MetricsProperties::getHistogramNativeResetDurationSeconds); + ExemplarsProperties exemplarsProperties = prometheusProperties.getExemplarProperties(); + exemplarSamplerConfig = + classicUpperBounds.length == 0 + ? new ExemplarSamplerConfig(exemplarsProperties, 4) + : new ExemplarSamplerConfig(exemplarsProperties, classicUpperBounds); + } + + /** {@inheritDoc} */ + @Override + public void observe(double amount) { + getNoLabels().observe(amount); + } + + /** {@inheritDoc} */ + @Override + public void observeWithExemplar(double amount, Labels labels) { + getNoLabels().observeWithExemplar(amount, labels); + } + + @Override + protected boolean isExemplarsEnabled() { + return exemplarsEnabled; + } + + public class DataPoint implements DistributionDataPoint { + private final LongAdder[] classicBuckets; + private final ConcurrentHashMap nativeBucketsForPositiveValues = + new ConcurrentHashMap<>(); + private final ConcurrentHashMap nativeBucketsForNegativeValues = + new ConcurrentHashMap<>(); + private final LongAdder nativeZeroCount = new LongAdder(); + private final LongAdder count = new LongAdder(); + private final DoubleAdder sum = new DoubleAdder(); + private volatile int nativeSchema = + nativeInitialSchema; // integer in [-4, 8] or CLASSIC_HISTOGRAM + private volatile double nativeZeroThreshold = Histogram.this.nativeMinZeroThreshold; + private volatile long createdTimeMillis = System.currentTimeMillis(); + private final Buffer buffer = new Buffer(); + private volatile boolean resetDurationExpired = false; + private final ExemplarSampler exemplarSampler; + + private DataPoint() { + if (exemplarsEnabled) { + exemplarSampler = new ExemplarSampler(exemplarSamplerConfig); + } else { + exemplarSampler = null; + } + classicBuckets = new LongAdder[classicUpperBounds.length]; + for (int i = 0; i < classicUpperBounds.length; i++) { + classicBuckets[i] = new LongAdder(); + } + maybeScheduleNextReset(); } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override - public void observe(double amount) { - getNoLabels().observe(amount); + public void observe(double value) { + if (Double.isNaN(value)) { + // See https://github.com/prometheus/client_golang/issues/1275 on ignoring NaN observations. + return; + } + if (!buffer.append(value)) { + doObserve(value, false); + } + if (isExemplarsEnabled()) { + exemplarSampler.observe(value); + } } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override - public void observeWithExemplar(double amount, Labels labels) { - getNoLabels().observeWithExemplar(amount, labels); + public void observeWithExemplar(double value, Labels labels) { + if (Double.isNaN(value)) { + // See https://github.com/prometheus/client_golang/issues/1275 on ignoring NaN observations. + return; + } + if (!buffer.append(value)) { + doObserve(value, false); + } + if (isExemplarsEnabled()) { + exemplarSampler.observeWithExemplar(value, labels); + } } - @Override - protected boolean isExemplarsEnabled() { - return exemplarsEnabled; - } - - public class DataPoint implements DistributionDataPoint { - private final LongAdder[] classicBuckets; - private final ConcurrentHashMap nativeBucketsForPositiveValues = new ConcurrentHashMap<>(); - private final ConcurrentHashMap nativeBucketsForNegativeValues = new ConcurrentHashMap<>(); - private final LongAdder nativeZeroCount = new LongAdder(); - private final LongAdder count = new LongAdder(); - private final DoubleAdder sum = new DoubleAdder(); - private volatile int nativeSchema = nativeInitialSchema; // integer in [-4, 8] or CLASSIC_HISTOGRAM - private volatile double nativeZeroThreshold = Histogram.this.nativeMinZeroThreshold; - private volatile long createdTimeMillis = System.currentTimeMillis(); - private final Buffer buffer = new Buffer(); - private volatile boolean resetDurationExpired = false; - private final ExemplarSampler exemplarSampler; - - private DataPoint() { - if (exemplarsEnabled) { - exemplarSampler = new ExemplarSampler(exemplarSamplerConfig); - } else { - exemplarSampler = null; - } - classicBuckets = new LongAdder[classicUpperBounds.length]; - for (int i = 0; i < classicUpperBounds.length; i++) { - classicBuckets[i] = new LongAdder(); - } - maybeScheduleNextReset(); - } - - /** - * {@inheritDoc} - */ - @Override - public void observe(double value) { - if (Double.isNaN(value)) { - // See https://github.com/prometheus/client_golang/issues/1275 on ignoring NaN observations. - return; - } - if (!buffer.append(value)) { - doObserve(value, false); - } - if (isExemplarsEnabled()) { - exemplarSampler.observe(value); - } - } - - /** - * {@inheritDoc} - */ - @Override - public void observeWithExemplar(double value, Labels labels) { - if (Double.isNaN(value)) { - // See https://github.com/prometheus/client_golang/issues/1275 on ignoring NaN observations. - return; - } - if (!buffer.append(value)) { - doObserve(value, false); - } - if (isExemplarsEnabled()) { - exemplarSampler.observeWithExemplar(value, labels); - } - } - - private void doObserve(double value, boolean fromBuffer) { - // classicUpperBounds is an empty array if this is a native histogram only. - for (int i = 0; i < classicUpperBounds.length; ++i) { - // The last bucket is +Inf, so we always increment. - if (value <= classicUpperBounds[i]) { - classicBuckets[i].add(1); - break; - } - } - boolean nativeBucketCreated = false; - if (Histogram.this.nativeInitialSchema != CLASSIC_HISTOGRAM) { - if (value > nativeZeroThreshold) { - nativeBucketCreated = addToNativeBucket(value, nativeBucketsForPositiveValues); - } else if (value < -nativeZeroThreshold) { - nativeBucketCreated = addToNativeBucket(-value, nativeBucketsForNegativeValues); - } else { - nativeZeroCount.add(1); - } - } - sum.add(value); - count.increment(); // must be the last step, because count is used to signal that the operation is complete. - if (!fromBuffer) { - // maybeResetOrScaleDown will switch to the buffer, - // which won't work if we are currently still processing observations from the buffer. - // The reason is that before switching to the buffer we wait for all pending observations to be counted. - // If we do this while still applying observations from the buffer, the pending observations from - // the buffer will never be counted, and the buffer.run() method will wait forever. - maybeResetOrScaleDown(value, nativeBucketCreated); - } - } - - private HistogramSnapshot.HistogramDataPointSnapshot collect(Labels labels) { - Exemplars exemplars = exemplarSampler != null ? exemplarSampler.collect() : Exemplars.EMPTY; - return buffer.run( - expectedCount -> count.sum() == expectedCount, - () -> { - if (classicUpperBounds.length == 0) { - // native only - return new HistogramSnapshot.HistogramDataPointSnapshot( - nativeSchema, - nativeZeroCount.sum(), - nativeZeroThreshold, - toBucketList(nativeBucketsForPositiveValues), - toBucketList(nativeBucketsForNegativeValues), - sum.sum(), - labels, - exemplars, - createdTimeMillis); - } else if (Histogram.this.nativeInitialSchema == CLASSIC_HISTOGRAM) { - // classic only - return new HistogramSnapshot.HistogramDataPointSnapshot( - ClassicHistogramBuckets.of(classicUpperBounds, classicBuckets), - sum.sum(), - labels, - exemplars, - createdTimeMillis); - } else { - // hybrid: classic and native - return new HistogramSnapshot.HistogramDataPointSnapshot( - ClassicHistogramBuckets.of(classicUpperBounds, classicBuckets), - nativeSchema, - nativeZeroCount.sum(), - nativeZeroThreshold, - toBucketList(nativeBucketsForPositiveValues), - toBucketList(nativeBucketsForNegativeValues), - sum.sum(), - labels, - exemplars, - createdTimeMillis); - } - }, - v -> doObserve(v, true) - ); - } + private void doObserve(double value, boolean fromBuffer) { + // classicUpperBounds is an empty array if this is a native histogram only. + for (int i = 0; i < classicUpperBounds.length; ++i) { + // The last bucket is +Inf, so we always increment. + if (value <= classicUpperBounds[i]) { + classicBuckets[i].add(1); + break; + } + } + boolean nativeBucketCreated = false; + if (Histogram.this.nativeInitialSchema != CLASSIC_HISTOGRAM) { + if (value > nativeZeroThreshold) { + nativeBucketCreated = addToNativeBucket(value, nativeBucketsForPositiveValues); + } else if (value < -nativeZeroThreshold) { + nativeBucketCreated = addToNativeBucket(-value, nativeBucketsForNegativeValues); + } else { + nativeZeroCount.add(1); + } + } + sum.add(value); + count + .increment(); // must be the last step, because count is used to signal that the operation + // is complete. + if (!fromBuffer) { + // maybeResetOrScaleDown will switch to the buffer, + // which won't work if we are currently still processing observations from the buffer. + // The reason is that before switching to the buffer we wait for all pending observations to + // be counted. + // If we do this while still applying observations from the buffer, the pending observations + // from + // the buffer will never be counted, and the buffer.run() method will wait forever. + maybeResetOrScaleDown(value, nativeBucketCreated); + } + } - private boolean addToNativeBucket(double value, ConcurrentHashMap buckets) { - boolean newBucketCreated = false; - int bucketIndex; - if (Double.isInfinite(value)) { - bucketIndex = findBucketIndex(Double.MAX_VALUE) + 1; + private HistogramSnapshot.HistogramDataPointSnapshot collect(Labels labels) { + Exemplars exemplars = exemplarSampler != null ? exemplarSampler.collect() : Exemplars.EMPTY; + return buffer.run( + expectedCount -> count.sum() == expectedCount, + () -> { + if (classicUpperBounds.length == 0) { + // native only + return new HistogramSnapshot.HistogramDataPointSnapshot( + nativeSchema, + nativeZeroCount.sum(), + nativeZeroThreshold, + toBucketList(nativeBucketsForPositiveValues), + toBucketList(nativeBucketsForNegativeValues), + sum.sum(), + labels, + exemplars, + createdTimeMillis); + } else if (Histogram.this.nativeInitialSchema == CLASSIC_HISTOGRAM) { + // classic only + return new HistogramSnapshot.HistogramDataPointSnapshot( + ClassicHistogramBuckets.of(classicUpperBounds, classicBuckets), + sum.sum(), + labels, + exemplars, + createdTimeMillis); } else { - bucketIndex = findBucketIndex(value); - } - LongAdder bucketCount = buckets.get(bucketIndex); - if (bucketCount == null) { - LongAdder newBucketCount = new LongAdder(); - LongAdder existingBucketCount = buckets.putIfAbsent(bucketIndex, newBucketCount); - if (existingBucketCount == null) { - newBucketCreated = true; - bucketCount = newBucketCount; - } else { - bucketCount = existingBucketCount; - } - } - bucketCount.increment(); - return newBucketCreated; - } + // hybrid: classic and native + return new HistogramSnapshot.HistogramDataPointSnapshot( + ClassicHistogramBuckets.of(classicUpperBounds, classicBuckets), + nativeSchema, + nativeZeroCount.sum(), + nativeZeroThreshold, + toBucketList(nativeBucketsForPositiveValues), + toBucketList(nativeBucketsForNegativeValues), + sum.sum(), + labels, + exemplars, + createdTimeMillis); + } + }, + v -> doObserve(v, true)); + } - private int findBucketIndex(double value) { - // Preconditions: - // Double.isNan(value) is false; - // Double.isInfinite(value) is false; - // value > 0 - // --- - // The following is a naive implementation of C's frexp() function. - // Performance can be improved by using the internal Bit representation of floating point numbers. - // More info on the Bit representation of floating point numbers: - // https://stackoverflow.com/questions/8341395/what-is-a-subnormal-floating-point-number - // Result: value == frac * 2^exp where frac in [0.5, 1). - double frac = value; - int exp = 0; - while (frac < 0.5) { - frac *= 2.0; - exp--; - } - while (frac >= 1.0) { - frac /= 2.0; - exp++; - } - // end of frexp() + private boolean addToNativeBucket(double value, ConcurrentHashMap buckets) { + boolean newBucketCreated = false; + int bucketIndex; + if (Double.isInfinite(value)) { + bucketIndex = findBucketIndex(Double.MAX_VALUE) + 1; + } else { + bucketIndex = findBucketIndex(value); + } + LongAdder bucketCount = buckets.get(bucketIndex); + if (bucketCount == null) { + LongAdder newBucketCount = new LongAdder(); + LongAdder existingBucketCount = buckets.putIfAbsent(bucketIndex, newBucketCount); + if (existingBucketCount == null) { + newBucketCreated = true; + bucketCount = newBucketCount; + } else { + bucketCount = existingBucketCount; + } + } + bucketCount.increment(); + return newBucketCreated; + } - if (nativeSchema >= 1) { - return findIndex(NATIVE_BOUNDS[nativeSchema - 1], frac) + (exp - 1) * NATIVE_BOUNDS[nativeSchema - 1].length; - } else { - int bucketIndex = exp; - if (frac == 0.5) { - bucketIndex--; - } - int offset = (1 << -nativeSchema) - 1; - bucketIndex = (bucketIndex + offset) >> -nativeSchema; - return bucketIndex; - } - } + private int findBucketIndex(double value) { + // Preconditions: + // Double.isNan(value) is false; + // Double.isInfinite(value) is false; + // value > 0 + // --- + // The following is a naive implementation of C's frexp() function. + // Performance can be improved by using the internal Bit representation of floating point + // numbers. + // More info on the Bit representation of floating point numbers: + // https://stackoverflow.com/questions/8341395/what-is-a-subnormal-floating-point-number + // Result: value == frac * 2^exp where frac in [0.5, 1). + double frac = value; + int exp = 0; + while (frac < 0.5) { + frac *= 2.0; + exp--; + } + while (frac >= 1.0) { + frac /= 2.0; + exp++; + } + // end of frexp() + + if (nativeSchema >= 1) { + return findIndex(NATIVE_BOUNDS[nativeSchema - 1], frac) + + (exp - 1) * NATIVE_BOUNDS[nativeSchema - 1].length; + } else { + int bucketIndex = exp; + if (frac == 0.5) { + bucketIndex--; + } + int offset = (1 << -nativeSchema) - 1; + bucketIndex = (bucketIndex + offset) >> -nativeSchema; + return bucketIndex; + } + } - private int findIndex(double[] bounds, double frac) { - // The following is the equivalent of golang's sort.SearchFloat64s(bounds, frac) - // See https://pkg.go.dev/sort#SearchFloat64s - int first = 0; - int last = bounds.length - 1; - while (first <= last) { - int mid = (first + last) / 2; - if (bounds[mid] == frac) { - return mid; - } else if (bounds[mid] < frac) { - first = mid + 1; - } else { - last = mid - 1; - } - } - return last + 1; - } + private int findIndex(double[] bounds, double frac) { + // The following is the equivalent of golang's sort.SearchFloat64s(bounds, frac) + // See https://pkg.go.dev/sort#SearchFloat64s + int first = 0; + int last = bounds.length - 1; + while (first <= last) { + int mid = (first + last) / 2; + if (bounds[mid] == frac) { + return mid; + } else if (bounds[mid] < frac) { + first = mid + 1; + } else { + last = mid - 1; + } + } + return last + 1; + } - /** - * Makes sure that the number of native buckets does not exceed nativeMaxBuckets. - *

    - *
  • If the histogram has already been scaled down (nativeSchema < initialSchema) - * reset after resetIntervalExpired to get back to the original schema.
  • - *
  • If a new bucket was created and we now exceed nativeMaxBuckets - * run maybeScaleDown() to scale down
  • - *
- */ - private void maybeResetOrScaleDown(double value, boolean nativeBucketCreated) { - AtomicBoolean wasReset = new AtomicBoolean(false); - if (resetDurationExpired && nativeSchema < nativeInitialSchema) { - // If nativeSchema < initialNativeSchema the histogram has been scaled down. - // So if resetDurationExpired we will reset it to restore the original native schema. - buffer.run(expectedCount -> count.sum() == expectedCount, - () -> { - if (maybeReset()) { - wasReset.set(true); - } - return null; - }, - v -> doObserve(v, true)); - } else if (nativeBucketCreated) { - // If a new bucket was created we need to check if nativeMaxBuckets is exceeded - // and scale down if so. - maybeScaleDown(wasReset); - } - if (wasReset.get()) { - // We just discarded the newly observed value. Observe it again. - if (!buffer.append(value)) { - doObserve(value, true); - } - } - } + /** + * Makes sure that the number of native buckets does not exceed nativeMaxBuckets. + * + *
    + *
  • If the histogram has already been scaled down (nativeSchema < initialSchema) reset + * after resetIntervalExpired to get back to the original schema. + *
  • If a new bucket was created and we now exceed nativeMaxBuckets run maybeScaleDown() to + * scale down + *
+ */ + private void maybeResetOrScaleDown(double value, boolean nativeBucketCreated) { + AtomicBoolean wasReset = new AtomicBoolean(false); + if (resetDurationExpired && nativeSchema < nativeInitialSchema) { + // If nativeSchema < initialNativeSchema the histogram has been scaled down. + // So if resetDurationExpired we will reset it to restore the original native schema. + buffer.run( + expectedCount -> count.sum() == expectedCount, + () -> { + if (maybeReset()) { + wasReset.set(true); + } + return null; + }, + v -> doObserve(v, true)); + } else if (nativeBucketCreated) { + // If a new bucket was created we need to check if nativeMaxBuckets is exceeded + // and scale down if so. + maybeScaleDown(wasReset); + } + if (wasReset.get()) { + // We just discarded the newly observed value. Observe it again. + if (!buffer.append(value)) { + doObserve(value, true); + } + } + } - private void maybeScaleDown(AtomicBoolean wasReset) { - if (nativeMaxBuckets == 0 || nativeSchema == -4) { - return; - } - int numberOfBuckets = nativeBucketsForPositiveValues.size() + nativeBucketsForNegativeValues.size(); - if (numberOfBuckets <= nativeMaxBuckets) { - return; - } - buffer.run( - expectedCount -> count.sum() == expectedCount, - () -> { - // Now we are in the synchronized block while new observations go into the buffer. - // Check again if we need to limit the bucket size, because another thread might - // have limited it in the meantime. - int nBuckets = nativeBucketsForPositiveValues.size() + nativeBucketsForNegativeValues.size(); - if (nBuckets <= nativeMaxBuckets || nativeSchema == -4) { - return null; - } - if (maybeReset()) { - wasReset.set(true); - return null; - } - if (maybeWidenZeroBucket()) { - return null; - } - doubleBucketWidth(); - return null; - }, - v -> doObserve(v, true) - ); - } + private void maybeScaleDown(AtomicBoolean wasReset) { + if (nativeMaxBuckets == 0 || nativeSchema == -4) { + return; + } + int numberOfBuckets = + nativeBucketsForPositiveValues.size() + nativeBucketsForNegativeValues.size(); + if (numberOfBuckets <= nativeMaxBuckets) { + return; + } + buffer.run( + expectedCount -> count.sum() == expectedCount, + () -> { + // Now we are in the synchronized block while new observations go into the buffer. + // Check again if we need to limit the bucket size, because another thread might + // have limited it in the meantime. + int nBuckets = + nativeBucketsForPositiveValues.size() + nativeBucketsForNegativeValues.size(); + if (nBuckets <= nativeMaxBuckets || nativeSchema == -4) { + return null; + } + if (maybeReset()) { + wasReset.set(true); + return null; + } + if (maybeWidenZeroBucket()) { + return null; + } + doubleBucketWidth(); + return null; + }, + v -> doObserve(v, true)); + } - // maybeReset is called in the synchronized block while new observations go into the buffer. - private boolean maybeReset() { - if (!resetDurationExpired) { - return false; - } - resetDurationExpired = false; - buffer.reset(); - nativeBucketsForPositiveValues.clear(); - nativeBucketsForNegativeValues.clear(); - nativeZeroCount.reset(); - count.reset(); - sum.reset(); - for (int i = 0; i < classicBuckets.length; i++) { - classicBuckets[i].reset(); - } - nativeZeroThreshold = nativeMinZeroThreshold; - nativeSchema = Histogram.this.nativeInitialSchema; - createdTimeMillis = System.currentTimeMillis(); - if (exemplarSampler != null) { - exemplarSampler.reset(); - } - maybeScheduleNextReset(); - return true; - } + // maybeReset is called in the synchronized block while new observations go into the buffer. + private boolean maybeReset() { + if (!resetDurationExpired) { + return false; + } + resetDurationExpired = false; + buffer.reset(); + nativeBucketsForPositiveValues.clear(); + nativeBucketsForNegativeValues.clear(); + nativeZeroCount.reset(); + count.reset(); + sum.reset(); + for (int i = 0; i < classicBuckets.length; i++) { + classicBuckets[i].reset(); + } + nativeZeroThreshold = nativeMinZeroThreshold; + nativeSchema = Histogram.this.nativeInitialSchema; + createdTimeMillis = System.currentTimeMillis(); + if (exemplarSampler != null) { + exemplarSampler.reset(); + } + maybeScheduleNextReset(); + return true; + } - // maybeWidenZeroBucket is called in the synchronized block while new observations go into the buffer. - private boolean maybeWidenZeroBucket() { - if (nativeZeroThreshold >= nativeMaxZeroThreshold) { - return false; - } - int smallestIndex = findSmallestIndex(nativeBucketsForPositiveValues); - int smallestNegativeIndex = findSmallestIndex(nativeBucketsForNegativeValues); - if (smallestNegativeIndex < smallestIndex) { - smallestIndex = smallestNegativeIndex; - } - if (smallestIndex == Integer.MAX_VALUE) { - return false; - } - double newZeroThreshold = nativeBucketIndexToUpperBound(nativeSchema, smallestIndex); - if (newZeroThreshold > nativeMaxZeroThreshold) { - return false; - } - mergeWithZeroBucket(smallestIndex, nativeBucketsForPositiveValues); - mergeWithZeroBucket(smallestIndex, nativeBucketsForNegativeValues); - nativeZeroThreshold = newZeroThreshold; - return true; - } + // maybeWidenZeroBucket is called in the synchronized block while new observations go into the + // buffer. + private boolean maybeWidenZeroBucket() { + if (nativeZeroThreshold >= nativeMaxZeroThreshold) { + return false; + } + int smallestIndex = findSmallestIndex(nativeBucketsForPositiveValues); + int smallestNegativeIndex = findSmallestIndex(nativeBucketsForNegativeValues); + if (smallestNegativeIndex < smallestIndex) { + smallestIndex = smallestNegativeIndex; + } + if (smallestIndex == Integer.MAX_VALUE) { + return false; + } + double newZeroThreshold = nativeBucketIndexToUpperBound(nativeSchema, smallestIndex); + if (newZeroThreshold > nativeMaxZeroThreshold) { + return false; + } + mergeWithZeroBucket(smallestIndex, nativeBucketsForPositiveValues); + mergeWithZeroBucket(smallestIndex, nativeBucketsForNegativeValues); + nativeZeroThreshold = newZeroThreshold; + return true; + } - private void mergeWithZeroBucket(int index, Map buckets) { - LongAdder count = buckets.remove(index); - if (count != null) { - nativeZeroCount.add(count.sum()); - } - } + private void mergeWithZeroBucket(int index, Map buckets) { + LongAdder count = buckets.remove(index); + if (count != null) { + nativeZeroCount.add(count.sum()); + } + } - private double nativeBucketIndexToUpperBound(int schema, int index) { - double result = calcUpperBound(schema, index); - if (Double.isInfinite(result)) { - // The last bucket boundary should always be MAX_VALUE, so that the +Inf bucket counts only - // actual +Inf observations. - // However, MAX_VALUE is not a natural bucket boundary, so we introduce MAX_VALUE - // as an artificial boundary before +Inf. - double previousBucketBoundary = calcUpperBound(schema, index - 1); - if (Double.isFinite(previousBucketBoundary) && previousBucketBoundary < Double.MAX_VALUE) { - return Double.MAX_VALUE; - } - } - return result; - } + private double nativeBucketIndexToUpperBound(int schema, int index) { + double result = calcUpperBound(schema, index); + if (Double.isInfinite(result)) { + // The last bucket boundary should always be MAX_VALUE, so that the +Inf bucket counts only + // actual +Inf observations. + // However, MAX_VALUE is not a natural bucket boundary, so we introduce MAX_VALUE + // as an artificial boundary before +Inf. + double previousBucketBoundary = calcUpperBound(schema, index - 1); + if (Double.isFinite(previousBucketBoundary) && previousBucketBoundary < Double.MAX_VALUE) { + return Double.MAX_VALUE; + } + } + return result; + } - private double calcUpperBound(int schema, int index) { - // The actual formula is: - // --- - // base := 2^(2^-schema); - // upperBound := base^index; - // --- - // The following implementation reduces the numerical error for index > 0. - // It's not very efficient. We should refactor and use an algorithm as in client_golang's getLe() - double factor = 1.0; - while (index > 0) { - if (index % 2 == 0) { - index /= 2; - schema -= 1; - } else { - index -= 1; - factor *= Math.pow(2, Math.pow(2, -schema)); - } - } - return factor * Math.pow(2, index * Math.pow(2, -schema)); - } + private double calcUpperBound(int schema, int index) { + // The actual formula is: + // --- + // base := 2^(2^-schema); + // upperBound := base^index; + // --- + // The following implementation reduces the numerical error for index > 0. + // It's not very efficient. We should refactor and use an algorithm as in client_golang's + // getLe() + double factor = 1.0; + while (index > 0) { + if (index % 2 == 0) { + index /= 2; + schema -= 1; + } else { + index -= 1; + factor *= Math.pow(2, Math.pow(2, -schema)); + } + } + return factor * Math.pow(2, index * Math.pow(2, -schema)); + } - private int findSmallestIndex(Map nativeBuckets) { - int result = Integer.MAX_VALUE; - for (int key : nativeBuckets.keySet()) { - if (key < result) { - result = key; - } - } - return result; + private int findSmallestIndex(Map nativeBuckets) { + int result = Integer.MAX_VALUE; + for (int key : nativeBuckets.keySet()) { + if (key < result) { + result = key; } + } + return result; + } - // doubleBucketWidth is called in the synchronized block while new observations go into the buffer. - private void doubleBucketWidth() { - doubleBucketWidth(nativeBucketsForPositiveValues); - doubleBucketWidth(nativeBucketsForNegativeValues); - nativeSchema--; - } + // doubleBucketWidth is called in the synchronized block while new observations go into the + // buffer. + private void doubleBucketWidth() { + doubleBucketWidth(nativeBucketsForPositiveValues); + doubleBucketWidth(nativeBucketsForNegativeValues); + nativeSchema--; + } - private void doubleBucketWidth(Map buckets) { - int[] keys = new int[buckets.size()]; - long[] values = new long[keys.length]; - int i = 0; - for (Map.Entry entry : buckets.entrySet()) { - keys[i] = entry.getKey(); - values[i] = entry.getValue().sum(); - i++; - } - buckets.clear(); - for (i = 0; i < keys.length; i++) { - int index = (keys[i] + 1) / 2; - LongAdder count = buckets.get(index); - if (count == null) { - count = new LongAdder(); - buckets.put(index, count); - } - count.add(values[i]); - } - } + private void doubleBucketWidth(Map buckets) { + int[] keys = new int[buckets.size()]; + long[] values = new long[keys.length]; + int i = 0; + for (Map.Entry entry : buckets.entrySet()) { + keys[i] = entry.getKey(); + values[i] = entry.getValue().sum(); + i++; + } + buckets.clear(); + for (i = 0; i < keys.length; i++) { + int index = (keys[i] + 1) / 2; + LongAdder count = buckets.get(index); + if (count == null) { + count = new LongAdder(); + buckets.put(index, count); + } + count.add(values[i]); + } + } - private NativeHistogramBuckets toBucketList(ConcurrentHashMap map) { - int[] bucketIndexes = new int[map.size()]; - long[] counts = new long[map.size()]; - int i = 0; - for (Map.Entry entry : map.entrySet()) { - bucketIndexes[i] = entry.getKey(); - counts[i] = entry.getValue().sum(); - i++; - } - return NativeHistogramBuckets.of(bucketIndexes, counts); - } + private NativeHistogramBuckets toBucketList(ConcurrentHashMap map) { + int[] bucketIndexes = new int[map.size()]; + long[] counts = new long[map.size()]; + int i = 0; + for (Map.Entry entry : map.entrySet()) { + bucketIndexes[i] = entry.getKey(); + counts[i] = entry.getValue().sum(); + i++; + } + return NativeHistogramBuckets.of(bucketIndexes, counts); + } - private void maybeScheduleNextReset() { - if (nativeResetDurationSeconds > 0) { - Scheduler.schedule(() -> resetDurationExpired = true, nativeResetDurationSeconds, TimeUnit.SECONDS); - } - } + private void maybeScheduleNextReset() { + if (nativeResetDurationSeconds > 0) { + Scheduler.schedule( + () -> resetDurationExpired = true, nativeResetDurationSeconds, TimeUnit.SECONDS); + } } + } + + /** {@inheritDoc} */ + @Override + public HistogramSnapshot collect() { + return (HistogramSnapshot) super.collect(); + } + + @Override + protected HistogramSnapshot collect(List labels, List metricData) { + List data = new ArrayList<>(labels.size()); + for (int i = 0; i < labels.size(); i++) { + data.add(metricData.get(i).collect(labels.get(i))); + } + return new HistogramSnapshot(getMetadata(), data); + } + + @Override + protected DataPoint newDataPoint() { + return new DataPoint(); + } + + static { + // See bounds in client_golang's histogram implementation. + NATIVE_BOUNDS = new double[8][]; + for (int schema = 1; schema <= 8; schema++) { + NATIVE_BOUNDS[schema - 1] = new double[1 << schema]; + NATIVE_BOUNDS[schema - 1][0] = 0.5; + // https://github.com/open-telemetry/opentelemetry-proto/blob/main/opentelemetry/proto/metrics/v1/metrics.proto#L501 + double base = Math.pow(2, Math.pow(2, -schema)); + for (int i = 1; i < NATIVE_BOUNDS[schema - 1].length; i++) { + if (i % 2 == 0 && schema > 1) { + // Use previously calculated value for increased precision, see comment in client_golang's + // implementation. + NATIVE_BOUNDS[schema - 1][i] = NATIVE_BOUNDS[schema - 2][i / 2]; + } else { + NATIVE_BOUNDS[schema - 1][i] = NATIVE_BOUNDS[schema - 1][i - 1] * base; + } + } + } + } + + public static Builder builder() { + return new Builder(PrometheusProperties.get()); + } + + public static Builder builder(PrometheusProperties config) { + return new Builder(config); + } + + public static class Builder extends StatefulMetric.Builder { + + public static final double[] DEFAULT_CLASSIC_UPPER_BOUNDS = + new double[] {.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10}; + private final double DEFAULT_NATIVE_MIN_ZERO_THRESHOLD = Math.pow(2.0, -128); + private final double DEFAULT_NATIVE_MAX_ZERO_THRESHOLD = Math.pow(2.0, -128); + private final int DEFAULT_NATIVE_INITIAL_SCHEMA = 5; + private final int DEFAULT_NATIVE_MAX_NUMBER_OF_BUCKETS = 160; + private final long DEFAULT_NATIVE_RESET_DURATION_SECONDS = 0; // 0 means no reset + + private Boolean nativeOnly; + private Boolean classicOnly; + private double[] classicUpperBounds; + private Integer nativeInitialSchema; + private Double nativeMaxZeroThreshold; + private Double nativeMinZeroThreshold; + private Integer nativeMaxNumberOfBuckets; + private Long nativeResetDurationSeconds; - /** - * {@inheritDoc} - */ @Override - public HistogramSnapshot collect() { - return (HistogramSnapshot) super.collect(); + public Histogram build() { + return new Histogram(this, properties); } @Override - protected HistogramSnapshot collect(List labels, List metricData) { - List data = new ArrayList<>(labels.size()); - for (int i = 0; i < labels.size(); i++) { - data.add(metricData.get(i).collect(labels.get(i))); - } - return new HistogramSnapshot(getMetadata(), data); + protected MetricsProperties toProperties() { + return MetricsProperties.builder() + .exemplarsEnabled(exemplarsEnabled) + .histogramNativeOnly(nativeOnly) + .histogramClassicOnly(classicOnly) + .histogramClassicUpperBounds(classicUpperBounds) + .histogramNativeInitialSchema(nativeInitialSchema) + .histogramNativeMinZeroThreshold(nativeMinZeroThreshold) + .histogramNativeMaxZeroThreshold(nativeMaxZeroThreshold) + .histogramNativeMaxNumberOfBuckets(nativeMaxNumberOfBuckets) + .histogramNativeResetDurationSeconds(nativeResetDurationSeconds) + .build(); } + /** Default properties for histogram metrics. */ @Override - protected DataPoint newDataPoint() { - return new DataPoint(); - } - - static { - // See bounds in client_golang's histogram implementation. - NATIVE_BOUNDS = new double[8][]; - for (int schema = 1; schema <= 8; schema++) { - NATIVE_BOUNDS[schema - 1] = new double[1 << schema]; - NATIVE_BOUNDS[schema - 1][0] = 0.5; - // https://github.com/open-telemetry/opentelemetry-proto/blob/main/opentelemetry/proto/metrics/v1/metrics.proto#L501 - double base = Math.pow(2, Math.pow(2, -schema)); - for (int i = 1; i < NATIVE_BOUNDS[schema - 1].length; i++) { - if (i % 2 == 0 && schema > 1) { - // Use previously calculated value for increased precision, see comment in client_golang's implementation. - NATIVE_BOUNDS[schema - 1][i] = NATIVE_BOUNDS[schema - 2][i / 2]; - } else { - NATIVE_BOUNDS[schema - 1][i] = NATIVE_BOUNDS[schema - 1][i - 1] * base; - } - } - } + public MetricsProperties getDefaultProperties() { + return MetricsProperties.builder() + .exemplarsEnabled(true) + .histogramNativeOnly(false) + .histogramClassicOnly(false) + .histogramClassicUpperBounds(DEFAULT_CLASSIC_UPPER_BOUNDS) + .histogramNativeInitialSchema(DEFAULT_NATIVE_INITIAL_SCHEMA) + .histogramNativeMinZeroThreshold(DEFAULT_NATIVE_MIN_ZERO_THRESHOLD) + .histogramNativeMaxZeroThreshold(DEFAULT_NATIVE_MAX_ZERO_THRESHOLD) + .histogramNativeMaxNumberOfBuckets(DEFAULT_NATIVE_MAX_NUMBER_OF_BUCKETS) + .histogramNativeResetDurationSeconds(DEFAULT_NATIVE_RESET_DURATION_SECONDS) + .build(); } - public static Builder builder() { - return new Builder(PrometheusProperties.get()); + private Builder(PrometheusProperties config) { + super(Collections.singletonList("le"), config); } - public static Builder builder(PrometheusProperties config) { - return new Builder(config); + /** + * Use the native histogram representation only, i.e. don't maintain classic histogram buckets. + * See {@link Histogram} for more info. + */ + public Builder nativeOnly() { + if (Boolean.TRUE.equals(classicOnly)) { + throw new IllegalArgumentException("Cannot call nativeOnly() after calling classicOnly()."); + } + nativeOnly = true; + return this; } - public static class Builder extends StatefulMetric.Builder { - - public static final double[] DEFAULT_CLASSIC_UPPER_BOUNDS = new double[]{.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10}; - private final double DEFAULT_NATIVE_MIN_ZERO_THRESHOLD = Math.pow(2.0, -128); - private final double DEFAULT_NATIVE_MAX_ZERO_THRESHOLD = Math.pow(2.0, -128); - private final int DEFAULT_NATIVE_INITIAL_SCHEMA = 5; - private final int DEFAULT_NATIVE_MAX_NUMBER_OF_BUCKETS = 160; - private final long DEFAULT_NATIVE_RESET_DURATION_SECONDS = 0; // 0 means no reset - - private Boolean nativeOnly; - private Boolean classicOnly; - private double[] classicUpperBounds; - private Integer nativeInitialSchema; - private Double nativeMaxZeroThreshold; - private Double nativeMinZeroThreshold; - private Integer nativeMaxNumberOfBuckets; - private Long nativeResetDurationSeconds; - - @Override - public Histogram build() { - return new Histogram(this, properties); - } - - @Override - protected MetricsProperties toProperties() { - return MetricsProperties.builder() - .exemplarsEnabled(exemplarsEnabled) - .histogramNativeOnly(nativeOnly) - .histogramClassicOnly(classicOnly) - .histogramClassicUpperBounds(classicUpperBounds) - .histogramNativeInitialSchema(nativeInitialSchema) - .histogramNativeMinZeroThreshold(nativeMinZeroThreshold) - .histogramNativeMaxZeroThreshold(nativeMaxZeroThreshold) - .histogramNativeMaxNumberOfBuckets(nativeMaxNumberOfBuckets) - .histogramNativeResetDurationSeconds(nativeResetDurationSeconds) - .build(); - } - - /** - * Default properties for histogram metrics. - */ - @Override - public MetricsProperties getDefaultProperties() { - return MetricsProperties.builder() - .exemplarsEnabled(true) - .histogramNativeOnly(false) - .histogramClassicOnly(false) - .histogramClassicUpperBounds(DEFAULT_CLASSIC_UPPER_BOUNDS) - .histogramNativeInitialSchema(DEFAULT_NATIVE_INITIAL_SCHEMA) - .histogramNativeMinZeroThreshold(DEFAULT_NATIVE_MIN_ZERO_THRESHOLD) - .histogramNativeMaxZeroThreshold(DEFAULT_NATIVE_MAX_ZERO_THRESHOLD) - .histogramNativeMaxNumberOfBuckets(DEFAULT_NATIVE_MAX_NUMBER_OF_BUCKETS) - .histogramNativeResetDurationSeconds(DEFAULT_NATIVE_RESET_DURATION_SECONDS) - .build(); - } - - private Builder(PrometheusProperties config) { - super(Collections.singletonList("le"), config); - } - - /** - * Use the native histogram representation only, i.e. don't maintain classic histogram buckets. - * See {@link Histogram} for more info. - */ - public Builder nativeOnly() { - if (Boolean.TRUE.equals(classicOnly)) { - throw new IllegalArgumentException("Cannot call nativeOnly() after calling classicOnly()."); - } - nativeOnly = true; - return this; - } - - /** - * Use the classic histogram representation only, i.e. don't maintain native histogram buckets. - * See {@link Histogram} for more info. - */ - public Builder classicOnly() { - if (Boolean.TRUE.equals(nativeOnly)) { - throw new IllegalArgumentException("Cannot call classicOnly() after calling nativeOnly()."); - } - classicOnly = true; - return this; - } - + /** + * Use the classic histogram representation only, i.e. don't maintain native histogram buckets. + * See {@link Histogram} for more info. + */ + public Builder classicOnly() { + if (Boolean.TRUE.equals(nativeOnly)) { + throw new IllegalArgumentException("Cannot call classicOnly() after calling nativeOnly()."); + } + classicOnly = true; + return this; + } - /** - * Set the upper bounds for the classic histogram buckets. - * Default is {@link Builder#DEFAULT_CLASSIC_UPPER_BOUNDS}. - * If the +Inf bucket is missing it will be added. - * If upperBounds contains duplicates the duplicates will be removed. - */ - public Builder classicUpperBounds(double... upperBounds) { - this.classicUpperBounds = upperBounds; - for (double bound : upperBounds) { - if (Double.isNaN(bound)) { - throw new IllegalArgumentException("Cannot use NaN as upper bound for a histogram"); - } - } - return this; - } + /** + * Set the upper bounds for the classic histogram buckets. Default is {@link + * Builder#DEFAULT_CLASSIC_UPPER_BOUNDS}. If the +Inf bucket is missing it will be added. If + * upperBounds contains duplicates the duplicates will be removed. + */ + public Builder classicUpperBounds(double... upperBounds) { + this.classicUpperBounds = upperBounds; + for (double bound : upperBounds) { + if (Double.isNaN(bound)) { + throw new IllegalArgumentException("Cannot use NaN as upper bound for a histogram"); + } + } + return this; + } - /** - * Create classic histogram buckets with linear bucket boundaries. - *

- * Example: {@code withClassicLinearBuckets(1.0, 0.5, 10)} creates bucket boundaries - * {@code [[1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5]}. - * - * @param start is the first bucket boundary - * @param width is the width of each bucket - * @param count is the total number of buckets, including start - */ - public Builder classicLinearUpperBounds(double start, double width, int count) { - this.classicUpperBounds = new double[count]; - // Use BigDecimal to avoid weird bucket boundaries like 0.7000000000000001. - BigDecimal s = new BigDecimal(Double.toString(start)); - BigDecimal w = new BigDecimal(Double.toString(width)); - for (int i = 0; i < count; i++) { - classicUpperBounds[i] = s.add(w.multiply(new BigDecimal(i))).doubleValue(); - } - return this; - } + /** + * Create classic histogram buckets with linear bucket boundaries. + * + *

Example: {@code withClassicLinearBuckets(1.0, 0.5, 10)} creates bucket boundaries {@code + * [[1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 5.5]}. + * + * @param start is the first bucket boundary + * @param width is the width of each bucket + * @param count is the total number of buckets, including start + */ + public Builder classicLinearUpperBounds(double start, double width, int count) { + this.classicUpperBounds = new double[count]; + // Use BigDecimal to avoid weird bucket boundaries like 0.7000000000000001. + BigDecimal s = new BigDecimal(Double.toString(start)); + BigDecimal w = new BigDecimal(Double.toString(width)); + for (int i = 0; i < count; i++) { + classicUpperBounds[i] = s.add(w.multiply(new BigDecimal(i))).doubleValue(); + } + return this; + } - /** - * Create classic histogram bucxkets with exponential boundaries. - *

- * Example: {@code withClassicExponentialBuckets(1.0, 2.0, 10)} creates bucket bounaries - * {@code [1.0, 2.0, 4.0, 8.0, 16.0, 32.0, 64.0, 128.0, 256.0, 512.0]} - * - * @param start is the first bucket boundary - * @param factor growth factor - * @param count total number of buckets, including start - */ - public Builder classicExponentialUpperBounds(double start, double factor, int count) { - classicUpperBounds = new double[count]; - for (int i = 0; i < count; i++) { - classicUpperBounds[i] = start * Math.pow(factor, i); - } - return this; - } + /** + * Create classic histogram bucxkets with exponential boundaries. + * + *

Example: {@code withClassicExponentialBuckets(1.0, 2.0, 10)} creates bucket bounaries + * {@code [1.0, 2.0, 4.0, 8.0, 16.0, 32.0, 64.0, 128.0, 256.0, 512.0]} + * + * @param start is the first bucket boundary + * @param factor growth factor + * @param count total number of buckets, including start + */ + public Builder classicExponentialUpperBounds(double start, double factor, int count) { + classicUpperBounds = new double[count]; + for (int i = 0; i < count; i++) { + classicUpperBounds[i] = start * Math.pow(factor, i); + } + return this; + } - /** - * The schema is a number in [-4, 8] defining the resolution of the native histogram. - * Default is {@link Builder#DEFAULT_NATIVE_INITIAL_SCHEMA}. - *

- * The higher the schema, the finer the resolution. - * Schema is Prometheus terminology. In OpenTelemetry it's called "scale". - *

- * Note that the schema for a histogram may be automatically decreased at runtime if the number - * of native histogram buckets exceeds {@link #nativeMaxNumberOfBuckets(int)}. - *

- * The following table shows: - *

    - *
  • factor: The growth factor for bucket boundaries, i.e. next bucket boundary = growth factor * previous bucket boundary. - *
  • max quantile error: The maximum error for quantiles calculated using the Prometheus histogram_quantile() function, relative to the observed value, assuming harmonic mean. - *
- * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
max quantile errors for different growth factors
schemafactormax quantile error
-465.53699%
-325699%
-21688%
-1460%
0233%
11.4142...17%
21.1892...9%
31.1090...4%
41.0442...2%
51.0218...1%
61.0108...0.5%
71.0054...0.3%
81.0027...0.1%
- */ - public Builder nativeInitialSchema(int nativeSchema) { - if (nativeSchema < -4 || nativeSchema > 8) { - throw new IllegalArgumentException("Unsupported native histogram schema " + nativeSchema + ": expecting -4 <= schema <= 8."); - } - this.nativeInitialSchema = nativeSchema; - return this; - } + /** + * The schema is a number in [-4, 8] defining the resolution of the native histogram. Default is + * {@link Builder#DEFAULT_NATIVE_INITIAL_SCHEMA}. + * + *

The higher the schema, the finer the resolution. Schema is Prometheus terminology. In + * OpenTelemetry it's called "scale". + * + *

Note that the schema for a histogram may be automatically decreased at runtime if the + * number of native histogram buckets exceeds {@link #nativeMaxNumberOfBuckets(int)}. + * + *

The following table shows: + * + *

    + *
  • factor: The growth factor for bucket boundaries, i.e. next bucket boundary = growth + * factor * previous bucket boundary. + *
  • max quantile error: The maximum error for quantiles calculated using the Prometheus + * histogram_quantile() function, relative to the observed value, assuming harmonic mean. + *
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
max quantile errors for different growth factors
schemafactormax quantile error
-465.53699%
-325699%
-21688%
-1460%
0233%
11.4142...17%
21.1892...9%
31.1090...4%
41.0442...2%
51.0218...1%
61.0108...0.5%
71.0054...0.3%
81.0027...0.1%
+ */ + public Builder nativeInitialSchema(int nativeSchema) { + if (nativeSchema < -4 || nativeSchema > 8) { + throw new IllegalArgumentException( + "Unsupported native histogram schema " + + nativeSchema + + ": expecting -4 <= schema <= 8."); + } + this.nativeInitialSchema = nativeSchema; + return this; + } - /** - * Native histogram buckets get smaller and smaller the closer they get to zero. - * To avoid wasting a lot of buckets for observations fluctuating around zero, we consider all - * values in [-zeroThreshold, +zeroThreshold] to be equal to zero. - *

- * The zeroThreshold is initialized with minZeroThreshold, and will grow up to maxZeroThreshold if - * the number of native histogram buckets exceeds nativeMaxBuckets. - *

- * Default is {@link Builder#DEFAULT_NATIVE_MAX_NUMBER_OF_BUCKETS}. - */ - public Builder nativeMaxZeroThreshold(double nativeMaxZeroThreshold) { - if (nativeMaxZeroThreshold < 0) { - throw new IllegalArgumentException("Illegal native max zero threshold " + nativeMaxZeroThreshold + ": must be >= 0"); - } - this.nativeMaxZeroThreshold = nativeMaxZeroThreshold; - return this; - } + /** + * Native histogram buckets get smaller and smaller the closer they get to zero. To avoid + * wasting a lot of buckets for observations fluctuating around zero, we consider all values in + * [-zeroThreshold, +zeroThreshold] to be equal to zero. + * + *

The zeroThreshold is initialized with minZeroThreshold, and will grow up to + * maxZeroThreshold if the number of native histogram buckets exceeds nativeMaxBuckets. + * + *

Default is {@link Builder#DEFAULT_NATIVE_MAX_NUMBER_OF_BUCKETS}. + */ + public Builder nativeMaxZeroThreshold(double nativeMaxZeroThreshold) { + if (nativeMaxZeroThreshold < 0) { + throw new IllegalArgumentException( + "Illegal native max zero threshold " + nativeMaxZeroThreshold + ": must be >= 0"); + } + this.nativeMaxZeroThreshold = nativeMaxZeroThreshold; + return this; + } - /** - * Native histogram buckets get smaller and smaller the closer they get to zero. - * To avoid wasting a lot of buckets for observations fluctuating around zero, we consider all - * values in [-zeroThreshold, +zeroThreshold] to be equal to zero. - *

- * The zeroThreshold is initialized with minZeroThreshold, and will grow up to maxZeroThreshold if - * the number of native histogram buckets exceeds nativeMaxBuckets. - *

- * Default is {@link Builder#DEFAULT_NATIVE_MIN_ZERO_THRESHOLD}. - */ - public Builder nativeMinZeroThreshold(double nativeMinZeroThreshold) { - if (nativeMinZeroThreshold < 0) { - throw new IllegalArgumentException("Illegal native min zero threshold " + nativeMinZeroThreshold + ": must be >= 0"); - } - this.nativeMinZeroThreshold = nativeMinZeroThreshold; - return this; - } + /** + * Native histogram buckets get smaller and smaller the closer they get to zero. To avoid + * wasting a lot of buckets for observations fluctuating around zero, we consider all values in + * [-zeroThreshold, +zeroThreshold] to be equal to zero. + * + *

The zeroThreshold is initialized with minZeroThreshold, and will grow up to + * maxZeroThreshold if the number of native histogram buckets exceeds nativeMaxBuckets. + * + *

Default is {@link Builder#DEFAULT_NATIVE_MIN_ZERO_THRESHOLD}. + */ + public Builder nativeMinZeroThreshold(double nativeMinZeroThreshold) { + if (nativeMinZeroThreshold < 0) { + throw new IllegalArgumentException( + "Illegal native min zero threshold " + nativeMinZeroThreshold + ": must be >= 0"); + } + this.nativeMinZeroThreshold = nativeMinZeroThreshold; + return this; + } - /** - * Limit the number of native buckets. - *

- * If the number of native buckets exceeds the maximum, the {@link #nativeInitialSchema(int)} is decreased, - * i.e. the resolution of the histogram is decreased to reduce the number of buckets. - *

- * Default is {@link Builder#DEFAULT_NATIVE_MAX_NUMBER_OF_BUCKETS}. - */ - public Builder nativeMaxNumberOfBuckets(int nativeMaxBuckets) { - this.nativeMaxNumberOfBuckets = nativeMaxBuckets; - return this; - } + /** + * Limit the number of native buckets. + * + *

If the number of native buckets exceeds the maximum, the {@link #nativeInitialSchema(int)} + * is decreased, i.e. the resolution of the histogram is decreased to reduce the number of + * buckets. + * + *

Default is {@link Builder#DEFAULT_NATIVE_MAX_NUMBER_OF_BUCKETS}. + */ + public Builder nativeMaxNumberOfBuckets(int nativeMaxBuckets) { + this.nativeMaxNumberOfBuckets = nativeMaxBuckets; + return this; + } - /** - * If the histogram needed to be scaled down because {@link #nativeMaxNumberOfBuckets(int)} was exceeded, - * reset the histogram after a certain time interval to go back to the original {@link #nativeInitialSchema(int)}. - *

- * Reset means all values are set to zero. A good value might be 24h or 7d. - *

- * Default is no reset. - */ - public Builder nativeResetDuration(long duration, TimeUnit unit) { - // TODO: reset interval isn't tested yet - if (duration <= 0) { - throw new IllegalArgumentException(duration + ": value > 0 expected"); - } - nativeResetDurationSeconds = unit.toSeconds(duration); - return this; - } + /** + * If the histogram needed to be scaled down because {@link #nativeMaxNumberOfBuckets(int)} was + * exceeded, reset the histogram after a certain time interval to go back to the original {@link + * #nativeInitialSchema(int)}. + * + *

Reset means all values are set to zero. A good value might be 24h or 7d. + * + *

Default is no reset. + */ + public Builder nativeResetDuration(long duration, TimeUnit unit) { + // TODO: reset interval isn't tested yet + if (duration <= 0) { + throw new IllegalArgumentException(duration + ": value > 0 expected"); + } + nativeResetDurationSeconds = unit.toSeconds(duration); + return this; + } - @Override - protected Builder self() { - return this; - } + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Info.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Info.java index 373e7a98b..49f8a1eb5 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Info.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Info.java @@ -4,7 +4,6 @@ import io.prometheus.metrics.model.snapshots.InfoSnapshot; import io.prometheus.metrics.model.snapshots.Labels; import io.prometheus.metrics.model.snapshots.Unit; - import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -13,6 +12,7 @@ /** * Info metric. Example: + * *

{@code
  * Info info = Info.builder()
  *         .name("java_runtime_info")
@@ -30,124 +30,143 @@
  */
 public class Info extends MetricWithFixedMetadata {
 
-    private final Set labels = new CopyOnWriteArraySet<>();
-
-    private Info(Builder builder) {
-        super(builder);
+  private final Set labels = new CopyOnWriteArraySet<>();
+
+  private Info(Builder builder) {
+    super(builder);
+  }
+
+  /**
+   * Set the info label values. This will replace any previous values, i.e. the info metric will
+   * only have one data point after calling setLabelValues(). This is good for a metric like {@code
+   * target_info} where you want only one single data point.
+   */
+  public void setLabelValues(String... labelValues) {
+    if (labelValues.length != labelNames.length) {
+      throw new IllegalArgumentException(
+          getClass().getSimpleName()
+              + " "
+              + getMetadata().getName()
+              + " was created with "
+              + labelNames.length
+              + " label names, but you called setLabelValues() with "
+              + labelValues.length
+              + " label values.");
     }
-
-    /**
-     * Set the info label values. This will replace any previous values,
-     * i.e. the info metric will only have one data point after calling setLabelValues().
-     * This is good for a metric like {@code target_info} where you want only one single data point.
-     */
-    public void setLabelValues(String... labelValues) {
-        if (labelValues.length != labelNames.length) {
-            throw new IllegalArgumentException(getClass().getSimpleName() + " " + getMetadata().getName() + " was created with " + labelNames.length + " label names, but you called setLabelValues() with " + labelValues.length + " label values.");
-        }
-        Labels newLabels = Labels.of(labelNames, labelValues);
-        labels.add(newLabels);
-        labels.retainAll(Collections.singletonList(newLabels));
+    Labels newLabels = Labels.of(labelNames, labelValues);
+    labels.add(newLabels);
+    labels.retainAll(Collections.singletonList(newLabels));
+  }
+
+  /** Create an info data point with the given label values. */
+  public void addLabelValues(String... labelValues) {
+    if (labelValues.length != labelNames.length) {
+      throw new IllegalArgumentException(
+          getClass().getSimpleName()
+              + " "
+              + getMetadata().getName()
+              + " was created with "
+              + labelNames.length
+              + " label names, but you called addLabelValues() with "
+              + labelValues.length
+              + " label values.");
+    }
+    labels.add(Labels.of(labelNames, labelValues));
+  }
+
+  /** Remove the data point with the specified label values. */
+  public void remove(String... labelValues) {
+    if (labelValues.length != labelNames.length) {
+      throw new IllegalArgumentException(
+          getClass().getSimpleName()
+              + " "
+              + getMetadata().getName()
+              + " was created with "
+              + labelNames.length
+              + " label names, but you called remove() with "
+              + labelValues.length
+              + " label values.");
     }
+    Labels toBeRemoved = Labels.of(labelNames, labelValues);
+    labels.remove(toBeRemoved);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public InfoSnapshot collect() {
+    List data = new ArrayList<>(labels.size());
+    if (labelNames.length == 0) {
+      data.add(new InfoSnapshot.InfoDataPointSnapshot(constLabels));
+    } else {
+      for (Labels label : labels) {
+        data.add(new InfoSnapshot.InfoDataPointSnapshot(label.merge(constLabels)));
+      }
+    }
+    return new InfoSnapshot(getMetadata(), data);
+  }
 
-    /**
-     * Create an info data point with the given label values.
-     */
-    public void addLabelValues(String... labelValues) {
-        if (labelValues.length != labelNames.length) {
-            throw new IllegalArgumentException(getClass().getSimpleName() + " " + getMetadata().getName() + " was created with " + labelNames.length + " label names, but you called addLabelValues() with " + labelValues.length + " label values.");
-        }
-        labels.add(Labels.of(labelNames, labelValues));
+  public static Builder builder() {
+    return new Builder(PrometheusProperties.get());
+  }
+
+  public static Builder builder(PrometheusProperties config) {
+    return new Builder(config);
+  }
+
+  public static class Builder extends MetricWithFixedMetadata.Builder {
+
+    private Builder(PrometheusProperties config) {
+      super(Collections.emptyList(), config);
     }
 
     /**
-     * Remove the data point with the specified label values.
+     * The {@code _info} suffix will automatically be appended if it's missing.
+     *
+     * 
{@code
+     * Info info1 = Info.builder()
+     *     .name("runtime_info")
+     *     .build();
+     * Info info2 = Info.builder()
+     *     .name("runtime")
+     *     .build();
+     * }
+ * + * In the example above both {@code info1} and {@code info2} will be named {@code + * "runtime_info"} in Prometheus. + * + *

Throws an {@link IllegalArgumentException} if {@link + * io.prometheus.metrics.model.snapshots.PrometheusNaming#isValidMetricName(String) + * MetricMetadata.isValidMetricName(name)} is {@code false}. */ - public void remove(String... labelValues) { - if (labelValues.length != labelNames.length) { - throw new IllegalArgumentException(getClass().getSimpleName() + " " + getMetadata().getName() + " was created with " + labelNames.length + " label names, but you called remove() with " + labelValues.length + " label values."); - } - Labels toBeRemoved = Labels.of(labelNames, labelValues); - labels.remove(toBeRemoved); + @Override + public Builder name(String name) { + return super.name(stripInfoSuffix(name)); } - /** - * {@inheritDoc} - */ + /** Throws an {@link UnsupportedOperationException} because Info metrics cannot have a unit. */ @Override - public InfoSnapshot collect() { - List data = new ArrayList<>(labels.size()); - if (labelNames.length == 0) { - data.add(new InfoSnapshot.InfoDataPointSnapshot(constLabels)); - } else { - for (Labels label : labels) { - data.add(new InfoSnapshot.InfoDataPointSnapshot(label.merge(constLabels))); - } - } - return new InfoSnapshot(getMetadata(), data); + public Builder unit(Unit unit) { + if (unit != null) { + throw new UnsupportedOperationException("Info metrics cannot have a unit."); + } + return this; } - public static Builder builder() { - return new Builder(PrometheusProperties.get()); + private static String stripInfoSuffix(String name) { + if (name != null && (name.endsWith("_info") || name.endsWith(".info"))) { + name = name.substring(0, name.length() - 5); + } + return name; } - public static Builder builder(PrometheusProperties config) { - return new Builder(config); + @Override + public Info build() { + return new Info(this); } - public static class Builder extends MetricWithFixedMetadata.Builder { - - private Builder(PrometheusProperties config) { - super(Collections.emptyList(), config); - } - - /** - * The {@code _info} suffix will automatically be appended if it's missing. - *

{@code
-         * Info info1 = Info.builder()
-         *     .name("runtime_info")
-         *     .build();
-         * Info info2 = Info.builder()
-         *     .name("runtime")
-         *     .build();
-         * }
- * In the example above both {@code info1} and {@code info2} will be named {@code "runtime_info"} in Prometheus. - *

- * Throws an {@link IllegalArgumentException} if - * {@link io.prometheus.metrics.model.snapshots.PrometheusNaming#isValidMetricName(String) MetricMetadata.isValidMetricName(name)} - * is {@code false}. - */ - @Override - public Builder name(String name) { - return super.name(stripInfoSuffix(name)); - } - - /** - * Throws an {@link UnsupportedOperationException} because Info metrics cannot have a unit. - */ - @Override - public Builder unit(Unit unit) { - if (unit != null) { - throw new UnsupportedOperationException("Info metrics cannot have a unit."); - } - return this; - } - - private static String stripInfoSuffix(String name) { - if (name != null && (name.endsWith("_info") || name.endsWith(".info"))) { - name = name.substring(0, name.length() - 5); - } - return name; - } - - @Override - public Info build() { - return new Info(this); - } - - @Override - protected Builder self() { - return this; - } + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Metric.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Metric.java index b7db83208..d3c00eca0 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Metric.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Metric.java @@ -6,63 +6,61 @@ import io.prometheus.metrics.model.snapshots.Label; import io.prometheus.metrics.model.snapshots.Labels; import io.prometheus.metrics.model.snapshots.MetricSnapshot; - import java.util.ArrayList; import java.util.List; -/** - * Common base class for all metrics. - */ +/** Common base class for all metrics. */ public abstract class Metric implements Collector { - protected final Labels constLabels; + protected final Labels constLabels; - protected Metric(Builder builder) { - this.constLabels = builder.constLabels; - } + protected Metric(Builder builder) { + this.constLabels = builder.constLabels; + } - @Override - public abstract MetricSnapshot collect(); + @Override + public abstract MetricSnapshot collect(); - protected static abstract class Builder, M extends Metric> { + protected abstract static class Builder, M extends Metric> { - protected final List illegalLabelNames; - protected final PrometheusProperties properties; - protected Labels constLabels = Labels.EMPTY; + protected final List illegalLabelNames; + protected final PrometheusProperties properties; + protected Labels constLabels = Labels.EMPTY; - protected Builder(List illegalLabelNames, PrometheusProperties properties) { - this.illegalLabelNames = new ArrayList<>(illegalLabelNames); - this.properties = properties; - } + protected Builder(List illegalLabelNames, PrometheusProperties properties) { + this.illegalLabelNames = new ArrayList<>(illegalLabelNames); + this.properties = properties; + } - // ConstLabels are only used rarely. In particular, do not use them to - // attach the same labels to all your metrics. Those use cases are - // better covered by target labels set by the scraping Prometheus - // server, or by one specific metric (e.g. a build_info or a - // machine_role metric). See also - // https://prometheus.io/docs/instrumenting/writing_exporters/#target-labels-not-static-scraped-labels - public B constLabels(Labels constLabels) { - for (Label label : constLabels) { // NPE if constLabels is null - if (illegalLabelNames.contains(label.getName())) { - throw new IllegalArgumentException(label.getName() + ": illegal label name for this metric type"); - } - } - this.constLabels = constLabels; - return self(); + // ConstLabels are only used rarely. In particular, do not use them to + // attach the same labels to all your metrics. Those use cases are + // better covered by target labels set by the scraping Prometheus + // server, or by one specific metric (e.g. a build_info or a + // machine_role metric). See also + // https://prometheus.io/docs/instrumenting/writing_exporters/#target-labels-not-static-scraped-labels + public B constLabels(Labels constLabels) { + for (Label label : constLabels) { // NPE if constLabels is null + if (illegalLabelNames.contains(label.getName())) { + throw new IllegalArgumentException( + label.getName() + ": illegal label name for this metric type"); } + } + this.constLabels = constLabels; + return self(); + } - public M register() { - return register(PrometheusRegistry.defaultRegistry); - } + public M register() { + return register(PrometheusRegistry.defaultRegistry); + } - public M register(PrometheusRegistry registry) { - M metric = build(); - registry.register(metric); - return metric; - } + public M register(PrometheusRegistry registry) { + M metric = build(); + registry.register(metric); + return metric; + } - public abstract M build(); + public abstract M build(); - protected abstract B self(); - } + protected abstract B self(); + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/MetricWithFixedMetadata.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/MetricWithFixedMetadata.java index 5b96ea303..c121d0c56 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/MetricWithFixedMetadata.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/MetricWithFixedMetadata.java @@ -5,103 +5,105 @@ import io.prometheus.metrics.model.snapshots.MetricMetadata; import io.prometheus.metrics.model.snapshots.PrometheusNaming; import io.prometheus.metrics.model.snapshots.Unit; - import java.util.Arrays; import java.util.List; /** * Almost all metrics have fixed metadata, i.e. the metric name is known when the metric is created. - *

- * An exception would be a metric that is a bridge to a 3rd party metric library, where the metric name - * has to be retrieved from the 3rd party metric library at scrape time. + * + *

An exception would be a metric that is a bridge to a 3rd party metric library, where the + * metric name has to be retrieved from the 3rd party metric library at scrape time. */ public abstract class MetricWithFixedMetadata extends Metric { - private final MetricMetadata metadata; - protected final String[] labelNames; - - protected MetricWithFixedMetadata(Builder builder) { - super(builder); - this.metadata = new MetricMetadata(makeName(builder.name, builder.unit), builder.help, builder.unit); - this.labelNames = Arrays.copyOf(builder.labelNames, builder.labelNames.length); + private final MetricMetadata metadata; + protected final String[] labelNames; + + protected MetricWithFixedMetadata(Builder builder) { + super(builder); + this.metadata = + new MetricMetadata(makeName(builder.name, builder.unit), builder.help, builder.unit); + this.labelNames = Arrays.copyOf(builder.labelNames, builder.labelNames.length); + } + + protected MetricMetadata getMetadata() { + return metadata; + } + + private String makeName(String name, Unit unit) { + if (unit != null) { + if (!name.endsWith("_" + unit) && !name.endsWith("." + unit)) { + name += "_" + unit; + } } + return name; + } - protected MetricMetadata getMetadata() { - return metadata; - } + @Override + public String getPrometheusName() { + return metadata.getPrometheusName(); + } - private String makeName(String name, Unit unit) { - if (unit != null) { - if (!name.endsWith("_" + unit) && !name.endsWith("." + unit)) { - name += "_" + unit; - } - } - return name; - } + public abstract static class Builder, M extends MetricWithFixedMetadata> + extends Metric.Builder { - @Override - public String getPrometheusName() { - return metadata.getPrometheusName(); - } + protected String name; + private Unit unit; + private String help; + private String[] labelNames = new String[0]; - public static abstract class Builder, M extends MetricWithFixedMetadata> extends Metric.Builder { + protected Builder(List illegalLabelNames, PrometheusProperties properties) { + super(illegalLabelNames, properties); + } - protected String name; - private Unit unit; - private String help; - private String[] labelNames = new String[0]; + public B name(String name) { + if (!PrometheusNaming.isValidMetricName(name)) { + throw new IllegalArgumentException("'" + name + "': Illegal metric name."); + } + this.name = name; + return self(); + } - protected Builder(List illegalLabelNames, PrometheusProperties properties) { - super(illegalLabelNames, properties); - } + public B unit(Unit unit) { + this.unit = unit; + return self(); + } - public B name(String name) { - if (!PrometheusNaming.isValidMetricName(name)) { - throw new IllegalArgumentException("'" + name + "': Illegal metric name."); - } - this.name = name; - return self(); - } + public B help(String help) { + this.help = help; + return self(); + } - public B unit(Unit unit) { - this.unit = unit; - return self(); + public B labelNames(String... labelNames) { + for (String labelName : labelNames) { + if (!PrometheusNaming.isValidLabelName(labelName)) { + throw new IllegalArgumentException(labelName + ": illegal label name"); } - - public B help(String help) { - this.help = help; - return self(); + if (illegalLabelNames.contains(labelName)) { + throw new IllegalArgumentException( + labelName + ": illegal label name for this metric type"); } - - public B labelNames(String... labelNames) { - for (String labelName : labelNames) { - if (!PrometheusNaming.isValidLabelName(labelName)) { - throw new IllegalArgumentException(labelName + ": illegal label name"); - } - if (illegalLabelNames.contains(labelName)) { - throw new IllegalArgumentException(labelName + ": illegal label name for this metric type"); - } - if (constLabels.contains(labelName)) { - throw new IllegalArgumentException(labelName + ": duplicate label name"); - } - } - this.labelNames = labelNames; - return self(); + if (constLabels.contains(labelName)) { + throw new IllegalArgumentException(labelName + ": duplicate label name"); } + } + this.labelNames = labelNames; + return self(); + } - public B constLabels(Labels constLabels) { - for (String labelName : labelNames) { - if (constLabels.contains(labelName)) { // Labels.contains() treats dots like underscores - throw new IllegalArgumentException(labelName + ": duplicate label name"); - } - } - return super.constLabels(constLabels); + public B constLabels(Labels constLabels) { + for (String labelName : labelNames) { + if (constLabels.contains(labelName)) { // Labels.contains() treats dots like underscores + throw new IllegalArgumentException(labelName + ": duplicate label name"); } + } + return super.constLabels(constLabels); + } - @Override - public abstract M build(); + @Override + public abstract M build(); - @Override - protected abstract B self(); - } + @Override + protected abstract B self(); + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/SlidingWindow.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/SlidingWindow.java index e4e86acdd..93fa20d0e 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/SlidingWindow.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/SlidingWindow.java @@ -8,72 +8,77 @@ /** * Maintains a ring buffer of T to implement a sliding time window. - *

- * This is used to maintain a sliding window of {@link CKMSQuantiles} for {@link Summary} metrics. - *

- * It is implemented in a generic way so that 3rd party libraries can use it for implementing sliding windows. - *

- * TODO: The current implementation is {@code synchronized}. There is likely room for optimization. + * + *

This is used to maintain a sliding window of {@link CKMSQuantiles} for {@link Summary} + * metrics. + * + *

It is implemented in a generic way so that 3rd party libraries can use it for implementing + * sliding windows. + * + *

TODO: The current implementation is {@code synchronized}. There is likely room for + * optimization. */ public class SlidingWindow { - private final Supplier constructor; - private final ObjDoubleConsumer observeFunction; - private final T[] ringBuffer; - private int currentBucket; - private long lastRotateTimestampMillis; - private final long durationBetweenRotatesMillis; - LongSupplier currentTimeMillis = System::currentTimeMillis; // to be replaced in unit tests + private final Supplier constructor; + private final ObjDoubleConsumer observeFunction; + private final T[] ringBuffer; + private int currentBucket; + private long lastRotateTimestampMillis; + private final long durationBetweenRotatesMillis; + LongSupplier currentTimeMillis = System::currentTimeMillis; // to be replaced in unit tests - /** - * Example: If the {@code maxAgeSeconds} is 60 and {@code ageBuckets} is 3, then 3 instances of {@code T} - * are maintained and the sliding window moves to the next instance of T every 20 seconds. - * - * @param clazz type of T - * @param constructor for creating a new instance of T as the old one gets evicted - * @param observeFunction for observing a value (e.g. calling {@code t.observe(value)} - * @param maxAgeSeconds after this amount of time an instance of T gets evicted. - * @param ageBuckets number of age buckets. - */ - public SlidingWindow(Class clazz, Supplier constructor, ObjDoubleConsumer observeFunction, long maxAgeSeconds, int ageBuckets) { - this.constructor = constructor; - this.observeFunction = observeFunction; - this.ringBuffer = (T[]) Array.newInstance(clazz, ageBuckets); - for (int i = 0; i < ringBuffer.length; i++) { - this.ringBuffer[i] = constructor.get(); - } - this.currentBucket = 0; - this.lastRotateTimestampMillis = currentTimeMillis.getAsLong(); - this.durationBetweenRotatesMillis = TimeUnit.SECONDS.toMillis(maxAgeSeconds) / ageBuckets; + /** + * Example: If the {@code maxAgeSeconds} is 60 and {@code ageBuckets} is 3, then 3 instances of + * {@code T} are maintained and the sliding window moves to the next instance of T every 20 + * seconds. + * + * @param clazz type of T + * @param constructor for creating a new instance of T as the old one gets evicted + * @param observeFunction for observing a value (e.g. calling {@code t.observe(value)} + * @param maxAgeSeconds after this amount of time an instance of T gets evicted. + * @param ageBuckets number of age buckets. + */ + public SlidingWindow( + Class clazz, + Supplier constructor, + ObjDoubleConsumer observeFunction, + long maxAgeSeconds, + int ageBuckets) { + this.constructor = constructor; + this.observeFunction = observeFunction; + this.ringBuffer = (T[]) Array.newInstance(clazz, ageBuckets); + for (int i = 0; i < ringBuffer.length; i++) { + this.ringBuffer[i] = constructor.get(); } + this.currentBucket = 0; + this.lastRotateTimestampMillis = currentTimeMillis.getAsLong(); + this.durationBetweenRotatesMillis = TimeUnit.SECONDS.toMillis(maxAgeSeconds) / ageBuckets; + } - /** - * Get the currently active instance of {@code T}. - */ - public synchronized T current() { - return rotate(); - } + /** Get the currently active instance of {@code T}. */ + public synchronized T current() { + return rotate(); + } - /** - * Observe a value. - */ - public synchronized void observe(double value) { - rotate(); - for (T t : ringBuffer) { - observeFunction.accept(t, value); - } + /** Observe a value. */ + public synchronized void observe(double value) { + rotate(); + for (T t : ringBuffer) { + observeFunction.accept(t, value); } + } - private T rotate() { - long timeSinceLastRotateMillis = currentTimeMillis.getAsLong() - lastRotateTimestampMillis; - while (timeSinceLastRotateMillis > durationBetweenRotatesMillis) { - ringBuffer[currentBucket] = constructor.get(); - if (++currentBucket >= ringBuffer.length) { - currentBucket = 0; - } - timeSinceLastRotateMillis -= durationBetweenRotatesMillis; - lastRotateTimestampMillis += durationBetweenRotatesMillis; - } - return ringBuffer[currentBucket]; + private T rotate() { + long timeSinceLastRotateMillis = currentTimeMillis.getAsLong() - lastRotateTimestampMillis; + while (timeSinceLastRotateMillis > durationBetweenRotatesMillis) { + ringBuffer[currentBucket] = constructor.get(); + if (++currentBucket >= ringBuffer.length) { + currentBucket = 0; + } + timeSinceLastRotateMillis -= durationBetweenRotatesMillis; + lastRotateTimestampMillis += durationBetweenRotatesMillis; } + return ringBuffer[currentBucket]; + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/StateSet.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/StateSet.java index 532cfa506..e4d9eff88 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/StateSet.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/StateSet.java @@ -1,20 +1,20 @@ package io.prometheus.metrics.core.metrics; +import static io.prometheus.metrics.model.snapshots.PrometheusNaming.prometheusName; + import io.prometheus.metrics.config.MetricsProperties; import io.prometheus.metrics.config.PrometheusProperties; +import io.prometheus.metrics.core.datapoints.StateSetDataPoint; import io.prometheus.metrics.model.snapshots.Labels; import io.prometheus.metrics.model.snapshots.StateSetSnapshot; -import io.prometheus.metrics.core.datapoints.StateSetDataPoint; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.stream.Stream; -import static io.prometheus.metrics.model.snapshots.PrometheusNaming.prometheusName; - /** * StateSet metric. Example: + * *

{@code
  * public enum Feature {
  *
@@ -46,151 +46,141 @@
  *     stateSet.labelValues("dev").setTrue(FEATURE_2);
  * }
  * }
- * The example above shows how to use a StateSet with an enum. - * You don't have to use enum, you can use regular strings as well. + * + * The example above shows how to use a StateSet with an enum. You don't have to use enum, you can + * use regular strings as well. */ -public class StateSet extends StatefulMetric implements StateSetDataPoint { - - private final boolean exemplarsEnabled; - private final String[] names; - - private StateSet(Builder builder, PrometheusProperties prometheusProperties) { - super(builder); - MetricsProperties[] properties = getMetricProperties(builder, prometheusProperties); - exemplarsEnabled = getConfigProperty(properties, MetricsProperties::getExemplarsEnabled); - this.names = builder.names; // builder.names is already a validated copy - for (String name : names) { - if (this.getMetadata().getPrometheusName().equals(prometheusName(name))) { - throw new IllegalArgumentException("Label name " + name + " is illegal (can't use the metric name as label name in state set metrics)"); - } - } +public class StateSet extends StatefulMetric + implements StateSetDataPoint { + + private final boolean exemplarsEnabled; + private final String[] names; + + private StateSet(Builder builder, PrometheusProperties prometheusProperties) { + super(builder); + MetricsProperties[] properties = getMetricProperties(builder, prometheusProperties); + exemplarsEnabled = getConfigProperty(properties, MetricsProperties::getExemplarsEnabled); + this.names = builder.names; // builder.names is already a validated copy + for (String name : names) { + if (this.getMetadata().getPrometheusName().equals(prometheusName(name))) { + throw new IllegalArgumentException( + "Label name " + + name + + " is illegal (can't use the metric name as label name in state set metrics)"); + } } - - /** - * {@inheritDoc} - */ - @Override - public StateSetSnapshot collect() { - return (StateSetSnapshot) super.collect(); + } + + /** {@inheritDoc} */ + @Override + public StateSetSnapshot collect() { + return (StateSetSnapshot) super.collect(); + } + + /** {@inheritDoc} */ + @Override + public void setTrue(String state) { + getNoLabels().setTrue(state); + } + + /** {@inheritDoc} */ + @Override + public void setFalse(String state) { + getNoLabels().setFalse(state); + } + + @Override + protected StateSetSnapshot collect(List labels, List metricDataList) { + List data = new ArrayList<>(labels.size()); + for (int i = 0; i < labels.size(); i++) { + data.add( + new StateSetSnapshot.StateSetDataPointSnapshot( + names, metricDataList.get(i).values, labels.get(i))); } + return new StateSetSnapshot(getMetadata(), data); + } + + @Override + protected DataPoint newDataPoint() { + return new DataPoint(); + } + + @Override + protected boolean isExemplarsEnabled() { + return exemplarsEnabled; + } + + class DataPoint implements StateSetDataPoint { + + private final boolean[] values = new boolean[names.length]; - /** - * {@inheritDoc} - */ + private DataPoint() {} + + /** {@inheritDoc} */ @Override public void setTrue(String state) { - getNoLabels().setTrue(state); + set(state, true); } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override public void setFalse(String state) { - getNoLabels().setFalse(state); + set(state, false); } - @Override - protected StateSetSnapshot collect(List labels, List metricDataList) { - List data = new ArrayList<>(labels.size()); - for (int i = 0; i < labels.size(); i++) { - data.add(new StateSetSnapshot.StateSetDataPointSnapshot(names, metricDataList.get(i).values, labels.get(i))); + private void set(String name, boolean value) { + for (int i = 0; i < names.length; i++) { + if (names[i].equals(name)) { + values[i] = value; + return; } - return new StateSetSnapshot(getMetadata(), data); - } - - @Override - protected DataPoint newDataPoint() { - return new DataPoint(); + } + throw new IllegalArgumentException(name + ": unknown state"); } + } - @Override - protected boolean isExemplarsEnabled() { - return exemplarsEnabled; - } - - class DataPoint implements StateSetDataPoint { + public static Builder builder() { + return new Builder(PrometheusProperties.get()); + } - private final boolean[] values = new boolean[names.length]; + public static Builder builder(PrometheusProperties config) { + return new Builder(config); + } - private DataPoint() { - } + public static class Builder extends StatefulMetric.Builder { - /** - * {@inheritDoc} - */ - @Override - public void setTrue(String state) { - set(state, true); - } + private String[] names; - /** - * {@inheritDoc} - */ - @Override - public void setFalse(String state) { - set(state, false); - } - - private void set(String name, boolean value) { - for (int i = 0; i < names.length; i++) { - if (names[i].equals(name)) { - values[i] = value; - return; - } - } - throw new IllegalArgumentException(name + ": unknown state"); - } + private Builder(PrometheusProperties config) { + super(Collections.emptyList(), config); } - public static Builder builder() { - return new Builder(PrometheusProperties.get()); + /** Declare the states that should be represented by this StateSet. */ + public Builder states(Class> enumClass) { + return states( + Stream.of(enumClass.getEnumConstants()).map(Enum::toString).toArray(String[]::new)); } - public static Builder builder(PrometheusProperties config) { - return new Builder(config); + /** Declare the states that should be represented by this StateSet. */ + public Builder states(String... stateNames) { + if (stateNames.length == 0) { + throw new IllegalArgumentException("states cannot be empty"); + } + this.names = Stream.of(stateNames).distinct().sorted().toArray(String[]::new); + return this; } - public static class Builder extends StatefulMetric.Builder { - - private String[] names; - - private Builder(PrometheusProperties config) { - super(Collections.emptyList(), config); - } - - /** - * Declare the states that should be represented by this StateSet. - */ - public Builder states(Class> enumClass) { - return states(Stream.of(enumClass.getEnumConstants()).map(Enum::toString).toArray(String[]::new)); - } - - /** - * Declare the states that should be represented by this StateSet. - */ - public Builder states(String... stateNames) { - if (stateNames.length == 0) { - throw new IllegalArgumentException("states cannot be empty"); - } - this.names = Stream.of(stateNames) - .distinct() - .sorted() - .toArray(String[]::new); - return this; - } - - @Override - public StateSet build() { - if (names == null) { - throw new IllegalStateException("State names are required when building a StateSet."); - } - return new StateSet(this, properties); - } + @Override + public StateSet build() { + if (names == null) { + throw new IllegalStateException("State names are required when building a StateSet."); + } + return new StateSet(this, properties); + } - @Override - protected Builder self() { - return this; - } + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/StatefulMetric.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/StatefulMetric.java index 44f6cc57a..188a0c15f 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/StatefulMetric.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/StatefulMetric.java @@ -1,11 +1,13 @@ package io.prometheus.metrics.core.metrics; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; + import io.prometheus.metrics.config.MetricsProperties; import io.prometheus.metrics.config.PrometheusProperties; +import io.prometheus.metrics.core.datapoints.DataPoint; import io.prometheus.metrics.model.snapshots.Labels; import io.prometheus.metrics.model.snapshots.MetricSnapshot; -import io.prometheus.metrics.core.datapoints.DataPoint; - import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -13,191 +15,191 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; -import static java.lang.Boolean.FALSE; -import static java.lang.Boolean.TRUE; - /** * There are two kinds of metrics: + * *
    - *
  • A {@code StatefulMetric} actively maintains its current values, e.g. a stateful counter actively stores its current count.
  • - *
  • A {@code CallbackMetric} gets its values on demand when it is collected, e.g. a callback gauge representing the current heap size.
  • + *
  • A {@code StatefulMetric} actively maintains its current values, e.g. a stateful counter + * actively stores its current count. + *
  • A {@code CallbackMetric} gets its values on demand when it is collected, e.g. a callback + * gauge representing the current heap size. *
- * The OpenTelemetry terminology for stateful is synchronous and the OpenTelemetry terminology for callback is asynchronous. - * We are using our own terminology here because in Java synchronous and asynchronous usually refers to multi-threading, - * but this has nothing to do with multi-threading. + * + * The OpenTelemetry terminology for stateful is synchronous and the OpenTelemetry + * terminology for callback is asynchronous. We are using our own terminology here + * because in Java synchronous and asynchronous usually refers to multi-threading, but + * this has nothing to do with multi-threading. */ abstract class StatefulMetric extends MetricWithFixedMetadata { - /** - * Map label values to data points. - */ - private final ConcurrentHashMap, T> data = new ConcurrentHashMap<>(); + /** Map label values to data points. */ + private final ConcurrentHashMap, T> data = new ConcurrentHashMap<>(); - /** - * Shortcut for data.get(Collections.emptyList()) - */ - private volatile T noLabels; + /** Shortcut for data.get(Collections.emptyList()) */ + private volatile T noLabels; - protected StatefulMetric(Builder builder) { - super(builder); - } + protected StatefulMetric(Builder builder) { + super(builder); + } - /** - * labels and metricData have the same size. labels.get(i) are the labels for metricData.get(i). - */ - protected abstract MetricSnapshot collect(List labels, List metricData); - - public MetricSnapshot collect() { - if (labelNames.length == 0 && data.size() == 0) { - // This is a metric without labels that has not been used yet. Initialize the data on the fly. - labelValues(); - } - List labels = new ArrayList<>(data.size()); - List metricData = new ArrayList<>(data.size()); - for (Map.Entry, T> entry : data.entrySet()) { - String[] labelValues = entry.getKey().toArray(new String[labelNames.length]); - labels.add(constLabels.merge(labelNames, labelValues)); - metricData.add(entry.getValue()); - } - return collect(labels, metricData); - } + /** + * labels and metricData have the same size. labels.get(i) are the labels for metricData.get(i). + */ + protected abstract MetricSnapshot collect(List labels, List metricData); - /** - * Initialize label values. - *

- * Example: Imagine you have a counter for payments as follows - *

-     * payment_transactions_total{payment_type="credit card"} 7.0
-     * payment_transactions_total{payment_type="paypal"} 3.0
-     * 
- * Now, the data points for the {@code payment_type} label values get initialized when they are - * first used, i.e. the first time you call - *
{@code
-     * counter.labelValues("paypal").inc();
-     * }
- * the data point with label {@code payment_type="paypal"} will go from non-existent to having value {@code 1.0}. - *

- * In some cases this is confusing, and you want to have data points initialized on application start - * with an initial value of {@code 0.0}: - *

-     * payment_transactions_total{payment_type="credit card"} 0.0
-     * payment_transactions_total{payment_type="paypal"} 0.0
-     * 
- * {@code initLabelValues(...)} can be used to initialize label value, so that the data points - * show up in the exposition format with an initial value of zero. - */ - public void initLabelValues(String... labelValues) { - labelValues(labelValues); + public MetricSnapshot collect() { + if (labelNames.length == 0 && data.size() == 0) { + // This is a metric without labels that has not been used yet. Initialize the data on the fly. + labelValues(); } - - public D labelValues(String... labelValues) { - if (labelValues.length != labelNames.length) { - if (labelValues.length == 0) { - throw new IllegalArgumentException(getClass().getSimpleName() + " " + getMetadata().getName() + " was created with label names, so you must call labelValues(...) when using it."); - } else { - throw new IllegalArgumentException("Expected " + labelNames.length + " label values, but got " + labelValues.length + "."); - } - } - return data.computeIfAbsent(Arrays.asList(labelValues), l -> newDataPoint()); + List labels = new ArrayList<>(data.size()); + List metricData = new ArrayList<>(data.size()); + for (Map.Entry, T> entry : data.entrySet()) { + String[] labelValues = entry.getKey().toArray(new String[labelNames.length]); + labels.add(constLabels.merge(labelNames, labelValues)); + metricData.add(entry.getValue()); } - - /** - * Remove the data point with the given label values. - * See https://prometheus.io/docs/instrumenting/writing_clientlibs/#labels. - */ - public void remove(String... labelValues) { - data.remove(Arrays.asList(labelValues)); + return collect(labels, metricData); + } + + /** + * Initialize label values. + * + *

Example: Imagine you have a counter for payments as follows + * + *

+   * payment_transactions_total{payment_type="credit card"} 7.0
+   * payment_transactions_total{payment_type="paypal"} 3.0
+   * 
+ * + * Now, the data points for the {@code payment_type} label values get initialized when they are + * first used, i.e. the first time you call + * + *
{@code
+   * counter.labelValues("paypal").inc();
+   * }
+ * + * the data point with label {@code payment_type="paypal"} will go from non-existent to having + * value {@code 1.0}. + * + *

In some cases this is confusing, and you want to have data points initialized on application + * start with an initial value of {@code 0.0}: + * + *

+   * payment_transactions_total{payment_type="credit card"} 0.0
+   * payment_transactions_total{payment_type="paypal"} 0.0
+   * 
+ * + * {@code initLabelValues(...)} can be used to initialize label value, so that the data points + * show up in the exposition format with an initial value of zero. + */ + public void initLabelValues(String... labelValues) { + labelValues(labelValues); + } + + public D labelValues(String... labelValues) { + if (labelValues.length != labelNames.length) { + if (labelValues.length == 0) { + throw new IllegalArgumentException( + getClass().getSimpleName() + + " " + + getMetadata().getName() + + " was created with label names, so you must call labelValues(...) when using it."); + } else { + throw new IllegalArgumentException( + "Expected " + labelNames.length + " label values, but got " + labelValues.length + "."); + } } - - /** - * Reset the metric (remove all data points). - */ - public void clear() { - data.clear(); - noLabels = null; + return data.computeIfAbsent(Arrays.asList(labelValues), l -> newDataPoint()); + } + + /** + * Remove the data point with the given label values. See https://prometheus.io/docs/instrumenting/writing_clientlibs/#labels. + */ + public void remove(String... labelValues) { + data.remove(Arrays.asList(labelValues)); + } + + /** Reset the metric (remove all data points). */ + public void clear() { + data.clear(); + noLabels = null; + } + + protected abstract T newDataPoint(); + + protected T getNoLabels() { + if (noLabels == null) { + // Note that this will throw an IllegalArgumentException if labelNames is not empty. + noLabels = (T) labelValues(); + } + return noLabels; + } + + protected MetricsProperties[] getMetricProperties( + Builder builder, PrometheusProperties prometheusProperties) { + String metricName = getMetadata().getName(); + if (prometheusProperties.getMetricProperties(metricName) != null) { + return new MetricsProperties[] { + prometheusProperties.getMetricProperties(metricName), // highest precedence + builder.toProperties(), // second-highest precedence + prometheusProperties.getDefaultMetricProperties(), // third-highest precedence + builder.getDefaultProperties() // fallback + }; + } else { + return new MetricsProperties[] { + builder.toProperties(), // highest precedence + prometheusProperties.getDefaultMetricProperties(), // second-highest precedence + builder.getDefaultProperties() // fallback + }; } + } + + protected T getConfigProperty( + MetricsProperties[] properties, Function getter) { + T result; + for (MetricsProperties props : properties) { + result = getter.apply(props); + if (result != null) { + return result; + } + } + throw new IllegalStateException( + "Missing default config. This is a bug in the Prometheus metrics core library."); + } + + protected abstract boolean isExemplarsEnabled(); + + abstract static class Builder, M extends StatefulMetric> + extends MetricWithFixedMetadata.Builder { - protected abstract T newDataPoint(); + protected Boolean exemplarsEnabled; + + protected Builder(List illegalLabelNames, PrometheusProperties config) { + super(illegalLabelNames, config); + } - protected T getNoLabels() { - if (noLabels == null) { - // Note that this will throw an IllegalArgumentException if labelNames is not empty. - noLabels = (T) labelValues(); - } - return noLabels; + /** Allow Exemplars for this metric. */ + public B withExemplars() { + this.exemplarsEnabled = TRUE; + return self(); } - protected MetricsProperties[] getMetricProperties(Builder builder, PrometheusProperties prometheusProperties) { - String metricName = getMetadata().getName(); - if (prometheusProperties.getMetricProperties(metricName) != null) { - return new MetricsProperties[]{ - prometheusProperties.getMetricProperties(metricName), // highest precedence - builder.toProperties(), // second-highest precedence - prometheusProperties.getDefaultMetricProperties(), // third-highest precedence - builder.getDefaultProperties() // fallback - }; - } else { - return new MetricsProperties[]{ - builder.toProperties(), // highest precedence - prometheusProperties.getDefaultMetricProperties(), // second-highest precedence - builder.getDefaultProperties() // fallback - }; - } + /** Turn off Exemplars for this metric. */ + public B withoutExemplars() { + this.exemplarsEnabled = FALSE; + return self(); } - protected T getConfigProperty(MetricsProperties[] properties, Function getter) { - T result; - for (MetricsProperties props : properties) { - result = getter.apply(props); - if (result != null) { - return result; - } - } - throw new IllegalStateException("Missing default config. This is a bug in the Prometheus metrics core library."); + /** Override if there are more properties than just exemplars enabled. */ + protected MetricsProperties toProperties() { + return MetricsProperties.builder().exemplarsEnabled(exemplarsEnabled).build(); } - protected abstract boolean isExemplarsEnabled(); - - static abstract class Builder, M extends StatefulMetric> extends MetricWithFixedMetadata.Builder { - - protected Boolean exemplarsEnabled; - - protected Builder(List illegalLabelNames, PrometheusProperties config) { - super(illegalLabelNames, config); - } - - /** - * Allow Exemplars for this metric. - */ - public B withExemplars() { - this.exemplarsEnabled = TRUE; - return self(); - } - - /** - * Turn off Exemplars for this metric. - */ - public B withoutExemplars() { - this.exemplarsEnabled = FALSE; - return self(); - } - - /** - * Override if there are more properties than just exemplars enabled. - */ - protected MetricsProperties toProperties() { - return MetricsProperties.builder() - .exemplarsEnabled(exemplarsEnabled) - .build(); - } - - /** - * Override if there are more properties than just exemplars enabled. - */ - public MetricsProperties getDefaultProperties() { - return MetricsProperties.builder() - .exemplarsEnabled(true) - .build(); - } + /** Override if there are more properties than just exemplars enabled. */ + public MetricsProperties getDefaultProperties() { + return MetricsProperties.builder().exemplarsEnabled(true).build(); } + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Summary.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Summary.java index 090922686..126626799 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Summary.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/Summary.java @@ -2,6 +2,7 @@ import io.prometheus.metrics.config.MetricsProperties; import io.prometheus.metrics.config.PrometheusProperties; +import io.prometheus.metrics.core.datapoints.DistributionDataPoint; import io.prometheus.metrics.core.exemplars.ExemplarSampler; import io.prometheus.metrics.core.exemplars.ExemplarSamplerConfig; import io.prometheus.metrics.model.snapshots.Exemplars; @@ -9,8 +10,6 @@ import io.prometheus.metrics.model.snapshots.Quantile; import io.prometheus.metrics.model.snapshots.Quantiles; import io.prometheus.metrics.model.snapshots.SummarySnapshot; -import io.prometheus.metrics.core.datapoints.DistributionDataPoint; - import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -20,6 +19,7 @@ /** * Summary metric. Example: + * *
{@code
  * Summary summary = Summary.builder()
  *         .name("http_request_duration_seconds_hi")
@@ -35,323 +35,330 @@
  * // process a request, duration will be observed
  * summary.labelValues("GET", "/", "200").observe(Unit.nanosToSeconds(System.nanoTime() - start));
  * }
+ * * See {@link Summary.Builder} for configuration options. */ -public class Summary extends StatefulMetric implements DistributionDataPoint { - - private final List quantiles; // May be empty, but cannot be null. - private final long maxAgeSeconds; - private final int ageBuckets; - private final boolean exemplarsEnabled; - private final ExemplarSamplerConfig exemplarSamplerConfig; - - private Summary(Builder builder, PrometheusProperties prometheusProperties) { - super(builder); - MetricsProperties[] properties = getMetricProperties(builder, prometheusProperties); - this.exemplarsEnabled = getConfigProperty(properties, MetricsProperties::getExemplarsEnabled); - this.quantiles = Collections.unmodifiableList(makeQuantiles(properties)); - this.maxAgeSeconds = getConfigProperty(properties, MetricsProperties::getSummaryMaxAgeSeconds); - this.ageBuckets = getConfigProperty(properties, MetricsProperties::getSummaryNumberOfAgeBuckets); - this.exemplarSamplerConfig = new ExemplarSamplerConfig(prometheusProperties.getExemplarProperties(), 4); - } - - private List makeQuantiles(MetricsProperties[] properties) { - List result = new ArrayList<>(); - List quantiles = getConfigProperty(properties, MetricsProperties::getSummaryQuantiles); - List quantileErrors = getConfigProperty(properties, MetricsProperties::getSummaryQuantileErrors); - if (quantiles != null) { - for (int i = 0; i < quantiles.size(); i++) { - if (quantileErrors.size() > 0) { - result.add(new CKMSQuantiles.Quantile(quantiles.get(i), quantileErrors.get(i))); - } else { - result.add(new CKMSQuantiles.Quantile(quantiles.get(i), Builder.defaultError(quantiles.get(i)))); - } - } +public class Summary extends StatefulMetric + implements DistributionDataPoint { + + private final List quantiles; // May be empty, but cannot be null. + private final long maxAgeSeconds; + private final int ageBuckets; + private final boolean exemplarsEnabled; + private final ExemplarSamplerConfig exemplarSamplerConfig; + + private Summary(Builder builder, PrometheusProperties prometheusProperties) { + super(builder); + MetricsProperties[] properties = getMetricProperties(builder, prometheusProperties); + this.exemplarsEnabled = getConfigProperty(properties, MetricsProperties::getExemplarsEnabled); + this.quantiles = Collections.unmodifiableList(makeQuantiles(properties)); + this.maxAgeSeconds = getConfigProperty(properties, MetricsProperties::getSummaryMaxAgeSeconds); + this.ageBuckets = + getConfigProperty(properties, MetricsProperties::getSummaryNumberOfAgeBuckets); + this.exemplarSamplerConfig = + new ExemplarSamplerConfig(prometheusProperties.getExemplarProperties(), 4); + } + + private List makeQuantiles(MetricsProperties[] properties) { + List result = new ArrayList<>(); + List quantiles = getConfigProperty(properties, MetricsProperties::getSummaryQuantiles); + List quantileErrors = + getConfigProperty(properties, MetricsProperties::getSummaryQuantileErrors); + if (quantiles != null) { + for (int i = 0; i < quantiles.size(); i++) { + if (quantileErrors.size() > 0) { + result.add(new CKMSQuantiles.Quantile(quantiles.get(i), quantileErrors.get(i))); + } else { + result.add( + new CKMSQuantiles.Quantile(quantiles.get(i), Builder.defaultError(quantiles.get(i)))); } - return result; + } } - - @Override - protected boolean isExemplarsEnabled() { - return exemplarsEnabled; + return result; + } + + @Override + protected boolean isExemplarsEnabled() { + return exemplarsEnabled; + } + + /** {@inheritDoc} */ + @Override + public void observe(double amount) { + getNoLabels().observe(amount); + } + + /** {@inheritDoc} */ + @Override + public void observeWithExemplar(double amount, Labels labels) { + getNoLabels().observeWithExemplar(amount, labels); + } + + /** {@inheritDoc} */ + @Override + public SummarySnapshot collect() { + return (SummarySnapshot) super.collect(); + } + + @Override + protected SummarySnapshot collect(List labels, List metricData) { + List data = new ArrayList<>(labels.size()); + for (int i = 0; i < labels.size(); i++) { + data.add(metricData.get(i).collect(labels.get(i))); + } + return new SummarySnapshot(getMetadata(), data); + } + + @Override + protected DataPoint newDataPoint() { + return new DataPoint(); + } + + public class DataPoint implements DistributionDataPoint { + + private final LongAdder count = new LongAdder(); + private final DoubleAdder sum = new DoubleAdder(); + private final SlidingWindow quantileValues; + private final Buffer buffer = new Buffer(); + private final ExemplarSampler exemplarSampler; + + private final long createdTimeMillis = System.currentTimeMillis(); + + private DataPoint() { + if (quantiles.size() > 0) { + CKMSQuantiles.Quantile[] quantilesArray = quantiles.toArray(new CKMSQuantiles.Quantile[0]); + quantileValues = + new SlidingWindow<>( + CKMSQuantiles.class, + () -> new CKMSQuantiles(quantilesArray), + CKMSQuantiles::insert, + maxAgeSeconds, + ageBuckets); + } else { + quantileValues = null; + } + if (exemplarsEnabled) { + exemplarSampler = new ExemplarSampler(exemplarSamplerConfig); + } else { + exemplarSampler = null; + } } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override - public void observe(double amount) { - getNoLabels().observe(amount); + public void observe(double value) { + if (Double.isNaN(value)) { + return; + } + if (!buffer.append(value)) { + doObserve(value); + } + if (isExemplarsEnabled()) { + exemplarSampler.observe(value); + } } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override - public void observeWithExemplar(double amount, Labels labels) { - getNoLabels().observeWithExemplar(amount, labels); + public void observeWithExemplar(double value, Labels labels) { + if (Double.isNaN(value)) { + return; + } + if (!buffer.append(value)) { + doObserve(value); + } + if (isExemplarsEnabled()) { + exemplarSampler.observeWithExemplar(value, labels); + } } - /** - * {@inheritDoc} - */ - @Override - public SummarySnapshot collect() { - return (SummarySnapshot) super.collect(); + private void doObserve(double amount) { + sum.add(amount); + if (quantileValues != null) { + quantileValues.observe(amount); + } + // count must be incremented last, because in collect() the count + // indicates the number of completed observations. + count.increment(); } - @Override - protected SummarySnapshot collect(List labels, List metricData) { - List data = new ArrayList<>(labels.size()); - for (int i = 0; i < labels.size(); i++) { - data.add(metricData.get(i).collect(labels.get(i))); - } - return new SummarySnapshot(getMetadata(), data); + private SummarySnapshot.SummaryDataPointSnapshot collect(Labels labels) { + return buffer.run( + expectedCount -> count.sum() == expectedCount, + // TODO Exemplars (are hard-coded as empty in the line below) + () -> + new SummarySnapshot.SummaryDataPointSnapshot( + count.sum(), + sum.sum(), + makeQuantiles(), + labels, + Exemplars.EMPTY, + createdTimeMillis), + this::doObserve); } - @Override - protected DataPoint newDataPoint() { - return new DataPoint(); + private List getQuantiles() { + return quantiles; } + private Quantiles makeQuantiles() { + Quantile[] quantiles = new Quantile[getQuantiles().size()]; + for (int i = 0; i < getQuantiles().size(); i++) { + CKMSQuantiles.Quantile quantile = getQuantiles().get(i); + quantiles[i] = + new Quantile(quantile.quantile, quantileValues.current().get(quantile.quantile)); + } + return Quantiles.of(quantiles); + } + } - public class DataPoint implements DistributionDataPoint { - - private final LongAdder count = new LongAdder(); - private final DoubleAdder sum = new DoubleAdder(); - private final SlidingWindow quantileValues; - private final Buffer buffer = new Buffer(); - private final ExemplarSampler exemplarSampler; - - private final long createdTimeMillis = System.currentTimeMillis(); - - private DataPoint() { - if (quantiles.size() > 0) { - CKMSQuantiles.Quantile[] quantilesArray = quantiles.toArray(new CKMSQuantiles.Quantile[0]); - quantileValues = new SlidingWindow<>(CKMSQuantiles.class, () -> new CKMSQuantiles(quantilesArray), CKMSQuantiles::insert, maxAgeSeconds, ageBuckets); - } else { - quantileValues = null; - } - if (exemplarsEnabled) { - exemplarSampler = new ExemplarSampler(exemplarSamplerConfig); - } else { - exemplarSampler = null; - } - } + public static Summary.Builder builder() { + return new Builder(PrometheusProperties.get()); + } - /** - * {@inheritDoc} - */ - @Override - public void observe(double value) { - if (Double.isNaN(value)) { - return; - } - if (!buffer.append(value)) { - doObserve(value); - } - if (isExemplarsEnabled()) { - exemplarSampler.observe(value); - } - } + public static Summary.Builder builder(PrometheusProperties config) { + return new Builder(config); + } - /** - * {@inheritDoc} - */ - @Override - public void observeWithExemplar(double value, Labels labels) { - if (Double.isNaN(value)) { - return; - } - if (!buffer.append(value)) { - doObserve(value); - } - if (isExemplarsEnabled()) { - exemplarSampler.observeWithExemplar(value, labels); - } - } + public static class Builder extends StatefulMetric.Builder { - private void doObserve(double amount) { - sum.add(amount); - if (quantileValues != null) { - quantileValues.observe(amount); - } - // count must be incremented last, because in collect() the count - // indicates the number of completed observations. - count.increment(); - } + /** 5 minutes. See {@link #maxAgeSeconds(long)}. */ + public static final long DEFAULT_MAX_AGE_SECONDS = TimeUnit.MINUTES.toSeconds(5); - private SummarySnapshot.SummaryDataPointSnapshot collect(Labels labels) { - return buffer.run( - expectedCount -> count.sum() == expectedCount, - // TODO Exemplars (are hard-coded as empty in the line below) - () -> new SummarySnapshot.SummaryDataPointSnapshot(count.sum(), sum.sum(), makeQuantiles(), labels, Exemplars.EMPTY, createdTimeMillis), - this::doObserve - ); - } + /** 5. See {@link #numberOfAgeBuckets(int)} */ + public static final int DEFAULT_NUMBER_OF_AGE_BUCKETS = 5; - private List getQuantiles() { - return quantiles; - } + private final List quantiles = new ArrayList<>(); + private Long maxAgeSeconds; + private Integer ageBuckets; - private Quantiles makeQuantiles() { - Quantile[] quantiles = new Quantile[getQuantiles().size()]; - for (int i = 0; i < getQuantiles().size(); i++) { - CKMSQuantiles.Quantile quantile = getQuantiles().get(i); - quantiles[i] = new Quantile(quantile.quantile, quantileValues.current().get(quantile.quantile)); - } - return Quantiles.of(quantiles); - } + private Builder(PrometheusProperties properties) { + super(Collections.singletonList("quantile"), properties); } - public static Summary.Builder builder() { - return new Builder(PrometheusProperties.get()); + private static double defaultError(double quantile) { + if (quantile <= 0.01 || quantile >= 0.99) { + return 0.001; + } else if (quantile <= 0.02 || quantile >= 0.98) { + return 0.005; + } else { + return 0.01; + } } - public static Summary.Builder builder(PrometheusProperties config) { - return new Builder(config); + /** + * Add a quantile. See {@link #quantile(double, double)}. + * + *

Default errors are: + * + *

    + *
  • error = 0.001 if quantile <= 0.01 or quantile >= 0.99 + *
  • error = 0.005 if quantile <= 0.02 or quantile >= 0.98 + *
  • error = 0.01 else. + *
+ */ + public Builder quantile(double quantile) { + return quantile(quantile, defaultError(quantile)); } - public static class Builder extends StatefulMetric.Builder { - - /** - * 5 minutes. See {@link #maxAgeSeconds(long)}. - */ - public static final long DEFAULT_MAX_AGE_SECONDS = TimeUnit.MINUTES.toSeconds(5); - - /** - * 5. See {@link #numberOfAgeBuckets(int)} - */ - public static final int DEFAULT_NUMBER_OF_AGE_BUCKETS = 5; - private final List quantiles = new ArrayList<>(); - private Long maxAgeSeconds; - private Integer ageBuckets; - - private Builder(PrometheusProperties properties) { - super(Collections.singletonList("quantile"), properties); - } - - private static double defaultError(double quantile) { - if (quantile <= 0.01 || quantile >= 0.99) { - return 0.001; - } else if (quantile <= 0.02 || quantile >= 0.98) { - return 0.005; - } else { - return 0.01; - } - } - - /** - * Add a quantile. See {@link #quantile(double, double)}. - *

- * Default errors are: - *

    - *
  • error = 0.001 if quantile <= 0.01 or quantile >= 0.99
  • - *
  • error = 0.005 if quantile <= 0.02 or quantile >= 0.98
  • - *
  • error = 0.01 else. - *
- */ - public Builder quantile(double quantile) { - return quantile(quantile, defaultError(quantile)); - } - - /** - * Add a quantile. Call multiple times to add multiple quantiles. - *

- * Example: The following will track the 0.95 quantile: - *

{@code
-         * .quantile(0.95, 0.001)
-         * }
- * The second argument is the acceptable error margin, i.e. with the code above the quantile - * will not be exactly the 0.95 quantile but something between 0.949 and 0.951. - *

- * There are two special cases: - *

    - *
  • {@code .quantile(0.0, 0.0)} gives you the minimum observed value
  • - *
  • {@code .quantile(1.0, 0.0)} gives you the maximum observed value
  • - *
- */ - public Builder quantile(double quantile, double error) { - if (quantile < 0.0 || quantile > 1.0) { - throw new IllegalArgumentException("Quantile " + quantile + " invalid: Expected number between 0.0 and 1.0."); - } - if (error < 0.0 || error > 1.0) { - throw new IllegalArgumentException("Error " + error + " invalid: Expected number between 0.0 and 1.0."); - } - quantiles.add(new CKMSQuantiles.Quantile(quantile, error)); - return this; - } + /** + * Add a quantile. Call multiple times to add multiple quantiles. + * + *

Example: The following will track the 0.95 quantile: + * + *

{@code
+     * .quantile(0.95, 0.001)
+     * }
+ * + * The second argument is the acceptable error margin, i.e. with the code above the quantile + * will not be exactly the 0.95 quantile but something between 0.949 and 0.951. + * + *

There are two special cases: + * + *

    + *
  • {@code .quantile(0.0, 0.0)} gives you the minimum observed value + *
  • {@code .quantile(1.0, 0.0)} gives you the maximum observed value + *
+ */ + public Builder quantile(double quantile, double error) { + if (quantile < 0.0 || quantile > 1.0) { + throw new IllegalArgumentException( + "Quantile " + quantile + " invalid: Expected number between 0.0 and 1.0."); + } + if (error < 0.0 || error > 1.0) { + throw new IllegalArgumentException( + "Error " + error + " invalid: Expected number between 0.0 and 1.0."); + } + quantiles.add(new CKMSQuantiles.Quantile(quantile, error)); + return this; + } - /** - * The quantiles are relative to a moving time window. - * {@code maxAgeSeconds} is the size of that time window. - * Default is {@link #DEFAULT_MAX_AGE_SECONDS}. - */ - public Builder maxAgeSeconds(long maxAgeSeconds) { - if (maxAgeSeconds <= 0) { - throw new IllegalArgumentException("maxAgeSeconds cannot be " + maxAgeSeconds); - } - this.maxAgeSeconds = maxAgeSeconds; - return this; - } + /** + * The quantiles are relative to a moving time window. {@code maxAgeSeconds} is the size of that + * time window. Default is {@link #DEFAULT_MAX_AGE_SECONDS}. + */ + public Builder maxAgeSeconds(long maxAgeSeconds) { + if (maxAgeSeconds <= 0) { + throw new IllegalArgumentException("maxAgeSeconds cannot be " + maxAgeSeconds); + } + this.maxAgeSeconds = maxAgeSeconds; + return this; + } - /** - * The quantiles are relative to a moving time window. - * The {@code numberOfAgeBuckets} defines how smoothly the time window moves forward. - * For example, if the time window is 5 minutes and has 5 age buckets, - * then it is moving forward every minute by one minute. - * Default is {@link #DEFAULT_NUMBER_OF_AGE_BUCKETS}. - */ - public Builder numberOfAgeBuckets(int ageBuckets) { - if (ageBuckets <= 0) { - throw new IllegalArgumentException("ageBuckets cannot be " + ageBuckets); - } - this.ageBuckets = ageBuckets; - return this; - } + /** + * The quantiles are relative to a moving time window. The {@code numberOfAgeBuckets} defines + * how smoothly the time window moves forward. For example, if the time window is 5 minutes and + * has 5 age buckets, then it is moving forward every minute by one minute. Default is {@link + * #DEFAULT_NUMBER_OF_AGE_BUCKETS}. + */ + public Builder numberOfAgeBuckets(int ageBuckets) { + if (ageBuckets <= 0) { + throw new IllegalArgumentException("ageBuckets cannot be " + ageBuckets); + } + this.ageBuckets = ageBuckets; + return this; + } - @Override - protected MetricsProperties toProperties() { - double[] quantiles = null; - double[] quantileErrors = null; - if (!this.quantiles.isEmpty()) { - quantiles = new double[this.quantiles.size()]; - quantileErrors = new double[this.quantiles.size()]; - for (int i = 0; i < this.quantiles.size(); i++) { - quantiles[i] = this.quantiles.get(i).quantile; - quantileErrors[i] = this.quantiles.get(i).epsilon; - } - } - return MetricsProperties.builder() - .exemplarsEnabled(exemplarsEnabled) - .summaryQuantiles(quantiles) - .summaryQuantileErrors(quantileErrors) - .summaryNumberOfAgeBuckets(ageBuckets) - .summaryMaxAgeSeconds(maxAgeSeconds) - .build(); + @Override + protected MetricsProperties toProperties() { + double[] quantiles = null; + double[] quantileErrors = null; + if (!this.quantiles.isEmpty()) { + quantiles = new double[this.quantiles.size()]; + quantileErrors = new double[this.quantiles.size()]; + for (int i = 0; i < this.quantiles.size(); i++) { + quantiles[i] = this.quantiles.get(i).quantile; + quantileErrors[i] = this.quantiles.get(i).epsilon; } + } + return MetricsProperties.builder() + .exemplarsEnabled(exemplarsEnabled) + .summaryQuantiles(quantiles) + .summaryQuantileErrors(quantileErrors) + .summaryNumberOfAgeBuckets(ageBuckets) + .summaryMaxAgeSeconds(maxAgeSeconds) + .build(); + } - /** - * Default properties for summary metrics. - */ - @Override - public MetricsProperties getDefaultProperties() { - return MetricsProperties.builder() - .exemplarsEnabled(true) - .summaryQuantiles() - .summaryNumberOfAgeBuckets(DEFAULT_NUMBER_OF_AGE_BUCKETS) - .summaryMaxAgeSeconds(DEFAULT_MAX_AGE_SECONDS) - .build(); - } + /** Default properties for summary metrics. */ + @Override + public MetricsProperties getDefaultProperties() { + return MetricsProperties.builder() + .exemplarsEnabled(true) + .summaryQuantiles() + .summaryNumberOfAgeBuckets(DEFAULT_NUMBER_OF_AGE_BUCKETS) + .summaryMaxAgeSeconds(DEFAULT_MAX_AGE_SECONDS) + .build(); + } - @Override - public Summary build() { - return new Summary(this, properties); - } + @Override + public Summary build() { + return new Summary(this, properties); + } - @Override - protected Builder self() { - return this; - } + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/SummaryWithCallback.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/SummaryWithCallback.java index c55211636..dbe61b2ce 100644 --- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/SummaryWithCallback.java +++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/metrics/SummaryWithCallback.java @@ -4,7 +4,6 @@ import io.prometheus.metrics.model.snapshots.Exemplars; import io.prometheus.metrics.model.snapshots.Quantiles; import io.prometheus.metrics.model.snapshots.SummarySnapshot; - import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -12,6 +11,7 @@ /** * Example: + * *
{@code
  * double MILLISECONDS_PER_SECOND = 1E3;
  *
@@ -35,59 +35,63 @@
  */
 public class SummaryWithCallback extends CallbackMetric {
 
-    @FunctionalInterface
-    public interface Callback {
-        void call(long count, double sum, Quantiles quantiles, String... labelValues);
-    }
+  @FunctionalInterface
+  public interface Callback {
+    void call(long count, double sum, Quantiles quantiles, String... labelValues);
+  }
 
-    private final Consumer callback;
+  private final Consumer callback;
 
-    private SummaryWithCallback(Builder builder) {
-        super(builder);
-        this.callback = builder.callback;
-        if (callback == null) {
-            throw new IllegalArgumentException("callback cannot be null");
-        }
+  private SummaryWithCallback(Builder builder) {
+    super(builder);
+    this.callback = builder.callback;
+    if (callback == null) {
+      throw new IllegalArgumentException("callback cannot be null");
     }
+  }
 
-    @Override
-    public SummarySnapshot collect() {
-        List dataPoints = new ArrayList<>();
-        callback.accept((count, sum, quantiles, labelValues) -> {
-            dataPoints.add(new SummarySnapshot.SummaryDataPointSnapshot(count, sum, quantiles, makeLabels(labelValues), Exemplars.EMPTY, 0L));
+  @Override
+  public SummarySnapshot collect() {
+    List dataPoints = new ArrayList<>();
+    callback.accept(
+        (count, sum, quantiles, labelValues) -> {
+          dataPoints.add(
+              new SummarySnapshot.SummaryDataPointSnapshot(
+                  count, sum, quantiles, makeLabels(labelValues), Exemplars.EMPTY, 0L));
         });
-        return new SummarySnapshot(getMetadata(), dataPoints);
-    }
+    return new SummarySnapshot(getMetadata(), dataPoints);
+  }
 
-    public static Builder builder() {
-        return new Builder(PrometheusProperties.get());
-    }
+  public static Builder builder() {
+    return new Builder(PrometheusProperties.get());
+  }
 
-    public static Builder builder(PrometheusProperties properties) {
-        return new Builder(properties);
-    }
+  public static Builder builder(PrometheusProperties properties) {
+    return new Builder(properties);
+  }
 
-    public static class Builder extends CallbackMetric.Builder {
+  public static class Builder
+      extends CallbackMetric.Builder {
 
-        private Consumer callback;
+    private Consumer callback;
 
-        public Builder callback(Consumer callback) {
-            this.callback = callback;
-            return self();
-        }
+    public Builder callback(Consumer callback) {
+      this.callback = callback;
+      return self();
+    }
 
-        private Builder(PrometheusProperties properties) {
-            super(Collections.singletonList("quantile"), properties);
-        }
+    private Builder(PrometheusProperties properties) {
+      super(Collections.singletonList("quantile"), properties);
+    }
 
-        @Override
-        public SummaryWithCallback build() {
-            return new SummaryWithCallback(this);
-        }
+    @Override
+    public SummaryWithCallback build() {
+      return new SummaryWithCallback(this);
+    }
 
-        @Override
-        protected Builder self() {
-            return this;
-        }
+    @Override
+    protected Builder self() {
+      return this;
     }
+  }
 }
diff --git a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/util/Scheduler.java b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/util/Scheduler.java
index 22780819e..6af7fa54a 100644
--- a/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/util/Scheduler.java
+++ b/prometheus-metrics-core/src/main/java/io/prometheus/metrics/core/util/Scheduler.java
@@ -8,30 +8,30 @@
 import java.util.concurrent.TimeUnit;
 
 /**
- * Used for scheduling maintenance tasks like purging outdated Exemplars or resetting native histograms.
+ * Used for scheduling maintenance tasks like purging outdated Exemplars or resetting native
+ * histograms.
  */
 public class Scheduler {
 
-    private static class DaemonThreadFactory implements ThreadFactory {
-        public Thread newThread(Runnable runnable) {
-            Thread thread = new Thread(runnable);
-            thread.setDaemon(true);
-            return thread;
-        }
+  private static class DaemonThreadFactory implements ThreadFactory {
+    public Thread newThread(Runnable runnable) {
+      Thread thread = new Thread(runnable);
+      thread.setDaemon(true);
+      return thread;
     }
+  }
 
-    private static final ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(new DaemonThreadFactory());
+  private static final ScheduledExecutorService executor =
+      Executors.newSingleThreadScheduledExecutor(new DaemonThreadFactory());
 
-    public static ScheduledFuture schedule(Runnable command, long delay, TimeUnit unit) {
-        return executor.schedule(command, delay, unit);
-    }
+  public static ScheduledFuture schedule(Runnable command, long delay, TimeUnit unit) {
+    return executor.schedule(command, delay, unit);
+  }
 
-    /**
-     * For unit test. Wait until the executor Thread is running.
-     */
-    public static void awaitInitialization() throws InterruptedException {
-        CountDownLatch latch = new CountDownLatch(1);
-        Scheduler.schedule(latch::countDown, 0, TimeUnit.MILLISECONDS);
-        latch.await();
-    }
+  /** For unit test. Wait until the executor Thread is running. */
+  public static void awaitInitialization() throws InterruptedException {
+    CountDownLatch latch = new CountDownLatch(1);
+    Scheduler.schedule(latch::countDown, 0, TimeUnit.MILLISECONDS);
+    latch.await();
+  }
 }
diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/datapoints/TimerApiTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/datapoints/TimerApiTest.java
index 4ae0cd0bb..8c015f591 100644
--- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/datapoints/TimerApiTest.java
+++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/datapoints/TimerApiTest.java
@@ -2,5 +2,5 @@
 
 public class TimerApiTest {
 
-    // TODO: Port this from the simpleclient SimpleTimerTest
+  // TODO: Port this from the simpleclient SimpleTimerTest
 }
diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/exemplars/ExemplarSamplerConfigTestUtil.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/exemplars/ExemplarSamplerConfigTestUtil.java
index c20482e44..e22e9d6c6 100644
--- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/exemplars/ExemplarSamplerConfigTestUtil.java
+++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/exemplars/ExemplarSamplerConfigTestUtil.java
@@ -1,30 +1,32 @@
 package io.prometheus.metrics.core.exemplars;
 
-import io.prometheus.metrics.core.exemplars.ExemplarSamplerConfig;
-
 import java.lang.reflect.Field;
 
 public class ExemplarSamplerConfigTestUtil {
 
-    private static ExemplarSamplerConfig getConfig(Object metric, String fieldName) throws NoSuchFieldException, IllegalAccessException {
-        Field configField = metric.getClass().getDeclaredField(fieldName);
-        configField.setAccessible(true);
-        return (ExemplarSamplerConfig) configField.get(metric);
-    }
+  private static ExemplarSamplerConfig getConfig(Object metric, String fieldName)
+      throws NoSuchFieldException, IllegalAccessException {
+    Field configField = metric.getClass().getDeclaredField(fieldName);
+    configField.setAccessible(true);
+    return (ExemplarSamplerConfig) configField.get(metric);
+  }
 
-    private static void setRetentionPeriod(ExemplarSamplerConfig config, String name, long value) throws IllegalAccessException, NoSuchFieldException {
-        Field field = config.getClass().getDeclaredField(name);
-        field.setAccessible(true);
-        field.set(config, value);
-    }
+  private static void setRetentionPeriod(ExemplarSamplerConfig config, String name, long value)
+      throws IllegalAccessException, NoSuchFieldException {
+    Field field = config.getClass().getDeclaredField(name);
+    field.setAccessible(true);
+    field.set(config, value);
+  }
 
-    public static void setMinRetentionPeriodMillis(Object metric, long value) throws NoSuchFieldException, IllegalAccessException {
-        ExemplarSamplerConfig config = getConfig(metric, "exemplarSamplerConfig");
-        setRetentionPeriod(config, "minRetentionPeriodMillis", value);
-    }
+  public static void setMinRetentionPeriodMillis(Object metric, long value)
+      throws NoSuchFieldException, IllegalAccessException {
+    ExemplarSamplerConfig config = getConfig(metric, "exemplarSamplerConfig");
+    setRetentionPeriod(config, "minRetentionPeriodMillis", value);
+  }
 
-    public static void setSampleIntervalMillis(Object metric, long value) throws NoSuchFieldException, IllegalAccessException {
-        ExemplarSamplerConfig config = getConfig(metric, "exemplarSamplerConfig");
-        setRetentionPeriod(config, "sampleIntervalMillis", value);
-    }
+  public static void setSampleIntervalMillis(Object metric, long value)
+      throws NoSuchFieldException, IllegalAccessException {
+    ExemplarSamplerConfig config = getConfig(metric, "exemplarSamplerConfig");
+    setRetentionPeriod(config, "sampleIntervalMillis", value);
+  }
 }
diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/exemplars/ExemplarSamplerTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/exemplars/ExemplarSamplerTest.java
index 1af5da42b..9d51ae17b 100644
--- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/exemplars/ExemplarSamplerTest.java
+++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/exemplars/ExemplarSamplerTest.java
@@ -1,10 +1,10 @@
 package io.prometheus.metrics.core.exemplars;
 
-import io.prometheus.metrics.tracer.initializer.SpanContextSupplier;
+import io.prometheus.metrics.core.util.Scheduler;
 import io.prometheus.metrics.model.snapshots.Exemplar;
 import io.prometheus.metrics.model.snapshots.Exemplars;
 import io.prometheus.metrics.model.snapshots.Label;
-import io.prometheus.metrics.core.util.Scheduler;
+import io.prometheus.metrics.tracer.initializer.SpanContextSupplier;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -12,214 +12,215 @@
 
 public class ExemplarSamplerTest {
 
-    private final int tick = 10; // Time step in milliseconds. Make this larger if the test is flaky.
-    private final int sampleInterval = 10 * tick; // do not change this
-    private final int minAge = 50 * tick; // do not change this
-    private final int maxAge = 200 * tick; // do not change this
-
-    private ExemplarSamplerConfig makeConfig(double... buckets) {
-        return new ExemplarSamplerConfig(
-                minAge,
-                maxAge,
-                sampleInterval,
-                buckets.length == 0 ? 4 : buckets.length, // number of exemplars
-                buckets.length == 0 ? null : buckets
-        );
+  private final int tick = 10; // Time step in milliseconds. Make this larger if the test is flaky.
+  private final int sampleInterval = 10 * tick; // do not change this
+  private final int minAge = 50 * tick; // do not change this
+  private final int maxAge = 200 * tick; // do not change this
+
+  private ExemplarSamplerConfig makeConfig(double... buckets) {
+    return new ExemplarSamplerConfig(
+        minAge,
+        maxAge,
+        sampleInterval,
+        buckets.length == 0 ? 4 : buckets.length, // number of exemplars
+        buckets.length == 0 ? null : buckets);
+  }
+
+  private static class SpanContext implements io.prometheus.metrics.tracer.common.SpanContext {
+
+    int callCount = 0;
+    boolean isSampled = true;
+    boolean isExemplar = false;
+
+    @Override
+    public String getCurrentTraceId() {
+      return "" + (callCount++);
     }
 
-
-    private static class SpanContext implements io.prometheus.metrics.tracer.common.SpanContext {
-
-        int callCount = 0;
-        boolean isSampled = true;
-        boolean isExemplar = false;
-
-        @Override
-        public String getCurrentTraceId() {
-            return "" + (callCount++);
-        }
-
-        @Override
-        public String getCurrentSpanId() {
-            return "" + callCount;
-        }
-
-        @Override
-        public boolean isCurrentSpanSampled() {
-            return isSampled;
-        }
-
-        @Override
-        public void markCurrentSpanAsExemplar() {
-            isExemplar = true;
-        }
+    @Override
+    public String getCurrentSpanId() {
+      return "" + callCount;
     }
 
-    @Test
-    public void testCustomExemplarsBuckets() throws Exception {
-        // TODO
+    @Override
+    public boolean isCurrentSpanSampled() {
+      return isSampled;
     }
 
-    private io.prometheus.metrics.tracer.common.SpanContext origContext;
-
-    @Before
-    public void setUp() {
-        origContext = SpanContextSupplier.getSpanContext();
+    @Override
+    public void markCurrentSpanAsExemplar() {
+      isExemplar = true;
     }
-
-    @After
-    public void tearDown() {
-        SpanContextSupplier.setSpanContext(origContext);
-    }
-
-    @Test
-    public void testIsSampled() throws Exception {
-        SpanContext context = new SpanContext();
-        context.isSampled = false;
-        ExemplarSampler sampler = new ExemplarSampler(makeConfig(), context);
-        Thread.sleep(tick); // t = 1 tick
-        sampler.observe(0.3); // no sampled, because isSampled() returns false
-        assertExemplars(sampler); // empty
-    }
-
-    @Test
-    public void testDefaultConfigHasFourExemplars() throws Exception {
-        ExemplarSampler sampler = new ExemplarSampler(makeConfig(), new SpanContext());
-        Thread.sleep(tick); // t = 1 tick
-        sampler.observe(0.3);
-        Thread.sleep(sampleInterval + tick); // t = 12 tick
-        sampler.observe(0.8);
-        Thread.sleep(sampleInterval + tick); // t = 23 tick
-        sampler.observe(0.4);
-        Thread.sleep(sampleInterval + tick); // t = 34 tick
-        sampler.observe(0.6);
-        Thread.sleep(sampleInterval + tick); // t = 45 tick
-        sampler.observe(0.2); // not observed, we got 4 Exemplars already and non reached min age
-        assertExemplars(sampler, 0.3, 0.8, 0.4, 0.6);
-        print(sampler.collect());
-    }
-
-    @Test
-    public void testEmptyBuckets() throws Exception {
-        ExemplarSampler sampler = new ExemplarSampler(makeConfig(Double.POSITIVE_INFINITY), new SpanContext());
-        Thread.sleep(tick); // t = 1 tick
-        sampler.observe(0.8); // observed in the +Inf bucket
-        Thread.sleep(sampleInterval + tick); // t = 12 tick
-        sampler.observe(0.5); // not observed, because +Inf is the only bucket
-        assertExemplars(sampler, 0.8);
-        print(sampler.collect());
-    }
-
-    @Test
-    public void testDefaultExemplarsBuckets() throws Exception {
-        ExemplarSampler sampler = new ExemplarSampler(makeConfig(0.2, 0.4, 0.6, 0.8, 1.0, Double.POSITIVE_INFINITY), new SpanContext());
-        Scheduler.awaitInitialization();
-        Thread.sleep(tick); // t = 1 tick
-        sampler.observe(0.3);
-        sampler.observe(0.5); // not observed, previous observation is less than sample interval ms ago
-        assertExemplars(sampler, 0.3);
-        Thread.sleep(sampleInterval + tick); // t = 12 ticks
-        sampler.observe(0.5); // observed
-        assertExemplars(sampler, 0.3, 0.5);
-        Thread.sleep(sampleInterval + tick); // t = 23 ticks
-        sampler.observe(0.4); // not observed, because 0.3 hasn't reached min age yet
-        assertExemplars(sampler, 0.3, 0.5);
-        Thread.sleep(sampleInterval + tick); // t = 34 ticks
-        sampler.observe(1.1); // observed
-        assertExemplars(sampler, 0.3, 0.5, 1.1);
-        Thread.sleep(20 * tick); // t = 54 ticks
-        assertExemplars(sampler, 0.3, 0.5, 1.1);
-        sampler.observe(0.4); // observed
-        assertExemplars(sampler, 0.4, 0.5, 1.1);
-        Thread.sleep(159 * tick); // t = 213 ticks
-        assertExemplars(sampler, 0.4, 1.1); // 0.5 evicted because it has reached max age
-        print(sampler.collect());
-    }
-
-    @Test
-    public void testCustomExemplarsNoBuckets() throws Exception {
-        // TODO
-    }
-
-    @Test
-    public void testDefaultExemplarsNoBuckets() throws Exception {
-        ExemplarSampler sampler = new ExemplarSampler(makeConfig(), new SpanContext());
-        Scheduler.awaitInitialization();
-        Thread.sleep(tick);           // t = 1 tick
-        sampler.observe(1);    // observed
-        assertExemplars(sampler, 1);
-        sampler.observe(2);    // not observed, previous observation is less than sample interval ms ago
-        Thread.sleep(sampleInterval + tick); // t = 12 ticks
-        sampler.observe(3);    // observed
-        assertExemplars(sampler, 1, 3);
-        Thread.sleep(2 * tick);    // t = 14 ticks
-        sampler.observe(4);    // not observed, previous observation is less than sample interval ms ago
-        Thread.sleep(sampleInterval + tick); // t = 25 ticks
-        sampler.observe(5);    // observed
-        assertExemplars(sampler, 1, 3, 5);
-        Thread.sleep(sampleInterval + tick); // t = 36 ticks
-        sampler.observe(6);    // observed
-        assertExemplars(sampler, 1, 3, 5, 6);
-        Thread.sleep(sampleInterval + tick);  // t = 47 ticks
-        sampler.observe(7);    // not observed, because no Exemplar has reached the minimum age yet
-        Thread.sleep(5 * tick); // t = 52 ticks
-        sampler.observe(2); // not observed. 1 is older than min age, but kept because it's the minimum
-        assertExemplars(sampler, 1, 3, 5, 6);
-        Thread.sleep(sampleInterval + tick); // t = 63 ticks
-        sampler.observe(2); // observed
-        assertExemplars(sampler, 1, 2, 5, 6);
-        Thread.sleep(27 * tick); // t = 90 ticks
-        sampler.observe(7); // observed, replaces 6 because 7 > 6 even though 5 is older
-        assertExemplars(sampler, 1, 2, 5, 7);
-        sampler.observe(8); // not observed, sample interval not done
-        assertExemplars(sampler, 1, 2, 5, 7);
-        Thread.sleep(sampleInterval + tick); // t = 101 ticks
-        sampler.observe(8); // observed
-        assertExemplars(sampler, 1, 2, 8, 7);
-        Thread.sleep(101 * tick); // t = 202 ticks
-        sampler.observe(5); // observed, replaces 1 because 1 reached the max age
-        assertExemplars(sampler, 5, 2, 8, 7);
-        print(sampler.collect());
-    }
-
-    private void assertExemplars(ExemplarSampler sampler, double... values) {
-        Exemplars exemplars = sampler.collect();
-        Assert.assertEquals(values.length, exemplars.size());
-        for (double value : values) {
-            boolean found = false;
-            for (Exemplar exemplar : exemplars) {
-                if (exemplar.getValue() == value) {
-                    found = true;
-                    break;
-                }
-            }
-            Assert.assertTrue(value + " not found", found);
+  }
+
+  @Test
+  public void testCustomExemplarsBuckets() throws Exception {
+    // TODO
+  }
+
+  private io.prometheus.metrics.tracer.common.SpanContext origContext;
+
+  @Before
+  public void setUp() {
+    origContext = SpanContextSupplier.getSpanContext();
+  }
+
+  @After
+  public void tearDown() {
+    SpanContextSupplier.setSpanContext(origContext);
+  }
+
+  @Test
+  public void testIsSampled() throws Exception {
+    SpanContext context = new SpanContext();
+    context.isSampled = false;
+    ExemplarSampler sampler = new ExemplarSampler(makeConfig(), context);
+    Thread.sleep(tick); // t = 1 tick
+    sampler.observe(0.3); // no sampled, because isSampled() returns false
+    assertExemplars(sampler); // empty
+  }
+
+  @Test
+  public void testDefaultConfigHasFourExemplars() throws Exception {
+    ExemplarSampler sampler = new ExemplarSampler(makeConfig(), new SpanContext());
+    Thread.sleep(tick); // t = 1 tick
+    sampler.observe(0.3);
+    Thread.sleep(sampleInterval + tick); // t = 12 tick
+    sampler.observe(0.8);
+    Thread.sleep(sampleInterval + tick); // t = 23 tick
+    sampler.observe(0.4);
+    Thread.sleep(sampleInterval + tick); // t = 34 tick
+    sampler.observe(0.6);
+    Thread.sleep(sampleInterval + tick); // t = 45 tick
+    sampler.observe(0.2); // not observed, we got 4 Exemplars already and non reached min age
+    assertExemplars(sampler, 0.3, 0.8, 0.4, 0.6);
+    print(sampler.collect());
+  }
+
+  @Test
+  public void testEmptyBuckets() throws Exception {
+    ExemplarSampler sampler =
+        new ExemplarSampler(makeConfig(Double.POSITIVE_INFINITY), new SpanContext());
+    Thread.sleep(tick); // t = 1 tick
+    sampler.observe(0.8); // observed in the +Inf bucket
+    Thread.sleep(sampleInterval + tick); // t = 12 tick
+    sampler.observe(0.5); // not observed, because +Inf is the only bucket
+    assertExemplars(sampler, 0.8);
+    print(sampler.collect());
+  }
+
+  @Test
+  public void testDefaultExemplarsBuckets() throws Exception {
+    ExemplarSampler sampler =
+        new ExemplarSampler(
+            makeConfig(0.2, 0.4, 0.6, 0.8, 1.0, Double.POSITIVE_INFINITY), new SpanContext());
+    Scheduler.awaitInitialization();
+    Thread.sleep(tick); // t = 1 tick
+    sampler.observe(0.3);
+    sampler.observe(0.5); // not observed, previous observation is less than sample interval ms ago
+    assertExemplars(sampler, 0.3);
+    Thread.sleep(sampleInterval + tick); // t = 12 ticks
+    sampler.observe(0.5); // observed
+    assertExemplars(sampler, 0.3, 0.5);
+    Thread.sleep(sampleInterval + tick); // t = 23 ticks
+    sampler.observe(0.4); // not observed, because 0.3 hasn't reached min age yet
+    assertExemplars(sampler, 0.3, 0.5);
+    Thread.sleep(sampleInterval + tick); // t = 34 ticks
+    sampler.observe(1.1); // observed
+    assertExemplars(sampler, 0.3, 0.5, 1.1);
+    Thread.sleep(20 * tick); // t = 54 ticks
+    assertExemplars(sampler, 0.3, 0.5, 1.1);
+    sampler.observe(0.4); // observed
+    assertExemplars(sampler, 0.4, 0.5, 1.1);
+    Thread.sleep(159 * tick); // t = 213 ticks
+    assertExemplars(sampler, 0.4, 1.1); // 0.5 evicted because it has reached max age
+    print(sampler.collect());
+  }
+
+  @Test
+  public void testCustomExemplarsNoBuckets() throws Exception {
+    // TODO
+  }
+
+  @Test
+  public void testDefaultExemplarsNoBuckets() throws Exception {
+    ExemplarSampler sampler = new ExemplarSampler(makeConfig(), new SpanContext());
+    Scheduler.awaitInitialization();
+    Thread.sleep(tick); // t = 1 tick
+    sampler.observe(1); // observed
+    assertExemplars(sampler, 1);
+    sampler.observe(2); // not observed, previous observation is less than sample interval ms ago
+    Thread.sleep(sampleInterval + tick); // t = 12 ticks
+    sampler.observe(3); // observed
+    assertExemplars(sampler, 1, 3);
+    Thread.sleep(2 * tick); // t = 14 ticks
+    sampler.observe(4); // not observed, previous observation is less than sample interval ms ago
+    Thread.sleep(sampleInterval + tick); // t = 25 ticks
+    sampler.observe(5); // observed
+    assertExemplars(sampler, 1, 3, 5);
+    Thread.sleep(sampleInterval + tick); // t = 36 ticks
+    sampler.observe(6); // observed
+    assertExemplars(sampler, 1, 3, 5, 6);
+    Thread.sleep(sampleInterval + tick); // t = 47 ticks
+    sampler.observe(7); // not observed, because no Exemplar has reached the minimum age yet
+    Thread.sleep(5 * tick); // t = 52 ticks
+    sampler.observe(2); // not observed. 1 is older than min age, but kept because it's the minimum
+    assertExemplars(sampler, 1, 3, 5, 6);
+    Thread.sleep(sampleInterval + tick); // t = 63 ticks
+    sampler.observe(2); // observed
+    assertExemplars(sampler, 1, 2, 5, 6);
+    Thread.sleep(27 * tick); // t = 90 ticks
+    sampler.observe(7); // observed, replaces 6 because 7 > 6 even though 5 is older
+    assertExemplars(sampler, 1, 2, 5, 7);
+    sampler.observe(8); // not observed, sample interval not done
+    assertExemplars(sampler, 1, 2, 5, 7);
+    Thread.sleep(sampleInterval + tick); // t = 101 ticks
+    sampler.observe(8); // observed
+    assertExemplars(sampler, 1, 2, 8, 7);
+    Thread.sleep(101 * tick); // t = 202 ticks
+    sampler.observe(5); // observed, replaces 1 because 1 reached the max age
+    assertExemplars(sampler, 5, 2, 8, 7);
+    print(sampler.collect());
+  }
+
+  private void assertExemplars(ExemplarSampler sampler, double... values) {
+    Exemplars exemplars = sampler.collect();
+    Assert.assertEquals(values.length, exemplars.size());
+    for (double value : values) {
+      boolean found = false;
+      for (Exemplar exemplar : exemplars) {
+        if (exemplar.getValue() == value) {
+          found = true;
+          break;
         }
+      }
+      Assert.assertTrue(value + " not found", found);
     }
-
-    private void print(Exemplars exemplars) {
-        System.out.print("[");
-        boolean farst = true;
-        for (Exemplar exemplar : exemplars) {
-            if (!farst) {
-                System.out.print(",");
-            }
-            farst = false;
-            System.out.print(exemplar.getValue() + "{");
-            boolean first = true;
-            for (Label label : exemplar.getLabels()) {
-                if (!first) {
-                    System.out.print(",");
-                }
-                System.out.print(label.getName() + "=" + label.getValue());
-                first = false;
-            }
-            if (!first) {
-                System.out.print(",");
-            }
-            System.out.print("age=" + (System.currentTimeMillis() - exemplar.getTimestampMillis()));
-            System.out.print("}");
+  }
+
+  private void print(Exemplars exemplars) {
+    System.out.print("[");
+    boolean farst = true;
+    for (Exemplar exemplar : exemplars) {
+      if (!farst) {
+        System.out.print(",");
+      }
+      farst = false;
+      System.out.print(exemplar.getValue() + "{");
+      boolean first = true;
+      for (Label label : exemplar.getLabels()) {
+        if (!first) {
+          System.out.print(",");
         }
-        System.out.println("]");
+        System.out.print(label.getName() + "=" + label.getValue());
+        first = false;
+      }
+      if (!first) {
+        System.out.print(",");
+      }
+      System.out.print("age=" + (System.currentTimeMillis() - exemplar.getTimestampMillis()));
+      System.out.print("}");
     }
+    System.out.println("]");
+  }
 }
diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/exemplars/SpanContextSupplierTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/exemplars/SpanContextSupplierTest.java
index 033420950..b1409b142 100644
--- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/exemplars/SpanContextSupplierTest.java
+++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/exemplars/SpanContextSupplierTest.java
@@ -1,5 +1,7 @@
 package io.prometheus.metrics.core.exemplars;
 
+import static io.prometheus.metrics.model.snapshots.Exemplar.TRACE_ID;
+
 import io.prometheus.metrics.config.ExemplarsProperties;
 import io.prometheus.metrics.model.snapshots.Exemplar;
 import io.prometheus.metrics.model.snapshots.Exemplars;
@@ -7,97 +9,97 @@
 import io.prometheus.metrics.tracer.initializer.SpanContextSupplier;
 import org.junit.*;
 
-import static io.prometheus.metrics.model.snapshots.Exemplar.TRACE_ID;
-
 public class SpanContextSupplierTest {
 
-    public SpanContext makeSpanContext(String traceId, String spanId) {
-
-        return new SpanContext() {
-            @Override
-            public String getCurrentTraceId() {
-                return traceId;
-            }
-
-            @Override
-            public String getCurrentSpanId() {
-                return spanId;
-            }
-
-            @Override
-            public boolean isCurrentSpanSampled() {
-                return true;
-            }
-
-            @Override
-            public void markCurrentSpanAsExemplar() {
-            }
-        };
-    }
-
-    SpanContext spanContextA = makeSpanContext("A", "a");
-    SpanContext spanContextB = makeSpanContext("B", "b");
-    SpanContext origSpanContext;
-
-    ExemplarSamplerConfig config = new ExemplarSamplerConfig(
-            10, // min retention period in milliseconds
-            20, // max retention period in milliseconds
-            5, // sample interval in millisecnods
-            1, // number of exemplars
-            null // histogram upper bounds
-    );
-
-    @Before
-    public void setUp() {
-        origSpanContext = SpanContextSupplier.getSpanContext();
-    }
-
-    @After
-    public void tearDown() {
-        SpanContextSupplier.setSpanContext(origSpanContext);
-    }
-
-    /**
-     * Test: When a {@link SpanContext} is provided as a constructor argument to the {@link ExemplarSampler},
-     * then that {@link SpanContext} is used, not the one from the {@link SpanContextSupplier}.
-     */
-    @Test
-    public void testConstructorInjection() {
-        ExemplarsProperties properties = ExemplarsProperties.builder().build();
-        ExemplarSamplerConfig config = new ExemplarSamplerConfig(properties, 1);
-        ExemplarSampler exemplarSampler = new ExemplarSampler(config, spanContextA);
-
-        SpanContextSupplier.setSpanContext(spanContextB);
-        exemplarSampler.observe(1.0);
-        Exemplars exemplars = exemplarSampler.collect();
-        Assert.assertEquals(1, exemplars.size());
-        Exemplar exemplar = exemplars.get(0);
-        Assert.assertEquals("A", exemplar.getLabels().get(TRACE_ID));
-    }
-
-    /**
-     * When the global {@link SpanContext} is updated via {@link SpanContextSupplier#setSpanContext(SpanContext)},
-     * the {@link ExemplarSampler} recognizes the update (unless a {@link ExemplarSampler} was provided as
-     * constructor argument to {@link ExemplarSampler}).
-     */
-    @Test
-    public void testUpdateSpanContext() throws InterruptedException {
-        ExemplarSampler exemplarSampler = new ExemplarSampler(config);
-
-        SpanContextSupplier.setSpanContext(spanContextB);
-        exemplarSampler.observe(1.0);
-        Exemplars exemplars = exemplarSampler.collect();
-        Assert.assertEquals(1, exemplars.size());
-        Exemplar exemplar = exemplars.get(0);
-        Assert.assertEquals("B", exemplar.getLabels().get(TRACE_ID));
-
-        Thread.sleep(15); // more than the minimum retention period defined in config above.
-
-        SpanContextSupplier.setSpanContext(spanContextA);
-        exemplarSampler.observe(1.0);
-        exemplars = exemplarSampler.collect();
-        Assert.assertEquals(1, exemplars.size());
-        exemplar = exemplars.get(0);
-        Assert.assertEquals("A", exemplar.getLabels().get(TRACE_ID));
-    }
+  public SpanContext makeSpanContext(String traceId, String spanId) {
+
+    return new SpanContext() {
+      @Override
+      public String getCurrentTraceId() {
+        return traceId;
+      }
+
+      @Override
+      public String getCurrentSpanId() {
+        return spanId;
+      }
+
+      @Override
+      public boolean isCurrentSpanSampled() {
+        return true;
+      }
+
+      @Override
+      public void markCurrentSpanAsExemplar() {}
+    };
+  }
+
+  SpanContext spanContextA = makeSpanContext("A", "a");
+  SpanContext spanContextB = makeSpanContext("B", "b");
+  SpanContext origSpanContext;
+
+  ExemplarSamplerConfig config =
+      new ExemplarSamplerConfig(
+          10, // min retention period in milliseconds
+          20, // max retention period in milliseconds
+          5, // sample interval in millisecnods
+          1, // number of exemplars
+          null // histogram upper bounds
+          );
+
+  @Before
+  public void setUp() {
+    origSpanContext = SpanContextSupplier.getSpanContext();
+  }
+
+  @After
+  public void tearDown() {
+    SpanContextSupplier.setSpanContext(origSpanContext);
+  }
+
+  /**
+   * Test: When a {@link SpanContext} is provided as a constructor argument to the {@link
+   * ExemplarSampler}, then that {@link SpanContext} is used, not the one from the {@link
+   * SpanContextSupplier}.
+   */
+  @Test
+  public void testConstructorInjection() {
+    ExemplarsProperties properties = ExemplarsProperties.builder().build();
+    ExemplarSamplerConfig config = new ExemplarSamplerConfig(properties, 1);
+    ExemplarSampler exemplarSampler = new ExemplarSampler(config, spanContextA);
+
+    SpanContextSupplier.setSpanContext(spanContextB);
+    exemplarSampler.observe(1.0);
+    Exemplars exemplars = exemplarSampler.collect();
+    Assert.assertEquals(1, exemplars.size());
+    Exemplar exemplar = exemplars.get(0);
+    Assert.assertEquals("A", exemplar.getLabels().get(TRACE_ID));
+  }
+
+  /**
+   * When the global {@link SpanContext} is updated via {@link
+   * SpanContextSupplier#setSpanContext(SpanContext)}, the {@link ExemplarSampler} recognizes the
+   * update (unless a {@link ExemplarSampler} was provided as constructor argument to {@link
+   * ExemplarSampler}).
+   */
+  @Test
+  public void testUpdateSpanContext() throws InterruptedException {
+    ExemplarSampler exemplarSampler = new ExemplarSampler(config);
+
+    SpanContextSupplier.setSpanContext(spanContextB);
+    exemplarSampler.observe(1.0);
+    Exemplars exemplars = exemplarSampler.collect();
+    Assert.assertEquals(1, exemplars.size());
+    Exemplar exemplar = exemplars.get(0);
+    Assert.assertEquals("B", exemplar.getLabels().get(TRACE_ID));
+
+    Thread.sleep(15); // more than the minimum retention period defined in config above.
+
+    SpanContextSupplier.setSpanContext(spanContextA);
+    exemplarSampler.observe(1.0);
+    exemplars = exemplarSampler.collect();
+    Assert.assertEquals(1, exemplars.size());
+    exemplar = exemplars.get(0);
+    Assert.assertEquals("A", exemplar.getLabels().get(TRACE_ID));
+  }
 }
diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/CKMSQuantilesTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/CKMSQuantilesTest.java
index ef90f85cf..e8adc42fe 100644
--- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/CKMSQuantilesTest.java
+++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/CKMSQuantilesTest.java
@@ -1,340 +1,369 @@
 package io.prometheus.metrics.core.metrics;
 
+import static org.junit.Assert.*;
+
 import io.prometheus.metrics.core.metrics.CKMSQuantiles.Quantile;
+import java.util.*;
 import org.apache.commons.math3.distribution.NormalDistribution;
 import org.apache.commons.math3.random.JDKRandomGenerator;
 import org.apache.commons.math3.random.RandomGenerator;
 import org.junit.Test;
 
-import java.util.*;
-
-import static org.junit.Assert.*;
-
 public class CKMSQuantilesTest {
 
-    private final Quantile qMin = new Quantile(0.0, 0.00);
-    private final Quantile q50 = new Quantile(0.5, 0.01);
-    private final Quantile q95 = new Quantile(0.95, 0.005);
-    private final Quantile q99 = new Quantile(0.99, 0.001);
-    private final Quantile qMax = new Quantile(1.0, 0.00);
-
-    @Test
-    public void testGetOnEmptyValues() {
-        CKMSQuantiles ckms = new CKMSQuantiles(q50, q95, q99);
-        assertTrue(Double.isNaN(ckms.get(q95.quantile)));
-    }
-
-    @Test
-    public void testGet() {
-        Random random = new Random(0);
-        CKMSQuantiles ckms = new CKMSQuantiles(q50, q95, q99);
-        List input = shuffledValues(100, random);
-        for (double value : input) {
-            ckms.insert(value);
-        }
-        validateResults(ckms);
+  private final Quantile qMin = new Quantile(0.0, 0.00);
+  private final Quantile q50 = new Quantile(0.5, 0.01);
+  private final Quantile q95 = new Quantile(0.95, 0.005);
+  private final Quantile q99 = new Quantile(0.99, 0.001);
+  private final Quantile qMax = new Quantile(1.0, 0.00);
+
+  @Test
+  public void testGetOnEmptyValues() {
+    CKMSQuantiles ckms = new CKMSQuantiles(q50, q95, q99);
+    assertTrue(Double.isNaN(ckms.get(q95.quantile)));
+  }
+
+  @Test
+  public void testGet() {
+    Random random = new Random(0);
+    CKMSQuantiles ckms = new CKMSQuantiles(q50, q95, q99);
+    List input = shuffledValues(100, random);
+    for (double value : input) {
+      ckms.insert(value);
     }
-
-    @Test
-    public void testBatchInsert() {
-        Random random = new Random(1);
-        testInsertBatch(1, 1, 100, random);
-        testInsertBatch(1, 10, 100, random);
-        testInsertBatch(2, 10, 100, random);
-        testInsertBatch(2, 110, 100, random); // compress never called, because compress interval > number of inserts
-        testInsertBatch(3, 10, 100, random);
-        testInsertBatch(10, 10, 100, random);
-        testInsertBatch(128, 128, 1, random);
-        testInsertBatch(128, 128, 1000, random);
-        testInsertBatch(128, 128, 10*1000, random);
-        testInsertBatch(128, 128, 100*1000, random);
-        testInsertBatch(128, 128, 1000*1000, random);
+    validateResults(ckms);
+  }
+
+  @Test
+  public void testBatchInsert() {
+    Random random = new Random(1);
+    testInsertBatch(1, 1, 100, random);
+    testInsertBatch(1, 10, 100, random);
+    testInsertBatch(2, 10, 100, random);
+    testInsertBatch(
+        2, 110, 100,
+        random); // compress never called, because compress interval > number of inserts
+    testInsertBatch(3, 10, 100, random);
+    testInsertBatch(10, 10, 100, random);
+    testInsertBatch(128, 128, 1, random);
+    testInsertBatch(128, 128, 1000, random);
+    testInsertBatch(128, 128, 10 * 1000, random);
+    testInsertBatch(128, 128, 100 * 1000, random);
+    testInsertBatch(128, 128, 1000 * 1000, random);
+  }
+
+  private void testInsertBatch(
+      int batchSize, int compressInterval, int totalNumber, Random random) {
+    System.out.println(
+        "testInsertBatch(batchSize="
+            + batchSize
+            + ", compressInterval="
+            + compressInterval
+            + ", totalNumber="
+            + totalNumber
+            + ")");
+    CKMSQuantiles ckms = new CKMSQuantiles(q50, q95);
+    int insertsSinceCompress = 0;
+    List input = shuffledValues(totalNumber, random);
+    for (int i = 0; i < input.size(); i += batchSize) {
+      double[] batch = new double[batchSize];
+      int j;
+      for (j = 0; j < batchSize && i + j < input.size(); j++) {
+        batch[j] = input.get(i + j);
+      }
+      Arrays.sort(batch, 0, j);
+      ckms.insertBatch(batch, j);
+      validateSamples(ckms); // after each insert the samples should still be valid
+      insertsSinceCompress += j;
+      if (insertsSinceCompress >= compressInterval) {
+        ckms.compress();
+        validateSamples(ckms); // after each compress the samples should still be valid
+        insertsSinceCompress = 0;
+      }
     }
-
-    private void testInsertBatch(int batchSize, int compressInterval, int totalNumber, Random random) {
-        System.out.println("testInsertBatch(batchSize=" + batchSize + ", compressInterval=" + compressInterval + ", totalNumber=" + totalNumber + ")");
-        CKMSQuantiles ckms = new CKMSQuantiles(q50, q95);
-        int insertsSinceCompress = 0;
-        List input = shuffledValues(totalNumber, random);
-        for (int i=0; i= compressInterval) {
-                ckms.compress();
-                validateSamples(ckms); // after each compress the samples should still be valid
-                insertsSinceCompress=0;
-            }
-        }
-        validateResults(ckms);
+    validateResults(ckms);
+  }
+
+  @Test
+  public void testGetWithAMillionElements() {
+    Random random = new Random(2);
+    List input = shuffledValues(1000 * 1000, random);
+    CKMSQuantiles ckms = new CKMSQuantiles(q50, q95, q99);
+    for (double v : input) {
+      ckms.insert(v);
     }
-
-    @Test
-    public void testGetWithAMillionElements() {
-        Random random = new Random(2);
-        List input = shuffledValues(1000*1000, random);
-        CKMSQuantiles ckms = new CKMSQuantiles(q50, q95, q99);
-        for (double v : input) {
-            ckms.insert(v);
-        }
-        validateResults(ckms);
-        assertTrue("sample size should be way below 1_000_000", ckms.samples.size() < 1000);
+    validateResults(ckms);
+    assertTrue("sample size should be way below 1_000_000", ckms.samples.size() < 1000);
+  }
+
+  @Test
+  public void testMin() {
+    Random random = new Random(3);
+    List input = shuffledValues(1000, random);
+    CKMSQuantiles ckms = new CKMSQuantiles(qMin);
+    for (double v : input) {
+      ckms.insert(v);
     }
-
-    @Test
-    public void testMin() {
-        Random random = new Random(3);
-        List input = shuffledValues(1000, random);
-        CKMSQuantiles ckms = new CKMSQuantiles(qMin);
-        for (double v : input) {
-            ckms.insert(v);
-        }
-        validateResults(ckms);
-        ckms.compress();
-        assertEquals(2, ckms.samples.size());
+    validateResults(ckms);
+    ckms.compress();
+    assertEquals(2, ckms.samples.size());
+  }
+
+  @Test
+  public void testMax() {
+    Random random = new Random(4);
+    List input = shuffledValues(1000, random);
+    CKMSQuantiles ckms = new CKMSQuantiles(qMax);
+    for (double v : input) {
+      ckms.insert(v);
     }
-
-    @Test
-    public void testMax() {
-        Random random = new Random(4);
-        List input = shuffledValues(1000, random);
-        CKMSQuantiles ckms = new CKMSQuantiles(qMax);
-        for (double v : input) {
-            ckms.insert(v);
-        }
-        validateResults(ckms);
-        ckms.compress();
-        assertEquals(2, ckms.samples.size());
+    validateResults(ckms);
+    ckms.compress();
+    assertEquals(2, ckms.samples.size());
+  }
+
+  @Test
+  public void testMinMax() {
+    Random random = new Random(5);
+    List input = shuffledValues(1000, random);
+    CKMSQuantiles ckms = new CKMSQuantiles(qMin, qMax);
+    for (double v : input) {
+      ckms.insert(v);
     }
-
-    @Test
-    public void testMinMax() {
-        Random random = new Random(5);
-        List input = shuffledValues(1000, random);
-        CKMSQuantiles ckms = new CKMSQuantiles(qMin, qMax);
-        for (double v : input) {
-            ckms.insert(v);
-        }
-        validateResults(ckms);
-        ckms.compress();
-        assertEquals(2, ckms.samples.size());
+    validateResults(ckms);
+    ckms.compress();
+    assertEquals(2, ckms.samples.size());
+  }
+
+  @Test
+  public void testMinAndOthers() {
+    Random random = new Random(6);
+    List input = shuffledValues(1000, random);
+    CKMSQuantiles ckms = new CKMSQuantiles(q95, qMin);
+    for (double v : input) {
+      ckms.insert(v);
     }
-
-    @Test
-    public void testMinAndOthers() {
-        Random random = new Random(6);
-        List input = shuffledValues(1000, random);
-        CKMSQuantiles ckms = new CKMSQuantiles(q95, qMin);
-        for (double v : input) {
-            ckms.insert(v);
-        }
-        validateResults(ckms);
-        assertTrue(ckms.samples.size() < 200); // should be a lot less than input.size()
+    validateResults(ckms);
+    assertTrue(ckms.samples.size() < 200); // should be a lot less than input.size()
+  }
+
+  @Test
+  public void testMaxAndOthers() {
+    Random random = new Random(7);
+    List input = shuffledValues(10000, random);
+    CKMSQuantiles ckms = new CKMSQuantiles(q50, q95, qMax);
+    for (double v : input) {
+      ckms.insert(v);
     }
-
-    @Test
-    public void testMaxAndOthers() {
-        Random random = new Random(7);
-        List input = shuffledValues(10000, random);
-        CKMSQuantiles ckms = new CKMSQuantiles(q50, q95, qMax);
-        for (double v : input) {
-            ckms.insert(v);
-        }
-        validateResults(ckms);
-        assertTrue(ckms.samples.size() < 200); // should be a lot less than input.size()
+    validateResults(ckms);
+    assertTrue(ckms.samples.size() < 200); // should be a lot less than input.size()
+  }
+
+  @Test
+  public void testMinMaxAndOthers() {
+    Random random = new Random(8);
+    List input = shuffledValues(10000, random);
+    CKMSQuantiles ckms = new CKMSQuantiles(qMin, q50, q95, q99, qMax);
+    for (double v : input) {
+      ckms.insert(v);
     }
-
-    @Test
-    public void testMinMaxAndOthers() {
-        Random random = new Random(8);
-        List input = shuffledValues(10000, random);
-        CKMSQuantiles ckms = new CKMSQuantiles(qMin, q50, q95, q99, qMax);
-        for (double v : input) {
-            ckms.insert(v);
-        }
-        validateResults(ckms);
-        assertTrue(ckms.samples.size() < 200); // should be a lot less than input.size()
+    validateResults(ckms);
+    assertTrue(ckms.samples.size() < 200); // should be a lot less than input.size()
+  }
+
+  @Test
+  public void testExactQuantile() {
+    Random random = new Random(9);
+    List input = shuffledValues(10000, random);
+    CKMSQuantiles ckms = new CKMSQuantiles(new Quantile(0.95, 0));
+    for (double v : input) {
+      ckms.insert(v);
     }
-
-    @Test
-    public void testExactQuantile() {
-        Random random = new Random(9);
-        List input = shuffledValues(10000, random);
-        CKMSQuantiles ckms = new CKMSQuantiles(new Quantile(0.95, 0));
-        for (double v : input) {
-            ckms.insert(v);
-        }
-        validateResults(ckms);
-        // With epsilon == 0 we need to keep all inputs in samples.
-        assertEquals(input.size(), ckms.samples.size());
+    validateResults(ckms);
+    // With epsilon == 0 we need to keep all inputs in samples.
+    assertEquals(input.size(), ckms.samples.size());
+  }
+
+  @Test
+  public void testExactAndOthers() {
+    Random random = new Random(10);
+    List input = shuffledValues(10000, random);
+    CKMSQuantiles ckms = new CKMSQuantiles(q50, new Quantile(0.95, 0), q99);
+    for (double v : input) {
+      ckms.insert(v);
     }
-
-    @Test
-    public void testExactAndOthers() {
-        Random random = new Random(10);
-        List input = shuffledValues(10000, random);
-        CKMSQuantiles ckms = new CKMSQuantiles(q50, new Quantile(0.95, 0), q99);
-        for (double v : input) {
-            ckms.insert(v);
-        }
-        validateResults(ckms);
-        // With epsilon == 0 we need to keep all inputs in samples.
-        assertEquals(input.size(), ckms.samples.size());
+    validateResults(ckms);
+    // With epsilon == 0 we need to keep all inputs in samples.
+    assertEquals(input.size(), ckms.samples.size());
+  }
+
+  @Test
+  public void testExactAndMin() {
+    Random random = new Random(11);
+    List input = shuffledValues(10000, random);
+    CKMSQuantiles ckms = new CKMSQuantiles(qMin, q50, new Quantile(0.95, 0));
+    for (double v : input) {
+      ckms.insert(v);
     }
-
-    @Test
-    public void testExactAndMin() {
-        Random random = new Random(11);
-        List input = shuffledValues(10000, random);
-        CKMSQuantiles ckms = new CKMSQuantiles(qMin, q50, new Quantile(0.95, 0));
-        for (double v : input) {
-            ckms.insert(v);
-        }
-        validateResults(ckms);
-        // With epsilon == 0 we need to keep all inputs in samples.
-        assertEquals(input.size(), ckms.samples.size());
+    validateResults(ckms);
+    // With epsilon == 0 we need to keep all inputs in samples.
+    assertEquals(input.size(), ckms.samples.size());
+  }
+
+  @Test
+  public void testMaxEpsilon() {
+    Random random = new Random(12);
+    List input = shuffledValues(10000, random);
+    // epsilon == 1 basically gives you random results, but it should still not throw an exception.
+    CKMSQuantiles ckms = new CKMSQuantiles(new Quantile(0.95, 1));
+    for (double v : input) {
+      ckms.insert(v);
     }
-
-    @Test
-    public void testMaxEpsilon() {
-        Random random = new Random(12);
-        List input = shuffledValues(10000, random);
-        // epsilon == 1 basically gives you random results, but it should still not throw an exception.
-        CKMSQuantiles ckms = new CKMSQuantiles(new Quantile(0.95, 1));
-        for (double v : input) {
-            ckms.insert(v);
-        }
-        validateResults(ckms);
+    validateResults(ckms);
+  }
+
+  @Test
+  public void testGetGaussian() {
+    RandomGenerator rand = new JDKRandomGenerator();
+    rand.setSeed(0);
+
+    double mean = 0.0;
+    double stddev = 1.0;
+    NormalDistribution normalDistribution =
+        new NormalDistribution(
+            rand, mean, stddev, NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY);
+
+    List quantiles = new ArrayList();
+    quantiles.add(new Quantile(0.10, 0.001));
+    quantiles.add(new Quantile(0.50, 0.01));
+    quantiles.add(new Quantile(0.90, 0.001));
+    quantiles.add(new Quantile(0.95, 0.001));
+    quantiles.add(new Quantile(0.99, 0.001));
+
+    CKMSQuantiles ckms = new CKMSQuantiles(quantiles.toArray(new Quantile[] {}));
+
+    final int elemCount = 1000 * 1000;
+    double[] shuffle = normalDistribution.sample(elemCount);
+
+    // insert a million samples
+    for (double v : shuffle) {
+      ckms.insert(v);
     }
 
-    @Test
-    public void testGetGaussian() {
-        RandomGenerator rand = new JDKRandomGenerator();
-        rand.setSeed(0);
-
-        double mean = 0.0;
-        double stddev = 1.0;
-        NormalDistribution normalDistribution = new NormalDistribution(rand, mean, stddev, NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY);
-
-        List quantiles = new ArrayList();
-        quantiles.add(new Quantile(0.10, 0.001));
-        quantiles.add(new Quantile(0.50, 0.01));
-        quantiles.add(new Quantile(0.90, 0.001));
-        quantiles.add(new Quantile(0.95, 0.001));
-        quantiles.add(new Quantile(0.99, 0.001));
-
-        CKMSQuantiles ckms = new CKMSQuantiles(quantiles.toArray(new Quantile[]{}));
-
-        final int elemCount = 1000*1000;
-        double[] shuffle = normalDistribution.sample(elemCount);
-
-        // insert a million samples
-        for (double v : shuffle) {
-            ckms.insert(v);
-        }
-
-        // give the actual values for the quantiles we test
-        double p10 = normalDistribution.inverseCumulativeProbability(0.1);
-        double p90 = normalDistribution.inverseCumulativeProbability(0.9);
-        double p95 = normalDistribution.inverseCumulativeProbability(0.95);
-        double p99 = normalDistribution.inverseCumulativeProbability(0.99);
-
-        //ε-approximate quantiles relaxes the requirement
-        //to finding an item with rank between (φ−ε)n and (φ+ε)n.
-        assertEquals(p10, ckms.get(0.1), errorBoundsNormalDistribution(0.1, 0.001, normalDistribution));
-        assertEquals(mean, ckms.get(0.5), errorBoundsNormalDistribution(0.5, 0.01, normalDistribution));
-        assertEquals(p90, ckms.get(0.9), errorBoundsNormalDistribution(0.9, 0.001, normalDistribution));
-        assertEquals(p95, ckms.get(0.95), errorBoundsNormalDistribution(0.95, 0.001, normalDistribution));
-        assertEquals(p99, ckms.get(0.99), errorBoundsNormalDistribution(0.99, 0.001, normalDistribution));
-
-        assertTrue("sample size should be below 1000", ckms.samples.size() < 1000);
+    // give the actual values for the quantiles we test
+    double p10 = normalDistribution.inverseCumulativeProbability(0.1);
+    double p90 = normalDistribution.inverseCumulativeProbability(0.9);
+    double p95 = normalDistribution.inverseCumulativeProbability(0.95);
+    double p99 = normalDistribution.inverseCumulativeProbability(0.99);
+
+    // ε-approximate quantiles relaxes the requirement
+    // to finding an item with rank between (φ−ε)n and (φ+ε)n.
+    assertEquals(p10, ckms.get(0.1), errorBoundsNormalDistribution(0.1, 0.001, normalDistribution));
+    assertEquals(mean, ckms.get(0.5), errorBoundsNormalDistribution(0.5, 0.01, normalDistribution));
+    assertEquals(p90, ckms.get(0.9), errorBoundsNormalDistribution(0.9, 0.001, normalDistribution));
+    assertEquals(
+        p95, ckms.get(0.95), errorBoundsNormalDistribution(0.95, 0.001, normalDistribution));
+    assertEquals(
+        p99, ckms.get(0.99), errorBoundsNormalDistribution(0.99, 0.001, normalDistribution));
+
+    assertTrue("sample size should be below 1000", ckms.samples.size() < 1000);
+  }
+
+  double errorBoundsNormalDistribution(double p, double epsilon, NormalDistribution nd) {
+    // (φ+ε)n
+    double upperBound = nd.inverseCumulativeProbability(p + epsilon);
+    // (φ−ε)n
+    double lowerBound = nd.inverseCumulativeProbability(p - epsilon);
+    // subtract and divide by 2, assuming that the increase is linear in this small epsilon.
+    return Math.abs(upperBound - lowerBound) / 2;
+  }
+
+  @Test
+  public void testIllegalArgumentException() {
+    try {
+      new Quantile(-1, 0);
+    } catch (IllegalArgumentException e) {
+      assertEquals("Quantile must be between 0 and 1", e.getMessage());
+    } catch (Exception e) {
+      fail("Wrong exception thrown" + e);
     }
-
-    double errorBoundsNormalDistribution(double p, double epsilon, NormalDistribution nd) {
-        //(φ+ε)n
-        double upperBound = nd.inverseCumulativeProbability(p + epsilon);
-        //(φ−ε)n
-        double lowerBound = nd.inverseCumulativeProbability(p - epsilon);
-        // subtract and divide by 2, assuming that the increase is linear in this small epsilon.
-        return Math.abs(upperBound - lowerBound) / 2;
+    try {
+      new Quantile(0.95, 2);
+    } catch (IllegalArgumentException e) {
+      assertEquals("Epsilon must be between 0 and 1", e.getMessage());
+    } catch (Exception e) {
+      fail("Wrong exception thrown" + e);
     }
+  }
 
-    @Test
-    public void testIllegalArgumentException() {
-        try {
-            new Quantile(-1, 0);
-        } catch (IllegalArgumentException e) {
-            assertEquals("Quantile must be between 0 and 1", e.getMessage());
-        } catch (Exception e) {
-            fail("Wrong exception thrown" + e);
-        }
-        try {
-            new Quantile(0.95, 2);
-        } catch (IllegalArgumentException e) {
-            assertEquals("Epsilon must be between 0 and 1", e.getMessage());
-        } catch (Exception e) {
-            fail("Wrong exception thrown" + e);
-        }
+  private List shuffledValues(int n, Random random) {
+    List result = new ArrayList(n);
+    for (int i = 0; i < n; i++) {
+      result.add(i + 1.0);
     }
-
-    private List shuffledValues(int n, Random random) {
-        List result = new ArrayList(n);
-        for (int i=0; i= lowerBound && actual <= upperBound;
+      if (!ok) {
         for (CKMSQuantiles.Sample sample : ckms.samples) {
-            String msg = "invalid sample " + sample + ": count=" + ckms.n + " r=" + r + " f(r)=" + ckms.f(r);
-            assertTrue(msg, sample.g + sample.delta <= ckms.f(r));
-            assertTrue("Samples not ordered. Keep in mind that insertBatch() takes a sorted array as parameter.", prev <= sample.value);
-            prev = sample.value;
-            r += sample.g;
-        }
-        assertEquals("the sum of all g's must be the total number of observations", r, ckms.n);
-    }
-
-    /**
-     * The values that we insert in these tests are always the numbers from 1 to n, in random order.
-     * So we can trivially calculate the range of acceptable results for each quantile.
-     * We check if the value returned by get() is within the range of acceptable results.
-     */
-    private void validateResults(CKMSQuantiles ckms) {
-        for (Quantile q : ckms.quantiles) {
-            double actual = ckms.get(q.quantile);
-            double lowerBound, upperBound;
-            if (q.quantile == 0) {
-                lowerBound = 1;
-                upperBound = 1;
-            } else if (q.quantile == 1) {
-                lowerBound = ckms.n;
-                upperBound = ckms.n;
-            } else {
-                lowerBound = Math.floor(ckms.n * (q.quantile - 2 * q.epsilon));
-                upperBound = Math.ceil(ckms.n * (q.quantile + 2 * q.epsilon));
-            }
-            boolean ok = actual >= lowerBound && actual <= upperBound;
-            if (!ok) {
-                for (CKMSQuantiles.Sample sample : ckms.samples) {
-                    System.err.println(sample);
-                }
-            }
-            String errorMessage = q + ": " + actual + " not in [" + lowerBound + ", " + upperBound + "], n=" + ckms.n + ", " +  q.quantile + "*" + ckms.n + "=" + (q.quantile*ckms.n);
-            assertTrue(errorMessage, ok);
+          System.err.println(sample);
         }
+      }
+      String errorMessage =
+          q
+              + ": "
+              + actual
+              + " not in ["
+              + lowerBound
+              + ", "
+              + upperBound
+              + "], n="
+              + ckms.n
+              + ", "
+              + q.quantile
+              + "*"
+              + ckms.n
+              + "="
+              + (q.quantile * ckms.n);
+      assertTrue(errorMessage, ok);
     }
+  }
 }
diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/CounterTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/CounterTest.java
index 7be2a3869..435c5b8fe 100644
--- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/CounterTest.java
+++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/CounterTest.java
@@ -1,15 +1,20 @@
 package io.prometheus.metrics.core.metrics;
 
-import io.prometheus.metrics.shaded.com_google_protobuf_3_25_3.TextFormat;
+import static io.prometheus.metrics.core.metrics.TestUtil.assertExemplarEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.rules.ExpectedException.none;
+
+import io.prometheus.metrics.core.exemplars.ExemplarSamplerConfigTestUtil;
 import io.prometheus.metrics.expositionformats.PrometheusProtobufWriter;
 import io.prometheus.metrics.expositionformats.generated.com_google_protobuf_3_25_3.Metrics;
-import io.prometheus.metrics.core.exemplars.ExemplarSamplerConfigTestUtil;
-import io.prometheus.metrics.tracer.common.SpanContext;
-import io.prometheus.metrics.tracer.initializer.SpanContextSupplier;
 import io.prometheus.metrics.model.snapshots.CounterSnapshot;
 import io.prometheus.metrics.model.snapshots.Exemplar;
 import io.prometheus.metrics.model.snapshots.Labels;
 import io.prometheus.metrics.model.snapshots.Unit;
+import io.prometheus.metrics.shaded.com_google_protobuf_3_25_3.TextFormat;
+import io.prometheus.metrics.tracer.common.SpanContext;
+import io.prometheus.metrics.tracer.initializer.SpanContextSupplier;
+import java.util.Iterator;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -17,302 +22,293 @@
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
-import java.util.Iterator;
-
-import static io.prometheus.metrics.core.metrics.TestUtil.assertExemplarEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.rules.ExpectedException.none;
-
 public class CounterTest {
 
-    Counter noLabels;
-    Counter labels;
-    private static final long exemplarSampleIntervalMillis = 10;
-    private static final long exemplarMinAgeMillis = 100;
-    SpanContext origSpanContext;
-
-    @Rule
-    public final ExpectedException thrown = none();
-
-    @Before
-    public void setUp() throws NoSuchFieldException, IllegalAccessException {
-        noLabels = Counter.builder().name("nolabels").build();
-        labels = Counter.builder().name("labels")
-                .help("help")
-                .unit(Unit.SECONDS)
-                .labelNames("l")
-                .build();
-        origSpanContext = SpanContextSupplier.getSpanContext();
-        ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(noLabels, exemplarSampleIntervalMillis);
-        ExemplarSamplerConfigTestUtil.setMinRetentionPeriodMillis(noLabels, exemplarMinAgeMillis);
-        ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(labels, exemplarSampleIntervalMillis);
-        ExemplarSamplerConfigTestUtil.setMinRetentionPeriodMillis(labels, exemplarMinAgeMillis);
-    }
-
-    @After
-    public void tearDown() {
-        SpanContextSupplier.setSpanContext(origSpanContext);
-    }
-
-    private CounterSnapshot.CounterDataPointSnapshot getData(Counter counter, String... labels) {
-        return counter.collect().getDataPoints().stream()
-                .filter(d -> d.getLabels().equals(Labels.of(labels)))
-                .findAny()
-                .orElseThrow(() -> new RuntimeException("counter with labels " + labels + " not found"));
-    }
-
-    private double getValue(Counter counter, String... labels) {
-        return getData(counter, labels).getValue();
-    }
-
-
-    private int getNumberOfLabels(Counter counter) {
-        return ((CounterSnapshot) counter.collect()).getDataPoints().size();
-    }
-
-    @Test
-    public void testIncrement() {
-        noLabels.inc();
-        assertEquals(1.0, getValue(noLabels), .001);
-        noLabels.inc(2);
-        assertEquals(3.0, getValue(noLabels), .001);
-        noLabels.labelValues().inc(4);
-        assertEquals(7.0, getValue(noLabels), .001);
-        noLabels.labelValues().inc();
-        assertEquals(8.0, getValue(noLabels), .001);
-    }
-
-    @Test
-    public void testNegativeIncrementFails() {
-        thrown.expect(IllegalArgumentException.class);
-        thrown.expectMessage("Negative increment -1 is illegal for Counter metrics.");
-        noLabels.inc(-1);
-    }
-
-    @Test
-    public void testEmptyCountersHaveNoLabels() {
-        assertEquals(1, getNumberOfLabels(noLabels));
-        assertEquals(0, getNumberOfLabels(labels));
-    }
-
-    @Test
-    public void testLabels() {
-        assertEquals(0, getNumberOfLabels(labels));
-        labels.labelValues("a").inc();
-        assertEquals(1, getNumberOfLabels(labels));
-        assertEquals(1.0, getValue(labels, "l", "a"), .001);
-        labels.labelValues("b").inc(3);
-        assertEquals(2, getNumberOfLabels(labels));
-        assertEquals(1.0, getValue(labels, "l", "a"), .001);
-        assertEquals(3.0, getValue(labels, "l", "b"), .001);
+  Counter noLabels;
+  Counter labels;
+  private static final long exemplarSampleIntervalMillis = 10;
+  private static final long exemplarMinAgeMillis = 100;
+  SpanContext origSpanContext;
+
+  @Rule public final ExpectedException thrown = none();
+
+  @Before
+  public void setUp() throws NoSuchFieldException, IllegalAccessException {
+    noLabels = Counter.builder().name("nolabels").build();
+    labels =
+        Counter.builder().name("labels").help("help").unit(Unit.SECONDS).labelNames("l").build();
+    origSpanContext = SpanContextSupplier.getSpanContext();
+    ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(noLabels, exemplarSampleIntervalMillis);
+    ExemplarSamplerConfigTestUtil.setMinRetentionPeriodMillis(noLabels, exemplarMinAgeMillis);
+    ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(labels, exemplarSampleIntervalMillis);
+    ExemplarSamplerConfigTestUtil.setMinRetentionPeriodMillis(labels, exemplarMinAgeMillis);
+  }
+
+  @After
+  public void tearDown() {
+    SpanContextSupplier.setSpanContext(origSpanContext);
+  }
+
+  private CounterSnapshot.CounterDataPointSnapshot getData(Counter counter, String... labels) {
+    return counter.collect().getDataPoints().stream()
+        .filter(d -> d.getLabels().equals(Labels.of(labels)))
+        .findAny()
+        .orElseThrow(() -> new RuntimeException("counter with labels " + labels + " not found"));
+  }
+
+  private double getValue(Counter counter, String... labels) {
+    return getData(counter, labels).getValue();
+  }
+
+  private int getNumberOfLabels(Counter counter) {
+    return ((CounterSnapshot) counter.collect()).getDataPoints().size();
+  }
+
+  @Test
+  public void testIncrement() {
+    noLabels.inc();
+    assertEquals(1.0, getValue(noLabels), .001);
+    noLabels.inc(2);
+    assertEquals(3.0, getValue(noLabels), .001);
+    noLabels.labelValues().inc(4);
+    assertEquals(7.0, getValue(noLabels), .001);
+    noLabels.labelValues().inc();
+    assertEquals(8.0, getValue(noLabels), .001);
+  }
+
+  @Test
+  public void testNegativeIncrementFails() {
+    thrown.expect(IllegalArgumentException.class);
+    thrown.expectMessage("Negative increment -1 is illegal for Counter metrics.");
+    noLabels.inc(-1);
+  }
+
+  @Test
+  public void testEmptyCountersHaveNoLabels() {
+    assertEquals(1, getNumberOfLabels(noLabels));
+    assertEquals(0, getNumberOfLabels(labels));
+  }
+
+  @Test
+  public void testLabels() {
+    assertEquals(0, getNumberOfLabels(labels));
+    labels.labelValues("a").inc();
+    assertEquals(1, getNumberOfLabels(labels));
+    assertEquals(1.0, getValue(labels, "l", "a"), .001);
+    labels.labelValues("b").inc(3);
+    assertEquals(2, getNumberOfLabels(labels));
+    assertEquals(1.0, getValue(labels, "l", "a"), .001);
+    assertEquals(3.0, getValue(labels, "l", "b"), .001);
+  }
+
+  @Test
+  public void testTotalStrippedFromName() {
+    for (String name :
+        new String[] {
+          "my_counter_total", "my.counter.total",
+          "my_counter_seconds_total", "my.counter.seconds.total",
+          "my_counter", "my.counter",
+          "my_counter_seconds", "my.counter.seconds"
+        }) {
+      Counter counter = Counter.builder().name(name).unit(Unit.SECONDS).build();
+      Metrics.MetricFamily protobufData = new PrometheusProtobufWriter().convert(counter.collect());
+      assertEquals(
+          "name: \"my_counter_seconds_total\" type: COUNTER metric { counter { value: 0.0 } }",
+          TextFormat.printer().shortDebugString(protobufData));
     }
+  }
 
-    @Test
-    public void testTotalStrippedFromName() {
-        for (String name : new String[]{
-                "my_counter_total", "my.counter.total",
-                "my_counter_seconds_total", "my.counter.seconds.total",
-                "my_counter", "my.counter",
-                "my_counter_seconds", "my.counter.seconds"}) {
-            Counter counter = Counter.builder()
-                    .name(name)
-                    .unit(Unit.SECONDS)
-                    .build();
-            Metrics.MetricFamily protobufData = new PrometheusProtobufWriter().convert(counter.collect());
-            assertEquals("name: \"my_counter_seconds_total\" type: COUNTER metric { counter { value: 0.0 } }", TextFormat.printer().shortDebugString(protobufData));
-        }
-    }
-
-    @Test
-    public void testSnapshotComplete() {
-        long before = System.currentTimeMillis();
-        Counter counter = Counter.builder()
-                .name("test_seconds_total")
-                .unit(Unit.SECONDS)
-                .help("help message")
-                .constLabels(Labels.of("const1name", "const1value", "const2name", "const2value"))
-                .labelNames("path", "status")
-                .build();
-        counter.labelValues("/", "200").inc(2);
-        counter.labelValues("/", "500").inc();
-        CounterSnapshot snapshot = (CounterSnapshot) counter.collect();
-        Assert.assertEquals("test_seconds", snapshot.getMetadata().getName());
-        Assert.assertEquals("seconds", snapshot.getMetadata().getUnit().toString());
-        Assert.assertEquals("help message", snapshot.getMetadata().getHelp());
-        Assert.assertEquals(2, snapshot.getDataPoints().size());
-        Iterator iter = snapshot.getDataPoints().iterator();
-        // data is ordered by labels, so 200 comes before 500
-        CounterSnapshot.CounterDataPointSnapshot data = iter.next();
-        Assert.assertEquals(Labels.of("const1name", "const1value", "const2name", "const2value", "path", "/", "status", "200"), data.getLabels());
-        Assert.assertEquals(2, data.getValue(), 0.0001);
-        Assert.assertTrue(data.getCreatedTimestampMillis() >= before);
-        Assert.assertTrue(data.getCreatedTimestampMillis() <= System.currentTimeMillis());
-        // 500
-        data = iter.next();
-        Assert.assertEquals(Labels.of("const1name", "const1value", "const2name", "const2value", "path", "/", "status", "500"), data.getLabels());
-        Assert.assertEquals(1, data.getValue(), 0.0001);
-        Assert.assertTrue(data.getCreatedTimestampMillis() >= before);
-        Assert.assertTrue(data.getCreatedTimestampMillis() <= System.currentTimeMillis());
-    }
-
-    @Test
-    public void testIncWithExemplar() throws Exception {
-        noLabels.incWithExemplar(Labels.of("key", "value"));
-        assertExemplar(noLabels, 1.0, "key", "value");
-
-        Thread.sleep(exemplarMinAgeMillis + 2 * exemplarSampleIntervalMillis);
-
-        noLabels.incWithExemplar(Labels.EMPTY);
-        assertExemplar(noLabels, 1.0);
-
-        Thread.sleep(exemplarMinAgeMillis + 2 * exemplarSampleIntervalMillis);
-
-        noLabels.incWithExemplar(3, Labels.of("key1", "value1", "key2", "value2"));
-        assertExemplar(noLabels, 3, "key1", "value1", "key2", "value2");
-    }
-
-    private void assertExemplar(Counter counter, double value, String... labels) {
-        Exemplar exemplar = getData(counter).getExemplar();
-        Assert.assertEquals(value, exemplar.getValue(), 0.0001);
-        assertEquals(Labels.of(labels), exemplar.getLabels());
-    }
-
-    @Test
-    public void testExemplarSampler() throws Exception {
-        final Exemplar exemplar1 = Exemplar.builder()
-                .value(2.0)
-                .traceId("abc")
-                .spanId("123")
-                .build();
-        final Exemplar exemplar2 = Exemplar.builder()
-                .value(1.0)
-                .traceId("def")
-                .spanId("456")
-                .build();
-        final Exemplar exemplar3 = Exemplar.builder()
-                .value(1.0)
-                .traceId("123")
-                .spanId("abc")
-                .build();
-        final Exemplar customExemplar = Exemplar.builder()
-                .value(1.0)
-                .traceId("bab")
-                .spanId("cdc")
-                .labels(Labels.of("test", "test"))
-                .build();
-        SpanContext spanContext = new SpanContext() {
-            private int callNumber = 0;
-
-            @Override
-            public String getCurrentTraceId() {
-                switch (callNumber) {
-                    case 1:
-                        return "abc";
-                    case 3:
-                        return "def";
-                    case 4:
-                        return "123";
-                    case 5:
-                        return "bab";
-                    default:
-                        throw new RuntimeException("unexpected call");
-                }
+  @Test
+  public void testSnapshotComplete() {
+    long before = System.currentTimeMillis();
+    Counter counter =
+        Counter.builder()
+            .name("test_seconds_total")
+            .unit(Unit.SECONDS)
+            .help("help message")
+            .constLabels(Labels.of("const1name", "const1value", "const2name", "const2value"))
+            .labelNames("path", "status")
+            .build();
+    counter.labelValues("/", "200").inc(2);
+    counter.labelValues("/", "500").inc();
+    CounterSnapshot snapshot = (CounterSnapshot) counter.collect();
+    Assert.assertEquals("test_seconds", snapshot.getMetadata().getName());
+    Assert.assertEquals("seconds", snapshot.getMetadata().getUnit().toString());
+    Assert.assertEquals("help message", snapshot.getMetadata().getHelp());
+    Assert.assertEquals(2, snapshot.getDataPoints().size());
+    Iterator iter = snapshot.getDataPoints().iterator();
+    // data is ordered by labels, so 200 comes before 500
+    CounterSnapshot.CounterDataPointSnapshot data = iter.next();
+    Assert.assertEquals(
+        Labels.of(
+            "const1name", "const1value", "const2name", "const2value", "path", "/", "status", "200"),
+        data.getLabels());
+    Assert.assertEquals(2, data.getValue(), 0.0001);
+    Assert.assertTrue(data.getCreatedTimestampMillis() >= before);
+    Assert.assertTrue(data.getCreatedTimestampMillis() <= System.currentTimeMillis());
+    // 500
+    data = iter.next();
+    Assert.assertEquals(
+        Labels.of(
+            "const1name", "const1value", "const2name", "const2value", "path", "/", "status", "500"),
+        data.getLabels());
+    Assert.assertEquals(1, data.getValue(), 0.0001);
+    Assert.assertTrue(data.getCreatedTimestampMillis() >= before);
+    Assert.assertTrue(data.getCreatedTimestampMillis() <= System.currentTimeMillis());
+  }
+
+  @Test
+  public void testIncWithExemplar() throws Exception {
+    noLabels.incWithExemplar(Labels.of("key", "value"));
+    assertExemplar(noLabels, 1.0, "key", "value");
+
+    Thread.sleep(exemplarMinAgeMillis + 2 * exemplarSampleIntervalMillis);
+
+    noLabels.incWithExemplar(Labels.EMPTY);
+    assertExemplar(noLabels, 1.0);
+
+    Thread.sleep(exemplarMinAgeMillis + 2 * exemplarSampleIntervalMillis);
+
+    noLabels.incWithExemplar(3, Labels.of("key1", "value1", "key2", "value2"));
+    assertExemplar(noLabels, 3, "key1", "value1", "key2", "value2");
+  }
+
+  private void assertExemplar(Counter counter, double value, String... labels) {
+    Exemplar exemplar = getData(counter).getExemplar();
+    Assert.assertEquals(value, exemplar.getValue(), 0.0001);
+    assertEquals(Labels.of(labels), exemplar.getLabels());
+  }
+
+  @Test
+  public void testExemplarSampler() throws Exception {
+    final Exemplar exemplar1 = Exemplar.builder().value(2.0).traceId("abc").spanId("123").build();
+    final Exemplar exemplar2 = Exemplar.builder().value(1.0).traceId("def").spanId("456").build();
+    final Exemplar exemplar3 = Exemplar.builder().value(1.0).traceId("123").spanId("abc").build();
+    final Exemplar customExemplar =
+        Exemplar.builder()
+            .value(1.0)
+            .traceId("bab")
+            .spanId("cdc")
+            .labels(Labels.of("test", "test"))
+            .build();
+    SpanContext spanContext =
+        new SpanContext() {
+          private int callNumber = 0;
+
+          @Override
+          public String getCurrentTraceId() {
+            switch (callNumber) {
+              case 1:
+                return "abc";
+              case 3:
+                return "def";
+              case 4:
+                return "123";
+              case 5:
+                return "bab";
+              default:
+                throw new RuntimeException("unexpected call");
             }
-
-            @Override
-            public String getCurrentSpanId() {
-                switch (callNumber) {
-                    case 1:
-                        return "123";
-                    case 3:
-                        return "456";
-                    case 4:
-                        return "abc";
-                    case 5:
-                        return "cdc";
-                    default:
-                        throw new RuntimeException("unexpected call");
-                }
+          }
+
+          @Override
+          public String getCurrentSpanId() {
+            switch (callNumber) {
+              case 1:
+                return "123";
+              case 3:
+                return "456";
+              case 4:
+                return "abc";
+              case 5:
+                return "cdc";
+              default:
+                throw new RuntimeException("unexpected call");
             }
+          }
 
-            @Override
-            public boolean isCurrentSpanSampled() {
-                callNumber++;
-                if (callNumber == 2) {
-                    return false;
-                }
-                return true;
+          @Override
+          public boolean isCurrentSpanSampled() {
+            callNumber++;
+            if (callNumber == 2) {
+              return false;
             }
+            return true;
+          }
 
-            @Override
-            public void markCurrentSpanAsExemplar() {
-            }
+          @Override
+          public void markCurrentSpanAsExemplar() {}
         };
-        Counter counter = Counter.builder()
-                .name("count_total")
-                .build();
+    Counter counter = Counter.builder().name("count_total").build();
 
-        SpanContextSupplier.setSpanContext(spanContext);
-        ExemplarSamplerConfigTestUtil.setMinRetentionPeriodMillis(counter, exemplarMinAgeMillis);
-        ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(counter, exemplarSampleIntervalMillis);
+    SpanContextSupplier.setSpanContext(spanContext);
+    ExemplarSamplerConfigTestUtil.setMinRetentionPeriodMillis(counter, exemplarMinAgeMillis);
+    ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(counter, exemplarSampleIntervalMillis);
 
-        counter.inc(2.0);
-        assertExemplarEquals(exemplar1, getData(counter).getExemplar());
+    counter.inc(2.0);
+    assertExemplarEquals(exemplar1, getData(counter).getExemplar());
 
-        Thread.sleep(2 * exemplarSampleIntervalMillis);
+    Thread.sleep(2 * exemplarSampleIntervalMillis);
 
-        counter.inc(3.0); // min age not reached -> keep the previous exemplar, exemplar sampler not called
-        assertExemplarEquals(exemplar1, getData(counter).getExemplar());
+    counter.inc(
+        3.0); // min age not reached -> keep the previous exemplar, exemplar sampler not called
+    assertExemplarEquals(exemplar1, getData(counter).getExemplar());
 
-        Thread.sleep(exemplarMinAgeMillis + 2 * exemplarSampleIntervalMillis);
+    Thread.sleep(exemplarMinAgeMillis + 2 * exemplarSampleIntervalMillis);
 
-        counter.inc(2.0); // 2nd call: isSampled() returns false -> not sampled
-        assertExemplarEquals(exemplar1, getData(counter).getExemplar());
+    counter.inc(2.0); // 2nd call: isSampled() returns false -> not sampled
+    assertExemplarEquals(exemplar1, getData(counter).getExemplar());
 
-        Thread.sleep(2 * exemplarSampleIntervalMillis);
+    Thread.sleep(2 * exemplarSampleIntervalMillis);
 
-        counter.inc(1.0); // sampled
-        assertExemplarEquals(exemplar2, getData(counter).getExemplar());
+    counter.inc(1.0); // sampled
+    assertExemplarEquals(exemplar2, getData(counter).getExemplar());
 
-        Thread.sleep(exemplarMinAgeMillis + 2 * exemplarSampleIntervalMillis);
+    Thread.sleep(exemplarMinAgeMillis + 2 * exemplarSampleIntervalMillis);
 
-        counter.inc(1.0); // sampled
-        assertExemplarEquals(exemplar3, getData(counter).getExemplar());
+    counter.inc(1.0); // sampled
+    assertExemplarEquals(exemplar3, getData(counter).getExemplar());
 
-        Thread.sleep(2 * exemplarSampleIntervalMillis);
+    Thread.sleep(2 * exemplarSampleIntervalMillis);
 
-        counter.incWithExemplar(Labels.of("test", "test")); // custom exemplar sampled even though the automatic exemplar hasn't reached min age yet
-        assertExemplarEquals(customExemplar, getData(counter).getExemplar());
-    }
-
-    @Test
-    public void testExemplarSamplerDisabled() {
-        Counter counter = Counter.builder()
-                //.withExemplarSampler((inc, prev) -> {throw new RuntimeException("unexpected call to exemplar sampler");})
-                .name("count_total")
-                .withoutExemplars()
-                .build();
-        counter.incWithExemplar(3.0, Labels.of("a", "b"));
-        Assert.assertNull(getData(counter).getExemplar());
-        counter.inc(2.0);
-        Assert.assertNull(getData(counter).getExemplar());
-    }
+    counter.incWithExemplar(
+        Labels.of(
+            "test",
+            "test")); // custom exemplar sampled even though the automatic exemplar hasn't reached
+    // min age yet
+    assertExemplarEquals(customExemplar, getData(counter).getExemplar());
+  }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testConstLabelsFirst() {
+  @Test
+  public void testExemplarSamplerDisabled() {
+    Counter counter =
         Counter.builder()
-                .name("test_total")
-                .constLabels(Labels.of("const_a", "const_b"))
-                .labelNames("const.a")
-                .build();
-    }
-
-    @Test(expected = IllegalArgumentException.class)
-    public void testConstLabelsSecond() {
-        Counter.builder()
-                .name("test_total")
-                .labelNames("const.a")
-                .constLabels(Labels.of("const_a", "const_b"))
-                .build();
-    }
+            // .withExemplarSampler((inc, prev) -> {throw new RuntimeException("unexpected call to
+            // exemplar sampler");})
+            .name("count_total")
+            .withoutExemplars()
+            .build();
+    counter.incWithExemplar(3.0, Labels.of("a", "b"));
+    Assert.assertNull(getData(counter).getExemplar());
+    counter.inc(2.0);
+    Assert.assertNull(getData(counter).getExemplar());
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testConstLabelsFirst() {
+    Counter.builder()
+        .name("test_total")
+        .constLabels(Labels.of("const_a", "const_b"))
+        .labelNames("const.a")
+        .build();
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testConstLabelsSecond() {
+    Counter.builder()
+        .name("test_total")
+        .labelNames("const.a")
+        .constLabels(Labels.of("const_a", "const_b"))
+        .build();
+  }
 }
diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/GaugeTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/GaugeTest.java
index 002a44545..678158cbd 100644
--- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/GaugeTest.java
+++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/GaugeTest.java
@@ -1,222 +1,213 @@
 package io.prometheus.metrics.core.metrics;
 
+import static io.prometheus.metrics.core.metrics.TestUtil.assertExemplarEquals;
+import static org.junit.Assert.assertEquals;
+
+import io.prometheus.metrics.core.datapoints.Timer;
 import io.prometheus.metrics.core.exemplars.ExemplarSamplerConfigTestUtil;
-import io.prometheus.metrics.tracer.common.SpanContext;
-import io.prometheus.metrics.tracer.initializer.SpanContextSupplier;
 import io.prometheus.metrics.model.snapshots.Exemplar;
 import io.prometheus.metrics.model.snapshots.GaugeSnapshot;
 import io.prometheus.metrics.model.snapshots.Labels;
-import io.prometheus.metrics.core.datapoints.Timer;
+import io.prometheus.metrics.tracer.common.SpanContext;
+import io.prometheus.metrics.tracer.initializer.SpanContextSupplier;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import static io.prometheus.metrics.core.metrics.TestUtil.assertExemplarEquals;
-import static org.junit.Assert.assertEquals;
-
 public class GaugeTest {
 
-    private static final long exemplarSampleIntervalMillis = 10;
-    private static final long exemplarMinAgeMillis = 100;
-
-    private Gauge noLabels, labels;
-
-    private SpanContext origSpanContext;
-
-    @Before
-    public void setUp() {
-        noLabels = Gauge.builder().name("nolabels").build();
-        labels = Gauge.builder().name("labels").labelNames("l").build();
-        origSpanContext = SpanContextSupplier.getSpanContext();
-    }
-
-    @After
-    public void tearDown() {
-        SpanContextSupplier.setSpanContext(origSpanContext);
-    }
-
-    private GaugeSnapshot.GaugeDataPointSnapshot getData(Gauge gauge, String... labels) {
-        return ((GaugeSnapshot) gauge.collect()).getDataPoints().stream()
-                .filter(data -> data.getLabels().equals(Labels.of(labels)))
-                .findAny()
-                .orElseThrow(RuntimeException::new);
+  private static final long exemplarSampleIntervalMillis = 10;
+  private static final long exemplarMinAgeMillis = 100;
+
+  private Gauge noLabels, labels;
+
+  private SpanContext origSpanContext;
+
+  @Before
+  public void setUp() {
+    noLabels = Gauge.builder().name("nolabels").build();
+    labels = Gauge.builder().name("labels").labelNames("l").build();
+    origSpanContext = SpanContextSupplier.getSpanContext();
+  }
+
+  @After
+  public void tearDown() {
+    SpanContextSupplier.setSpanContext(origSpanContext);
+  }
+
+  private GaugeSnapshot.GaugeDataPointSnapshot getData(Gauge gauge, String... labels) {
+    return ((GaugeSnapshot) gauge.collect())
+        .getDataPoints().stream()
+            .filter(data -> data.getLabels().equals(Labels.of(labels)))
+            .findAny()
+            .orElseThrow(RuntimeException::new);
+  }
+
+  private double getValue(Gauge gauge, String... labels) {
+    return getData(gauge, labels).getValue();
+  }
+
+  @Test
+  public void testIncrement() {
+    noLabels.inc();
+    assertEquals(1.0, getValue(noLabels), .001);
+    noLabels.inc(2);
+    assertEquals(3.0, getValue(noLabels), .001);
+    noLabels.inc(4);
+    assertEquals(7.0, getValue(noLabels), .001);
+    noLabels.inc();
+    assertEquals(8.0, getValue(noLabels), .001);
+  }
+
+  @Test
+  public void testDecrement() {
+    noLabels.dec();
+    assertEquals(-1.0, getValue(noLabels), .001);
+    noLabels.dec(2);
+    assertEquals(-3.0, getValue(noLabels), .001);
+    noLabels.dec(4);
+    assertEquals(-7.0, getValue(noLabels), .001);
+    noLabels.dec();
+    assertEquals(-8.0, getValue(noLabels), .001);
+  }
+
+  @Test
+  public void testSet() {
+    noLabels.set(42);
+    assertEquals(42, getValue(noLabels), .001);
+    noLabels.set(7);
+    assertEquals(7.0, getValue(noLabels), .001);
+  }
+
+  @Test
+  public void testTimer() throws InterruptedException {
+    try (Timer timer = noLabels.startTimer()) {
+      Thread.sleep(12);
     }
-
-    private double getValue(Gauge gauge, String... labels) {
-        return getData(gauge, labels).getValue();
-    }
-
-    @Test
-    public void testIncrement() {
-        noLabels.inc();
-        assertEquals(1.0, getValue(noLabels), .001);
-        noLabels.inc(2);
-        assertEquals(3.0, getValue(noLabels), .001);
-        noLabels.inc(4);
-        assertEquals(7.0, getValue(noLabels), .001);
-        noLabels.inc();
-        assertEquals(8.0, getValue(noLabels), .001);
-    }
-
-    @Test
-    public void testDecrement() {
-        noLabels.dec();
-        assertEquals(-1.0, getValue(noLabels), .001);
-        noLabels.dec(2);
-        assertEquals(-3.0, getValue(noLabels), .001);
-        noLabels.dec(4);
-        assertEquals(-7.0, getValue(noLabels), .001);
-        noLabels.dec();
-        assertEquals(-8.0, getValue(noLabels), .001);
-    }
-
-    @Test
-    public void testSet() {
-        noLabels.set(42);
-        assertEquals(42, getValue(noLabels), .001);
-        noLabels.set(7);
-        assertEquals(7.0, getValue(noLabels), .001);
-    }
-
-    @Test
-    public void testTimer() throws InterruptedException {
-        try (Timer timer = noLabels.startTimer()) {
-            Thread.sleep(12);
-        }
-        assertEquals(0.012, getValue(noLabels), 0.005); // 5ms delta should be enough so this isn't flaky
-    }
-
-    @Test
-    public void noLabelsDefaultZeroValue() {
-        assertEquals(0.0, getValue(noLabels), .001);
-    }
-
-    @Test
-    public void testLabels() {
-        labels.labelValues("a").inc();
-        labels.labelValues("b").inc(3);
-        assertEquals(1.0, getValue(labels, "l", "a"), .001);
-        assertEquals(3.0, getValue(labels, "l", "b"), .001);
-    }
-
-    @Test
-    public void testExemplarSampler() throws Exception {
-        final Exemplar exemplar1 = Exemplar.builder()
-                .value(2.0)
-                .traceId("abc")
-                .spanId("123")
-                .build();
-        final Exemplar exemplar2 = Exemplar.builder()
-                .value(6.5)
-                .traceId("def")
-                .spanId("456")
-                .build();
-        final Exemplar exemplar3 = Exemplar.builder()
-                .value(7.0)
-                .traceId("123")
-                .spanId("abc")
-                .build();
-        final Exemplar customExemplar = Exemplar.builder()
-                .value(8.0)
-                .traceId("bab")
-                .spanId("cdc")
-                .labels(Labels.of("test", "test"))
-                .build();
-        SpanContext spanContext = new SpanContext() {
-            private int callNumber = 0;
-
-            @Override
-            public String getCurrentTraceId() {
-                switch (callNumber) {
-                    case 1:
-                        return "abc";
-                    case 3:
-                        return "def";
-                    case 4:
-                        return "123";
-                    case 5:
-                        return "bab";
-                    default:
-                        throw new RuntimeException("unexpected call");
-                }
+    assertEquals(
+        0.012, getValue(noLabels), 0.005); // 5ms delta should be enough so this isn't flaky
+  }
+
+  @Test
+  public void noLabelsDefaultZeroValue() {
+    assertEquals(0.0, getValue(noLabels), .001);
+  }
+
+  @Test
+  public void testLabels() {
+    labels.labelValues("a").inc();
+    labels.labelValues("b").inc(3);
+    assertEquals(1.0, getValue(labels, "l", "a"), .001);
+    assertEquals(3.0, getValue(labels, "l", "b"), .001);
+  }
+
+  @Test
+  public void testExemplarSampler() throws Exception {
+    final Exemplar exemplar1 = Exemplar.builder().value(2.0).traceId("abc").spanId("123").build();
+    final Exemplar exemplar2 = Exemplar.builder().value(6.5).traceId("def").spanId("456").build();
+    final Exemplar exemplar3 = Exemplar.builder().value(7.0).traceId("123").spanId("abc").build();
+    final Exemplar customExemplar =
+        Exemplar.builder()
+            .value(8.0)
+            .traceId("bab")
+            .spanId("cdc")
+            .labels(Labels.of("test", "test"))
+            .build();
+    SpanContext spanContext =
+        new SpanContext() {
+          private int callNumber = 0;
+
+          @Override
+          public String getCurrentTraceId() {
+            switch (callNumber) {
+              case 1:
+                return "abc";
+              case 3:
+                return "def";
+              case 4:
+                return "123";
+              case 5:
+                return "bab";
+              default:
+                throw new RuntimeException("unexpected call");
             }
-
-            @Override
-            public String getCurrentSpanId() {
-                switch (callNumber) {
-                    case 1:
-                        return "123";
-                    case 3:
-                        return "456";
-                    case 4:
-                        return "abc";
-                    case 5:
-                        return "cdc";
-                    default:
-                        throw new RuntimeException("unexpected call");
-                }
+          }
+
+          @Override
+          public String getCurrentSpanId() {
+            switch (callNumber) {
+              case 1:
+                return "123";
+              case 3:
+                return "456";
+              case 4:
+                return "abc";
+              case 5:
+                return "cdc";
+              default:
+                throw new RuntimeException("unexpected call");
             }
+          }
 
-            @Override
-            public boolean isCurrentSpanSampled() {
-                callNumber++;
-                if (callNumber == 2) {
-                    return false;
-                }
-                return true;
+          @Override
+          public boolean isCurrentSpanSampled() {
+            callNumber++;
+            if (callNumber == 2) {
+              return false;
             }
+            return true;
+          }
 
-            @Override
-            public void markCurrentSpanAsExemplar() {
-            }
+          @Override
+          public void markCurrentSpanAsExemplar() {}
         };
-        Gauge gauge = Gauge.builder()
-                .name("my_gauge")
-                .build();
+    Gauge gauge = Gauge.builder().name("my_gauge").build();
 
-        ExemplarSamplerConfigTestUtil.setMinRetentionPeriodMillis(gauge, exemplarMinAgeMillis);
-        ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(gauge, exemplarSampleIntervalMillis);
-        SpanContextSupplier.setSpanContext(spanContext);
+    ExemplarSamplerConfigTestUtil.setMinRetentionPeriodMillis(gauge, exemplarMinAgeMillis);
+    ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(gauge, exemplarSampleIntervalMillis);
+    SpanContextSupplier.setSpanContext(spanContext);
 
-        gauge.inc(2.0);
-        assertExemplarEquals(exemplar1, getData(gauge).getExemplar());
+    gauge.inc(2.0);
+    assertExemplarEquals(exemplar1, getData(gauge).getExemplar());
 
-        Thread.sleep(2 * exemplarSampleIntervalMillis);
+    Thread.sleep(2 * exemplarSampleIntervalMillis);
 
-        gauge.inc(3.0); // min age not reached -> keep the previous exemplar, exemplar sampler not called
-        assertExemplarEquals(exemplar1, getData(gauge).getExemplar());
+    gauge.inc(
+        3.0); // min age not reached -> keep the previous exemplar, exemplar sampler not called
+    assertExemplarEquals(exemplar1, getData(gauge).getExemplar());
 
-        Thread.sleep(exemplarMinAgeMillis + 2 * exemplarSampleIntervalMillis);
+    Thread.sleep(exemplarMinAgeMillis + 2 * exemplarSampleIntervalMillis);
 
-        gauge.inc(2.0); // 2nd call: isSampled() returns false -> not sampled
-        assertExemplarEquals(exemplar1, getData(gauge).getExemplar());
+    gauge.inc(2.0); // 2nd call: isSampled() returns false -> not sampled
+    assertExemplarEquals(exemplar1, getData(gauge).getExemplar());
 
-        Thread.sleep(2 * exemplarSampleIntervalMillis);
+    Thread.sleep(2 * exemplarSampleIntervalMillis);
 
-        gauge.dec(0.5); // sampled
-        assertExemplarEquals(exemplar2, getData(gauge).getExemplar());
+    gauge.dec(0.5); // sampled
+    assertExemplarEquals(exemplar2, getData(gauge).getExemplar());
 
-        Thread.sleep(exemplarMinAgeMillis + 2 * exemplarSampleIntervalMillis);
+    Thread.sleep(exemplarMinAgeMillis + 2 * exemplarSampleIntervalMillis);
 
-        gauge.set(7.0); // sampled
-        assertExemplarEquals(exemplar3, getData(gauge).getExemplar());
+    gauge.set(7.0); // sampled
+    assertExemplarEquals(exemplar3, getData(gauge).getExemplar());
 
-        Thread.sleep(2 * exemplarSampleIntervalMillis);
+    Thread.sleep(2 * exemplarSampleIntervalMillis);
 
-        gauge.incWithExemplar(Labels.of("test", "test")); // custom exemplar sampled even though the automatic exemplar hasn't reached min age yet
-        assertExemplarEquals(customExemplar, getData(gauge).getExemplar());
-    }
+    gauge.incWithExemplar(
+        Labels.of(
+            "test",
+            "test")); // custom exemplar sampled even though the automatic exemplar hasn't reached
+    // min age yet
+    assertExemplarEquals(customExemplar, getData(gauge).getExemplar());
+  }
 
-    @Test
-    public void testExemplarSamplerDisabled() {
-        Gauge gauge = Gauge.builder()
-                .name("test")
-                .withoutExemplars()
-                .build();
-        gauge.setWithExemplar(3.0, Labels.of("a", "b"));
-        Assert.assertNull(getData(gauge).getExemplar());
-        gauge.inc(2.0);
-        Assert.assertNull(getData(gauge).getExemplar());
-    }
+  @Test
+  public void testExemplarSamplerDisabled() {
+    Gauge gauge = Gauge.builder().name("test").withoutExemplars().build();
+    gauge.setWithExemplar(3.0, Labels.of("a", "b"));
+    Assert.assertNull(getData(gauge).getExemplar());
+    gauge.inc(2.0);
+    Assert.assertNull(getData(gauge).getExemplar());
+  }
 }
diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/HistogramTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/HistogramTest.java
index 8df44b43e..da7da5a89 100644
--- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/HistogramTest.java
+++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/HistogramTest.java
@@ -1,6 +1,9 @@
 package io.prometheus.metrics.core.metrics;
 
-import io.prometheus.metrics.shaded.com_google_protobuf_3_25_3.TextFormat;
+import static io.prometheus.metrics.core.metrics.TestUtil.assertExemplarEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
 import io.prometheus.metrics.core.datapoints.DistributionDataPoint;
 import io.prometheus.metrics.core.exemplars.ExemplarSamplerConfigTestUtil;
 import io.prometheus.metrics.expositionformats.OpenMetricsTextFormatWriter;
@@ -12,13 +15,9 @@
 import io.prometheus.metrics.model.snapshots.HistogramSnapshot;
 import io.prometheus.metrics.model.snapshots.Labels;
 import io.prometheus.metrics.model.snapshots.MetricSnapshots;
+import io.prometheus.metrics.shaded.com_google_protobuf_3_25_3.TextFormat;
 import io.prometheus.metrics.tracer.common.SpanContext;
 import io.prometheus.metrics.tracer.initializer.SpanContextSupplier;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.lang.reflect.Field;
@@ -40,1270 +39,1464 @@
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 import java.util.stream.Collectors;
-
-import static io.prometheus.metrics.core.metrics.TestUtil.assertExemplarEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
 
 public class HistogramTest {
 
-    private static final double RESET_DURATION_REACHED = -123.456; // just a random value indicating that we should simulate that the reset duration has been reached
-
-    private SpanContext origSpanContext;
-
-    @Before
-    public void setUp() {
-        origSpanContext = SpanContextSupplier.getSpanContext();
-    }
-
-    @After
-    public void tearDown() {
-        SpanContextSupplier.setSpanContext(origSpanContext);
+  private static final double RESET_DURATION_REACHED =
+      -123.456; // just a random value indicating that we should simulate that the reset duration
+  // has been reached
+
+  private SpanContext origSpanContext;
+
+  @Before
+  public void setUp() {
+    origSpanContext = SpanContextSupplier.getSpanContext();
+  }
+
+  @After
+  public void tearDown() {
+    SpanContextSupplier.setSpanContext(origSpanContext);
+  }
+
+  /** Mimic the tests in client_golang. */
+  private static class GolangTestCase {
+    final String name;
+    final String expected;
+    final Histogram histogram;
+    final double[] observations;
+
+    private GolangTestCase(
+        String name, String expected, Histogram histogram, double... observations) {
+      this.name = name;
+      this.expected = expected;
+      this.histogram = histogram;
+      this.observations = observations;
     }
 
-    /**
-     * Mimic the tests in client_golang.
-     */
-    private static class GolangTestCase {
-        final String name;
-        final String expected;
-        final Histogram histogram;
-        final double[] observations;
-
-        private GolangTestCase(String name, String expected, Histogram histogram, double... observations) {
-            this.name = name;
-            this.expected = expected;
-            this.histogram = histogram;
-            this.observations = observations;
-        }
-
-        private void run() throws NoSuchFieldException, IllegalAccessException {
-            System.out.println("Running " + name + "...");
-            for (double observation : observations) {
-                if (observation == RESET_DURATION_REACHED) {
-                    Field resetAllowed = Histogram.DataPoint.class.getDeclaredField("resetDurationExpired");
-                    resetAllowed.setAccessible(true);
-                    resetAllowed.set(histogram.getNoLabels(), true);
-                } else {
-                    histogram.observe(observation);
-                }
-            }
-            Metrics.MetricFamily protobufData = new PrometheusProtobufWriter().convert(histogram.collect());
-            String expectedWithMetadata = "name: \"test\" type: HISTOGRAM metric { histogram { " + expected + " } }";
-            assertEquals("test \"" + name + "\" failed", expectedWithMetadata, TextFormat.printer().shortDebugString(protobufData));
+    private void run() throws NoSuchFieldException, IllegalAccessException {
+      System.out.println("Running " + name + "...");
+      for (double observation : observations) {
+        if (observation == RESET_DURATION_REACHED) {
+          Field resetAllowed = Histogram.DataPoint.class.getDeclaredField("resetDurationExpired");
+          resetAllowed.setAccessible(true);
+          resetAllowed.set(histogram.getNoLabels(), true);
+        } else {
+          histogram.observe(observation);
         }
+      }
+      Metrics.MetricFamily protobufData =
+          new PrometheusProtobufWriter().convert(histogram.collect());
+      String expectedWithMetadata =
+          "name: \"test\" type: HISTOGRAM metric { histogram { " + expected + " } }";
+      assertEquals(
+          "test \"" + name + "\" failed",
+          expectedWithMetadata,
+          TextFormat.printer().shortDebugString(protobufData));
     }
-
-    /**
-     * Test cases copied from histogram_test.go in client_golang.
-     */
-    @Test
-    public void testGolangTests() throws NoSuchFieldException, IllegalAccessException {
-        GolangTestCase[] testCases = new GolangTestCase[]{
-                new GolangTestCase("'no sparse buckets' from client_golang",
-                        "sample_count: 3 " +
-                                "sample_sum: 6.0 " +
-                                "bucket { cumulative_count: 0 upper_bound: 0.005 } " +
-                                "bucket { cumulative_count: 0 upper_bound: 0.01 } " +
-                                "bucket { cumulative_count: 0 upper_bound: 0.025 } " +
-                                "bucket { cumulative_count: 0 upper_bound: 0.05 } " +
-                                "bucket { cumulative_count: 0 upper_bound: 0.1 } " +
-                                "bucket { cumulative_count: 0 upper_bound: 0.25 } " +
-                                "bucket { cumulative_count: 0 upper_bound: 0.5 } " +
-                                "bucket { cumulative_count: 1 upper_bound: 1.0 } " +
-                                "bucket { cumulative_count: 2 upper_bound: 2.5 } " +
-                                "bucket { cumulative_count: 3 upper_bound: 5.0 } " +
-                                "bucket { cumulative_count: 3 upper_bound: 10.0 } " +
-                                "bucket { cumulative_count: 3 upper_bound: Infinity }",
-                        Histogram.builder()
-                                .name("test")
-                                .classicOnly()
-                                .build(),
-                        1.0, 2.0, 3.0),
-                new GolangTestCase("'factor 1.1 results in schema 3' from client_golang",
-                        "sample_count: 4 " +
-                                "sample_sum: 6.0 " +
-                                "schema: 3 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 1 " +
-                                "positive_span { offset: 0 length: 1 } " +
-                                "positive_span { offset: 7 length: 1 } " +
-                                "positive_span { offset: 4 length: 1 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: 0 " +
-                                "positive_delta: 0",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(3)
-                                .nativeMaxZeroThreshold(0)
-                                .build(),
-                        0.0, 1.0, 2.0, 3.0),
-                new GolangTestCase("'factor 1.2 results in schema 2' from client_golang",
-                        "sample_count: 6 " +
-                                "sample_sum: 7.4 " +
-                                "schema: 2 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 1 " +
-                                "positive_span { offset: 0 length: 5 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: -1 " +
-                                "positive_delta: 2 " +
-                                "positive_delta: -2 " +
-                                "positive_delta: 2",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(0)
-                                .build(),
-                        0, 1, 1.2, 1.4, 1.8, 2),
-                new GolangTestCase("'factor 4 results in schema -1' from client_golang",
-                        "sample_count: 14 " +
-                                "sample_sum: 63.2581251 " +
-                                "schema: -1 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 0 " +
-                                "positive_span { offset: -2 length: 6 } " +
-                                "positive_delta: 2 " +
-                                "positive_delta: 0 " +
-                                "positive_delta: 0 " +
-                                "positive_delta: 2 " +
-                                "positive_delta: -1 " +
-                                "positive_delta: -2",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(-1)
-                                .nativeMaxZeroThreshold(0)
-                                .build(),
-                        0.0156251, 0.0625, // Bucket -2: (0.015625, 0.0625)
-                        0.1, 0.25, // Bucket -1: (0.0625, 0.25]
-                        0.5, 1, // Bucket 0: (0.25, 1]
-                        1.5, 2, 3, 3.5, // Bucket 1: (1, 4]
-                        5, 6, 7, // Bucket 2: (4, 16]
-                        33.33 // Bucket 3: (16, 64]
-                ),
-                new GolangTestCase("'factor 17 results in schema -2' from client_golang",
-                        "sample_count: 14 " +
-                                "sample_sum: 63.2581251 " +
-                                "schema: -2 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 0 " +
-                                "positive_span { offset: -1 length: 4 } " +
-                                "positive_delta: 2 " +
-                                "positive_delta: 2 " +
-                                "positive_delta: 3 " +
-                                "positive_delta: -6",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(-2)
-                                .nativeMaxZeroThreshold(0)
-                                .build(),
-                        0.0156251, 0.0625, // Bucket -1: (0.015625, 0.0625]
-                        0.1, 0.25, 0.5, 1, // Bucket 0: (0.0625, 1]
-                        1.5, 2, 3, 3.5, 5, 6, 7, // Bucket 1: (1, 16]
-                        33.33 // Bucket 2: (16, 256]
-                ),
-                new GolangTestCase("'negative buckets' from client_golang",
-                        "sample_count: 6 " +
-                                "sample_sum: -7.4 " +
-                                "schema: 2 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 1 " +
-                                "negative_span { offset: 0 length: 5 } " +
-                                "negative_delta: 1 " +
-                                "negative_delta: -1 " +
-                                "negative_delta: 2 " +
-                                "negative_delta: -2 " +
-                                "negative_delta: 2",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(0)
-                                .build(),
-                        0, -1, -1.2, -1.4, -1.8, -2
-                ),
-                new GolangTestCase("'negative and positive buckets' from client_golang",
-                        "sample_count: 11 " +
-                                "sample_sum: 0.0 " +
-                                "schema: 2 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 1 " +
-                                "negative_span { offset: 0 length: 5 } " +
-                                "negative_delta: 1 " +
-                                "negative_delta: -1 " +
-                                "negative_delta: 2 " +
-                                "negative_delta: -2 " +
-                                "negative_delta: 2 " +
-                                "positive_span { offset: 0 length: 5 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: -1 " +
-                                "positive_delta: 2 " +
-                                "positive_delta: -2 " +
-                                "positive_delta: 2",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(0)
-                                .build(),
-                        0, -1, -1.2, -1.4, -1.8, -2, 1, 1.2, 1.4, 1.8, 2
-                ),
-                new GolangTestCase("'wide zero bucket' from client_golang",
-                        "sample_count: 11 " +
-                                "sample_sum: 0.0 " +
-                                "schema: 2 " +
-                                "zero_threshold: 1.4 " +
-                                "zero_count: 7 " +
-                                "negative_span { offset: 4 length: 1 } " +
-                                "negative_delta: 2 " +
-                                "positive_span { offset: 4 length: 1 } " +
-                                "positive_delta: 2",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMinZeroThreshold(1.4)
-                                .build(),
-                        0, -1, -1.2, -1.4, -1.8, -2, 1, 1.2, 1.4, 1.8, 2
-                ),
-                /*
-                // See https://github.com/prometheus/client_golang/issues/1275
-                new TestCase("'NaN observation' from client_golang",
-                        "sample_count: 7 " +
-                                "sample_sum: NaN " +
-                                "schema: 2 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 1 " +
-                                "positive_span { offset: 0 length: 5 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: -1 " +
-                                "positive_delta: 2 " +
-                                "positive_delta: -2 " +
-                                "positive_delta: 2",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeHistogram()
-                                .nativeSchema(2)
-                                .nativeMaxZeroThreshold(0)
-                                .build(),
-                        0, 1, 1.2, 1.4, 1.8, 2, Double.NaN
-                ),
-                */
-                new GolangTestCase("'+Inf observation' from client_golang",
-                        "sample_count: 7 " +
-                                "sample_sum: Infinity " +
-                                "schema: 2 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 1 " +
-                                "positive_span { offset: 0 length: 5 } " +
-                                "positive_span { offset: 4092 length: 1 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: -1 " +
-                                "positive_delta: 2 " +
-                                "positive_delta: -2 " +
-                                "positive_delta: 2 " +
-                                "positive_delta: -1",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(0)
-                                .build(),
-                        0, 1, 1.2, 1.4, 1.8, 2, Double.POSITIVE_INFINITY
-                ),
-                new GolangTestCase("'-Inf observation' from client_golang",
-                        "sample_count: 7 " +
-                                "sample_sum: -Infinity " +
-                                "schema: 2 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 1 " +
-                                "negative_span { offset: 4097 length: 1 } " +
-                                "negative_delta: 1 " +
-                                "positive_span { offset: 0 length: 5 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: -1 " +
-                                "positive_delta: 2 " +
-                                "positive_delta: -2 " +
-                                "positive_delta: 2",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(0)
-                                .build(),
-                        0, 1, 1.2, 1.4, 1.8, 2, Double.NEGATIVE_INFINITY
-                ),
-                new GolangTestCase("'limited buckets but nothing triggered' from client_golang",
-                        "sample_count: 6 " +
-                                "sample_sum: 7.4 " +
-                                "schema: 2 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 1 " +
-                                "positive_span { offset: 0 length: 5 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: -1 " +
-                                "positive_delta: 2 " +
-                                "positive_delta: -2 " +
-                                "positive_delta: 2",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(0)
-                                .nativeMaxNumberOfBuckets(4)
-                                .build(),
-                        0, 1, 1.2, 1.4, 1.8, 2
-                ),
-                new GolangTestCase("'buckets limited by halving resolution' from client_golang",
-                        "sample_count: 8 " +
-                                "sample_sum: 11.5 " +
-                                "schema: 1 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 1 " +
-                                "positive_span { offset: 0 length: 5 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: 2 " +
-                                "positive_delta: -1 " +
-                                "positive_delta: -2 " +
-                                "positive_delta: 1",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(0)
-                                .nativeMaxNumberOfBuckets(4)
-                                .build(),
-                        0, 1, 1.1, 1.2, 1.4, 1.8, 2, 3
-                ),
-                new GolangTestCase("'buckets limited by widening the zero bucket' from client_golang",
-                        "sample_count: 8 " +
-                                "sample_sum: 11.5 " +
-                                "schema: 2 " +
-                                "zero_threshold: 1.0 " +
-                                "zero_count: 2 " +
-                                "positive_span { offset: 1 length: 7 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: 1 " +
-                                "positive_delta: -2 " +
-                                "positive_delta: 2 " +
-                                "positive_delta: -2 " +
-                                "positive_delta: 0 " +
-                                "positive_delta: 1",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(1.2)
-                                .nativeMaxNumberOfBuckets(4)
-                                .build(),
-                        0, 1, 1.1, 1.2, 1.4, 1.8, 2, 3
-                ),
-                new GolangTestCase("'buckets limited by widening the zero bucket twice' from client_golang",
-                        "sample_count: 9 " +
-                                "sample_sum: 15.5 " +
-                                "schema: 2 " +
-                                "zero_threshold: 1.189207115002721 " +
-                                "zero_count: 3 " +
-                                "positive_span { offset: 2 length: 7 } " +
-                                "positive_delta: 2 " +
-                                "positive_delta: -2 " +
-                                "positive_delta: 2 " +
-                                "positive_delta: -2 " +
-                                "positive_delta: 0 " +
-                                "positive_delta: 1 " +
-                                "positive_delta: 0",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(1.2)
-                                .nativeMaxNumberOfBuckets(4)
-                                .build(),
-                        0, 1, 1.1, 1.2, 1.4, 1.8, 2, 3, 4),
-                new GolangTestCase("'buckets limited by reset' from client_golang",
-                        "sample_count: 2 " +
-                                "sample_sum: 7.0 " +
-                                "schema: 2 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 0 " +
-                                "positive_span { offset: 7 length: 2 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: 0",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(1.2)
-                                .nativeMinZeroThreshold(0)
-                                .nativeMaxNumberOfBuckets(4)
-                                .build(),
-                        0, 1, 1.1, 1.2, 1.4, 1.8, 2, RESET_DURATION_REACHED, 3, 4),
-                new GolangTestCase("'limited buckets but nothing triggered, negative observations' from client_golang",
-                        "sample_count: 6 " +
-                                "sample_sum: -7.4 " +
-                                "schema: 2 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 1 " +
-                                "negative_span { offset: 0 length: 5 } " +
-                                "negative_delta: 1 " +
-                                "negative_delta: -1 " +
-                                "negative_delta: 2 " +
-                                "negative_delta: -2 " +
-                                "negative_delta: 2",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(0)
-                                .nativeMaxNumberOfBuckets(4)
-                                .build(),
-                        0, -1, -1.2, -1.4, -1.8, -2),
-                new GolangTestCase("'buckets limited by halving resolution, negative observations' from client_golang",
-                        "sample_count: 8 " +
-                                "sample_sum: -11.5 " +
-                                "schema: 1 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 1 " +
-                                "negative_span { offset: 0 length: 5 } " +
-                                "negative_delta: 1 " +
-                                "negative_delta: 2 " +
-                                "negative_delta: -1 " +
-                                "negative_delta: -2 " +
-                                "negative_delta: 1",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(0)
-                                .nativeMaxNumberOfBuckets(4)
-                                .build(),
-                        0, -1, -1.1, -1.2, -1.4, -1.8, -2, -3),
-                new GolangTestCase("'buckets limited by widening the zero bucket, negative observations' from client_golang",
-                        "sample_count: 8 " +
-                                "sample_sum: -11.5 " +
-                                "schema: 2 " +
-                                "zero_threshold: 1.0 " +
-                                "zero_count: 2 " +
-                                "negative_span { offset: 1 length: 7 } " +
-                                "negative_delta: 1 " +
-                                "negative_delta: 1 " +
-                                "negative_delta: -2 " +
-                                "negative_delta: 2 " +
-                                "negative_delta: -2 " +
-                                "negative_delta: 0 " +
-                                "negative_delta: 1",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(1.2)
-                                .nativeMaxNumberOfBuckets(4)
-                                .build(),
-                        0, -1, -1.1, -1.2, -1.4, -1.8, -2, -3),
-                new GolangTestCase("'buckets limited by widening the zero bucket twice, negative observations' from client_golang",
-                        "sample_count: 9 " +
-                                "sample_sum: -15.5 " +
-                                "schema: 2 " +
-                                "zero_threshold: 1.189207115002721 " +
-                                "zero_count: 3 " +
-                                "negative_span { offset: 2 length: 7 } " +
-                                "negative_delta: 2 " +
-                                "negative_delta: -2 " +
-                                "negative_delta: 2 " +
-                                "negative_delta: -2 " +
-                                "negative_delta: 0 " +
-                                "negative_delta: 1 " +
-                                "negative_delta: 0",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(1.2)
-                                .nativeMaxNumberOfBuckets(4)
-                                .build(),
-                        0, -1, -1.1, -1.2, -1.4, -1.8, -2, -3, -4),
-                new GolangTestCase("'buckets limited by reset, negative observations' from client_golang",
-                        "sample_count: 2 " +
-                                "sample_sum: -7.0 " +
-                                "schema: 2 " +
-                                "zero_threshold: " + Math.pow(2.0, -128.0) + " " +
-                                "zero_count: 0 " +
-                                "negative_span { offset: 7 length: 2 } " +
-                                "negative_delta: 1 " +
-                                "negative_delta: 0",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(1.2)
-                                .nativeMaxNumberOfBuckets(4)
-                                .build(),
-                        0, -1, -1.1, -1.2, -1.4, -1.8, -2, RESET_DURATION_REACHED, -3, -4),
-                new GolangTestCase("'buckets limited by halving resolution, then reset' from client_golang",
-                        "sample_count: 2 " +
-                                "sample_sum: 7.0 " +
-                                "schema: 2 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 0 " +
-                                "positive_span { offset: 7 length: 2 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: 0",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(0)
-                                .nativeMaxNumberOfBuckets(4)
-                                .build(),
-                        0, 1, 1.1, 1.2, 1.4, 1.8, 2, 5, 5.1, RESET_DURATION_REACHED, 3, 4),
-                new GolangTestCase("'buckets limited by widening the zero bucket, then reset' from client_golang",
-                        "sample_count: 2 " +
-                                "sample_sum: 7.0 " +
-                                "schema: 2 " +
-                                "zero_threshold: " + Math.pow(2.0, -128.0) + " " +
-                                "zero_count: 0 " +
-                                "positive_span { offset: 7 length: 2 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: 0",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(2)
-                                .nativeMaxZeroThreshold(1.2)
-                                .nativeMaxNumberOfBuckets(4)
-                                .build(),
-                        0, 1, 1.1, 1.2, 1.4, 1.8, 2, 5, 5.1, RESET_DURATION_REACHED, 3, 4)
+  }
+
+  /** Test cases copied from histogram_test.go in client_golang. */
+  @Test
+  public void testGolangTests() throws NoSuchFieldException, IllegalAccessException {
+    GolangTestCase[] testCases =
+        new GolangTestCase[] {
+          new GolangTestCase(
+              "'no sparse buckets' from client_golang",
+              "sample_count: 3 "
+                  + "sample_sum: 6.0 "
+                  + "bucket { cumulative_count: 0 upper_bound: 0.005 } "
+                  + "bucket { cumulative_count: 0 upper_bound: 0.01 } "
+                  + "bucket { cumulative_count: 0 upper_bound: 0.025 } "
+                  + "bucket { cumulative_count: 0 upper_bound: 0.05 } "
+                  + "bucket { cumulative_count: 0 upper_bound: 0.1 } "
+                  + "bucket { cumulative_count: 0 upper_bound: 0.25 } "
+                  + "bucket { cumulative_count: 0 upper_bound: 0.5 } "
+                  + "bucket { cumulative_count: 1 upper_bound: 1.0 } "
+                  + "bucket { cumulative_count: 2 upper_bound: 2.5 } "
+                  + "bucket { cumulative_count: 3 upper_bound: 5.0 } "
+                  + "bucket { cumulative_count: 3 upper_bound: 10.0 } "
+                  + "bucket { cumulative_count: 3 upper_bound: Infinity }",
+              Histogram.builder().name("test").classicOnly().build(),
+              1.0,
+              2.0,
+              3.0),
+          new GolangTestCase(
+              "'factor 1.1 results in schema 3' from client_golang",
+              "sample_count: 4 "
+                  + "sample_sum: 6.0 "
+                  + "schema: 3 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 1 "
+                  + "positive_span { offset: 0 length: 1 } "
+                  + "positive_span { offset: 7 length: 1 } "
+                  + "positive_span { offset: 4 length: 1 } "
+                  + "positive_delta: 1 "
+                  + "positive_delta: 0 "
+                  + "positive_delta: 0",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(3)
+                  .nativeMaxZeroThreshold(0)
+                  .build(),
+              0.0,
+              1.0,
+              2.0,
+              3.0),
+          new GolangTestCase(
+              "'factor 1.2 results in schema 2' from client_golang",
+              "sample_count: 6 "
+                  + "sample_sum: 7.4 "
+                  + "schema: 2 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 1 "
+                  + "positive_span { offset: 0 length: 5 } "
+                  + "positive_delta: 1 "
+                  + "positive_delta: -1 "
+                  + "positive_delta: 2 "
+                  + "positive_delta: -2 "
+                  + "positive_delta: 2",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(0)
+                  .build(),
+              0,
+              1,
+              1.2,
+              1.4,
+              1.8,
+              2),
+          new GolangTestCase(
+              "'factor 4 results in schema -1' from client_golang",
+              "sample_count: 14 "
+                  + "sample_sum: 63.2581251 "
+                  + "schema: -1 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 0 "
+                  + "positive_span { offset: -2 length: 6 } "
+                  + "positive_delta: 2 "
+                  + "positive_delta: 0 "
+                  + "positive_delta: 0 "
+                  + "positive_delta: 2 "
+                  + "positive_delta: -1 "
+                  + "positive_delta: -2",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(-1)
+                  .nativeMaxZeroThreshold(0)
+                  .build(),
+              0.0156251,
+              0.0625, // Bucket -2: (0.015625, 0.0625)
+              0.1,
+              0.25, // Bucket -1: (0.0625, 0.25]
+              0.5,
+              1, // Bucket 0: (0.25, 1]
+              1.5,
+              2,
+              3,
+              3.5, // Bucket 1: (1, 4]
+              5,
+              6,
+              7, // Bucket 2: (4, 16]
+              33.33 // Bucket 3: (16, 64]
+              ),
+          new GolangTestCase(
+              "'factor 17 results in schema -2' from client_golang",
+              "sample_count: 14 "
+                  + "sample_sum: 63.2581251 "
+                  + "schema: -2 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 0 "
+                  + "positive_span { offset: -1 length: 4 } "
+                  + "positive_delta: 2 "
+                  + "positive_delta: 2 "
+                  + "positive_delta: 3 "
+                  + "positive_delta: -6",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(-2)
+                  .nativeMaxZeroThreshold(0)
+                  .build(),
+              0.0156251,
+              0.0625, // Bucket -1: (0.015625, 0.0625]
+              0.1,
+              0.25,
+              0.5,
+              1, // Bucket 0: (0.0625, 1]
+              1.5,
+              2,
+              3,
+              3.5,
+              5,
+              6,
+              7, // Bucket 1: (1, 16]
+              33.33 // Bucket 2: (16, 256]
+              ),
+          new GolangTestCase(
+              "'negative buckets' from client_golang",
+              "sample_count: 6 "
+                  + "sample_sum: -7.4 "
+                  + "schema: 2 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 1 "
+                  + "negative_span { offset: 0 length: 5 } "
+                  + "negative_delta: 1 "
+                  + "negative_delta: -1 "
+                  + "negative_delta: 2 "
+                  + "negative_delta: -2 "
+                  + "negative_delta: 2",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(0)
+                  .build(),
+              0,
+              -1,
+              -1.2,
+              -1.4,
+              -1.8,
+              -2),
+          new GolangTestCase(
+              "'negative and positive buckets' from client_golang",
+              "sample_count: 11 "
+                  + "sample_sum: 0.0 "
+                  + "schema: 2 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 1 "
+                  + "negative_span { offset: 0 length: 5 } "
+                  + "negative_delta: 1 "
+                  + "negative_delta: -1 "
+                  + "negative_delta: 2 "
+                  + "negative_delta: -2 "
+                  + "negative_delta: 2 "
+                  + "positive_span { offset: 0 length: 5 } "
+                  + "positive_delta: 1 "
+                  + "positive_delta: -1 "
+                  + "positive_delta: 2 "
+                  + "positive_delta: -2 "
+                  + "positive_delta: 2",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(0)
+                  .build(),
+              0,
+              -1,
+              -1.2,
+              -1.4,
+              -1.8,
+              -2,
+              1,
+              1.2,
+              1.4,
+              1.8,
+              2),
+          new GolangTestCase(
+              "'wide zero bucket' from client_golang",
+              "sample_count: 11 "
+                  + "sample_sum: 0.0 "
+                  + "schema: 2 "
+                  + "zero_threshold: 1.4 "
+                  + "zero_count: 7 "
+                  + "negative_span { offset: 4 length: 1 } "
+                  + "negative_delta: 2 "
+                  + "positive_span { offset: 4 length: 1 } "
+                  + "positive_delta: 2",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMinZeroThreshold(1.4)
+                  .build(),
+              0,
+              -1,
+              -1.2,
+              -1.4,
+              -1.8,
+              -2,
+              1,
+              1.2,
+              1.4,
+              1.8,
+              2),
+          /*
+          // See https://github.com/prometheus/client_golang/issues/1275
+          new TestCase("'NaN observation' from client_golang",
+                  "sample_count: 7 " +
+                          "sample_sum: NaN " +
+                          "schema: 2 " +
+                          "zero_threshold: 0.0 " +
+                          "zero_count: 1 " +
+                          "positive_span { offset: 0 length: 5 } " +
+                          "positive_delta: 1 " +
+                          "positive_delta: -1 " +
+                          "positive_delta: 2 " +
+                          "positive_delta: -2 " +
+                          "positive_delta: 2",
+                  Histogram.builder()
+                          .name("test")
+                          .nativeHistogram()
+                          .nativeSchema(2)
+                          .nativeMaxZeroThreshold(0)
+                          .build(),
+                  0, 1, 1.2, 1.4, 1.8, 2, Double.NaN
+          ),
+          */
+          new GolangTestCase(
+              "'+Inf observation' from client_golang",
+              "sample_count: 7 "
+                  + "sample_sum: Infinity "
+                  + "schema: 2 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 1 "
+                  + "positive_span { offset: 0 length: 5 } "
+                  + "positive_span { offset: 4092 length: 1 } "
+                  + "positive_delta: 1 "
+                  + "positive_delta: -1 "
+                  + "positive_delta: 2 "
+                  + "positive_delta: -2 "
+                  + "positive_delta: 2 "
+                  + "positive_delta: -1",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(0)
+                  .build(),
+              0,
+              1,
+              1.2,
+              1.4,
+              1.8,
+              2,
+              Double.POSITIVE_INFINITY),
+          new GolangTestCase(
+              "'-Inf observation' from client_golang",
+              "sample_count: 7 "
+                  + "sample_sum: -Infinity "
+                  + "schema: 2 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 1 "
+                  + "negative_span { offset: 4097 length: 1 } "
+                  + "negative_delta: 1 "
+                  + "positive_span { offset: 0 length: 5 } "
+                  + "positive_delta: 1 "
+                  + "positive_delta: -1 "
+                  + "positive_delta: 2 "
+                  + "positive_delta: -2 "
+                  + "positive_delta: 2",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(0)
+                  .build(),
+              0,
+              1,
+              1.2,
+              1.4,
+              1.8,
+              2,
+              Double.NEGATIVE_INFINITY),
+          new GolangTestCase(
+              "'limited buckets but nothing triggered' from client_golang",
+              "sample_count: 6 "
+                  + "sample_sum: 7.4 "
+                  + "schema: 2 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 1 "
+                  + "positive_span { offset: 0 length: 5 } "
+                  + "positive_delta: 1 "
+                  + "positive_delta: -1 "
+                  + "positive_delta: 2 "
+                  + "positive_delta: -2 "
+                  + "positive_delta: 2",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(0)
+                  .nativeMaxNumberOfBuckets(4)
+                  .build(),
+              0,
+              1,
+              1.2,
+              1.4,
+              1.8,
+              2),
+          new GolangTestCase(
+              "'buckets limited by halving resolution' from client_golang",
+              "sample_count: 8 "
+                  + "sample_sum: 11.5 "
+                  + "schema: 1 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 1 "
+                  + "positive_span { offset: 0 length: 5 } "
+                  + "positive_delta: 1 "
+                  + "positive_delta: 2 "
+                  + "positive_delta: -1 "
+                  + "positive_delta: -2 "
+                  + "positive_delta: 1",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(0)
+                  .nativeMaxNumberOfBuckets(4)
+                  .build(),
+              0,
+              1,
+              1.1,
+              1.2,
+              1.4,
+              1.8,
+              2,
+              3),
+          new GolangTestCase(
+              "'buckets limited by widening the zero bucket' from client_golang",
+              "sample_count: 8 "
+                  + "sample_sum: 11.5 "
+                  + "schema: 2 "
+                  + "zero_threshold: 1.0 "
+                  + "zero_count: 2 "
+                  + "positive_span { offset: 1 length: 7 } "
+                  + "positive_delta: 1 "
+                  + "positive_delta: 1 "
+                  + "positive_delta: -2 "
+                  + "positive_delta: 2 "
+                  + "positive_delta: -2 "
+                  + "positive_delta: 0 "
+                  + "positive_delta: 1",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(1.2)
+                  .nativeMaxNumberOfBuckets(4)
+                  .build(),
+              0,
+              1,
+              1.1,
+              1.2,
+              1.4,
+              1.8,
+              2,
+              3),
+          new GolangTestCase(
+              "'buckets limited by widening the zero bucket twice' from client_golang",
+              "sample_count: 9 "
+                  + "sample_sum: 15.5 "
+                  + "schema: 2 "
+                  + "zero_threshold: 1.189207115002721 "
+                  + "zero_count: 3 "
+                  + "positive_span { offset: 2 length: 7 } "
+                  + "positive_delta: 2 "
+                  + "positive_delta: -2 "
+                  + "positive_delta: 2 "
+                  + "positive_delta: -2 "
+                  + "positive_delta: 0 "
+                  + "positive_delta: 1 "
+                  + "positive_delta: 0",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(1.2)
+                  .nativeMaxNumberOfBuckets(4)
+                  .build(),
+              0,
+              1,
+              1.1,
+              1.2,
+              1.4,
+              1.8,
+              2,
+              3,
+              4),
+          new GolangTestCase(
+              "'buckets limited by reset' from client_golang",
+              "sample_count: 2 "
+                  + "sample_sum: 7.0 "
+                  + "schema: 2 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 0 "
+                  + "positive_span { offset: 7 length: 2 } "
+                  + "positive_delta: 1 "
+                  + "positive_delta: 0",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(1.2)
+                  .nativeMinZeroThreshold(0)
+                  .nativeMaxNumberOfBuckets(4)
+                  .build(),
+              0,
+              1,
+              1.1,
+              1.2,
+              1.4,
+              1.8,
+              2,
+              RESET_DURATION_REACHED,
+              3,
+              4),
+          new GolangTestCase(
+              "'limited buckets but nothing triggered, negative observations' from client_golang",
+              "sample_count: 6 "
+                  + "sample_sum: -7.4 "
+                  + "schema: 2 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 1 "
+                  + "negative_span { offset: 0 length: 5 } "
+                  + "negative_delta: 1 "
+                  + "negative_delta: -1 "
+                  + "negative_delta: 2 "
+                  + "negative_delta: -2 "
+                  + "negative_delta: 2",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(0)
+                  .nativeMaxNumberOfBuckets(4)
+                  .build(),
+              0,
+              -1,
+              -1.2,
+              -1.4,
+              -1.8,
+              -2),
+          new GolangTestCase(
+              "'buckets limited by halving resolution, negative observations' from client_golang",
+              "sample_count: 8 "
+                  + "sample_sum: -11.5 "
+                  + "schema: 1 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 1 "
+                  + "negative_span { offset: 0 length: 5 } "
+                  + "negative_delta: 1 "
+                  + "negative_delta: 2 "
+                  + "negative_delta: -1 "
+                  + "negative_delta: -2 "
+                  + "negative_delta: 1",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(0)
+                  .nativeMaxNumberOfBuckets(4)
+                  .build(),
+              0,
+              -1,
+              -1.1,
+              -1.2,
+              -1.4,
+              -1.8,
+              -2,
+              -3),
+          new GolangTestCase(
+              "'buckets limited by widening the zero bucket, negative observations' from client_golang",
+              "sample_count: 8 "
+                  + "sample_sum: -11.5 "
+                  + "schema: 2 "
+                  + "zero_threshold: 1.0 "
+                  + "zero_count: 2 "
+                  + "negative_span { offset: 1 length: 7 } "
+                  + "negative_delta: 1 "
+                  + "negative_delta: 1 "
+                  + "negative_delta: -2 "
+                  + "negative_delta: 2 "
+                  + "negative_delta: -2 "
+                  + "negative_delta: 0 "
+                  + "negative_delta: 1",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(1.2)
+                  .nativeMaxNumberOfBuckets(4)
+                  .build(),
+              0,
+              -1,
+              -1.1,
+              -1.2,
+              -1.4,
+              -1.8,
+              -2,
+              -3),
+          new GolangTestCase(
+              "'buckets limited by widening the zero bucket twice, negative observations' from client_golang",
+              "sample_count: 9 "
+                  + "sample_sum: -15.5 "
+                  + "schema: 2 "
+                  + "zero_threshold: 1.189207115002721 "
+                  + "zero_count: 3 "
+                  + "negative_span { offset: 2 length: 7 } "
+                  + "negative_delta: 2 "
+                  + "negative_delta: -2 "
+                  + "negative_delta: 2 "
+                  + "negative_delta: -2 "
+                  + "negative_delta: 0 "
+                  + "negative_delta: 1 "
+                  + "negative_delta: 0",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(1.2)
+                  .nativeMaxNumberOfBuckets(4)
+                  .build(),
+              0,
+              -1,
+              -1.1,
+              -1.2,
+              -1.4,
+              -1.8,
+              -2,
+              -3,
+              -4),
+          new GolangTestCase(
+              "'buckets limited by reset, negative observations' from client_golang",
+              "sample_count: 2 "
+                  + "sample_sum: -7.0 "
+                  + "schema: 2 "
+                  + "zero_threshold: "
+                  + Math.pow(2.0, -128.0)
+                  + " "
+                  + "zero_count: 0 "
+                  + "negative_span { offset: 7 length: 2 } "
+                  + "negative_delta: 1 "
+                  + "negative_delta: 0",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(1.2)
+                  .nativeMaxNumberOfBuckets(4)
+                  .build(),
+              0,
+              -1,
+              -1.1,
+              -1.2,
+              -1.4,
+              -1.8,
+              -2,
+              RESET_DURATION_REACHED,
+              -3,
+              -4),
+          new GolangTestCase(
+              "'buckets limited by halving resolution, then reset' from client_golang",
+              "sample_count: 2 "
+                  + "sample_sum: 7.0 "
+                  + "schema: 2 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 0 "
+                  + "positive_span { offset: 7 length: 2 } "
+                  + "positive_delta: 1 "
+                  + "positive_delta: 0",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(0)
+                  .nativeMaxNumberOfBuckets(4)
+                  .build(),
+              0,
+              1,
+              1.1,
+              1.2,
+              1.4,
+              1.8,
+              2,
+              5,
+              5.1,
+              RESET_DURATION_REACHED,
+              3,
+              4),
+          new GolangTestCase(
+              "'buckets limited by widening the zero bucket, then reset' from client_golang",
+              "sample_count: 2 "
+                  + "sample_sum: 7.0 "
+                  + "schema: 2 "
+                  + "zero_threshold: "
+                  + Math.pow(2.0, -128.0)
+                  + " "
+                  + "zero_count: 0 "
+                  + "positive_span { offset: 7 length: 2 } "
+                  + "positive_delta: 1 "
+                  + "positive_delta: 0",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(2)
+                  .nativeMaxZeroThreshold(1.2)
+                  .nativeMaxNumberOfBuckets(4)
+                  .build(),
+              0,
+              1,
+              1.1,
+              1.2,
+              1.4,
+              1.8,
+              2,
+              5,
+              5.1,
+              RESET_DURATION_REACHED,
+              3,
+              4)
         };
-        for (GolangTestCase testCase : testCases) {
-            testCase.run();
-        }
+    for (GolangTestCase testCase : testCases) {
+      testCase.run();
     }
-
-    /**
-     * Additional tests that are not part of client_golang's test suite.
-     */
-    @Test
-    public void testAdditional() throws NoSuchFieldException, IllegalAccessException {
-        GolangTestCase[] testCases = new GolangTestCase[]{
-                new GolangTestCase("observed values are exactly at bucket boundaries",
-                        "sample_count: 3 " +
-                                "sample_sum: 1.5 " +
-                                "schema: 0 " +
-                                "zero_threshold: 0.0 " +
-                                "zero_count: 1 " +
-                                "positive_span { offset: -1 length: 2 } " +
-                                "positive_delta: 1 " +
-                                "positive_delta: 0",
-                        Histogram.builder()
-                                .name("test")
-                                .nativeOnly()
-                                .nativeInitialSchema(0)
-                                .nativeMaxZeroThreshold(0)
-                                .build(),
-                        0.0, 0.5, 1.0)
+  }
+
+  /** Additional tests that are not part of client_golang's test suite. */
+  @Test
+  public void testAdditional() throws NoSuchFieldException, IllegalAccessException {
+    GolangTestCase[] testCases =
+        new GolangTestCase[] {
+          new GolangTestCase(
+              "observed values are exactly at bucket boundaries",
+              "sample_count: 3 "
+                  + "sample_sum: 1.5 "
+                  + "schema: 0 "
+                  + "zero_threshold: 0.0 "
+                  + "zero_count: 1 "
+                  + "positive_span { offset: -1 length: 2 } "
+                  + "positive_delta: 1 "
+                  + "positive_delta: 0",
+              Histogram.builder()
+                  .name("test")
+                  .nativeOnly()
+                  .nativeInitialSchema(0)
+                  .nativeMaxZeroThreshold(0)
+                  .build(),
+              0.0,
+              0.5,
+              1.0)
         };
-        for (GolangTestCase testCase : testCases) {
-            testCase.run();
-        }
+    for (GolangTestCase testCase : testCases) {
+      testCase.run();
     }
-
-    /**
-     * Tests HistogramData.nativeBucketIndexToUpperBound(int, int).
-     * 

- * This test is ported from client_golang's TestGetLe(). - */ - @Test - public void testNativeBucketIndexToUpperBound() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { - int[] indexes = new int[]{-1, 0, 1, 512, 513, -1, 0, 1, 1024, 1025, -1, 0, 1, 4096, 4097}; - int[] schemas = new int[]{-1, -1, -1, -1, -1, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2}; - double[] expectedUpperBounds = new double[]{0.25, 1, 4, Double.MAX_VALUE, Double.POSITIVE_INFINITY, - 0.5, 1, 2, Double.MAX_VALUE, Double.POSITIVE_INFINITY, - 0.8408964152537144, 1, 1.189207115002721, Double.MAX_VALUE, Double.POSITIVE_INFINITY}; - Method method = Histogram.DataPoint.class.getDeclaredMethod("nativeBucketIndexToUpperBound", int.class, int.class); - method.setAccessible(true); - for (int i = 0; i < indexes.length; i++) { - Histogram histogram = Histogram.builder() - .name("test") - .nativeInitialSchema(schemas[i]) - .build(); - Histogram.DataPoint histogramData = histogram.newDataPoint(); - double result = (double) method.invoke(histogramData, schemas[i], indexes[i]); - Assert.assertEquals("index=" + indexes[i] + ", schema=" + schemas[i], expectedUpperBounds[i], result, 0.0000000000001); - } + } + + /** + * Tests HistogramData.nativeBucketIndexToUpperBound(int, int). + * + *

This test is ported from client_golang's TestGetLe(). + */ + @Test + public void testNativeBucketIndexToUpperBound() + throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { + int[] indexes = new int[] {-1, 0, 1, 512, 513, -1, 0, 1, 1024, 1025, -1, 0, 1, 4096, 4097}; + int[] schemas = new int[] {-1, -1, -1, -1, -1, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2}; + double[] expectedUpperBounds = + new double[] { + 0.25, + 1, + 4, + Double.MAX_VALUE, + Double.POSITIVE_INFINITY, + 0.5, + 1, + 2, + Double.MAX_VALUE, + Double.POSITIVE_INFINITY, + 0.8408964152537144, + 1, + 1.189207115002721, + Double.MAX_VALUE, + Double.POSITIVE_INFINITY + }; + Method method = + Histogram.DataPoint.class.getDeclaredMethod( + "nativeBucketIndexToUpperBound", int.class, int.class); + method.setAccessible(true); + for (int i = 0; i < indexes.length; i++) { + Histogram histogram = + Histogram.builder().name("test").nativeInitialSchema(schemas[i]).build(); + Histogram.DataPoint histogramData = histogram.newDataPoint(); + double result = (double) method.invoke(histogramData, schemas[i], indexes[i]); + Assert.assertEquals( + "index=" + indexes[i] + ", schema=" + schemas[i], + expectedUpperBounds[i], + result, + 0.0000000000001); } - - /** - * Test if lowerBound < value <= upperBound is true for the bucket index returned by findBucketIndex() - */ - @Test - public void testFindBucketIndex() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { - Random rand = new Random(); - Method findBucketIndex = Histogram.DataPoint.class.getDeclaredMethod("findBucketIndex", double.class); - Method nativeBucketIndexToUpperBound = Histogram.DataPoint.class.getDeclaredMethod("nativeBucketIndexToUpperBound", int.class, int.class); - findBucketIndex.setAccessible(true); - nativeBucketIndexToUpperBound.setAccessible(true); - for (int schema = -4; schema <= 8; schema++) { - Histogram histogram = Histogram.builder() - .nativeOnly() - .name("test") - .nativeInitialSchema(schema) - .build(); - for (int i = 0; i < 10_000; i++) { - for (int zeros = -5; zeros <= 10; zeros++) { - double value = rand.nextDouble() * Math.pow(10, zeros); - int bucketIndex = (int) findBucketIndex.invoke(histogram.getNoLabels(), value); - double lowerBound = (double) nativeBucketIndexToUpperBound.invoke(histogram.getNoLabels(), schema, bucketIndex - 1); - double upperBound = (double) nativeBucketIndexToUpperBound.invoke(histogram.getNoLabels(), schema, bucketIndex); - Assert.assertTrue("Bucket index " + bucketIndex + " with schema " + schema + " has range [" + lowerBound + ", " + upperBound + "]. Value " + value + " is outside of that range.", lowerBound < value && upperBound >= value); - } - } + } + + /** + * Test if lowerBound < value <= upperBound is true for the bucket index returned by + * findBucketIndex() + */ + @Test + public void testFindBucketIndex() + throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { + Random rand = new Random(); + Method findBucketIndex = + Histogram.DataPoint.class.getDeclaredMethod("findBucketIndex", double.class); + Method nativeBucketIndexToUpperBound = + Histogram.DataPoint.class.getDeclaredMethod( + "nativeBucketIndexToUpperBound", int.class, int.class); + findBucketIndex.setAccessible(true); + nativeBucketIndexToUpperBound.setAccessible(true); + for (int schema = -4; schema <= 8; schema++) { + Histogram histogram = + Histogram.builder().nativeOnly().name("test").nativeInitialSchema(schema).build(); + for (int i = 0; i < 10_000; i++) { + for (int zeros = -5; zeros <= 10; zeros++) { + double value = rand.nextDouble() * Math.pow(10, zeros); + int bucketIndex = (int) findBucketIndex.invoke(histogram.getNoLabels(), value); + double lowerBound = + (double) + nativeBucketIndexToUpperBound.invoke( + histogram.getNoLabels(), schema, bucketIndex - 1); + double upperBound = + (double) + nativeBucketIndexToUpperBound.invoke( + histogram.getNoLabels(), schema, bucketIndex); + Assert.assertTrue( + "Bucket index " + + bucketIndex + + " with schema " + + schema + + " has range [" + + lowerBound + + ", " + + upperBound + + "]. Value " + + value + + " is outside of that range.", + lowerBound < value && upperBound >= value); } + } } - - @Test - public void testDefaults() throws IOException { - Histogram histogram = Histogram.builder().name("test").build(); - histogram.observe(0.5); - HistogramSnapshot snapshot = histogram.collect(); - String expectedProtobuf = "" + - "name: \"test\" " + - "type: HISTOGRAM " + - "metric { " + - "histogram { " + - "sample_count: 1 " + - "sample_sum: 0.5 " + - // default has both, native and classic buckets - "bucket { cumulative_count: 0 upper_bound: 0.005 } " + - "bucket { cumulative_count: 0 upper_bound: 0.01 } " + - "bucket { cumulative_count: 0 upper_bound: 0.025 } " + - "bucket { cumulative_count: 0 upper_bound: 0.05 } " + - "bucket { cumulative_count: 0 upper_bound: 0.1 } " + - "bucket { cumulative_count: 0 upper_bound: 0.25 } " + - "bucket { cumulative_count: 1 upper_bound: 0.5 } " + - "bucket { cumulative_count: 1 upper_bound: 1.0 } " + - "bucket { cumulative_count: 1 upper_bound: 2.5 } " + - "bucket { cumulative_count: 1 upper_bound: 5.0 } " + - "bucket { cumulative_count: 1 upper_bound: 10.0 } " + - "bucket { cumulative_count: 1 upper_bound: Infinity } " + - // default native schema is 5 - "schema: 5 " + - // default zero threshold is 2^-128 - "zero_threshold: " + Math.pow(2.0, -128.0) + " " + - "zero_count: 0 " + - "positive_span { offset: -32 length: 1 } " + - "positive_delta: 1 " + - "} }"; - String expectedTextFormat = "" + - // default classic buckets - "# TYPE test histogram\n" + - "test_bucket{le=\"0.005\"} 0\n" + - "test_bucket{le=\"0.01\"} 0\n" + - "test_bucket{le=\"0.025\"} 0\n" + - "test_bucket{le=\"0.05\"} 0\n" + - "test_bucket{le=\"0.1\"} 0\n" + - "test_bucket{le=\"0.25\"} 0\n" + - "test_bucket{le=\"0.5\"} 1\n" + - "test_bucket{le=\"1.0\"} 1\n" + - "test_bucket{le=\"2.5\"} 1\n" + - "test_bucket{le=\"5.0\"} 1\n" + - "test_bucket{le=\"10.0\"} 1\n" + - "test_bucket{le=\"+Inf\"} 1\n" + - "test_count 1\n" + - "test_sum 0.5\n" + - "# EOF\n"; - - // protobuf - Metrics.MetricFamily protobufData = new PrometheusProtobufWriter().convert(snapshot); - Assert.assertEquals(expectedProtobuf, TextFormat.printer().shortDebugString(protobufData)); - - // text - ByteArrayOutputStream out = new ByteArrayOutputStream(); - OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(false, true); - writer.write(out, MetricSnapshots.of(snapshot)); - Assert.assertEquals(expectedTextFormat, out.toString()); - } - - @Test - public void testExemplarsClassicHistogram() throws Exception { - SpanContext spanContext = new SpanContext() { - int callCount = 0; - - @Override - public String getCurrentTraceId() { - return "traceId-" + callCount; - } - - @Override - public String getCurrentSpanId() { - return "spanId-" + callCount; - } - - @Override - public boolean isCurrentSpanSampled() { - callCount++; - return true; - } - - @Override - public void markCurrentSpanAsExemplar() { - } + } + + @Test + public void testDefaults() throws IOException { + Histogram histogram = Histogram.builder().name("test").build(); + histogram.observe(0.5); + HistogramSnapshot snapshot = histogram.collect(); + String expectedProtobuf = + "" + + "name: \"test\" " + + "type: HISTOGRAM " + + "metric { " + + "histogram { " + + "sample_count: 1 " + + "sample_sum: 0.5 " + + + // default has both, native and classic buckets + "bucket { cumulative_count: 0 upper_bound: 0.005 } " + + "bucket { cumulative_count: 0 upper_bound: 0.01 } " + + "bucket { cumulative_count: 0 upper_bound: 0.025 } " + + "bucket { cumulative_count: 0 upper_bound: 0.05 } " + + "bucket { cumulative_count: 0 upper_bound: 0.1 } " + + "bucket { cumulative_count: 0 upper_bound: 0.25 } " + + "bucket { cumulative_count: 1 upper_bound: 0.5 } " + + "bucket { cumulative_count: 1 upper_bound: 1.0 } " + + "bucket { cumulative_count: 1 upper_bound: 2.5 } " + + "bucket { cumulative_count: 1 upper_bound: 5.0 } " + + "bucket { cumulative_count: 1 upper_bound: 10.0 } " + + "bucket { cumulative_count: 1 upper_bound: Infinity } " + + + // default native schema is 5 + "schema: 5 " + + + // default zero threshold is 2^-128 + "zero_threshold: " + + Math.pow(2.0, -128.0) + + " " + + "zero_count: 0 " + + "positive_span { offset: -32 length: 1 } " + + "positive_delta: 1 " + + "} }"; + String expectedTextFormat = + "" + + + // default classic buckets + "# TYPE test histogram\n" + + "test_bucket{le=\"0.005\"} 0\n" + + "test_bucket{le=\"0.01\"} 0\n" + + "test_bucket{le=\"0.025\"} 0\n" + + "test_bucket{le=\"0.05\"} 0\n" + + "test_bucket{le=\"0.1\"} 0\n" + + "test_bucket{le=\"0.25\"} 0\n" + + "test_bucket{le=\"0.5\"} 1\n" + + "test_bucket{le=\"1.0\"} 1\n" + + "test_bucket{le=\"2.5\"} 1\n" + + "test_bucket{le=\"5.0\"} 1\n" + + "test_bucket{le=\"10.0\"} 1\n" + + "test_bucket{le=\"+Inf\"} 1\n" + + "test_count 1\n" + + "test_sum 0.5\n" + + "# EOF\n"; + + // protobuf + Metrics.MetricFamily protobufData = new PrometheusProtobufWriter().convert(snapshot); + Assert.assertEquals(expectedProtobuf, TextFormat.printer().shortDebugString(protobufData)); + + // text + ByteArrayOutputStream out = new ByteArrayOutputStream(); + OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(false, true); + writer.write(out, MetricSnapshots.of(snapshot)); + Assert.assertEquals(expectedTextFormat, out.toString()); + } + + @Test + public void testExemplarsClassicHistogram() throws Exception { + SpanContext spanContext = + new SpanContext() { + int callCount = 0; + + @Override + public String getCurrentTraceId() { + return "traceId-" + callCount; + } + + @Override + public String getCurrentSpanId() { + return "spanId-" + callCount; + } + + @Override + public boolean isCurrentSpanSampled() { + callCount++; + return true; + } + + @Override + public void markCurrentSpanAsExemplar() {} }; - Histogram histogram = Histogram.builder() - .name("test") - // The default number of Exemplars is 4. - // Use 5 buckets to verify that the exemplar sample is configured with the buckets. - .classicUpperBounds(1.0, 2.0, 3.0, 4.0, Double.POSITIVE_INFINITY) - .labelNames("path") - .build(); - - long sampleIntervalMillis = 10; - ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(histogram, sampleIntervalMillis); - SpanContextSupplier.setSpanContext(spanContext); - - Exemplar ex1a = Exemplar.builder() - .value(0.5) - .spanId("spanId-1") - .traceId("traceId-1") - .build(); - Exemplar ex1b = Exemplar.builder() - .value(0.5) - .spanId("spanId-2") - .traceId("traceId-2") - .build(); - Exemplar ex2a = Exemplar.builder() - .value(4.5) - .spanId("spanId-3") - .traceId("traceId-3") - .build(); - Exemplar ex2b = Exemplar.builder() - .value(4.5) - .spanId("spanId-4") - .traceId("traceId-4") - .build(); - Exemplar ex3a = Exemplar.builder() - .value(1.5) - .spanId("spanId-5") - .traceId("traceId-5") - .build(); - Exemplar ex3b = Exemplar.builder() - .value(1.5) - .spanId("spanId-6") - .traceId("traceId-6") - .build(); - Exemplar ex4a = Exemplar.builder() - .value(2.5) - .spanId("spanId-7") - .traceId("traceId-7") - .build(); - Exemplar ex4b = Exemplar.builder() - .value(2.5) - .spanId("spanId-8") - .traceId("traceId-8") - .build(); - Exemplar ex5a = Exemplar.builder() - .value(3.5) - .spanId("spanId-9") - .traceId("traceId-9") - .build(); - Exemplar ex5b = Exemplar.builder() - .value(3.5) - .spanId("spanId-10") - .traceId("traceId-10") - .build(); - histogram.labelValues("/hello").observe(0.5); - histogram.labelValues("/world").observe(0.5); // different labels are tracked independently, i.e. we don't need to wait for sampleIntervalMillis - - HistogramSnapshot snapshot = histogram.collect(); - assertExemplarEquals(ex1a, getExemplar(snapshot, 1.0, "path", "/hello")); - assertExemplarEquals(ex1b, getExemplar(snapshot, 1.0, "path", "/world")); - assertNull(getExemplar(snapshot, 2.0, "path", "/hello")); - assertNull(getExemplar(snapshot, 2.0, "path", "/world")); - assertNull(getExemplar(snapshot, 3.0, "path", "/hello")); - assertNull(getExemplar(snapshot, 3.0, "path", "/world")); - assertNull(getExemplar(snapshot, 4.0, "path", "/hello")); - assertNull(getExemplar(snapshot, 4.0, "path", "/world")); - assertNull(getExemplar(snapshot, Double.POSITIVE_INFINITY, "path", "/hello")); - assertNull(getExemplar(snapshot, Double.POSITIVE_INFINITY, "path", "/world")); - - Thread.sleep(sampleIntervalMillis + 1); - histogram.labelValues("/hello").observe(4.5); - histogram.labelValues("/world").observe(4.5); - - snapshot = histogram.collect(); - assertExemplarEquals(ex1a, getExemplar(snapshot, 1.0, "path", "/hello")); - assertExemplarEquals(ex1b, getExemplar(snapshot, 1.0, "path", "/world")); - assertNull(getExemplar(snapshot, 2.0, "path", "/hello")); - assertNull(getExemplar(snapshot, 2.0, "path", "/world")); - assertNull(getExemplar(snapshot, 3.0, "path", "/hello")); - assertNull(getExemplar(snapshot, 3.0, "path", "/world")); - assertNull(getExemplar(snapshot, 4.0, "path", "/hello")); - assertNull(getExemplar(snapshot, 4.0, "path", "/world")); - assertExemplarEquals(ex2a, getExemplar(snapshot, Double.POSITIVE_INFINITY, "path", "/hello")); - assertExemplarEquals(ex2b, getExemplar(snapshot, Double.POSITIVE_INFINITY, "path", "/world")); - - Thread.sleep(sampleIntervalMillis + 1); - histogram.labelValues("/hello").observe(1.5); - histogram.labelValues("/world").observe(1.5); - Thread.sleep(sampleIntervalMillis + 1); - histogram.labelValues("/hello").observe(2.5); - histogram.labelValues("/world").observe(2.5); - Thread.sleep(sampleIntervalMillis + 1); - histogram.labelValues("/hello").observe(3.5); - histogram.labelValues("/world").observe(3.5); - - snapshot = histogram.collect(); - assertExemplarEquals(ex1a, getExemplar(snapshot, 1.0, "path", "/hello")); - assertExemplarEquals(ex1b, getExemplar(snapshot, 1.0, "path", "/world")); - assertExemplarEquals(ex3a, getExemplar(snapshot, 2.0, "path", "/hello")); - assertExemplarEquals(ex3b, getExemplar(snapshot, 2.0, "path", "/world")); - assertExemplarEquals(ex4a, getExemplar(snapshot, 3.0, "path", "/hello")); - assertExemplarEquals(ex4b, getExemplar(snapshot, 3.0, "path", "/world")); - assertExemplarEquals(ex5a, getExemplar(snapshot, 4.0, "path", "/hello")); - assertExemplarEquals(ex5b, getExemplar(snapshot, 4.0, "path", "/world")); - assertExemplarEquals(ex2a, getExemplar(snapshot, Double.POSITIVE_INFINITY, "path", "/hello")); - assertExemplarEquals(ex2b, getExemplar(snapshot, Double.POSITIVE_INFINITY, "path", "/world")); - - Exemplar custom = Exemplar.builder() - .value(3.4) - .labels(Labels.of("key2", "value2", "key1", "value1", "trace_id", "traceId-11", "span_id", "spanId-11")) - .build(); - Thread.sleep(sampleIntervalMillis + 1); - histogram.labelValues("/hello").observeWithExemplar(3.4, Labels.of("key1", "value1", "key2", "value2")); - snapshot = histogram.collect(); - // custom exemplars have preference, so the automatic exemplar is replaced - assertExemplarEquals(custom, getExemplar(snapshot, 4.0, "path", "/hello")); + Histogram histogram = + Histogram.builder() + .name("test") + // The default number of Exemplars is 4. + // Use 5 buckets to verify that the exemplar sample is configured with the buckets. + .classicUpperBounds(1.0, 2.0, 3.0, 4.0, Double.POSITIVE_INFINITY) + .labelNames("path") + .build(); + + long sampleIntervalMillis = 10; + ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(histogram, sampleIntervalMillis); + SpanContextSupplier.setSpanContext(spanContext); + + Exemplar ex1a = Exemplar.builder().value(0.5).spanId("spanId-1").traceId("traceId-1").build(); + Exemplar ex1b = Exemplar.builder().value(0.5).spanId("spanId-2").traceId("traceId-2").build(); + Exemplar ex2a = Exemplar.builder().value(4.5).spanId("spanId-3").traceId("traceId-3").build(); + Exemplar ex2b = Exemplar.builder().value(4.5).spanId("spanId-4").traceId("traceId-4").build(); + Exemplar ex3a = Exemplar.builder().value(1.5).spanId("spanId-5").traceId("traceId-5").build(); + Exemplar ex3b = Exemplar.builder().value(1.5).spanId("spanId-6").traceId("traceId-6").build(); + Exemplar ex4a = Exemplar.builder().value(2.5).spanId("spanId-7").traceId("traceId-7").build(); + Exemplar ex4b = Exemplar.builder().value(2.5).spanId("spanId-8").traceId("traceId-8").build(); + Exemplar ex5a = Exemplar.builder().value(3.5).spanId("spanId-9").traceId("traceId-9").build(); + Exemplar ex5b = Exemplar.builder().value(3.5).spanId("spanId-10").traceId("traceId-10").build(); + histogram.labelValues("/hello").observe(0.5); + histogram + .labelValues("/world") + .observe(0.5); // different labels are tracked independently, i.e. we don't need to wait for + // sampleIntervalMillis + + HistogramSnapshot snapshot = histogram.collect(); + assertExemplarEquals(ex1a, getExemplar(snapshot, 1.0, "path", "/hello")); + assertExemplarEquals(ex1b, getExemplar(snapshot, 1.0, "path", "/world")); + assertNull(getExemplar(snapshot, 2.0, "path", "/hello")); + assertNull(getExemplar(snapshot, 2.0, "path", "/world")); + assertNull(getExemplar(snapshot, 3.0, "path", "/hello")); + assertNull(getExemplar(snapshot, 3.0, "path", "/world")); + assertNull(getExemplar(snapshot, 4.0, "path", "/hello")); + assertNull(getExemplar(snapshot, 4.0, "path", "/world")); + assertNull(getExemplar(snapshot, Double.POSITIVE_INFINITY, "path", "/hello")); + assertNull(getExemplar(snapshot, Double.POSITIVE_INFINITY, "path", "/world")); + + Thread.sleep(sampleIntervalMillis + 1); + histogram.labelValues("/hello").observe(4.5); + histogram.labelValues("/world").observe(4.5); + + snapshot = histogram.collect(); + assertExemplarEquals(ex1a, getExemplar(snapshot, 1.0, "path", "/hello")); + assertExemplarEquals(ex1b, getExemplar(snapshot, 1.0, "path", "/world")); + assertNull(getExemplar(snapshot, 2.0, "path", "/hello")); + assertNull(getExemplar(snapshot, 2.0, "path", "/world")); + assertNull(getExemplar(snapshot, 3.0, "path", "/hello")); + assertNull(getExemplar(snapshot, 3.0, "path", "/world")); + assertNull(getExemplar(snapshot, 4.0, "path", "/hello")); + assertNull(getExemplar(snapshot, 4.0, "path", "/world")); + assertExemplarEquals(ex2a, getExemplar(snapshot, Double.POSITIVE_INFINITY, "path", "/hello")); + assertExemplarEquals(ex2b, getExemplar(snapshot, Double.POSITIVE_INFINITY, "path", "/world")); + + Thread.sleep(sampleIntervalMillis + 1); + histogram.labelValues("/hello").observe(1.5); + histogram.labelValues("/world").observe(1.5); + Thread.sleep(sampleIntervalMillis + 1); + histogram.labelValues("/hello").observe(2.5); + histogram.labelValues("/world").observe(2.5); + Thread.sleep(sampleIntervalMillis + 1); + histogram.labelValues("/hello").observe(3.5); + histogram.labelValues("/world").observe(3.5); + + snapshot = histogram.collect(); + assertExemplarEquals(ex1a, getExemplar(snapshot, 1.0, "path", "/hello")); + assertExemplarEquals(ex1b, getExemplar(snapshot, 1.0, "path", "/world")); + assertExemplarEquals(ex3a, getExemplar(snapshot, 2.0, "path", "/hello")); + assertExemplarEquals(ex3b, getExemplar(snapshot, 2.0, "path", "/world")); + assertExemplarEquals(ex4a, getExemplar(snapshot, 3.0, "path", "/hello")); + assertExemplarEquals(ex4b, getExemplar(snapshot, 3.0, "path", "/world")); + assertExemplarEquals(ex5a, getExemplar(snapshot, 4.0, "path", "/hello")); + assertExemplarEquals(ex5b, getExemplar(snapshot, 4.0, "path", "/world")); + assertExemplarEquals(ex2a, getExemplar(snapshot, Double.POSITIVE_INFINITY, "path", "/hello")); + assertExemplarEquals(ex2b, getExemplar(snapshot, Double.POSITIVE_INFINITY, "path", "/world")); + + Exemplar custom = + Exemplar.builder() + .value(3.4) + .labels( + Labels.of( + "key2", + "value2", + "key1", + "value1", + "trace_id", + "traceId-11", + "span_id", + "spanId-11")) + .build(); + Thread.sleep(sampleIntervalMillis + 1); + histogram + .labelValues("/hello") + .observeWithExemplar(3.4, Labels.of("key1", "value1", "key2", "value2")); + snapshot = histogram.collect(); + // custom exemplars have preference, so the automatic exemplar is replaced + assertExemplarEquals(custom, getExemplar(snapshot, 4.0, "path", "/hello")); + } + + private Exemplar getExemplar(HistogramSnapshot snapshot, double le, String... labels) { + HistogramSnapshot.HistogramDataPointSnapshot data = + snapshot.getDataPoints().stream() + .filter(d -> d.getLabels().equals(Labels.of(labels))) + .findFirst() + .orElseThrow(() -> new RuntimeException("Labels not found")); + double lowerBound = Double.NEGATIVE_INFINITY; + for (ClassicHistogramBucket bucket : data.getClassicBuckets()) { + if (bucket.getUpperBound() == le) { + break; + } else { + lowerBound = bucket.getUpperBound(); + } } - - private Exemplar getExemplar(HistogramSnapshot snapshot, double le, String... labels) { - HistogramSnapshot.HistogramDataPointSnapshot data = snapshot.getDataPoints().stream() - .filter(d -> d.getLabels().equals(Labels.of(labels))) - .findFirst() - .orElseThrow(() -> new RuntimeException("Labels not found")); - double lowerBound = Double.NEGATIVE_INFINITY; - for (ClassicHistogramBucket bucket : data.getClassicBuckets()) { - if (bucket.getUpperBound() == le) { - break; - } else { - lowerBound = bucket.getUpperBound(); - } - } - return data.getExemplars().get(lowerBound, le); + return data.getExemplars().get(lowerBound, le); + } + + @Test + public void testCustomExemplarsClassicHistogram() + throws InterruptedException, NoSuchFieldException, IllegalAccessException { + + // TODO: This was copied from the old simpleclient, can probably be refactored. + + Histogram histogram = Histogram.builder().name("test").withExemplars().build(); + + long sampleIntervalMillis = 10; + ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(histogram, sampleIntervalMillis); + ExemplarSamplerConfigTestUtil.setMinRetentionPeriodMillis(histogram, 3 * sampleIntervalMillis); + + Labels labels = Labels.of("mapKey1", "mapValue1", "mapKey2", "mapValue2"); + + histogram.observeWithExemplar(0.5, Labels.of("key", "value")); + assertExemplar(histogram, 0.5, "key", "value"); + + Thread.sleep(sampleIntervalMillis * 3 + 1); + histogram.observeWithExemplar(0.5, Labels.EMPTY); + assertExemplar(histogram, 0.5); + + Thread.sleep(sampleIntervalMillis * 3 + 1); + histogram.observeWithExemplar(0.5, labels); + assertExemplar(histogram, 0.5, "mapKey1", "mapValue1", "mapKey2", "mapValue2"); + + // default buckets are {.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10} + Thread.sleep(sampleIntervalMillis * 3 + 1); + histogram.observeWithExemplar(2.0, Labels.of("key1", "value1", "key2", "value2")); + assertExemplar(histogram, 2.0, "key1", "value1", "key2", "value2"); + assertExemplar(histogram, 0.5, "mapKey1", "mapValue1", "mapKey2", "mapValue2"); + + Thread.sleep(sampleIntervalMillis * 3 + 1); + histogram.observeWithExemplar(0.4, Labels.EMPTY); // same bucket as 0.5 + assertExemplar(histogram, 0.4); + assertExemplar(histogram, 2.0, "key1", "value1", "key2", "value2"); + } + + private void assertExemplar(Histogram histogram, double value, String... labels) { + double lowerBound = Double.NEGATIVE_INFINITY; + double upperBound = Double.POSITIVE_INFINITY; + HistogramSnapshot snapshot = histogram.collect(); + HistogramSnapshot.HistogramDataPointSnapshot data = + snapshot.getDataPoints().stream() + .filter(d -> d.getLabels().isEmpty()) + .findFirst() + .orElseThrow(() -> new RuntimeException("No data without labels found")); + for (ClassicHistogramBucket bucket : data.getClassicBuckets()) { + if (bucket.getUpperBound() >= value) { + upperBound = bucket.getUpperBound(); + break; + } else { + lowerBound = bucket.getUpperBound(); + } } - - @Test - public void testCustomExemplarsClassicHistogram() throws InterruptedException, NoSuchFieldException, IllegalAccessException { - - // TODO: This was copied from the old simpleclient, can probably be refactored. - - Histogram histogram = Histogram.builder() - .name("test") - .withExemplars() - .build(); - - long sampleIntervalMillis = 10; - ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(histogram, sampleIntervalMillis); - ExemplarSamplerConfigTestUtil.setMinRetentionPeriodMillis(histogram, 3 * sampleIntervalMillis); - - Labels labels = Labels.of("mapKey1", "mapValue1", "mapKey2", "mapValue2"); - - histogram.observeWithExemplar(0.5, Labels.of("key", "value")); - assertExemplar(histogram, 0.5, "key", "value"); - - Thread.sleep(sampleIntervalMillis * 3 + 1); - histogram.observeWithExemplar(0.5, Labels.EMPTY); - assertExemplar(histogram, 0.5); - - Thread.sleep(sampleIntervalMillis * 3 + 1); - histogram.observeWithExemplar(0.5, labels); - assertExemplar(histogram, 0.5, "mapKey1", "mapValue1", "mapKey2", "mapValue2"); - - // default buckets are {.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10} - Thread.sleep(sampleIntervalMillis * 3 + 1); - histogram.observeWithExemplar(2.0, Labels.of("key1", "value1", "key2", "value2")); - assertExemplar(histogram, 2.0, "key1", "value1", "key2", "value2"); - assertExemplar(histogram, 0.5, "mapKey1", "mapValue1", "mapKey2", "mapValue2"); - - Thread.sleep(sampleIntervalMillis * 3 + 1); - histogram.observeWithExemplar(0.4, Labels.EMPTY); // same bucket as 0.5 - assertExemplar(histogram, 0.4); - assertExemplar(histogram, 2.0, "key1", "value1", "key2", "value2"); - } - - private void assertExemplar(Histogram histogram, double value, String... labels) { - double lowerBound = Double.NEGATIVE_INFINITY; - double upperBound = Double.POSITIVE_INFINITY; - HistogramSnapshot snapshot = histogram.collect(); - HistogramSnapshot.HistogramDataPointSnapshot data = snapshot.getDataPoints().stream() - .filter(d -> d.getLabels().isEmpty()) - .findFirst() - .orElseThrow(() -> new RuntimeException("No data without labels found")); - for (ClassicHistogramBucket bucket : data.getClassicBuckets()) { - if (bucket.getUpperBound() >= value) { - upperBound = bucket.getUpperBound(); - break; - } else { - lowerBound = bucket.getUpperBound(); - } - } - Exemplar exemplar = data.getExemplars().get(lowerBound, upperBound); - Assert.assertNotNull("No exemplar found in bucket [" + lowerBound + ", " + upperBound + "]", exemplar); - Assert.assertEquals(value, exemplar.getValue(), 0.0); - Assert.assertEquals("" + exemplar.getLabels(), labels.length / 2, exemplar.getLabels().size()); - for (int i = 0; i < labels.length; i += 2) { - Assert.assertEquals(labels[i], exemplar.getLabels().getName(i / 2)); - Assert.assertEquals(labels[i + 1], exemplar.getLabels().getValue(i / 2)); - } + Exemplar exemplar = data.getExemplars().get(lowerBound, upperBound); + Assert.assertNotNull( + "No exemplar found in bucket [" + lowerBound + ", " + upperBound + "]", exemplar); + Assert.assertEquals(value, exemplar.getValue(), 0.0); + Assert.assertEquals("" + exemplar.getLabels(), labels.length / 2, exemplar.getLabels().size()); + for (int i = 0; i < labels.length; i += 2) { + Assert.assertEquals(labels[i], exemplar.getLabels().getName(i / 2)); + Assert.assertEquals(labels[i + 1], exemplar.getLabels().getValue(i / 2)); } + } + @Test + public void testExemplarsNativeHistogram() throws NoSuchFieldException, IllegalAccessException { - @Test - public void testExemplarsNativeHistogram() throws NoSuchFieldException, IllegalAccessException { + SpanContext spanContext = + new SpanContext() { + int callCount = 0; - SpanContext spanContext = new SpanContext() { - int callCount = 0; + @Override + public String getCurrentTraceId() { + return "traceId-" + callCount; + } - @Override - public String getCurrentTraceId() { - return "traceId-" + callCount; - } + @Override + public String getCurrentSpanId() { + return "spanId-" + callCount; + } - @Override - public String getCurrentSpanId() { - return "spanId-" + callCount; - } + @Override + public boolean isCurrentSpanSampled() { + callCount++; + return true; + } - @Override - public boolean isCurrentSpanSampled() { - callCount++; - return true; - } - - @Override - public void markCurrentSpanAsExemplar() { - } + @Override + public void markCurrentSpanAsExemplar() {} }; - Histogram histogram = Histogram.builder() - .name("test") - .nativeOnly() - .labelNames("path") - .build(); - - long sampleIntervalMillis = 10; - ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(histogram, sampleIntervalMillis); - SpanContextSupplier.setSpanContext(spanContext); - - Exemplar ex1 = Exemplar.builder() - .value(3.11) - .spanId("spanId-1") - .traceId("traceId-1") - .build(); - Exemplar ex2 = Exemplar.builder() - .value(3.12) - .spanId("spanId-2") - .traceId("traceId-2") - .build(); - Exemplar ex3 = Exemplar.builder() - .value(3.13) - .spanId("spanId-3") - .traceId("traceId-3") - .labels(Labels.of("key1", "value1", "key2", "value2")) - .build(); - - histogram.labelValues("/hello").observe(3.11); - histogram.labelValues("/world").observe(3.12); - assertEquals(1, getData(histogram, "path", "/hello").getExemplars().size()); - assertExemplarEquals(ex1, getData(histogram, "path", "/hello").getExemplars().iterator().next()); - assertEquals(1, getData(histogram, "path", "/world").getExemplars().size()); - assertExemplarEquals(ex2, getData(histogram, "path", "/world").getExemplars().iterator().next()); - - histogram.labelValues("/world").observeWithExemplar(3.13, Labels.of("key1", "value1", "key2", "value2")); - assertEquals(1, getData(histogram, "path", "/hello").getExemplars().size()); - assertExemplarEquals(ex1, getData(histogram, "path", "/hello").getExemplars().iterator().next()); - assertEquals(2, getData(histogram, "path", "/world").getExemplars().size()); - Exemplars exemplars = getData(histogram, "path", "/world").getExemplars(); - List exemplarList = new ArrayList<>(exemplars.size()); - for (Exemplar exemplar : exemplars) { - exemplarList.add(exemplar); - } - exemplarList.sort(Comparator.comparingDouble(Exemplar::getValue)); - assertEquals(2, exemplars.size()); - assertExemplarEquals(ex2, exemplarList.get(0)); - assertExemplarEquals(ex3, exemplarList.get(1)); + Histogram histogram = Histogram.builder().name("test").nativeOnly().labelNames("path").build(); + + long sampleIntervalMillis = 10; + ExemplarSamplerConfigTestUtil.setSampleIntervalMillis(histogram, sampleIntervalMillis); + SpanContextSupplier.setSpanContext(spanContext); + + Exemplar ex1 = Exemplar.builder().value(3.11).spanId("spanId-1").traceId("traceId-1").build(); + Exemplar ex2 = Exemplar.builder().value(3.12).spanId("spanId-2").traceId("traceId-2").build(); + Exemplar ex3 = + Exemplar.builder() + .value(3.13) + .spanId("spanId-3") + .traceId("traceId-3") + .labels(Labels.of("key1", "value1", "key2", "value2")) + .build(); + + histogram.labelValues("/hello").observe(3.11); + histogram.labelValues("/world").observe(3.12); + assertEquals(1, getData(histogram, "path", "/hello").getExemplars().size()); + assertExemplarEquals( + ex1, getData(histogram, "path", "/hello").getExemplars().iterator().next()); + assertEquals(1, getData(histogram, "path", "/world").getExemplars().size()); + assertExemplarEquals( + ex2, getData(histogram, "path", "/world").getExemplars().iterator().next()); + + histogram + .labelValues("/world") + .observeWithExemplar(3.13, Labels.of("key1", "value1", "key2", "value2")); + assertEquals(1, getData(histogram, "path", "/hello").getExemplars().size()); + assertExemplarEquals( + ex1, getData(histogram, "path", "/hello").getExemplars().iterator().next()); + assertEquals(2, getData(histogram, "path", "/world").getExemplars().size()); + Exemplars exemplars = getData(histogram, "path", "/world").getExemplars(); + List exemplarList = new ArrayList<>(exemplars.size()); + for (Exemplar exemplar : exemplars) { + exemplarList.add(exemplar); } - - @Test(expected = IllegalArgumentException.class) - public void testIllegalLabelName() { + exemplarList.sort(Comparator.comparingDouble(Exemplar::getValue)); + assertEquals(2, exemplars.size()); + assertExemplarEquals(ex2, exemplarList.get(0)); + assertExemplarEquals(ex3, exemplarList.get(1)); + } + + @Test(expected = IllegalArgumentException.class) + public void testIllegalLabelName() { + Histogram.builder().name("test").labelNames("label", "le"); + } + + @Test(expected = IllegalArgumentException.class) + public void testIllegalLabelNameConstLabels() { + Histogram.builder().name("test").constLabels(Labels.of("label1", "value1", "le", "0.3")); + } + + @Test(expected = IllegalArgumentException.class) + public void testIllegalLabelNamePrefix() { + Histogram.builder().name("test").labelNames("__hello"); + } + + @Test(expected = IllegalArgumentException.class) + public void testIllegalName() { + Histogram.builder().name("my_namespace/server.durations"); + } + + @Test(expected = IllegalArgumentException.class) + public void testNoName() { + Histogram.builder().build(); + } + + @Test(expected = NullPointerException.class) + public void testNullName() { + Histogram.builder().name(null); + } + + @Test + public void testDuplicateClassicBuckets() { + Histogram histogram = + Histogram.builder().name("test").classicUpperBounds(0, 3, 17, 3, 21).build(); + List upperBounds = + getData(histogram).getClassicBuckets().stream() + .map(ClassicHistogramBucket::getUpperBound) + .collect(Collectors.toList()); + Assert.assertEquals(Arrays.asList(0.0, 3.0, 17.0, 21.0, Double.POSITIVE_INFINITY), upperBounds); + } + + @Test + public void testUnsortedBuckets() { + Histogram histogram = Histogram.builder().name("test").classicUpperBounds(0.2, 0.1).build(); + List upperBounds = + getData(histogram).getClassicBuckets().stream() + .map(ClassicHistogramBucket::getUpperBound) + .collect(Collectors.toList()); + Assert.assertEquals(Arrays.asList(0.1, 0.2, Double.POSITIVE_INFINITY), upperBounds); + } + + @Test + public void testEmptyBuckets() { + Histogram histogram = Histogram.builder().name("test").classicUpperBounds().build(); + List upperBounds = + getData(histogram).getClassicBuckets().stream() + .map(ClassicHistogramBucket::getUpperBound) + .collect(Collectors.toList()); + Assert.assertEquals(Collections.singletonList(Double.POSITIVE_INFINITY), upperBounds); + } + + @Test + public void testBucketsIncludePositiveInfinity() { + Histogram histogram = Histogram.builder() - .name("test") - .labelNames("label", "le"); - } - - @Test(expected = IllegalArgumentException.class) - public void testIllegalLabelNameConstLabels() { + .name("test") + .classicUpperBounds(0.01, 0.1, 1.0, Double.POSITIVE_INFINITY) + .build(); + List upperBounds = + getData(histogram).getClassicBuckets().stream() + .map(ClassicHistogramBucket::getUpperBound) + .collect(Collectors.toList()); + Assert.assertEquals(Arrays.asList(0.01, 0.1, 1.0, Double.POSITIVE_INFINITY), upperBounds); + } + + @Test + public void testLinearBuckets() { + Histogram histogram = + Histogram.builder().name("test").classicLinearUpperBounds(0.1, 0.1, 10).build(); + List upperBounds = + getData(histogram).getClassicBuckets().stream() + .map(ClassicHistogramBucket::getUpperBound) + .collect(Collectors.toList()); + Assert.assertEquals( + Arrays.asList(0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, Double.POSITIVE_INFINITY), + upperBounds); + } + + @Test + public void testExponentialBuckets() { + Histogram histogram = + Histogram.builder().classicExponentialUpperBounds(2, 2.5, 3).name("test").build(); + List upperBounds = + getData(histogram).getClassicBuckets().stream() + .map(ClassicHistogramBucket::getUpperBound) + .collect(Collectors.toList()); + assertEquals(Arrays.asList(2.0, 5.0, 12.5, Double.POSITIVE_INFINITY), upperBounds); + } + + @Test(expected = RuntimeException.class) + public void testBucketsIncludeNaN() { + Histogram.builder().name("test").classicUpperBounds(0.01, 0.1, 1.0, Double.NaN); + } + + @Test + public void testNoLabelsDefaultZeroValue() { + Histogram noLabels = Histogram.builder().name("test").build(); + assertEquals(0.0, getBucket(noLabels, 0.005).getCount(), 0.0); + assertEquals(0, getData(noLabels).getCount()); + assertEquals(0.0, getData(noLabels).getSum(), 0.0); + } + + private ClassicHistogramBucket getBucket(Histogram histogram, double le, String... labels) { + return getData(histogram, labels).getClassicBuckets().stream() + .filter(b -> b.getUpperBound() == le) + .findAny() + .orElseThrow(() -> new RuntimeException("bucket with le=" + le + " not found.")); + } + + @Test + public void testObserve() { + Histogram noLabels = Histogram.builder().name("test").build(); + noLabels.observe(2); + assertEquals(1, getData(noLabels).getCount()); + assertEquals(2.0, getData(noLabels).getSum(), .0); + assertEquals(0.0, getBucket(noLabels, 1).getCount(), .0); + assertEquals(1.0, getBucket(noLabels, 2.5).getCount(), .0); + noLabels.observe(4); + assertEquals(2.0, getData(noLabels).getCount(), .0); + assertEquals(6.0, getData(noLabels).getSum(), .0); + assertEquals(0.0, getBucket(noLabels, 1).getCount(), .0); + assertEquals(1.0, getBucket(noLabels, 2.5).getCount(), .0); + assertEquals(1.0, getBucket(noLabels, 5).getCount(), .0); + assertEquals(0.0, getBucket(noLabels, 10).getCount(), .0); + assertEquals(0.0, getBucket(noLabels, Double.POSITIVE_INFINITY).getCount(), .0); + } + + @Test + // See https://github.com/prometheus/client_java/issues/646 + public void testNegativeAmount() { + Histogram histogram = Histogram.builder() - .name("test") - .constLabels(Labels.of("label1", "value1", "le", "0.3")); - } - - @Test(expected = IllegalArgumentException.class) - public void testIllegalLabelNamePrefix() { - Histogram.builder() - .name("test") - .labelNames("__hello"); - } - - @Test(expected = IllegalArgumentException.class) - public void testIllegalName() { - Histogram.builder().name("my_namespace/server.durations"); - } - - @Test(expected = IllegalArgumentException.class) - public void testNoName() { - Histogram.builder().build(); - } - - @Test(expected = NullPointerException.class) - public void testNullName() { - Histogram.builder() - .name(null); - } - - @Test - public void testDuplicateClassicBuckets() { - Histogram histogram = Histogram.builder() - .name("test") - .classicUpperBounds(0, 3, 17, 3, 21) - .build(); - List upperBounds = getData(histogram).getClassicBuckets().stream() - .map(ClassicHistogramBucket::getUpperBound) - .collect(Collectors.toList()); - Assert.assertEquals(Arrays.asList(0.0, 3.0, 17.0, 21.0, Double.POSITIVE_INFINITY), upperBounds); - } - - @Test - public void testUnsortedBuckets() { - Histogram histogram = Histogram.builder() - .name("test") - .classicUpperBounds(0.2, 0.1) - .build(); - List upperBounds = getData(histogram).getClassicBuckets().stream() - .map(ClassicHistogramBucket::getUpperBound) - .collect(Collectors.toList()); - Assert.assertEquals(Arrays.asList(0.1, 0.2, Double.POSITIVE_INFINITY), upperBounds); + .name("histogram") + .help("test histogram for negative values") + .classicUpperBounds(-10, -5, 0, 5, 10) + .build(); + double expectedCount = 0; + double expectedSum = 0; + for (int i = 10; i >= -11; i--) { + histogram.observe(i); + expectedCount++; + expectedSum += i; + assertEquals(expectedSum, getData(histogram).getSum(), .001); + assertEquals(expectedCount, getData(histogram).getCount(), .001); } - - @Test - public void testEmptyBuckets() { - Histogram histogram = Histogram.builder() - .name("test") - .classicUpperBounds() - .build(); - List upperBounds = getData(histogram).getClassicBuckets().stream() - .map(ClassicHistogramBucket::getUpperBound) - .collect(Collectors.toList()); - Assert.assertEquals(Collections.singletonList(Double.POSITIVE_INFINITY), upperBounds); - } - - @Test - public void testBucketsIncludePositiveInfinity() { - Histogram histogram = Histogram.builder() - .name("test") - .classicUpperBounds(0.01, 0.1, 1.0, Double.POSITIVE_INFINITY) - .build(); - List upperBounds = getData(histogram).getClassicBuckets().stream() - .map(ClassicHistogramBucket::getUpperBound) - .collect(Collectors.toList()); - Assert.assertEquals(Arrays.asList(0.01, 0.1, 1.0, Double.POSITIVE_INFINITY), upperBounds); - } - - @Test - public void testLinearBuckets() { - Histogram histogram = Histogram.builder() - .name("test") - .classicLinearUpperBounds(0.1, 0.1, 10) - .build(); - List upperBounds = getData(histogram).getClassicBuckets().stream() - .map(ClassicHistogramBucket::getUpperBound) - .collect(Collectors.toList()); - Assert.assertEquals(Arrays.asList(0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, Double.POSITIVE_INFINITY), upperBounds); - } - - @Test - public void testExponentialBuckets() { - Histogram histogram = Histogram.builder() - .classicExponentialUpperBounds(2, 2.5, 3) - .name("test") - .build(); - List upperBounds = getData(histogram).getClassicBuckets().stream() - .map(ClassicHistogramBucket::getUpperBound) - .collect(Collectors.toList()); - assertEquals(Arrays.asList(2.0, 5.0, 12.5, Double.POSITIVE_INFINITY), upperBounds); - } - - @Test(expected = RuntimeException.class) - public void testBucketsIncludeNaN() { + List expectedBucketCounts = + Arrays.asList(2L, 5L, 5L, 5L, 5L, 0L); // buckets -10, -5, 0, 5, 10, +Inf + List actualBucketCounts = + getData(histogram).getClassicBuckets().stream() + .map(ClassicHistogramBucket::getCount) + .collect(Collectors.toList()); + assertEquals(expectedBucketCounts, actualBucketCounts); + } + + @Test + public void testBoundaryConditions() { + Histogram histogram = Histogram.builder().name("test").build(); + histogram.observe(2.5); + assertEquals(0, getBucket(histogram, 1).getCount()); + assertEquals(1, getBucket(histogram, 2.5).getCount()); + + histogram.observe(Double.POSITIVE_INFINITY); + assertEquals(0, getBucket(histogram, 1).getCount()); + assertEquals(1, getBucket(histogram, 2.5).getCount()); + assertEquals(0, getBucket(histogram, 5).getCount()); + assertEquals(0, getBucket(histogram, 10).getCount()); + assertEquals(1, getBucket(histogram, Double.POSITIVE_INFINITY).getCount()); + } + + @Test + public void testObserveWithLabels() { + Histogram histogram = Histogram.builder() - .name("test") - .classicUpperBounds(0.01, 0.1, 1.0, Double.NaN); - } - - @Test - public void testNoLabelsDefaultZeroValue() { - Histogram noLabels = Histogram.builder().name("test").build(); - assertEquals(0.0, getBucket(noLabels, 0.005).getCount(), 0.0); - assertEquals(0, getData(noLabels).getCount()); - assertEquals(0.0, getData(noLabels).getSum(), 0.0); - } - - private ClassicHistogramBucket getBucket(Histogram histogram, double le, String... labels) { - return getData(histogram, labels).getClassicBuckets().stream() - .filter(b -> b.getUpperBound() == le) - .findAny() - .orElseThrow(() -> new RuntimeException("bucket with le=" + le + " not found.")); - } - - @Test - public void testObserve() { - Histogram noLabels = Histogram.builder() - .name("test") - .build(); - noLabels.observe(2); - assertEquals(1, getData(noLabels).getCount()); - assertEquals(2.0, getData(noLabels).getSum(), .0); - assertEquals(0.0, getBucket(noLabels, 1).getCount(), .0); - assertEquals(1.0, getBucket(noLabels, 2.5).getCount(), .0); - noLabels.observe(4); - assertEquals(2.0, getData(noLabels).getCount(), .0); - assertEquals(6.0, getData(noLabels).getSum(), .0); - assertEquals(0.0, getBucket(noLabels, 1).getCount(), .0); - assertEquals(1.0, getBucket(noLabels, 2.5).getCount(), .0); - assertEquals(1.0, getBucket(noLabels, 5).getCount(), .0); - assertEquals(0.0, getBucket(noLabels, 10).getCount(), .0); - assertEquals(0.0, getBucket(noLabels, Double.POSITIVE_INFINITY).getCount(), .0); - } - - @Test - // See https://github.com/prometheus/client_java/issues/646 - public void testNegativeAmount() { - Histogram histogram = Histogram.builder() - .name("histogram") - .help("test histogram for negative values") - .classicUpperBounds(-10, -5, 0, 5, 10) - .build(); - double expectedCount = 0; - double expectedSum = 0; - for (int i = 10; i >= -11; i--) { - histogram.observe(i); - expectedCount++; - expectedSum += i; - assertEquals(expectedSum, getData(histogram).getSum(), .001); - assertEquals(expectedCount, getData(histogram).getCount(), .001); - } - List expectedBucketCounts = Arrays.asList(2L, 5L, 5L, 5L, 5L, 0L); // buckets -10, -5, 0, 5, 10, +Inf - List actualBucketCounts = getData(histogram).getClassicBuckets().stream() - .map(ClassicHistogramBucket::getCount) - .collect(Collectors.toList()); - assertEquals(expectedBucketCounts, actualBucketCounts); - } - - @Test - public void testBoundaryConditions() { - Histogram histogram = Histogram.builder() - .name("test") - .build(); - histogram.observe(2.5); - assertEquals(0, getBucket(histogram, 1).getCount()); - assertEquals(1, getBucket(histogram, 2.5).getCount()); - - histogram.observe(Double.POSITIVE_INFINITY); - assertEquals(0, getBucket(histogram, 1).getCount()); - assertEquals(1, getBucket(histogram, 2.5).getCount()); - assertEquals(0, getBucket(histogram, 5).getCount()); - assertEquals(0, getBucket(histogram, 10).getCount()); - assertEquals(1, getBucket(histogram, Double.POSITIVE_INFINITY).getCount()); - } - - @Test - public void testObserveWithLabels() { - Histogram histogram = Histogram.builder() - .name("test") - .constLabels(Labels.of("env", "prod")) - .labelNames("path", "status") - .build(); - histogram.labelValues("/hello", "200").observe(0.11); - histogram.labelValues("/hello", "200").observe(0.2); - histogram.labelValues("/hello", "500").observe(0.19); - HistogramSnapshot.HistogramDataPointSnapshot data200 = getData(histogram, "env", "prod", "path", "/hello", "status", "200"); - HistogramSnapshot.HistogramDataPointSnapshot data500 = getData(histogram, "env", "prod", "path", "/hello", "status", "500"); - assertEquals(2, data200.getCount()); - assertEquals(0.31, data200.getSum(), 0.0000001); - assertEquals(1, data500.getCount()); - assertEquals(0.19, data500.getSum(), 0.0000001); - histogram.labelValues("/hello", "200").observe(0.13); - data200 = getData(histogram, "env", "prod", "path", "/hello", "status", "200"); - data500 = getData(histogram, "env", "prod", "path", "/hello", "status", "500"); - assertEquals(3, data200.getCount()); - assertEquals(0.44, data200.getSum(), 0.0000001); - assertEquals(1, data500.getCount()); - assertEquals(0.19, data500.getSum(), 0.0000001); - } - - @Test - public void testObserveMultithreaded() throws InterruptedException, ExecutionException, TimeoutException { - // Hard to test concurrency, but let's run a couple of observations in parallel and assert none gets lost. - Histogram histogram = Histogram.builder() - .name("test") - .labelNames("status") - .build(); - int nThreads = 8; - DistributionDataPoint obs = histogram.labelValues("200"); - ExecutorService executor = Executors.newFixedThreadPool(nThreads); - CompletionService> completionService = new ExecutorCompletionService<>(executor); - CountDownLatch startSignal = new CountDownLatch(nThreads); - for (int t = 0; t < nThreads; t++) { - completionService.submit(() -> { - List snapshots = new ArrayList<>(); - startSignal.countDown(); - startSignal.await(); - for (int i = 0; i < 10; i++) { - for (int j = 0; j < 1000; j++) { - obs.observe(1.1); - } - snapshots.add(histogram.collect()); - } - return snapshots; - }); - } - long maxCount = 0; - for (int i = 0; i < nThreads; i++) { - Future> future = completionService.take(); - List snapshots = future.get(5, TimeUnit.SECONDS); - long count = 0; - for (HistogramSnapshot snapshot : snapshots) { - Assert.assertEquals(1, snapshot.getDataPoints().size()); - HistogramSnapshot.HistogramDataPointSnapshot data = snapshot.getDataPoints().stream().findFirst().orElseThrow(RuntimeException::new); - Assert.assertTrue(data.getCount() >= (count + 1000)); // 1000 own observations plus the ones from other threads - count = data.getCount(); - } - if (count > maxCount) { - maxCount = count; + .name("test") + .constLabels(Labels.of("env", "prod")) + .labelNames("path", "status") + .build(); + histogram.labelValues("/hello", "200").observe(0.11); + histogram.labelValues("/hello", "200").observe(0.2); + histogram.labelValues("/hello", "500").observe(0.19); + HistogramSnapshot.HistogramDataPointSnapshot data200 = + getData(histogram, "env", "prod", "path", "/hello", "status", "200"); + HistogramSnapshot.HistogramDataPointSnapshot data500 = + getData(histogram, "env", "prod", "path", "/hello", "status", "500"); + assertEquals(2, data200.getCount()); + assertEquals(0.31, data200.getSum(), 0.0000001); + assertEquals(1, data500.getCount()); + assertEquals(0.19, data500.getSum(), 0.0000001); + histogram.labelValues("/hello", "200").observe(0.13); + data200 = getData(histogram, "env", "prod", "path", "/hello", "status", "200"); + data500 = getData(histogram, "env", "prod", "path", "/hello", "status", "500"); + assertEquals(3, data200.getCount()); + assertEquals(0.44, data200.getSum(), 0.0000001); + assertEquals(1, data500.getCount()); + assertEquals(0.19, data500.getSum(), 0.0000001); + } + + @Test + public void testObserveMultithreaded() + throws InterruptedException, ExecutionException, TimeoutException { + // Hard to test concurrency, but let's run a couple of observations in parallel and assert none + // gets lost. + Histogram histogram = Histogram.builder().name("test").labelNames("status").build(); + int nThreads = 8; + DistributionDataPoint obs = histogram.labelValues("200"); + ExecutorService executor = Executors.newFixedThreadPool(nThreads); + CompletionService> completionService = + new ExecutorCompletionService<>(executor); + CountDownLatch startSignal = new CountDownLatch(nThreads); + for (int t = 0; t < nThreads; t++) { + completionService.submit( + () -> { + List snapshots = new ArrayList<>(); + startSignal.countDown(); + startSignal.await(); + for (int i = 0; i < 10; i++) { + for (int j = 0; j < 1000; j++) { + obs.observe(1.1); + } + snapshots.add(histogram.collect()); } - } - Assert.assertEquals(nThreads * 10_000, maxCount); // the last collect() has seen all observations - Assert.assertEquals(getBucket(histogram, 2.5, "status", "200").getCount(), nThreads * 10_000); - executor.shutdown(); - Assert.assertTrue(executor.awaitTermination(5, TimeUnit.SECONDS)); + return snapshots; + }); } - - - private HistogramSnapshot.HistogramDataPointSnapshot getData(Histogram histogram, String... labels) { - return histogram.collect().getDataPoints().stream() - .filter(d -> d.getLabels().equals(Labels.of(labels))) - .findAny() - .orElseThrow(() -> new RuntimeException("histogram with labels " + labels + " not found")); + long maxCount = 0; + for (int i = 0; i < nThreads; i++) { + Future> future = completionService.take(); + List snapshots = future.get(5, TimeUnit.SECONDS); + long count = 0; + for (HistogramSnapshot snapshot : snapshots) { + Assert.assertEquals(1, snapshot.getDataPoints().size()); + HistogramSnapshot.HistogramDataPointSnapshot data = + snapshot.getDataPoints().stream().findFirst().orElseThrow(RuntimeException::new); + Assert.assertTrue( + data.getCount() + >= (count + 1000)); // 1000 own observations plus the ones from other threads + count = data.getCount(); + } + if (count > maxCount) { + maxCount = count; + } } + Assert.assertEquals( + nThreads * 10_000, maxCount); // the last collect() has seen all observations + Assert.assertEquals(getBucket(histogram, 2.5, "status", "200").getCount(), nThreads * 10_000); + executor.shutdown(); + Assert.assertTrue(executor.awaitTermination(5, TimeUnit.SECONDS)); + } + + private HistogramSnapshot.HistogramDataPointSnapshot getData( + Histogram histogram, String... labels) { + return histogram.collect().getDataPoints().stream() + .filter(d -> d.getLabels().equals(Labels.of(labels))) + .findAny() + .orElseThrow(() -> new RuntimeException("histogram with labels " + labels + " not found")); + } } diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/InfoTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/InfoTest.java index f2ddbc456..4175ae733 100644 --- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/InfoTest.java +++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/InfoTest.java @@ -1,107 +1,102 @@ package io.prometheus.metrics.core.metrics; +import static org.junit.Assert.assertEquals; + import io.prometheus.metrics.expositionformats.OpenMetricsTextFormatWriter; +import io.prometheus.metrics.expositionformats.PrometheusProtobufWriter; +import io.prometheus.metrics.expositionformats.generated.com_google_protobuf_3_25_3.Metrics; import io.prometheus.metrics.model.snapshots.Labels; import io.prometheus.metrics.model.snapshots.MetricSnapshots; import io.prometheus.metrics.shaded.com_google_protobuf_3_25_3.TextFormat; -import io.prometheus.metrics.expositionformats.PrometheusProtobufWriter; -import io.prometheus.metrics.expositionformats.generated.com_google_protobuf_3_25_3.Metrics; -import org.junit.Assert; -import org.junit.Test; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; - -import static org.junit.Assert.assertEquals; +import org.junit.Assert; +import org.junit.Test; public class InfoTest { - @Test - public void testInfoStrippedFromName() { - for (String name : new String[]{ - "jvm.runtime", "jvm_runtime", - "jvm.runtime.info", "jvm_runtime_info"}) { - for (String labelName : new String[]{"my.key", "my_key"}) { - Info info = Info.builder() - .name(name) - .labelNames(labelName) - .build(); - info.addLabelValues("value"); - Metrics.MetricFamily protobufData = new PrometheusProtobufWriter().convert(info.collect()); - assertEquals("name: \"jvm_runtime_info\" type: GAUGE metric { label { name: \"my_key\" value: \"value\" } gauge { value: 1.0 } }", TextFormat.printer().shortDebugString(protobufData)); - } - } - } - - @Test - public void testAddAndRemove() throws IOException { - Info info = Info.builder() - .name("test_info") - .labelNames("a", "b") - .build(); - Assert.assertEquals(0, info.collect().getDataPoints().size()); - info.addLabelValues("val1", "val2"); - Assert.assertEquals(1, info.collect().getDataPoints().size()); - info.addLabelValues("val1", "val2"); // already exist, so no change - Assert.assertEquals(1, info.collect().getDataPoints().size()); - info.addLabelValues("val2", "val2"); - Assert.assertEquals(2, info.collect().getDataPoints().size()); - info.remove("val1", "val3"); // does not exist, so no change - Assert.assertEquals(2, info.collect().getDataPoints().size()); - info.remove("val1", "val2"); - Assert.assertEquals(1, info.collect().getDataPoints().size()); - info.remove("val2", "val2"); - Assert.assertEquals(0, info.collect().getDataPoints().size()); + @Test + public void testInfoStrippedFromName() { + for (String name : + new String[] { + "jvm.runtime", "jvm_runtime", + "jvm.runtime.info", "jvm_runtime_info" + }) { + for (String labelName : new String[] {"my.key", "my_key"}) { + Info info = Info.builder().name(name).labelNames(labelName).build(); + info.addLabelValues("value"); + Metrics.MetricFamily protobufData = new PrometheusProtobufWriter().convert(info.collect()); + assertEquals( + "name: \"jvm_runtime_info\" type: GAUGE metric { label { name: \"my_key\" value: \"value\" } gauge { value: 1.0 } }", + TextFormat.printer().shortDebugString(protobufData)); + } } + } - @Test - public void testSet() throws IOException { - Info info = Info.builder() - .name("target_info") - .constLabels(Labels.of("service.name", "test", "service.instance.id", "123")) - .labelNames("service.version") - .build(); - info.setLabelValues("1.0.0"); - Assert.assertEquals(1, info.collect().getDataPoints().size()); - info.setLabelValues("2.0.0"); - Assert.assertEquals(1, info.collect().getDataPoints().size()); - assertTextFormat("target_info{service_instance_id=\"123\",service_name=\"test\",service_version=\"2.0.0\"} 1\n", info); - } + @Test + public void testAddAndRemove() throws IOException { + Info info = Info.builder().name("test_info").labelNames("a", "b").build(); + Assert.assertEquals(0, info.collect().getDataPoints().size()); + info.addLabelValues("val1", "val2"); + Assert.assertEquals(1, info.collect().getDataPoints().size()); + info.addLabelValues("val1", "val2"); // already exist, so no change + Assert.assertEquals(1, info.collect().getDataPoints().size()); + info.addLabelValues("val2", "val2"); + Assert.assertEquals(2, info.collect().getDataPoints().size()); + info.remove("val1", "val3"); // does not exist, so no change + Assert.assertEquals(2, info.collect().getDataPoints().size()); + info.remove("val1", "val2"); + Assert.assertEquals(1, info.collect().getDataPoints().size()); + info.remove("val2", "val2"); + Assert.assertEquals(0, info.collect().getDataPoints().size()); + } - @Test - public void testConstLabelsOnly() throws IOException { - Info info = Info.builder() - .name("target_info") - .constLabels(Labels.of("service.name", "test", "service.instance.id", "123")) - .build(); - Assert.assertEquals(1, info.collect().getDataPoints().size()); - assertTextFormat("target_info{service_instance_id=\"123\",service_name=\"test\"} 1\n", info); - } - - @Test(expected = IllegalArgumentException.class) - public void testConstLabelsDuplicate1() { + @Test + public void testSet() throws IOException { + Info info = Info.builder() - .constLabels(Labels.of("a_1", "val1")) - .labelNames("a.1") - .build(); - } + .name("target_info") + .constLabels(Labels.of("service.name", "test", "service.instance.id", "123")) + .labelNames("service.version") + .build(); + info.setLabelValues("1.0.0"); + Assert.assertEquals(1, info.collect().getDataPoints().size()); + info.setLabelValues("2.0.0"); + Assert.assertEquals(1, info.collect().getDataPoints().size()); + assertTextFormat( + "target_info{service_instance_id=\"123\",service_name=\"test\",service_version=\"2.0.0\"} 1\n", + info); + } - @Test(expected = IllegalArgumentException.class) - public void testConstLabelsDuplicate2() { + @Test + public void testConstLabelsOnly() throws IOException { + Info info = Info.builder() - .labelNames("a_1") - .constLabels(Labels.of("a.1", "val1")) - .build(); - } + .name("target_info") + .constLabels(Labels.of("service.name", "test", "service.instance.id", "123")) + .build(); + Assert.assertEquals(1, info.collect().getDataPoints().size()); + assertTextFormat("target_info{service_instance_id=\"123\",service_name=\"test\"} 1\n", info); + } + + @Test(expected = IllegalArgumentException.class) + public void testConstLabelsDuplicate1() { + Info.builder().constLabels(Labels.of("a_1", "val1")).labelNames("a.1").build(); + } + + @Test(expected = IllegalArgumentException.class) + public void testConstLabelsDuplicate2() { + Info.builder().labelNames("a_1").constLabels(Labels.of("a.1", "val1")).build(); + } - private void assertTextFormat(String expected, Info info) throws IOException { - OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); - ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); - writer.write(outputStream, MetricSnapshots.of(info.collect())); - String result = outputStream.toString(StandardCharsets.UTF_8.name()); - if (!result.contains(expected)) { - throw new AssertionError(expected + " is not contained in the following output:\n" + result); - } + private void assertTextFormat(String expected, Info info) throws IOException { + OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + writer.write(outputStream, MetricSnapshots.of(info.collect())); + String result = outputStream.toString(StandardCharsets.UTF_8.name()); + if (!result.contains(expected)) { + throw new AssertionError(expected + " is not contained in the following output:\n" + result); } + } } diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/SlidingWindowTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/SlidingWindowTest.java index 3461d959e..ce43b141c 100644 --- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/SlidingWindowTest.java +++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/SlidingWindowTest.java @@ -1,79 +1,89 @@ package io.prometheus.metrics.core.metrics; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicLong; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; public class SlidingWindowTest { - class Observer { + class Observer { - final List values = new ArrayList<>(); + final List values = new ArrayList<>(); - public void observe(double value) { - values.add(value); - } + public void observe(double value) { + values.add(value); + } - void assertValues(double... expectedValues) { - ArrayList expectedList = new ArrayList<>(); - for (double expectedValue : expectedValues) { - expectedList.add(expectedValue); - } - Assert.assertEquals("Start time: " + startTime + ", current time: " + currentTimeMillis.get() + ", elapsed time: " + (currentTimeMillis.get() - startTime), expectedList, values); - } + void assertValues(double... expectedValues) { + ArrayList expectedList = new ArrayList<>(); + for (double expectedValue : expectedValues) { + expectedList.add(expectedValue); + } + Assert.assertEquals( + "Start time: " + + startTime + + ", current time: " + + currentTimeMillis.get() + + ", elapsed time: " + + (currentTimeMillis.get() - startTime), + expectedList, + values); } + } - private long startTime; - private final AtomicLong currentTimeMillis = new AtomicLong(); - private SlidingWindow ringBuffer; - private final long maxAgeSeconds = 30; - private final int ageBuckets = 5; - private final long timeBetweenRotateMillis = maxAgeSeconds * 1000 / ageBuckets + 2; + private long startTime; + private final AtomicLong currentTimeMillis = new AtomicLong(); + private SlidingWindow ringBuffer; + private final long maxAgeSeconds = 30; + private final int ageBuckets = 5; + private final long timeBetweenRotateMillis = maxAgeSeconds * 1000 / ageBuckets + 2; - @Before - public void setUp() { - startTime = System.currentTimeMillis(); - currentTimeMillis.set(startTime); - ringBuffer = new SlidingWindow<>(Observer.class, Observer::new, Observer::observe, maxAgeSeconds, ageBuckets); - ringBuffer.currentTimeMillis = currentTimeMillis::get; - } + @Before + public void setUp() { + startTime = System.currentTimeMillis(); + currentTimeMillis.set(startTime); + ringBuffer = + new SlidingWindow<>( + Observer.class, Observer::new, Observer::observe, maxAgeSeconds, ageBuckets); + ringBuffer.currentTimeMillis = currentTimeMillis::get; + } - @Test - public void testRotate() { - for (int i=0; i first observation evicted - ringBuffer.current().assertValues(2.0); - ringBuffer.observe(3.0); - ringBuffer.current().assertValues(2.0, 3.0); - currentTimeMillis.addAndGet(2 * timeBetweenRotateMillis); // 7/5 of max age - ringBuffer.current().assertValues(3.0); - currentTimeMillis.addAndGet(3 * timeBetweenRotateMillis); // 10/5 of max age - ringBuffer.current().assertValues(); // empty - } + @Test + public void testMultiRotate() { + ringBuffer.observe(1.0); + currentTimeMillis.addAndGet(2 * timeBetweenRotateMillis); // 2/5 of max aqe + ringBuffer.observe(2.0); + ringBuffer.current().assertValues(1.0, 2.0); + currentTimeMillis.addAndGet( + 3 * timeBetweenRotateMillis); // 5/5 of max age -> first observation evicted + ringBuffer.current().assertValues(2.0); + ringBuffer.observe(3.0); + ringBuffer.current().assertValues(2.0, 3.0); + currentTimeMillis.addAndGet(2 * timeBetweenRotateMillis); // 7/5 of max age + ringBuffer.current().assertValues(3.0); + currentTimeMillis.addAndGet(3 * timeBetweenRotateMillis); // 10/5 of max age + ringBuffer.current().assertValues(); // empty + } } diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/StateSetTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/StateSetTest.java index 5a82cdb48..776aeacde 100644 --- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/StateSetTest.java +++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/StateSetTest.java @@ -7,69 +7,68 @@ public class StateSetTest { - enum MyFeatureFlag { - EXPERIMENTAL_FEATURE_1 { - @Override - public String toString() { - return "feature1"; - } - }, + enum MyFeatureFlag { + EXPERIMENTAL_FEATURE_1 { + @Override + public String toString() { + return "feature1"; + } + }, - EXPERIMENTAL_FEATURE_2 { - @Override - public String toString() { - return "feature2"; - } - } + EXPERIMENTAL_FEATURE_2 { + @Override + public String toString() { + return "feature2"; + } } + } - @Test - public void testEnumStateSet() { - StateSet stateSet = StateSet.builder() - .name("feature_flags") - .labelNames("environment") - .states(MyFeatureFlag.class) - .build(); - stateSet.labelValues("dev").setTrue(MyFeatureFlag.EXPERIMENTAL_FEATURE_2); - stateSet.labelValues("prod").setFalse(MyFeatureFlag.EXPERIMENTAL_FEATURE_2); - StateSetSnapshot snapshot = stateSet.collect(); - Assert.assertEquals(2, snapshot.getDataPoints().size()); - Assert.assertEquals(2, getData(stateSet, "environment", "dev").size()); - Assert.assertEquals("feature1", getData(stateSet, "environment", "dev").getName(0)); - Assert.assertFalse(getData(stateSet, "environment", "dev").isTrue(0)); - Assert.assertEquals("feature2", getData(stateSet, "environment", "dev").getName(1)); - Assert.assertTrue(getData(stateSet, "environment", "dev").isTrue(1)); - Assert.assertEquals(2, getData(stateSet, "environment", "prod").size()); - Assert.assertEquals("feature1", getData(stateSet, "environment", "prod").getName(0)); - Assert.assertFalse(getData(stateSet, "environment", "prod").isTrue(0)); - Assert.assertEquals("feature2", getData(stateSet, "environment", "prod").getName(1)); - Assert.assertFalse(getData(stateSet, "environment", "prod").isTrue(1)); - } + @Test + public void testEnumStateSet() { + StateSet stateSet = + StateSet.builder() + .name("feature_flags") + .labelNames("environment") + .states(MyFeatureFlag.class) + .build(); + stateSet.labelValues("dev").setTrue(MyFeatureFlag.EXPERIMENTAL_FEATURE_2); + stateSet.labelValues("prod").setFalse(MyFeatureFlag.EXPERIMENTAL_FEATURE_2); + StateSetSnapshot snapshot = stateSet.collect(); + Assert.assertEquals(2, snapshot.getDataPoints().size()); + Assert.assertEquals(2, getData(stateSet, "environment", "dev").size()); + Assert.assertEquals("feature1", getData(stateSet, "environment", "dev").getName(0)); + Assert.assertFalse(getData(stateSet, "environment", "dev").isTrue(0)); + Assert.assertEquals("feature2", getData(stateSet, "environment", "dev").getName(1)); + Assert.assertTrue(getData(stateSet, "environment", "dev").isTrue(1)); + Assert.assertEquals(2, getData(stateSet, "environment", "prod").size()); + Assert.assertEquals("feature1", getData(stateSet, "environment", "prod").getName(0)); + Assert.assertFalse(getData(stateSet, "environment", "prod").isTrue(0)); + Assert.assertEquals("feature2", getData(stateSet, "environment", "prod").getName(1)); + Assert.assertFalse(getData(stateSet, "environment", "prod").isTrue(1)); + } - @Test - public void testDefaultFalse() { - StateSet stateSet = StateSet.builder() - .name("test") - .states("state1", "state2", "state3") - .build(); - Assert.assertEquals(3, getData(stateSet).size()); - Assert.assertEquals("state1", getData(stateSet).getName(0)); - Assert.assertFalse(getData(stateSet).isTrue(0)); - Assert.assertEquals("state2", getData(stateSet).getName(1)); - Assert.assertFalse(getData(stateSet).isTrue(1)); - Assert.assertEquals("state3", getData(stateSet).getName(2)); - Assert.assertFalse(getData(stateSet).isTrue(2)); - } + @Test + public void testDefaultFalse() { + StateSet stateSet = + StateSet.builder().name("test").states("state1", "state2", "state3").build(); + Assert.assertEquals(3, getData(stateSet).size()); + Assert.assertEquals("state1", getData(stateSet).getName(0)); + Assert.assertFalse(getData(stateSet).isTrue(0)); + Assert.assertEquals("state2", getData(stateSet).getName(1)); + Assert.assertFalse(getData(stateSet).isTrue(1)); + Assert.assertEquals("state3", getData(stateSet).getName(2)); + Assert.assertFalse(getData(stateSet).isTrue(2)); + } - private StateSetSnapshot.StateSetDataPointSnapshot getData(StateSet stateSet, String... labels) { - return stateSet.collect().getDataPoints().stream() - .filter(d -> d.getLabels().equals(Labels.of(labels))) - .findAny() - .orElseThrow(() -> new RuntimeException("stateset with labels " + labels + " not found")); - } + private StateSetSnapshot.StateSetDataPointSnapshot getData(StateSet stateSet, String... labels) { + return stateSet.collect().getDataPoints().stream() + .filter(d -> d.getLabels().equals(Labels.of(labels))) + .findAny() + .orElseThrow(() -> new RuntimeException("stateset with labels " + labels + " not found")); + } - @Test(expected = IllegalStateException.class) - public void testStatesCannotBeEmpty() { - StateSet.builder().name("invalid").build(); - } + @Test(expected = IllegalStateException.class) + public void testStatesCannotBeEmpty() { + StateSet.builder().name("invalid").build(); + } } diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/StatefulMetricTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/StatefulMetricTest.java index dd2c4355a..d77ec6e83 100644 --- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/StatefulMetricTest.java +++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/StatefulMetricTest.java @@ -1,69 +1,69 @@ package io.prometheus.metrics.core.metrics; -import org.junit.Assert; -import org.junit.Test; - import java.lang.reflect.Field; import java.util.Map; +import org.junit.Assert; +import org.junit.Test; public class StatefulMetricTest { - @Test - public void testLabelRemoveWhileCollecting() throws Exception { - Counter counter = Counter.builder().name("test").labelNames("label1", "label2").build(); - Field data = counter.getClass().getSuperclass().getDeclaredField("data"); - data.setAccessible(true); + @Test + public void testLabelRemoveWhileCollecting() throws Exception { + Counter counter = Counter.builder().name("test").labelNames("label1", "label2").build(); + Field data = counter.getClass().getSuperclass().getDeclaredField("data"); + data.setAccessible(true); - counter.labelValues("a", "b").inc(1.0); - counter.labelValues("c", "d").inc(3.0); - counter.labelValues("e", "f").inc(7.0); + counter.labelValues("a", "b").inc(1.0); + counter.labelValues("c", "d").inc(3.0); + counter.labelValues("e", "f").inc(7.0); - // collect() iterates over data.entrySet(). - // remove() removes entries from data. - // Make sure iterating does not yield null while removing. + // collect() iterates over data.entrySet(). + // remove() removes entries from data. + // Make sure iterating does not yield null while removing. - int i = 0; - for (Map.Entry entry : ((Map) data.get(counter)).entrySet()) { - i++; - if (i == 2) { - counter.remove("c", "d"); - counter.remove("e", "f"); - } - Assert.assertNotNull(entry.getKey()); - Assert.assertNotNull(entry.getValue()); - } + int i = 0; + for (Map.Entry entry : ((Map) data.get(counter)).entrySet()) { + i++; + if (i == 2) { + counter.remove("c", "d"); + counter.remove("e", "f"); + } + Assert.assertNotNull(entry.getKey()); + Assert.assertNotNull(entry.getValue()); } + } - @Test - public void testClear() { - Counter counter = Counter.builder().name("test").labelNames("label1", "label2").build(); - counter.labelValues("a", "b").inc(3.0); - counter.labelValues("c", "d").inc(3.0); - counter.labelValues("a", "b").inc(); - Assert.assertEquals(2, counter.collect().getDataPoints().size()); + @Test + public void testClear() { + Counter counter = Counter.builder().name("test").labelNames("label1", "label2").build(); + counter.labelValues("a", "b").inc(3.0); + counter.labelValues("c", "d").inc(3.0); + counter.labelValues("a", "b").inc(); + Assert.assertEquals(2, counter.collect().getDataPoints().size()); - counter.clear(); - Assert.assertEquals(0, counter.collect().getDataPoints().size()); + counter.clear(); + Assert.assertEquals(0, counter.collect().getDataPoints().size()); - counter.labelValues("a", "b").inc(); - Assert.assertEquals(1, counter.collect().getDataPoints().size()); - } + counter.labelValues("a", "b").inc(); + Assert.assertEquals(1, counter.collect().getDataPoints().size()); + } - @Test - public void testClearNoLabels() { - Counter counter = Counter.builder().name("test").build(); - counter.inc(); - Assert.assertEquals(1, counter.collect().getDataPoints().size()); - Assert.assertEquals(1.0, counter.collect().getDataPoints().get(0).getValue(), 0.0); + @Test + public void testClearNoLabels() { + Counter counter = Counter.builder().name("test").build(); + counter.inc(); + Assert.assertEquals(1, counter.collect().getDataPoints().size()); + Assert.assertEquals(1.0, counter.collect().getDataPoints().get(0).getValue(), 0.0); - counter.clear(); - // No labels is always present, but as no value has been observed after clear() the value should be 0.0 - Assert.assertEquals(1, counter.collect().getDataPoints().size()); - Assert.assertEquals(0.0, counter.collect().getDataPoints().get(0).getValue(), 0.0); + counter.clear(); + // No labels is always present, but as no value has been observed after clear() the value should + // be 0.0 + Assert.assertEquals(1, counter.collect().getDataPoints().size()); + Assert.assertEquals(0.0, counter.collect().getDataPoints().get(0).getValue(), 0.0); - // Making inc() works correctly after clear() - counter.inc(); - Assert.assertEquals(1, counter.collect().getDataPoints().size()); - Assert.assertEquals(1.0, counter.collect().getDataPoints().get(0).getValue(), 0.0); - } + // Making inc() works correctly after clear() + counter.inc(); + Assert.assertEquals(1, counter.collect().getDataPoints().size()); + Assert.assertEquals(1.0, counter.collect().getDataPoints().get(0).getValue(), 0.0); + } } diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/SummaryWithCallbackTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/SummaryWithCallbackTest.java index cbd9ef53f..8231d86f2 100644 --- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/SummaryWithCallbackTest.java +++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/SummaryWithCallbackTest.java @@ -2,5 +2,5 @@ public class SummaryWithCallbackTest { - // TODO :). Anyway, callbacks are implicitly covered by the JVM metrics tests as well. + // TODO :). Anyway, callbacks are implicitly covered by the JVM metrics tests as well. } diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/TestUtil.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/TestUtil.java index 489bcac33..95a8b90fb 100644 --- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/TestUtil.java +++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/TestUtil.java @@ -5,9 +5,9 @@ public class TestUtil { - public static void assertExemplarEquals(Exemplar expected, Exemplar actual) { - // ignore timestamp - Assert.assertEquals(expected.getValue(), actual.getValue(), 0.00001); - Assert.assertEquals(expected.getLabels(), actual.getLabels()); - } + public static void assertExemplarEquals(Exemplar expected, Exemplar actual) { + // ignore timestamp + Assert.assertEquals(expected.getValue(), actual.getValue(), 0.00001); + Assert.assertEquals(expected.getLabels(), actual.getLabels()); + } } diff --git a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/TodoTest.java b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/TodoTest.java index 996e199db..be6d17784 100644 --- a/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/TodoTest.java +++ b/prometheus-metrics-core/src/test/java/io/prometheus/metrics/core/metrics/TodoTest.java @@ -2,16 +2,17 @@ public class TodoTest { - // if a metric with labels is created but never used it has no data. - // The registry's collect() method should skip those metrics to avoid illegal protobuf or text format. + // if a metric with labels is created but never used it has no data. + // The registry's collect() method should skip those metrics to avoid illegal protobuf or text + // format. - // callback versions of metrics + // callback versions of metrics - // build() called with name == null + // build() called with name == null - // call inc() without labels, but the metric was created with labels + // call inc() without labels, but the metric was created with labels - // call inc() with labels, but the metric was created without labels + // call inc() with labels, but the metric was created without labels - // for performance: Use return value of withLabels() directly + // for performance: Use return value of withLabels() directly } diff --git a/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusHttpExchange.java b/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusHttpExchange.java index 7a9f5846b..b7ac63b28 100644 --- a/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusHttpExchange.java +++ b/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusHttpExchange.java @@ -3,11 +3,14 @@ import java.io.IOException; public interface PrometheusHttpExchange extends AutoCloseable { - PrometheusHttpRequest getRequest(); - PrometheusHttpResponse getResponse(); - void handleException(IOException e) throws IOException; - void handleException(RuntimeException e); + PrometheusHttpRequest getRequest(); - @Override - void close(); + PrometheusHttpResponse getResponse(); + + void handleException(IOException e) throws IOException; + + void handleException(RuntimeException e); + + @Override + void close(); } diff --git a/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusHttpRequest.java b/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusHttpRequest.java index f7b5346a5..954facfef 100644 --- a/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusHttpRequest.java +++ b/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusHttpRequest.java @@ -1,78 +1,65 @@ package io.prometheus.metrics.exporter.common; +import io.prometheus.metrics.model.registry.PrometheusScrapeRequest; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Enumeration; -import io.prometheus.metrics.model.registry.PrometheusScrapeRequest; - public interface PrometheusHttpRequest extends PrometheusScrapeRequest { - /** - * See {@code jakarta.servlet.http.HttpServletRequest.getQueryString()} - */ - String getQueryString(); + /** See {@code jakarta.servlet.http.HttpServletRequest.getQueryString()} */ + String getQueryString(); - /** - * See {@code jakarta.servlet.http.HttpServletRequest.getHeaders(String)} - */ - Enumeration getHeaders(String name); + /** See {@code jakarta.servlet.http.HttpServletRequest.getHeaders(String)} */ + Enumeration getHeaders(String name); - /** - * See {@code jakarta.servlet.http.HttpServletRequest.getMethod()} - */ - String getMethod(); + /** See {@code jakarta.servlet.http.HttpServletRequest.getMethod()} */ + String getMethod(); - /** - * See {@code jakarta.servlet.http.HttpServletRequest.getHeader(String)} - */ - default String getHeader(String name) { - Enumeration headers = getHeaders(name); - if (headers == null || !headers.hasMoreElements()) { - return null; - } else { - return headers.nextElement(); - } + /** See {@code jakarta.servlet.http.HttpServletRequest.getHeader(String)} */ + default String getHeader(String name) { + Enumeration headers = getHeaders(name); + if (headers == null || !headers.hasMoreElements()) { + return null; + } else { + return headers.nextElement(); } + } - /** - * See {@code jakarta.servlet.ServletRequest.getParameter(String)} - */ - default String getParameter(String name) { - String[] values = getParameterValues(name); - if (values == null || values.length == 0) { - return null; - } else { - return values[0]; - } + /** See {@code jakarta.servlet.ServletRequest.getParameter(String)} */ + default String getParameter(String name) { + String[] values = getParameterValues(name); + if (values == null || values.length == 0) { + return null; + } else { + return values[0]; } + } - /** - * See {@code jakarta.servlet.ServletRequest.getParameterValues(String)} - */ - default String[] getParameterValues(String name) { - try { - ArrayList result = new ArrayList<>(); - String queryString = getQueryString(); - if (queryString != null) { - String[] pairs = queryString.split("&"); - for (String pair : pairs) { - int idx = pair.indexOf("="); - if (idx != -1 && URLDecoder.decode(pair.substring(0, idx), "UTF-8").equals(name)) { - result.add(URLDecoder.decode(pair.substring(idx + 1), "UTF-8")); - } - } - } - if (result.isEmpty()) { - // Servlet API: getParameterValues() returns null if the parameter does not exist. - return null; - } else { - return result.toArray(new String[0]); - } - } catch (UnsupportedEncodingException e) { - // UTF-8 encoding not supported. - throw new RuntimeException(e); + /** See {@code jakarta.servlet.ServletRequest.getParameterValues(String)} */ + default String[] getParameterValues(String name) { + try { + ArrayList result = new ArrayList<>(); + String queryString = getQueryString(); + if (queryString != null) { + String[] pairs = queryString.split("&"); + for (String pair : pairs) { + int idx = pair.indexOf("="); + if (idx != -1 && URLDecoder.decode(pair.substring(0, idx), "UTF-8").equals(name)) { + result.add(URLDecoder.decode(pair.substring(idx + 1), "UTF-8")); + } } + } + if (result.isEmpty()) { + // Servlet API: getParameterValues() returns null if the parameter does not exist. + return null; + } else { + return result.toArray(new String[0]); + } + } catch (UnsupportedEncodingException e) { + // UTF-8 encoding not supported. + throw new RuntimeException(e); } + } } diff --git a/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusHttpResponse.java b/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusHttpResponse.java index 4e3db6955..b3dd4e2fb 100644 --- a/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusHttpResponse.java +++ b/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusHttpResponse.java @@ -5,14 +5,13 @@ public interface PrometheusHttpResponse { - /** - * See {@code jakarta.servlet.http.HttpServletResponse.setHeader(String, String)} - */ - void setHeader(String name, String value); + /** See {@code jakarta.servlet.http.HttpServletResponse.setHeader(String, String)} */ + void setHeader(String name, String value); - /** - * This is equivalent to calling {@link com.sun.net.httpserver.HttpExchange#sendResponseHeaders(int, long)} - * followed by {@link com.sun.net.httpserver.HttpExchange#getResponseBody()}. - */ - OutputStream sendHeadersAndGetBody(int statusCode, int contentLength) throws IOException; + /** + * This is equivalent to calling {@link + * com.sun.net.httpserver.HttpExchange#sendResponseHeaders(int, long)} followed by {@link + * com.sun.net.httpserver.HttpExchange#getResponseBody()}. + */ + OutputStream sendHeadersAndGetBody(int statusCode, int contentLength) throws IOException; } diff --git a/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusScrapeHandler.java b/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusScrapeHandler.java index 5155457df..dee7098fb 100644 --- a/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusScrapeHandler.java +++ b/prometheus-metrics-exporter-common/src/main/java/io/prometheus/metrics/exporter/common/PrometheusScrapeHandler.java @@ -7,7 +7,6 @@ import io.prometheus.metrics.model.registry.MetricNameFilter; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.MetricSnapshots; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; @@ -18,157 +17,168 @@ import java.util.function.Predicate; import java.util.zip.GZIPOutputStream; -/** - * Prometheus scrape endpoint. - */ +/** Prometheus scrape endpoint. */ public class PrometheusScrapeHandler { - private final PrometheusRegistry registry; - private final ExpositionFormats expositionFormats; - private final Predicate nameFilter; - private AtomicInteger lastResponseSize = new AtomicInteger(2 << 9); // 0.5 MB - - public PrometheusScrapeHandler() { - this(PrometheusProperties.get(), PrometheusRegistry.defaultRegistry); - } - - public PrometheusScrapeHandler(PrometheusRegistry registry) { - this(PrometheusProperties.get(), registry); - } - - public PrometheusScrapeHandler(PrometheusProperties config) { - this(config, PrometheusRegistry.defaultRegistry); - } - - public PrometheusScrapeHandler(PrometheusProperties config, PrometheusRegistry registry) { - this.expositionFormats = ExpositionFormats.init(config.getExporterProperties()); - this.registry = registry; - this.nameFilter = makeNameFilter(config.getExporterFilterProperties()); - } - - public void handleRequest(PrometheusHttpExchange exchange) throws IOException { - try { - PrometheusHttpRequest request = exchange.getRequest(); - PrometheusHttpResponse response = exchange.getResponse(); - MetricSnapshots snapshots = scrape(request); - if (writeDebugResponse(snapshots, exchange)) { - return; - } - ByteArrayOutputStream responseBuffer = new ByteArrayOutputStream(lastResponseSize.get() + 1024); - String acceptHeader = request.getHeader("Accept"); - ExpositionFormatWriter writer = expositionFormats.findWriter(acceptHeader); - writer.write(responseBuffer, snapshots); - lastResponseSize.set(responseBuffer.size()); - response.setHeader("Content-Type", writer.getContentType()); - - if (shouldUseCompression(request)) { - response.setHeader("Content-Encoding", "gzip"); - try (GZIPOutputStream gzipOutputStream = new GZIPOutputStream(response.sendHeadersAndGetBody(200, 0))) { - responseBuffer.writeTo(gzipOutputStream); - } - } else { - int contentLength = responseBuffer.size(); - if (contentLength > 0) { - response.setHeader("Content-Length", String.valueOf(contentLength)); - } - if (request.getMethod().equals("HEAD")) { - // The HTTPServer implementation will throw an Exception if we close the output stream - // without sending a response body, so let's not close the output stream in case of a HEAD response. - response.sendHeadersAndGetBody(200, -1); - } else { - try (OutputStream outputStream = response.sendHeadersAndGetBody(200, contentLength)) { - responseBuffer.writeTo(outputStream); - } - } - } - } catch (IOException e) { - exchange.handleException(e); - } catch (RuntimeException e) { - exchange.handleException(e); - } finally { - exchange.close(); + private final PrometheusRegistry registry; + private final ExpositionFormats expositionFormats; + private final Predicate nameFilter; + private AtomicInteger lastResponseSize = new AtomicInteger(2 << 9); // 0.5 MB + + public PrometheusScrapeHandler() { + this(PrometheusProperties.get(), PrometheusRegistry.defaultRegistry); + } + + public PrometheusScrapeHandler(PrometheusRegistry registry) { + this(PrometheusProperties.get(), registry); + } + + public PrometheusScrapeHandler(PrometheusProperties config) { + this(config, PrometheusRegistry.defaultRegistry); + } + + public PrometheusScrapeHandler(PrometheusProperties config, PrometheusRegistry registry) { + this.expositionFormats = ExpositionFormats.init(config.getExporterProperties()); + this.registry = registry; + this.nameFilter = makeNameFilter(config.getExporterFilterProperties()); + } + + public void handleRequest(PrometheusHttpExchange exchange) throws IOException { + try { + PrometheusHttpRequest request = exchange.getRequest(); + PrometheusHttpResponse response = exchange.getResponse(); + MetricSnapshots snapshots = scrape(request); + if (writeDebugResponse(snapshots, exchange)) { + return; + } + ByteArrayOutputStream responseBuffer = + new ByteArrayOutputStream(lastResponseSize.get() + 1024); + String acceptHeader = request.getHeader("Accept"); + ExpositionFormatWriter writer = expositionFormats.findWriter(acceptHeader); + writer.write(responseBuffer, snapshots); + lastResponseSize.set(responseBuffer.size()); + response.setHeader("Content-Type", writer.getContentType()); + + if (shouldUseCompression(request)) { + response.setHeader("Content-Encoding", "gzip"); + try (GZIPOutputStream gzipOutputStream = + new GZIPOutputStream(response.sendHeadersAndGetBody(200, 0))) { + responseBuffer.writeTo(gzipOutputStream); } - } - - private Predicate makeNameFilter(ExporterFilterProperties props) { - if (props.getAllowedMetricNames() == null && props.getExcludedMetricNames() == null && props.getAllowedMetricNamePrefixes() == null && props.getExcludedMetricNamePrefixes() == null) { - return null; + } else { + int contentLength = responseBuffer.size(); + if (contentLength > 0) { + response.setHeader("Content-Length", String.valueOf(contentLength)); + } + if (request.getMethod().equals("HEAD")) { + // The HTTPServer implementation will throw an Exception if we close the output stream + // without sending a response body, so let's not close the output stream in case of a HEAD + // response. + response.sendHeadersAndGetBody(200, -1); } else { - return MetricNameFilter.builder() - .nameMustBeEqualTo(props.getAllowedMetricNames()) - .nameMustNotBeEqualTo(props.getExcludedMetricNames()) - .nameMustStartWith(props.getAllowedMetricNamePrefixes()) - .nameMustNotStartWith(props.getExcludedMetricNamePrefixes()) - .build(); + try (OutputStream outputStream = response.sendHeadersAndGetBody(200, contentLength)) { + responseBuffer.writeTo(outputStream); + } } + } + } catch (IOException e) { + exchange.handleException(e); + } catch (RuntimeException e) { + exchange.handleException(e); + } finally { + exchange.close(); + } + } + + private Predicate makeNameFilter(ExporterFilterProperties props) { + if (props.getAllowedMetricNames() == null + && props.getExcludedMetricNames() == null + && props.getAllowedMetricNamePrefixes() == null + && props.getExcludedMetricNamePrefixes() == null) { + return null; + } else { + return MetricNameFilter.builder() + .nameMustBeEqualTo(props.getAllowedMetricNames()) + .nameMustNotBeEqualTo(props.getExcludedMetricNames()) + .nameMustStartWith(props.getAllowedMetricNamePrefixes()) + .nameMustNotStartWith(props.getExcludedMetricNamePrefixes()) + .build(); } + } - private MetricSnapshots scrape(PrometheusHttpRequest request) { + private MetricSnapshots scrape(PrometheusHttpRequest request) { - Predicate filter = makeNameFilter(request.getParameterValues("name[]")); - if (filter != null) { - return registry.scrape(filter, request); - } else { - return registry.scrape(request); - } + Predicate filter = makeNameFilter(request.getParameterValues("name[]")); + if (filter != null) { + return registry.scrape(filter, request); + } else { + return registry.scrape(request); } + } - private Predicate makeNameFilter(String[] includedNames) { - Predicate result = null; - if (includedNames != null && includedNames.length > 0) { - result = MetricNameFilter.builder().nameMustBeEqualTo(includedNames).build(); - } - if (result != null && nameFilter != null) { - result = result.and(nameFilter); - } else if (nameFilter != null) { - result = nameFilter; - } - return result; + private Predicate makeNameFilter(String[] includedNames) { + Predicate result = null; + if (includedNames != null && includedNames.length > 0) { + result = MetricNameFilter.builder().nameMustBeEqualTo(includedNames).build(); } - - private boolean writeDebugResponse(MetricSnapshots snapshots, PrometheusHttpExchange exchange) throws IOException { - String debugParam = exchange.getRequest().getParameter("debug"); - PrometheusHttpResponse response = exchange.getResponse(); - if (debugParam == null) { - return false; - } else { - response.setHeader("Content-Type", "text/plain; charset=utf-8"); - boolean supportedFormat = Arrays.asList("openmetrics", "text", "prometheus-protobuf").contains(debugParam); - int responseStatus = supportedFormat ? 200 : 500; - OutputStream body = response.sendHeadersAndGetBody(responseStatus, 0); - switch (debugParam) { - case "openmetrics": - expositionFormats.getOpenMetricsTextFormatWriter().write(body, snapshots); - break; - case "text": - expositionFormats.getPrometheusTextFormatWriter().write(body, snapshots); - break; - case "prometheus-protobuf": - String debugString = expositionFormats.getPrometheusProtobufWriter().toDebugString(snapshots); - body.write(debugString.getBytes(StandardCharsets.UTF_8)); - break; - default: - body.write(("debug=" + debugParam + ": Unsupported query parameter. Valid values are 'openmetrics', 'text', and 'prometheus-protobuf'.").getBytes(StandardCharsets.UTF_8)); - break; - } - return true; - } + if (result != null && nameFilter != null) { + result = result.and(nameFilter); + } else if (nameFilter != null) { + result = nameFilter; } + return result; + } + + private boolean writeDebugResponse(MetricSnapshots snapshots, PrometheusHttpExchange exchange) + throws IOException { + String debugParam = exchange.getRequest().getParameter("debug"); + PrometheusHttpResponse response = exchange.getResponse(); + if (debugParam == null) { + return false; + } else { + response.setHeader("Content-Type", "text/plain; charset=utf-8"); + boolean supportedFormat = + Arrays.asList("openmetrics", "text", "prometheus-protobuf").contains(debugParam); + int responseStatus = supportedFormat ? 200 : 500; + OutputStream body = response.sendHeadersAndGetBody(responseStatus, 0); + switch (debugParam) { + case "openmetrics": + expositionFormats.getOpenMetricsTextFormatWriter().write(body, snapshots); + break; + case "text": + expositionFormats.getPrometheusTextFormatWriter().write(body, snapshots); + break; + case "prometheus-protobuf": + String debugString = + expositionFormats.getPrometheusProtobufWriter().toDebugString(snapshots); + body.write(debugString.getBytes(StandardCharsets.UTF_8)); + break; + default: + body.write( + ("debug=" + + debugParam + + ": Unsupported query parameter. Valid values are 'openmetrics', 'text', and 'prometheus-protobuf'.") + .getBytes(StandardCharsets.UTF_8)); + break; + } + return true; + } + } - private boolean shouldUseCompression(PrometheusHttpRequest request) { - Enumeration encodingHeaders = request.getHeaders("Accept-Encoding"); - if (encodingHeaders == null) { - return false; - } - while (encodingHeaders.hasMoreElements()) { - String encodingHeader = encodingHeaders.nextElement(); - String[] encodings = encodingHeader.split(","); - for (String encoding : encodings) { - if (encoding.trim().equalsIgnoreCase("gzip")) { - return true; - } - } + private boolean shouldUseCompression(PrometheusHttpRequest request) { + Enumeration encodingHeaders = request.getHeaders("Accept-Encoding"); + if (encodingHeaders == null) { + return false; + } + while (encodingHeaders.hasMoreElements()) { + String encodingHeader = encodingHeaders.nextElement(); + String[] encodings = encodingHeader.split(","); + for (String encoding : encodings) { + if (encoding.trim().equalsIgnoreCase("gzip")) { + return true; } - return false; + } } + return false; + } } diff --git a/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/DefaultHandler.java b/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/DefaultHandler.java index 0bf3b5cc2..eeb2f70f6 100644 --- a/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/DefaultHandler.java +++ b/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/DefaultHandler.java @@ -2,69 +2,67 @@ import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; - import java.io.IOException; import java.nio.charset.StandardCharsets; -/** - * Handler for the / endpoint - */ +/** Handler for the / endpoint */ public class DefaultHandler implements HttpHandler { - private final byte[] responseBytes; - private final String contentType; + private final byte[] responseBytes; + private final String contentType; - public DefaultHandler() { - String responseString = "" + - "\n" + - "Prometheus Java Client\n" + - "\n" + - "

Prometheus Java Client

\n" + - "

Metrics Path

\n" + - "The metrics path is /metrics.\n" + - "

Name Filter

\n" + - "If you want to scrape only specific metrics, use the name[] parameter like this:\n" + - "\n" + - "You can also use multiple name[] parameters to query multiple metrics:\n" + - "\n" + - "The name[] parameter can be used by the Prometheus server for scraping. Add the following snippet to your scrape job configuration in prometheus.yaml:\n" + - "
\n" +
-                "params:\n" +
-                "    name[]:\n" +
-                "        - my_metric_a\n" +
-                "        - my_metric_b\n" +
-                "
\n" + - "

Debug Parameter

\n" + - "The Prometheus Java metrics library supports multiple exposition formats.\n" + - "The Prometheus server sends the Accept header to indicate which format it accepts.\n" + - "By default, the Prometheus server accepts OpenMetrics text format, unless the Prometheus server is started with feature flag --enable-feature=native-histograms,\n" + - "in which case the default is Prometheus protobuf.\n" + - "The Prometheus Java metrics library supports a debug query parameter for viewing the different formats in a Web browser:\n" + - "\n" + - "Note that the debug parameter is only for viewing different formats in a Web browser, it should not be used by the Prometheus server for scraping. The Prometheus server uses the Accept header for indicating which format it accepts.\n" + - "\n" + - "\n"; - this.responseBytes = responseString.getBytes(StandardCharsets.UTF_8); - this.contentType = "text/html; charset=utf-8"; - } + public DefaultHandler() { + String responseString = + "" + + "\n" + + "Prometheus Java Client\n" + + "\n" + + "

Prometheus Java Client

\n" + + "

Metrics Path

\n" + + "The metrics path is /metrics.\n" + + "

Name Filter

\n" + + "If you want to scrape only specific metrics, use the name[] parameter like this:\n" + + "\n" + + "You can also use multiple name[] parameters to query multiple metrics:\n" + + "\n" + + "The name[] parameter can be used by the Prometheus server for scraping. Add the following snippet to your scrape job configuration in prometheus.yaml:\n" + + "
\n"
+            + "params:\n"
+            + "    name[]:\n"
+            + "        - my_metric_a\n"
+            + "        - my_metric_b\n"
+            + "
\n" + + "

Debug Parameter

\n" + + "The Prometheus Java metrics library supports multiple exposition formats.\n" + + "The Prometheus server sends the Accept header to indicate which format it accepts.\n" + + "By default, the Prometheus server accepts OpenMetrics text format, unless the Prometheus server is started with feature flag --enable-feature=native-histograms,\n" + + "in which case the default is Prometheus protobuf.\n" + + "The Prometheus Java metrics library supports a debug query parameter for viewing the different formats in a Web browser:\n" + + "\n" + + "Note that the debug parameter is only for viewing different formats in a Web browser, it should not be used by the Prometheus server for scraping. The Prometheus server uses the Accept header for indicating which format it accepts.\n" + + "\n" + + "\n"; + this.responseBytes = responseString.getBytes(StandardCharsets.UTF_8); + this.contentType = "text/html; charset=utf-8"; + } - @Override - public void handle(HttpExchange exchange) throws IOException { - try { - exchange.getResponseHeaders().set("Content-Type", contentType); - exchange.getResponseHeaders().set("Content-Length", Integer.toString(responseBytes.length)); - exchange.sendResponseHeaders(200, responseBytes.length); - exchange.getResponseBody().write(responseBytes); - } finally { - exchange.close(); - } + @Override + public void handle(HttpExchange exchange) throws IOException { + try { + exchange.getResponseHeaders().set("Content-Type", contentType); + exchange.getResponseHeaders().set("Content-Length", Integer.toString(responseBytes.length)); + exchange.sendResponseHeaders(200, responseBytes.length); + exchange.getResponseBody().write(responseBytes); + } finally { + exchange.close(); } + } } diff --git a/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/HTTPServer.java b/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/HTTPServer.java index de572ba9f..ad13d70af 100644 --- a/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/HTTPServer.java +++ b/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/HTTPServer.java @@ -8,7 +8,6 @@ import com.sun.net.httpserver.HttpsServer; import io.prometheus.metrics.config.PrometheusProperties; import io.prometheus.metrics.model.registry.PrometheusRegistry; - import java.io.Closeable; import java.io.IOException; import java.net.InetAddress; @@ -22,251 +21,245 @@ /** * Expose Prometheus metrics using a plain Java HttpServer. - *

- * Example Usage: - *

- * {@code
+ *
+ * 

Example Usage: + * + *

{@code
  * HTTPServer server = HTTPServer.builder()
  *     .port(9090)
  *     .buildAndStart();
  * }
- * */ + */ public class HTTPServer implements Closeable { - static { - if (!System.getProperties().containsKey("sun.net.httpserver.maxReqTime")) { - System.setProperty("sun.net.httpserver.maxReqTime", "60"); - } + static { + if (!System.getProperties().containsKey("sun.net.httpserver.maxReqTime")) { + System.setProperty("sun.net.httpserver.maxReqTime", "60"); + } - if (!System.getProperties().containsKey("sun.net.httpserver.maxRspTime")) { - System.setProperty("sun.net.httpserver.maxRspTime", "600"); - } + if (!System.getProperties().containsKey("sun.net.httpserver.maxRspTime")) { + System.setProperty("sun.net.httpserver.maxRspTime", "600"); } + } - protected final HttpServer server; - protected final ExecutorService executorService; + protected final HttpServer server; + protected final ExecutorService executorService; - private HTTPServer(PrometheusProperties config, ExecutorService executorService, HttpServer httpServer, PrometheusRegistry registry, Authenticator authenticator, HttpHandler defaultHandler) { - if (httpServer.getAddress() == null) { - throw new IllegalArgumentException("HttpServer hasn't been bound to an address"); - } - this.server = httpServer; - this.executorService = executorService; - registerHandler("/", defaultHandler == null ? new DefaultHandler() : defaultHandler, authenticator); - registerHandler("/metrics", new MetricsHandler(config, registry), authenticator); - registerHandler("/-/healthy", new HealthyHandler(), authenticator); - try { - // HttpServer.start() starts the HttpServer in a new background thread. - // If we call HttpServer.start() from a thread of the executorService, - // the background thread will inherit the "daemon" property, - // i.e. the server will run as a Daemon thread. - // See https://github.com/prometheus/client_java/pull/955 - this.executorService.submit(this.server::start).get(); - // calling .get() on the Future here to avoid silently discarding errors - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } + private HTTPServer( + PrometheusProperties config, + ExecutorService executorService, + HttpServer httpServer, + PrometheusRegistry registry, + Authenticator authenticator, + HttpHandler defaultHandler) { + if (httpServer.getAddress() == null) { + throw new IllegalArgumentException("HttpServer hasn't been bound to an address"); } + this.server = httpServer; + this.executorService = executorService; + registerHandler( + "/", defaultHandler == null ? new DefaultHandler() : defaultHandler, authenticator); + registerHandler("/metrics", new MetricsHandler(config, registry), authenticator); + registerHandler("/-/healthy", new HealthyHandler(), authenticator); + try { + // HttpServer.start() starts the HttpServer in a new background thread. + // If we call HttpServer.start() from a thread of the executorService, + // the background thread will inherit the "daemon" property, + // i.e. the server will run as a Daemon thread. + // See https://github.com/prometheus/client_java/pull/955 + this.executorService.submit(this.server::start).get(); + // calling .get() on the Future here to avoid silently discarding errors + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + } - private void registerHandler(String path, HttpHandler handler, Authenticator authenticator) { - HttpContext context = server.createContext(path, handler); - if (authenticator != null) { - context.setAuthenticator(authenticator); - } + private void registerHandler(String path, HttpHandler handler, Authenticator authenticator) { + HttpContext context = server.createContext(path, handler); + if (authenticator != null) { + context.setAuthenticator(authenticator); + } + } + + /** Stop the HTTP server. Same as {@link #close()}. */ + public void stop() { + close(); + } + + /** Stop the HTTPServer. Same as {@link #stop()}. */ + @Override + public void close() { + server.stop(0); + executorService.shutdown(); // Free any (parked/idle) threads in pool + } + + /** + * Gets the port number. This is useful if you did not specify a port and the server picked a free + * port automatically. + */ + public int getPort() { + return server.getAddress().getPort(); + } + + public static Builder builder() { + return new Builder(PrometheusProperties.get()); + } + + public static Builder builder(PrometheusProperties config) { + return new Builder(config); + } + + public static class Builder { + + private final PrometheusProperties config; + private Integer port = null; + private String hostname = null; + private InetAddress inetAddress = null; + private ExecutorService executorService = null; + private PrometheusRegistry registry = null; + private Authenticator authenticator = null; + private HttpsConfigurator httpsConfigurator = null; + private HttpHandler defaultHandler = null; + + private Builder(PrometheusProperties config) { + this.config = config; } /** - * Stop the HTTP server. Same as {@link #close()}. + * Port to bind to. Default is 0, indicating that a random port will be selected. You can learn + * the randomly selected port by calling {@link HTTPServer#getPort()}. */ - public void stop() { - close(); + public Builder port(int port) { + this.port = port; + return this; } /** - * Stop the HTTPServer. Same as {@link #stop()}. + * Use this hostname to resolve the IP address to bind to. Must not be called together with + * {@link #inetAddress(InetAddress)}. Default is empty, indicating that the HTTPServer binds to + * the wildcard address. */ - @Override - public void close() { - server.stop(0); - executorService.shutdown(); // Free any (parked/idle) threads in pool + public Builder hostname(String hostname) { + this.hostname = hostname; + return this; } /** - * Gets the port number. - * This is useful if you did not specify a port and the server picked a free port automatically. + * Bind to this IP address. Must not be called together with {@link #hostname(String)}. Default + * is empty, indicating that the HTTPServer binds to the wildcard address. */ - public int getPort() { - return server.getAddress().getPort(); + public Builder inetAddress(InetAddress address) { + this.inetAddress = address; + return this; } - public static Builder builder() { - return new Builder(PrometheusProperties.get()); + /** Optional: ExecutorService used by the {@code httpServer}. */ + public Builder executorService(ExecutorService executorService) { + this.executorService = executorService; + return this; } - public static Builder builder(PrometheusProperties config) { - return new Builder(config); + /** Optional: Default is {@link PrometheusRegistry#defaultRegistry}. */ + public Builder registry(PrometheusRegistry registry) { + this.registry = registry; + return this; } - public static class Builder { - - private final PrometheusProperties config; - private Integer port = null; - private String hostname = null; - private InetAddress inetAddress = null; - private ExecutorService executorService = null; - private PrometheusRegistry registry = null; - private Authenticator authenticator = null; - private HttpsConfigurator httpsConfigurator = null; - private HttpHandler defaultHandler = null; - - private Builder(PrometheusProperties config) { - this.config = config; - } - - /** - * Port to bind to. Default is 0, indicating that a random port will be selected. - * You can learn the randomly selected port by calling {@link HTTPServer#getPort()}. - */ - public Builder port(int port) { - this.port = port; - return this; - } - - /** - * Use this hostname to resolve the IP address to bind to. - * Must not be called together with {@link #inetAddress(InetAddress)}. - * Default is empty, indicating that the HTTPServer binds to the wildcard address. - */ - public Builder hostname(String hostname) { - this.hostname = hostname; - return this; - } - - /** - * Bind to this IP address. - * Must not be called together with {@link #hostname(String)}. - * Default is empty, indicating that the HTTPServer binds to the wildcard address. - */ - public Builder inetAddress(InetAddress address) { - this.inetAddress = address; - return this; - } - - /** - * Optional: ExecutorService used by the {@code httpServer}. - */ - public Builder executorService(ExecutorService executorService) { - this.executorService = executorService; - return this; - } - - /** - * Optional: Default is {@link PrometheusRegistry#defaultRegistry}. - */ - public Builder registry(PrometheusRegistry registry) { - this.registry = registry; - return this; - } - - /** - * Optional: {@link Authenticator} for authentication. - */ - public Builder authenticator(Authenticator authenticator) { - this.authenticator = authenticator; - return this; - } + /** Optional: {@link Authenticator} for authentication. */ + public Builder authenticator(Authenticator authenticator) { + this.authenticator = authenticator; + return this; + } - /** - * Optional: {@link HttpsConfigurator} for TLS/SSL - */ - public Builder httpsConfigurator(HttpsConfigurator configurator) { - this.httpsConfigurator = configurator; - return this; - } + /** Optional: {@link HttpsConfigurator} for TLS/SSL */ + public Builder httpsConfigurator(HttpsConfigurator configurator) { + this.httpsConfigurator = configurator; + return this; + } - /** - * Optional: Override default handler, i.e. the handler that will be registered for the / endpoint. - */ - public Builder defaultHandler(HttpHandler defaultHandler) { - this.defaultHandler = defaultHandler; - return this; - } + /** + * Optional: Override default handler, i.e. the handler that will be registered for the / + * endpoint. + */ + public Builder defaultHandler(HttpHandler defaultHandler) { + this.defaultHandler = defaultHandler; + return this; + } - /** - * Build and start the HTTPServer. - */ - public HTTPServer buildAndStart() throws IOException { - if (registry == null) { - registry = PrometheusRegistry.defaultRegistry; - } - HttpServer httpServer; - if (httpsConfigurator != null) { - httpServer = HttpsServer.create(makeInetSocketAddress(), 3); - ((HttpsServer)httpServer).setHttpsConfigurator(httpsConfigurator); - } else { - httpServer = HttpServer.create(makeInetSocketAddress(), 3); - } - ExecutorService executorService = makeExecutorService(); - httpServer.setExecutor(executorService); - return new HTTPServer(config, executorService, httpServer, registry, authenticator, defaultHandler); - } + /** Build and start the HTTPServer. */ + public HTTPServer buildAndStart() throws IOException { + if (registry == null) { + registry = PrometheusRegistry.defaultRegistry; + } + HttpServer httpServer; + if (httpsConfigurator != null) { + httpServer = HttpsServer.create(makeInetSocketAddress(), 3); + ((HttpsServer) httpServer).setHttpsConfigurator(httpsConfigurator); + } else { + httpServer = HttpServer.create(makeInetSocketAddress(), 3); + } + ExecutorService executorService = makeExecutorService(); + httpServer.setExecutor(executorService); + return new HTTPServer( + config, executorService, httpServer, registry, authenticator, defaultHandler); + } - private InetSocketAddress makeInetSocketAddress() { - if (inetAddress != null) { - assertNull(hostname, "cannot configure 'inetAddress' and 'hostname' at the same time"); - return new InetSocketAddress(inetAddress, findPort()); - } else if (hostname != null) { - return new InetSocketAddress(hostname, findPort()); - } else { - return new InetSocketAddress(findPort()); - } - } + private InetSocketAddress makeInetSocketAddress() { + if (inetAddress != null) { + assertNull(hostname, "cannot configure 'inetAddress' and 'hostname' at the same time"); + return new InetSocketAddress(inetAddress, findPort()); + } else if (hostname != null) { + return new InetSocketAddress(hostname, findPort()); + } else { + return new InetSocketAddress(findPort()); + } + } - private ExecutorService makeExecutorService() { - if (executorService != null) { - return executorService; - } else { - return new ThreadPoolExecutor( - 1, - 10, - 120, - TimeUnit.SECONDS, - new SynchronousQueue<>(true), - NamedDaemonThreadFactory.defaultThreadFactory(true), - new BlockingRejectedExecutionHandler()); - } - } + private ExecutorService makeExecutorService() { + if (executorService != null) { + return executorService; + } else { + return new ThreadPoolExecutor( + 1, + 10, + 120, + TimeUnit.SECONDS, + new SynchronousQueue<>(true), + NamedDaemonThreadFactory.defaultThreadFactory(true), + new BlockingRejectedExecutionHandler()); + } + } - private int findPort() { - if (config != null && config.getExporterHttpServerProperties() != null) { - Integer port = config.getExporterHttpServerProperties().getPort(); - if (port != null) { - return port; - } - } - if (port != null) { - return port; - } - return 0; // random port will be selected + private int findPort() { + if (config != null && config.getExporterHttpServerProperties() != null) { + Integer port = config.getExporterHttpServerProperties().getPort(); + if (port != null) { + return port; } + } + if (port != null) { + return port; + } + return 0; // random port will be selected + } - private void assertNull(Object o, String msg) { - if (o != null) { - throw new IllegalStateException(msg); - } - } + private void assertNull(Object o, String msg) { + if (o != null) { + throw new IllegalStateException(msg); + } } + } - private static class BlockingRejectedExecutionHandler implements RejectedExecutionHandler { + private static class BlockingRejectedExecutionHandler implements RejectedExecutionHandler { - @Override - public void rejectedExecution(Runnable runnable, ThreadPoolExecutor threadPoolExecutor) { - if (!threadPoolExecutor.isShutdown()) { - try { - threadPoolExecutor.getQueue().put(runnable); - } catch (InterruptedException ignored) { - } - } + @Override + public void rejectedExecution(Runnable runnable, ThreadPoolExecutor threadPoolExecutor) { + if (!threadPoolExecutor.isShutdown()) { + try { + threadPoolExecutor.getQueue().put(runnable); + } catch (InterruptedException ignored) { } + } } + } } diff --git a/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/HealthyHandler.java b/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/HealthyHandler.java index 4fbdb9426..806b47553 100644 --- a/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/HealthyHandler.java +++ b/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/HealthyHandler.java @@ -2,33 +2,30 @@ import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; - import java.io.IOException; import java.nio.charset.StandardCharsets; -/** - * Handler for the /-/healthy endpoint - */ +/** Handler for the /-/healthy endpoint */ public class HealthyHandler implements HttpHandler { - private final byte[] responseBytes; - private final String contentType; + private final byte[] responseBytes; + private final String contentType; - public HealthyHandler() { - String responseString = "Exporter is healthy.\n"; - this.responseBytes = responseString.getBytes(StandardCharsets.UTF_8); - this.contentType = "text/plain; charset=utf-8"; - } + public HealthyHandler() { + String responseString = "Exporter is healthy.\n"; + this.responseBytes = responseString.getBytes(StandardCharsets.UTF_8); + this.contentType = "text/plain; charset=utf-8"; + } - @Override - public void handle(HttpExchange exchange) throws IOException { - try { - exchange.getResponseHeaders().set("Content-Type", contentType); - exchange.getResponseHeaders().set("Content-Length", Integer.toString(responseBytes.length)); - exchange.sendResponseHeaders(200, responseBytes.length); - exchange.getResponseBody().write(responseBytes); - } finally { - exchange.close(); - } + @Override + public void handle(HttpExchange exchange) throws IOException { + try { + exchange.getResponseHeaders().set("Content-Type", contentType); + exchange.getResponseHeaders().set("Content-Length", Integer.toString(responseBytes.length)); + exchange.sendResponseHeaders(200, responseBytes.length); + exchange.getResponseBody().write(responseBytes); + } finally { + exchange.close(); } + } } diff --git a/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/HttpExchangeAdapter.java b/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/HttpExchangeAdapter.java index 3636acedf..e7d33d310 100644 --- a/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/HttpExchangeAdapter.java +++ b/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/HttpExchangeAdapter.java @@ -4,7 +4,6 @@ import io.prometheus.metrics.exporter.common.PrometheusHttpExchange; import io.prometheus.metrics.exporter.common.PrometheusHttpRequest; import io.prometheus.metrics.exporter.common.PrometheusHttpResponse; - import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; @@ -19,113 +18,129 @@ public class HttpExchangeAdapter implements PrometheusHttpExchange { - private final HttpExchange httpExchange; - private final HttpRequest request = new HttpRequest(); - private final HttpResponse response = new HttpResponse(); - private volatile boolean responseSent = false; + private final HttpExchange httpExchange; + private final HttpRequest request = new HttpRequest(); + private final HttpResponse response = new HttpResponse(); + private volatile boolean responseSent = false; - public HttpExchangeAdapter(HttpExchange httpExchange) { - this.httpExchange = httpExchange; - } + public HttpExchangeAdapter(HttpExchange httpExchange) { + this.httpExchange = httpExchange; + } - public class HttpRequest implements PrometheusHttpRequest { - - @Override - public String getQueryString() { - return httpExchange.getRequestURI().getRawQuery(); - } - - @Override - public Enumeration getHeaders(String name) { - List headers = httpExchange.getRequestHeaders().get(name); - if (headers == null) { - return Collections.emptyEnumeration(); - } else { - return Collections.enumeration(headers); - } - } - - @Override - public String getMethod() { - return httpExchange.getRequestMethod(); - } - - @Override - public String getRequestPath() { - URI requestURI = httpExchange.getRequestURI(); - String uri = requestURI.toString(); - int qx = uri.indexOf('?'); - if (qx != -1) { - uri = uri.substring(0, qx); - } - return uri; - } - } - - public class HttpResponse implements PrometheusHttpResponse { - - @Override - public void setHeader(String name, String value) { - httpExchange.getResponseHeaders().set(name, value); - } - - @Override - public OutputStream sendHeadersAndGetBody(int statusCode, int contentLength) throws IOException { - if (responseSent) { - throw new IOException("Cannot send multiple HTTP responses for a single HTTP exchange."); - } - responseSent = true; - httpExchange.sendResponseHeaders(statusCode, contentLength); - return httpExchange.getResponseBody(); - } - } + public class HttpRequest implements PrometheusHttpRequest { @Override - public HttpRequest getRequest() { - return request; + public String getQueryString() { + return httpExchange.getRequestURI().getRawQuery(); } @Override - public HttpResponse getResponse() { - return response; + public Enumeration getHeaders(String name) { + List headers = httpExchange.getRequestHeaders().get(name); + if (headers == null) { + return Collections.emptyEnumeration(); + } else { + return Collections.enumeration(headers); + } } @Override - public void handleException(IOException e) throws IOException { - sendErrorResponseWithStackTrace(e); + public String getMethod() { + return httpExchange.getRequestMethod(); } @Override - public void handleException(RuntimeException e) { - sendErrorResponseWithStackTrace(e); + public String getRequestPath() { + URI requestURI = httpExchange.getRequestURI(); + String uri = requestURI.toString(); + int qx = uri.indexOf('?'); + if (qx != -1) { + uri = uri.substring(0, qx); + } + return uri; } + } - private void sendErrorResponseWithStackTrace(Exception requestHandlerException) { - if (!responseSent) { - responseSent = true; - try { - StringWriter stringWriter = new StringWriter(); - PrintWriter printWriter = new PrintWriter(stringWriter); - printWriter.write("An Exception occurred while scraping metrics: "); - requestHandlerException.printStackTrace(new PrintWriter(printWriter)); - byte[] stackTrace = stringWriter.toString().getBytes(StandardCharsets.UTF_8); - httpExchange.getResponseHeaders().set("Content-Type", "text/plain; charset=utf-8"); - httpExchange.sendResponseHeaders(500, stackTrace.length); - httpExchange.getResponseBody().write(stackTrace); - } catch (Exception errorWriterException) { - // We want to avoid logging so that we don't mess with application logs when the HTTPServer is used in a Java agent. - // However, if we can't even send an error response to the client there's nothing we can do but logging a message. - Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, "The Prometheus metrics HTTPServer caught an Exception during scrape and failed to send an error response to the client.", errorWriterException); - Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, "Original Exception that caused the Prometheus scrape error:", requestHandlerException); - } - } else { - // If the exception occurs after response headers have been sent, it's too late to respond with HTTP 500. - Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, "The Prometheus metrics HTTPServer caught an Exception while trying to send the metrics response.", requestHandlerException); - } + public class HttpResponse implements PrometheusHttpResponse { + + @Override + public void setHeader(String name, String value) { + httpExchange.getResponseHeaders().set(name, value); } @Override - public void close() { - httpExchange.close(); + public OutputStream sendHeadersAndGetBody(int statusCode, int contentLength) + throws IOException { + if (responseSent) { + throw new IOException("Cannot send multiple HTTP responses for a single HTTP exchange."); + } + responseSent = true; + httpExchange.sendResponseHeaders(statusCode, contentLength); + return httpExchange.getResponseBody(); } + } + + @Override + public HttpRequest getRequest() { + return request; + } + + @Override + public HttpResponse getResponse() { + return response; + } + + @Override + public void handleException(IOException e) throws IOException { + sendErrorResponseWithStackTrace(e); + } + + @Override + public void handleException(RuntimeException e) { + sendErrorResponseWithStackTrace(e); + } + + private void sendErrorResponseWithStackTrace(Exception requestHandlerException) { + if (!responseSent) { + responseSent = true; + try { + StringWriter stringWriter = new StringWriter(); + PrintWriter printWriter = new PrintWriter(stringWriter); + printWriter.write("An Exception occurred while scraping metrics: "); + requestHandlerException.printStackTrace(new PrintWriter(printWriter)); + byte[] stackTrace = stringWriter.toString().getBytes(StandardCharsets.UTF_8); + httpExchange.getResponseHeaders().set("Content-Type", "text/plain; charset=utf-8"); + httpExchange.sendResponseHeaders(500, stackTrace.length); + httpExchange.getResponseBody().write(stackTrace); + } catch (Exception errorWriterException) { + // We want to avoid logging so that we don't mess with application logs when the HTTPServer + // is used in a Java agent. + // However, if we can't even send an error response to the client there's nothing we can do + // but logging a message. + Logger.getLogger(this.getClass().getName()) + .log( + Level.SEVERE, + "The Prometheus metrics HTTPServer caught an Exception during scrape and failed to send an error response to the client.", + errorWriterException); + Logger.getLogger(this.getClass().getName()) + .log( + Level.SEVERE, + "Original Exception that caused the Prometheus scrape error:", + requestHandlerException); + } + } else { + // If the exception occurs after response headers have been sent, it's too late to respond + // with HTTP 500. + Logger.getLogger(this.getClass().getName()) + .log( + Level.SEVERE, + "The Prometheus metrics HTTPServer caught an Exception while trying to send the metrics response.", + requestHandlerException); + } + } + + @Override + public void close() { + httpExchange.close(); + } } diff --git a/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/MetricsHandler.java b/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/MetricsHandler.java index 3506ddd4b..4ac4b80d7 100644 --- a/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/MetricsHandler.java +++ b/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/MetricsHandler.java @@ -5,41 +5,31 @@ import io.prometheus.metrics.config.PrometheusProperties; import io.prometheus.metrics.exporter.common.PrometheusScrapeHandler; import io.prometheus.metrics.model.registry.PrometheusRegistry; - -import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.PrintStream; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.nio.charset.StandardCharsets; -import java.util.logging.Level; -import java.util.logging.Logger; - -/** - * Handler for the /metrics endpoint - */ + +/** Handler for the /metrics endpoint */ public class MetricsHandler implements HttpHandler { - private final PrometheusScrapeHandler prometheusScrapeHandler; + private final PrometheusScrapeHandler prometheusScrapeHandler; - public MetricsHandler() { - prometheusScrapeHandler = new PrometheusScrapeHandler(); - } + public MetricsHandler() { + prometheusScrapeHandler = new PrometheusScrapeHandler(); + } - public MetricsHandler(PrometheusRegistry registry) { - prometheusScrapeHandler = new PrometheusScrapeHandler(registry); - } + public MetricsHandler(PrometheusRegistry registry) { + prometheusScrapeHandler = new PrometheusScrapeHandler(registry); + } - public MetricsHandler(PrometheusProperties config) { - prometheusScrapeHandler = new PrometheusScrapeHandler(config); - } + public MetricsHandler(PrometheusProperties config) { + prometheusScrapeHandler = new PrometheusScrapeHandler(config); + } - public MetricsHandler(PrometheusProperties config, PrometheusRegistry registry) { - prometheusScrapeHandler = new PrometheusScrapeHandler(config, registry); - } + public MetricsHandler(PrometheusProperties config, PrometheusRegistry registry) { + prometheusScrapeHandler = new PrometheusScrapeHandler(config, registry); + } - @Override - public void handle(HttpExchange t) throws IOException { - prometheusScrapeHandler.handleRequest(new HttpExchangeAdapter(t)); - } + @Override + public void handle(HttpExchange t) throws IOException { + prometheusScrapeHandler.handleRequest(new HttpExchangeAdapter(t)); + } } diff --git a/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/NamedDaemonThreadFactory.java b/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/NamedDaemonThreadFactory.java index 378ac6ddc..b5d2415f7 100644 --- a/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/NamedDaemonThreadFactory.java +++ b/prometheus-metrics-exporter-httpserver/src/main/java/io/prometheus/metrics/exporter/httpserver/NamedDaemonThreadFactory.java @@ -6,26 +6,26 @@ class NamedDaemonThreadFactory implements ThreadFactory { - private static final AtomicInteger POOL_NUMBER = new AtomicInteger(1); - private final int poolNumber = POOL_NUMBER.getAndIncrement(); - private final AtomicInteger threadNumber = new AtomicInteger(1); - private final ThreadFactory delegate; - private final boolean daemon; + private static final AtomicInteger POOL_NUMBER = new AtomicInteger(1); + private final int poolNumber = POOL_NUMBER.getAndIncrement(); + private final AtomicInteger threadNumber = new AtomicInteger(1); + private final ThreadFactory delegate; + private final boolean daemon; - NamedDaemonThreadFactory(ThreadFactory delegate, boolean daemon) { - this.delegate = delegate; - this.daemon = daemon; - } + NamedDaemonThreadFactory(ThreadFactory delegate, boolean daemon) { + this.delegate = delegate; + this.daemon = daemon; + } - @Override - public Thread newThread(Runnable r) { - Thread t = delegate.newThread(r); - t.setName(String.format("prometheus-http-%d-%d", poolNumber, threadNumber.getAndIncrement())); - t.setDaemon(daemon); - return t; - } + @Override + public Thread newThread(Runnable r) { + Thread t = delegate.newThread(r); + t.setName(String.format("prometheus-http-%d-%d", poolNumber, threadNumber.getAndIncrement())); + t.setDaemon(daemon); + return t; + } - static ThreadFactory defaultThreadFactory(boolean daemon) { - return new NamedDaemonThreadFactory(Executors.defaultThreadFactory(), daemon); - } + static ThreadFactory defaultThreadFactory(boolean daemon) { + return new NamedDaemonThreadFactory(Executors.defaultThreadFactory(), daemon); + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/OpenTelemetryExporter.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/OpenTelemetryExporter.java index 97719d6c3..fb674e351 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/OpenTelemetryExporter.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/OpenTelemetryExporter.java @@ -12,444 +12,492 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.export.PeriodicMetricReader; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.resources.Resource; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.resources.ResourceBuilder; - import java.time.Duration; import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; public class OpenTelemetryExporter implements AutoCloseable { - private final PeriodicMetricReader reader; - - private OpenTelemetryExporter(Builder builder, PrometheusProperties config, PrometheusRegistry registry) { - InstrumentationScopeInfo instrumentationScopeInfo = PrometheusInstrumentationScope.loadInstrumentationScopeInfo(); - ExporterOpenTelemetryProperties properties = config.getExporterOpenTelemetryProperties(); - Resource resource = initResourceAttributes(builder, properties, instrumentationScopeInfo); - MetricExporter exporter; - if (ConfigHelper.getProtocol(builder, properties).equals("grpc")) { - OtlpGrpcMetricExporterBuilder exporterBuilder = OtlpGrpcMetricExporter.builder() - .setTimeout(Duration.ofSeconds(ConfigHelper.getTimeoutSeconds(builder, properties))) - .setEndpoint(ConfigHelper.getEndpoint(builder, properties)); - for (Map.Entry header : ConfigHelper.getHeaders(builder, properties).entrySet()) { - exporterBuilder.addHeader(header.getKey(), header.getValue()); - } - exporter = exporterBuilder.build(); - } else { - OtlpHttpMetricExporterBuilder exporterBuilder = OtlpHttpMetricExporter.builder() - .setTimeout(Duration.ofSeconds(ConfigHelper.getTimeoutSeconds(builder, properties))) - .setEndpoint(ConfigHelper.getEndpoint(builder, properties)); - for (Map.Entry header : ConfigHelper.getHeaders(builder, properties).entrySet()) { - exporterBuilder.addHeader(header.getKey(), header.getValue()); - } - exporter = exporterBuilder.build(); - } - reader = PeriodicMetricReader.builder(exporter) - .setInterval(Duration.ofSeconds(ConfigHelper.getIntervalSeconds(builder, properties))) - .build(); - - PrometheusMetricProducer prometheusMetricProducer = new PrometheusMetricProducer(registry, instrumentationScopeInfo, resource); - reader.register(prometheusMetricProducer); + private final PeriodicMetricReader reader; + + private OpenTelemetryExporter( + Builder builder, PrometheusProperties config, PrometheusRegistry registry) { + InstrumentationScopeInfo instrumentationScopeInfo = + PrometheusInstrumentationScope.loadInstrumentationScopeInfo(); + ExporterOpenTelemetryProperties properties = config.getExporterOpenTelemetryProperties(); + Resource resource = initResourceAttributes(builder, properties, instrumentationScopeInfo); + MetricExporter exporter; + if (ConfigHelper.getProtocol(builder, properties).equals("grpc")) { + OtlpGrpcMetricExporterBuilder exporterBuilder = + OtlpGrpcMetricExporter.builder() + .setTimeout(Duration.ofSeconds(ConfigHelper.getTimeoutSeconds(builder, properties))) + .setEndpoint(ConfigHelper.getEndpoint(builder, properties)); + for (Map.Entry header : + ConfigHelper.getHeaders(builder, properties).entrySet()) { + exporterBuilder.addHeader(header.getKey(), header.getValue()); + } + exporter = exporterBuilder.build(); + } else { + OtlpHttpMetricExporterBuilder exporterBuilder = + OtlpHttpMetricExporter.builder() + .setTimeout(Duration.ofSeconds(ConfigHelper.getTimeoutSeconds(builder, properties))) + .setEndpoint(ConfigHelper.getEndpoint(builder, properties)); + for (Map.Entry header : + ConfigHelper.getHeaders(builder, properties).entrySet()) { + exporterBuilder.addHeader(header.getKey(), header.getValue()); + } + exporter = exporterBuilder.build(); } - - public void close() { - reader.shutdown(); + reader = + PeriodicMetricReader.builder(exporter) + .setInterval(Duration.ofSeconds(ConfigHelper.getIntervalSeconds(builder, properties))) + .build(); + + PrometheusMetricProducer prometheusMetricProducer = + new PrometheusMetricProducer(registry, instrumentationScopeInfo, resource); + reader.register(prometheusMetricProducer); + } + + public void close() { + reader.shutdown(); + } + + private Resource initResourceAttributes( + Builder builder, + ExporterOpenTelemetryProperties properties, + InstrumentationScopeInfo instrumentationScopeInfo) { + String serviceName = ConfigHelper.getServiceName(builder, properties); + String serviceNamespace = ConfigHelper.getServiceNamespace(builder, properties); + String serviceInstanceId = ConfigHelper.getServiceInstanceId(builder, properties); + String serviceVersion = ConfigHelper.getServiceVersion(builder, properties); + Map resourceAttributes = + ResourceAttributes.get( + instrumentationScopeInfo.getName(), + serviceName, + serviceNamespace, + serviceInstanceId, + serviceVersion, + ConfigHelper.getResourceAttributes(builder, properties)); + ResourceBuilder resourceBuilder = Resource.builder(); + for (Map.Entry entry : resourceAttributes.entrySet()) { + resourceBuilder.put(entry.getKey(), entry.getValue()); } - - private Resource initResourceAttributes(Builder builder, ExporterOpenTelemetryProperties properties, InstrumentationScopeInfo instrumentationScopeInfo) { - String serviceName = ConfigHelper.getServiceName(builder, properties); - String serviceNamespace = ConfigHelper.getServiceNamespace(builder, properties); - String serviceInstanceId = ConfigHelper.getServiceInstanceId(builder, properties); - String serviceVersion = ConfigHelper.getServiceVersion(builder, properties); - Map resourceAttributes = ResourceAttributes.get(instrumentationScopeInfo.getName(), serviceName, serviceNamespace, serviceInstanceId, serviceVersion, ConfigHelper.getResourceAttributes(builder, properties)); - ResourceBuilder resourceBuilder = Resource.builder(); - for (Map.Entry entry : resourceAttributes.entrySet()) { - resourceBuilder.put(entry.getKey(), entry.getValue()); - } - return resourceBuilder.build(); + return resourceBuilder.build(); + } + + public static Builder builder() { + return new Builder(PrometheusProperties.get()); + } + + public static Builder builder(PrometheusProperties config) { + return new Builder(config); + } + + public static class Builder { + + private final PrometheusProperties config; + private PrometheusRegistry registry = null; + private String protocol; + private String endpoint; + private final Map headers = new HashMap<>(); + private Integer intervalSeconds; + private Integer timeoutSeconds; + private String serviceName; + private String serviceNamespace; + private String serviceInstanceId; + private String serviceVersion; + private final Map resourceAttributes = new HashMap<>(); + + private Builder(PrometheusProperties config) { + this.config = config; } - public static Builder builder() { - return new Builder(PrometheusProperties.get()); + public Builder registry(PrometheusRegistry registry) { + this.registry = registry; + return this; } - public static Builder builder(PrometheusProperties config) { - return new Builder(config); + /** + * Specifies the OTLP transport protocol to be used when exporting metrics. + * + *

Supported values are {@code "grpc"} and {@code "http/protobuf"}. Default is {@code + * "grpc"}. + * + *

See OpenTelemetry's OTEL_EXPORTER_OTLP_PROTOCOL. + */ + public Builder protocol(String protocol) { + if (!protocol.equals("grpc") && !protocol.equals("http/protobuf")) { + throw new IllegalArgumentException( + protocol + ": Unsupported protocol. Expecting grpc or http/protobuf"); + } + this.protocol = protocol; + return this; } - public static class Builder { - - private final PrometheusProperties config; - private PrometheusRegistry registry = null; - private String protocol; - private String endpoint; - private final Map headers = new HashMap<>(); - private Integer intervalSeconds; - private Integer timeoutSeconds; - private String serviceName; - private String serviceNamespace; - private String serviceInstanceId; - private String serviceVersion; - private final Map resourceAttributes = new HashMap<>(); - - private Builder(PrometheusProperties config) { - this.config = config; - } - - public Builder registry(PrometheusRegistry registry) { - this.registry = registry; - return this; - } - - /** - * Specifies the OTLP transport protocol to be used when exporting metrics. - *

- * Supported values are {@code "grpc"} and {@code "http/protobuf"}. Default is {@code "grpc"}. - *

- * See OpenTelemetry's OTEL_EXPORTER_OTLP_PROTOCOL. - */ - public Builder protocol(String protocol) { - if (!protocol.equals("grpc") && !protocol.equals("http/protobuf")) { - throw new IllegalArgumentException(protocol + ": Unsupported protocol. Expecting grpc or http/protobuf"); - } - this.protocol = protocol; - return this; - } - - /** - * The OTLP endpoint to send metric data to. - *

- * The default depends on the protocol: - *

    - *
  • {@code "grpc"}: {@code "http://localhost:4317"}
  • - *
  • {@code "http/protobuf"}: {@code "http://localhost:4318/v1/metrics"}
  • - *
- * If the protocol is {@code "http/protobuf"} and the endpoint does not have the {@code "/v1/metrics"} suffix, - * the {@code "/v1/metrics"} suffix will automatically be appended. - *

- * See OpenTelemetry's OTEL_EXPORTER_OTLP_METRICS_ENDPOINT. - */ - public Builder endpoint(String endpoint) { - this.endpoint = endpoint; - return this; - } + /** + * The OTLP endpoint to send metric data to. + * + *

The default depends on the protocol: + * + *

    + *
  • {@code "grpc"}: {@code "http://localhost:4317"} + *
  • {@code "http/protobuf"}: {@code "http://localhost:4318/v1/metrics"} + *
+ * + * If the protocol is {@code "http/protobuf"} and the endpoint does not have the {@code + * "/v1/metrics"} suffix, the {@code "/v1/metrics"} suffix will automatically be appended. + * + *

See OpenTelemetry's OTEL_EXPORTER_OTLP_METRICS_ENDPOINT. + */ + public Builder endpoint(String endpoint) { + this.endpoint = endpoint; + return this; + } - /** - * Add an HTTP header to be applied to outgoing requests. - * Call multiple times to add multiple headers. - *

- * See OpenTelemetry's OTEL_EXPORTER_OTLP_HEADERS. - */ - public Builder header(String name, String value) { - this.headers.put(name, value); - return this; - } + /** + * Add an HTTP header to be applied to outgoing requests. Call multiple times to add multiple + * headers. + * + *

See OpenTelemetry's OTEL_EXPORTER_OTLP_HEADERS. + */ + public Builder header(String name, String value) { + this.headers.put(name, value); + return this; + } - /** - * The interval between the start of two export attempts. Default is 60000. - *

- * Like OpenTelemetry's OTEL_METRIC_EXPORT_INTERVAL, - * but in seconds rather than milliseconds. - */ - public Builder intervalSeconds(int intervalSeconds) { - if (intervalSeconds <= 0) { - throw new IllegalStateException(intervalSeconds + ": expecting a push interval > 0s"); - } - this.intervalSeconds = intervalSeconds; - return this; - } + /** + * The interval between the start of two export attempts. Default is 60000. + * + *

Like OpenTelemetry's OTEL_METRIC_EXPORT_INTERVAL, + * but in seconds rather than milliseconds. + */ + public Builder intervalSeconds(int intervalSeconds) { + if (intervalSeconds <= 0) { + throw new IllegalStateException(intervalSeconds + ": expecting a push interval > 0s"); + } + this.intervalSeconds = intervalSeconds; + return this; + } - /** - * The timeout for outgoing requests. Default is 10. - *

- * Like OpenTelemetry's OTEL_EXPORTER_OTLP_METRICS_TIMEOUT, - * but in seconds rather than milliseconds. - */ - public Builder timeoutSeconds(int timeoutSeconds) { - if (timeoutSeconds <= 0) { - throw new IllegalStateException(timeoutSeconds + ": expecting a push interval > 0s"); - } - this.timeoutSeconds = timeoutSeconds; - return this; - } + /** + * The timeout for outgoing requests. Default is 10. + * + *

Like OpenTelemetry's OTEL_EXPORTER_OTLP_METRICS_TIMEOUT, + * but in seconds rather than milliseconds. + */ + public Builder timeoutSeconds(int timeoutSeconds) { + if (timeoutSeconds <= 0) { + throw new IllegalStateException(timeoutSeconds + ": expecting a push interval > 0s"); + } + this.timeoutSeconds = timeoutSeconds; + return this; + } - /** - * The {@code service.name} resource attribute. - *

- * If not explicitly specified, {@code client_java} will try to initialize it with a reasonable default, like the JAR file name. - *

- * See {@code service.name} in OpenTelemetry's Resource Semantic Conventions. - */ - public Builder serviceName(String serviceName) { - this.serviceName = serviceName; - return this; - } + /** + * The {@code service.name} resource attribute. + * + *

If not explicitly specified, {@code client_java} will try to initialize it with a + * reasonable default, like the JAR file name. + * + *

See {@code service.name} in OpenTelemetry's Resource + * Semantic Conventions. + */ + public Builder serviceName(String serviceName) { + this.serviceName = serviceName; + return this; + } - /** - * The {@code service.namespace} resource attribute. - *

- * See {@code service.namespace} in OpenTelemetry's Resource Semantic Conventions. - */ - public Builder serviceNamespace(String serviceNamespace) { - this.serviceNamespace = serviceNamespace; - return this; - } + /** + * The {@code service.namespace} resource attribute. + * + *

See {@code service.namespace} in OpenTelemetry's Resource + * Semantic Conventions. + */ + public Builder serviceNamespace(String serviceNamespace) { + this.serviceNamespace = serviceNamespace; + return this; + } - /** - * The {@code service.instance.id} resource attribute. - *

- * See {@code service.instance.id} in OpenTelemetry's Resource Semantic Conventions. - */ - public Builder serviceInstanceId(String serviceInstanceId) { - this.serviceInstanceId = serviceInstanceId; - return this; - } + /** + * The {@code service.instance.id} resource attribute. + * + *

See {@code service.instance.id} in OpenTelemetry's Resource + * Semantic Conventions. + */ + public Builder serviceInstanceId(String serviceInstanceId) { + this.serviceInstanceId = serviceInstanceId; + return this; + } - /** - * The {@code service.version} resource attribute. - *

- * See {@code service.version} in OpenTelemetry's Resource Semantic Conventions. - */ - public Builder serviceVersion(String serviceVersion) { - this.serviceVersion = serviceVersion; - return this; - } + /** + * The {@code service.version} resource attribute. + * + *

See {@code service.version} in OpenTelemetry's Resource + * Semantic Conventions. + */ + public Builder serviceVersion(String serviceVersion) { + this.serviceVersion = serviceVersion; + return this; + } - /** - * Add a resource attribute. Call multiple times to add multiple resource attributes. - *

- * See OpenTelemetry's OTEL_RESOURCE_ATTRIBUTES. - */ - public Builder resourceAttribute(String name, String value) { - this.resourceAttributes.put(name, value); - return this; - } + /** + * Add a resource attribute. Call multiple times to add multiple resource attributes. + * + *

See OpenTelemetry's OTEL_RESOURCE_ATTRIBUTES. + */ + public Builder resourceAttribute(String name, String value) { + this.resourceAttributes.put(name, value); + return this; + } - public OpenTelemetryExporter buildAndStart() { - if (registry == null) { - registry = PrometheusRegistry.defaultRegistry; - } - return new OpenTelemetryExporter(this, config, registry); + public OpenTelemetryExporter buildAndStart() { + if (registry == null) { + registry = PrometheusRegistry.defaultRegistry; + } + return new OpenTelemetryExporter(this, config, registry); + } + } + + private static class ConfigHelper { + + private static String getProtocol( + OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { + String protocol = config.getProtocol(); + if (protocol != null) { + return protocol; + } + protocol = getString("otel.exporter.otlp.protocol"); + if (protocol != null) { + if (!protocol.equals("grpc") && !protocol.equals("http/protobuf")) { + throw new IllegalStateException( + protocol + + ": Unsupported OpenTelemetry exporter protocol. Expecting grpc or http/protobuf."); } + return protocol; + } + if (builder.protocol != null) { + return builder.protocol; + } + return "grpc"; } - private static class ConfigHelper { - - private static String getProtocol(OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { - String protocol = config.getProtocol(); - if (protocol != null) { - return protocol; - } - protocol = getString("otel.exporter.otlp.protocol"); - if (protocol != null) { - if (!protocol.equals("grpc") && !protocol.equals("http/protobuf")) { - throw new IllegalStateException(protocol + ": Unsupported OpenTelemetry exporter protocol. Expecting grpc or http/protobuf."); - } - return protocol; - } - if (builder.protocol != null) { - return builder.protocol; - } - return "grpc"; + private static String getEndpoint( + OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { + String endpoint = config.getEndpoint(); + if (endpoint == null) { + endpoint = getString("otel.exporter.otlp.metrics.endpoint"); + } + if (endpoint == null) { + endpoint = getString("otel.exporter.otlp.endpoint"); + } + if (endpoint == null) { + endpoint = builder.endpoint; + } + if (endpoint == null) { + if (getProtocol(builder, config).equals("grpc")) { + endpoint = "http://localhost:4317"; + } else { // http/protobuf + endpoint = "http://localhost:4318/v1/metrics"; } - - private static String getEndpoint(OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { - String endpoint = config.getEndpoint(); - if (endpoint == null) { - endpoint = getString("otel.exporter.otlp.metrics.endpoint"); - } - if (endpoint == null) { - endpoint = getString("otel.exporter.otlp.endpoint"); - } - if (endpoint == null) { - endpoint = builder.endpoint; - } - if (endpoint == null) { - if (getProtocol(builder, config).equals("grpc")) { - endpoint = "http://localhost:4317"; - } else { // http/protobuf - endpoint = "http://localhost:4318/v1/metrics"; - } - } - if (getProtocol(builder, config).equals("grpc")) { - return endpoint; - } else { // http/protobuf - if (!endpoint.endsWith("v1/metrics")) { - if (!endpoint.endsWith("/")) { - return endpoint + "/v1/metrics"; - } else { - return endpoint + "v1/metrics"; - } - } else { - return endpoint; - } - } + } + if (getProtocol(builder, config).equals("grpc")) { + return endpoint; + } else { // http/protobuf + if (!endpoint.endsWith("v1/metrics")) { + if (!endpoint.endsWith("/")) { + return endpoint + "/v1/metrics"; + } else { + return endpoint + "v1/metrics"; + } + } else { + return endpoint; } + } + } - private static Map getHeaders(OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { - Map headers = config.getHeaders(); - if (!headers.isEmpty()) { - return headers; - } - headers = getMap("otel.exporter.otlp.headers"); - if (!headers.isEmpty()) { - return headers; - } - if (!builder.headers.isEmpty()) { - return builder.headers; - } - return new HashMap<>(); - } + private static Map getHeaders( + OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { + Map headers = config.getHeaders(); + if (!headers.isEmpty()) { + return headers; + } + headers = getMap("otel.exporter.otlp.headers"); + if (!headers.isEmpty()) { + return headers; + } + if (!builder.headers.isEmpty()) { + return builder.headers; + } + return new HashMap<>(); + } - private static int getIntervalSeconds(OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { - Integer intervalSeconds = config.getIntervalSeconds(); - if (intervalSeconds != null) { - return intervalSeconds; - } - intervalSeconds = getPositiveInteger("otel.metric.export.interval"); - if (intervalSeconds != null) { - return (int) TimeUnit.MILLISECONDS.toSeconds(intervalSeconds); - } - if (builder.intervalSeconds != null) { - return builder.intervalSeconds; - } - return 60; - } + private static int getIntervalSeconds( + OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { + Integer intervalSeconds = config.getIntervalSeconds(); + if (intervalSeconds != null) { + return intervalSeconds; + } + intervalSeconds = getPositiveInteger("otel.metric.export.interval"); + if (intervalSeconds != null) { + return (int) TimeUnit.MILLISECONDS.toSeconds(intervalSeconds); + } + if (builder.intervalSeconds != null) { + return builder.intervalSeconds; + } + return 60; + } - private static int getTimeoutSeconds(OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { - Integer timeoutSeconds = config.getTimeoutSeconds(); - if (timeoutSeconds != null) { - return timeoutSeconds; - } - Integer timeoutMilliseconds = getPositiveInteger("otel.exporter.otlp.metrics.timeout"); - if (timeoutMilliseconds == null) { - timeoutMilliseconds = getPositiveInteger("otel.exporter.otlp.timeout"); - } - if (timeoutMilliseconds != null) { - return (int) TimeUnit.MILLISECONDS.toSeconds(timeoutMilliseconds); - } - if (builder.timeoutSeconds != null) { - return builder.timeoutSeconds; - } - return 10; - } + private static int getTimeoutSeconds( + OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { + Integer timeoutSeconds = config.getTimeoutSeconds(); + if (timeoutSeconds != null) { + return timeoutSeconds; + } + Integer timeoutMilliseconds = getPositiveInteger("otel.exporter.otlp.metrics.timeout"); + if (timeoutMilliseconds == null) { + timeoutMilliseconds = getPositiveInteger("otel.exporter.otlp.timeout"); + } + if (timeoutMilliseconds != null) { + return (int) TimeUnit.MILLISECONDS.toSeconds(timeoutMilliseconds); + } + if (builder.timeoutSeconds != null) { + return builder.timeoutSeconds; + } + return 10; + } - private static String getServiceName(OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { - String serviceName = config.getServiceName(); - if (serviceName != null) { - return serviceName; - } - serviceName = getString("otel.service.name"); - if (serviceName != null) { - return serviceName; - } - if (builder.serviceName != null) { - return builder.serviceName; - } - return null; - } + private static String getServiceName( + OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { + String serviceName = config.getServiceName(); + if (serviceName != null) { + return serviceName; + } + serviceName = getString("otel.service.name"); + if (serviceName != null) { + return serviceName; + } + if (builder.serviceName != null) { + return builder.serviceName; + } + return null; + } - private static String getServiceNamespace(OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { - String serviceNamespace = config.getServiceNamespace(); - if (serviceNamespace != null) { - return serviceNamespace; - } - if (builder.serviceNamespace != null) { - return builder.serviceNamespace; - } - return null; - } + private static String getServiceNamespace( + OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { + String serviceNamespace = config.getServiceNamespace(); + if (serviceNamespace != null) { + return serviceNamespace; + } + if (builder.serviceNamespace != null) { + return builder.serviceNamespace; + } + return null; + } - private static String getServiceInstanceId(OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { - String serviceInstanceId = config.getServiceInstanceId(); - if (serviceInstanceId != null) { - return serviceInstanceId; - } - if (builder.serviceInstanceId != null) { - return builder.serviceInstanceId; - } - return null; - } + private static String getServiceInstanceId( + OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { + String serviceInstanceId = config.getServiceInstanceId(); + if (serviceInstanceId != null) { + return serviceInstanceId; + } + if (builder.serviceInstanceId != null) { + return builder.serviceInstanceId; + } + return null; + } - private static String getServiceVersion(OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { - String serviceVersion = config.getServiceVersion(); - if (serviceVersion != null) { - return serviceVersion; - } - if (builder.serviceVersion != null) { - return builder.serviceVersion; - } - return null; - } + private static String getServiceVersion( + OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { + String serviceVersion = config.getServiceVersion(); + if (serviceVersion != null) { + return serviceVersion; + } + if (builder.serviceVersion != null) { + return builder.serviceVersion; + } + return null; + } - private static Map getResourceAttributes(OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { - Map resourceAttributes = config.getResourceAttributes(); - if (!resourceAttributes.isEmpty()) { - return resourceAttributes; - } - resourceAttributes = getMap("otel.resource.attributes"); - if (!resourceAttributes.isEmpty()) { - return resourceAttributes; - } - if (!builder.resourceAttributes.isEmpty()) { - return builder.resourceAttributes; - } - return new HashMap<>(); - } + private static Map getResourceAttributes( + OpenTelemetryExporter.Builder builder, ExporterOpenTelemetryProperties config) { + Map resourceAttributes = config.getResourceAttributes(); + if (!resourceAttributes.isEmpty()) { + return resourceAttributes; + } + resourceAttributes = getMap("otel.resource.attributes"); + if (!resourceAttributes.isEmpty()) { + return resourceAttributes; + } + if (!builder.resourceAttributes.isEmpty()) { + return builder.resourceAttributes; + } + return new HashMap<>(); + } - private static String getString(String otelPropertyName) { - String otelEnvVarName = otelPropertyName.replace(".", "_").replace("-", "_").toUpperCase(); - if (System.getenv(otelEnvVarName) != null) { - return System.getenv(otelEnvVarName); - } - if (System.getProperty(otelPropertyName) != null) { - return System.getProperty(otelPropertyName); - } - return null; - } + private static String getString(String otelPropertyName) { + String otelEnvVarName = otelPropertyName.replace(".", "_").replace("-", "_").toUpperCase(); + if (System.getenv(otelEnvVarName) != null) { + return System.getenv(otelEnvVarName); + } + if (System.getProperty(otelPropertyName) != null) { + return System.getProperty(otelPropertyName); + } + return null; + } - private static Integer getInteger(String otelPropertyName) { - String result = getString(otelPropertyName); - if (result == null) { - return null; - } else { - try { - return Integer.parseInt(result); - } catch (NumberFormatException e) { - throw new IllegalStateException(otelPropertyName + "=" + result + " - illegal value."); - } - } + private static Integer getInteger(String otelPropertyName) { + String result = getString(otelPropertyName); + if (result == null) { + return null; + } else { + try { + return Integer.parseInt(result); + } catch (NumberFormatException e) { + throw new IllegalStateException(otelPropertyName + "=" + result + " - illegal value."); } + } + } - private static Integer getPositiveInteger(String otelPropertyName) { - Integer result = getInteger(otelPropertyName); - if (result == null) { - return null; - } - if (result <= 0) { - throw new IllegalStateException(otelPropertyName + "=" + result + ": Expecting value > 0."); - } - return result; - } + private static Integer getPositiveInteger(String otelPropertyName) { + Integer result = getInteger(otelPropertyName); + if (result == null) { + return null; + } + if (result <= 0) { + throw new IllegalStateException(otelPropertyName + "=" + result + ": Expecting value > 0."); + } + return result; + } - private static Map getMap(String otelPropertyName) { - Map result = new HashMap<>(); - String property = getString(otelPropertyName); - if (property != null) { - String[] pairs = property.split(","); - for (String pair : pairs) { - if (pair.contains("=")) { - String[] keyValue = pair.split("=", 1); - if (keyValue.length == 2) { - String key = keyValue[0].trim(); - String value = keyValue[1].trim(); - if (key.length() > 0 && value.length() > 0) { - result.putIfAbsent(key, value); - } - } - } - } - } - return result; + private static Map getMap(String otelPropertyName) { + Map result = new HashMap<>(); + String property = getString(otelPropertyName); + if (property != null) { + String[] pairs = property.split(","); + for (String pair : pairs) { + if (pair.contains("=")) { + String[] keyValue = pair.split("=", 1); + if (keyValue.length == 2) { + String key = keyValue[0].trim(); + String value = keyValue[1].trim(); + if (key.length() > 0 && value.length() > 0) { + result.putIfAbsent(key, value); + } + } + } } + } + return result; } + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/PrometheusInstrumentationScope.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/PrometheusInstrumentationScope.java index de14def11..fae9a2984 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/PrometheusInstrumentationScope.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/PrometheusInstrumentationScope.java @@ -1,32 +1,49 @@ package io.prometheus.metrics.exporter.opentelemetry; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.common.InstrumentationScopeInfo; - import java.util.Properties; class PrometheusInstrumentationScope { - private static final String instrumentationScopePropertiesFile = "instrumentationScope.properties"; - private static final String instrumentationScopeNameKey = "instrumentationScope.name"; - private static final String instrumentationScopeVersionKey = "instrumentationScope.version"; + private static final String instrumentationScopePropertiesFile = + "instrumentationScope.properties"; + private static final String instrumentationScopeNameKey = "instrumentationScope.name"; + private static final String instrumentationScopeVersionKey = "instrumentationScope.version"; - public static InstrumentationScopeInfo loadInstrumentationScopeInfo() { - try { - Properties properties = new Properties(); - properties.load(PrometheusInstrumentationScope.class.getClassLoader().getResourceAsStream(instrumentationScopePropertiesFile)); - String instrumentationScopeName = properties.getProperty(instrumentationScopeNameKey); - if (instrumentationScopeName == null) { - throw new IllegalStateException("Prometheus metrics library initialization error: " + instrumentationScopeNameKey + " not found in " + instrumentationScopePropertiesFile + " in classpath."); - } - String instrumentationScopeVersion = properties.getProperty(instrumentationScopeVersionKey); - if (instrumentationScopeVersion == null) { - throw new IllegalStateException("Prometheus metrics library initialization error: " + instrumentationScopeVersionKey + " not found in " + instrumentationScopePropertiesFile + " in classpath."); - } - return InstrumentationScopeInfo.builder(instrumentationScopeName) - .setVersion(instrumentationScopeVersion) - .build(); - } catch (Exception e) { - throw new IllegalStateException("Prometheus metrics library initialization error: Failed to read " + instrumentationScopePropertiesFile + " from classpath.", e); - } + public static InstrumentationScopeInfo loadInstrumentationScopeInfo() { + try { + Properties properties = new Properties(); + properties.load( + PrometheusInstrumentationScope.class + .getClassLoader() + .getResourceAsStream(instrumentationScopePropertiesFile)); + String instrumentationScopeName = properties.getProperty(instrumentationScopeNameKey); + if (instrumentationScopeName == null) { + throw new IllegalStateException( + "Prometheus metrics library initialization error: " + + instrumentationScopeNameKey + + " not found in " + + instrumentationScopePropertiesFile + + " in classpath."); + } + String instrumentationScopeVersion = properties.getProperty(instrumentationScopeVersionKey); + if (instrumentationScopeVersion == null) { + throw new IllegalStateException( + "Prometheus metrics library initialization error: " + + instrumentationScopeVersionKey + + " not found in " + + instrumentationScopePropertiesFile + + " in classpath."); + } + return InstrumentationScopeInfo.builder(instrumentationScopeName) + .setVersion(instrumentationScopeVersion) + .build(); + } catch (Exception e) { + throw new IllegalStateException( + "Prometheus metrics library initialization error: Failed to read " + + instrumentationScopePropertiesFile + + " from classpath.", + e); } + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/PrometheusMetricProducer.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/PrometheusMetricProducer.java index 37ee4d1de..8d3fb2469 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/PrometheusMetricProducer.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/PrometheusMetricProducer.java @@ -19,106 +19,115 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.export.CollectionRegistration; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.resources.Resource; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.resources.ResourceBuilder; - import java.util.ArrayList; import java.util.Collection; import java.util.List; class PrometheusMetricProducer implements CollectionRegistration { - private final PrometheusRegistry registry; - private final Resource resource; - private final InstrumentationScopeInfo instrumentationScopeInfo; + private final PrometheusRegistry registry; + private final Resource resource; + private final InstrumentationScopeInfo instrumentationScopeInfo; - public PrometheusMetricProducer(PrometheusRegistry registry, InstrumentationScopeInfo instrumentationScopeInfo, Resource resource) { - this.registry = registry; - this.instrumentationScopeInfo = instrumentationScopeInfo; - this.resource = resource; - } + public PrometheusMetricProducer( + PrometheusRegistry registry, + InstrumentationScopeInfo instrumentationScopeInfo, + Resource resource) { + this.registry = registry; + this.instrumentationScopeInfo = instrumentationScopeInfo; + this.resource = resource; + } - @Override - public Collection collectAllMetrics() { - // TODO: We could add a filter configuration for the OpenTelemetry exporter and call registry.scrape(filter) if a filter is configured, like in the Servlet exporter. - MetricSnapshots snapshots = registry.scrape(); - Resource resourceWithTargetInfo = resource.merge(resourceFromTargetInfo(snapshots)); - InstrumentationScopeInfo scopeFromInfo = instrumentationScopeFromOTelScopeInfo(snapshots); - List result = new ArrayList<>(snapshots.size()); - MetricDataFactory factory = new MetricDataFactory(resourceWithTargetInfo, scopeFromInfo != null ? scopeFromInfo : instrumentationScopeInfo, System.currentTimeMillis()); - for (MetricSnapshot snapshot : snapshots) { - if (snapshot instanceof CounterSnapshot) { - addUnlessNull(result, factory.create((CounterSnapshot) snapshot)); - } else if (snapshot instanceof GaugeSnapshot) { - addUnlessNull(result, factory.create((GaugeSnapshot) snapshot)); - } else if (snapshot instanceof HistogramSnapshot) { - if (!((HistogramSnapshot) snapshot).isGaugeHistogram()) { - addUnlessNull(result, factory.create((HistogramSnapshot) snapshot)); - } - } else if (snapshot instanceof SummarySnapshot) { - addUnlessNull(result, factory.create((SummarySnapshot) snapshot)); - } else if (snapshot instanceof InfoSnapshot) { - String name = snapshot.getMetadata().getPrometheusName(); - if (!name.equals("target") && !name.equals("otel_scope")) { - addUnlessNull(result, factory.create((InfoSnapshot) snapshot)); - } - } else if (snapshot instanceof StateSetSnapshot) { - addUnlessNull(result, factory.create((StateSetSnapshot) snapshot)); - } else if (snapshot instanceof UnknownSnapshot) { - addUnlessNull(result, factory.create((UnknownSnapshot) snapshot)); - } + @Override + public Collection collectAllMetrics() { + // TODO: We could add a filter configuration for the OpenTelemetry exporter and call + // registry.scrape(filter) if a filter is configured, like in the Servlet exporter. + MetricSnapshots snapshots = registry.scrape(); + Resource resourceWithTargetInfo = resource.merge(resourceFromTargetInfo(snapshots)); + InstrumentationScopeInfo scopeFromInfo = instrumentationScopeFromOTelScopeInfo(snapshots); + List result = new ArrayList<>(snapshots.size()); + MetricDataFactory factory = + new MetricDataFactory( + resourceWithTargetInfo, + scopeFromInfo != null ? scopeFromInfo : instrumentationScopeInfo, + System.currentTimeMillis()); + for (MetricSnapshot snapshot : snapshots) { + if (snapshot instanceof CounterSnapshot) { + addUnlessNull(result, factory.create((CounterSnapshot) snapshot)); + } else if (snapshot instanceof GaugeSnapshot) { + addUnlessNull(result, factory.create((GaugeSnapshot) snapshot)); + } else if (snapshot instanceof HistogramSnapshot) { + if (!((HistogramSnapshot) snapshot).isGaugeHistogram()) { + addUnlessNull(result, factory.create((HistogramSnapshot) snapshot)); } - return result; + } else if (snapshot instanceof SummarySnapshot) { + addUnlessNull(result, factory.create((SummarySnapshot) snapshot)); + } else if (snapshot instanceof InfoSnapshot) { + String name = snapshot.getMetadata().getPrometheusName(); + if (!name.equals("target") && !name.equals("otel_scope")) { + addUnlessNull(result, factory.create((InfoSnapshot) snapshot)); + } + } else if (snapshot instanceof StateSetSnapshot) { + addUnlessNull(result, factory.create((StateSetSnapshot) snapshot)); + } else if (snapshot instanceof UnknownSnapshot) { + addUnlessNull(result, factory.create((UnknownSnapshot) snapshot)); + } } + return result; + } - private Resource resourceFromTargetInfo(MetricSnapshots snapshots) { - ResourceBuilder result = Resource.builder(); - for (MetricSnapshot snapshot : snapshots) { - if (snapshot.getMetadata().getName().equals("target") && snapshot instanceof InfoSnapshot) { - InfoSnapshot targetInfo = (InfoSnapshot) snapshot; - if (targetInfo.getDataPoints().size() > 0) { - InfoSnapshot.InfoDataPointSnapshot data = targetInfo.getDataPoints().get(0); - Labels labels = data.getLabels(); - for (int i = 0; i < labels.size(); i++) { - result.put(labels.getName(i), labels.getValue(i)); - } - } - } + private Resource resourceFromTargetInfo(MetricSnapshots snapshots) { + ResourceBuilder result = Resource.builder(); + for (MetricSnapshot snapshot : snapshots) { + if (snapshot.getMetadata().getName().equals("target") && snapshot instanceof InfoSnapshot) { + InfoSnapshot targetInfo = (InfoSnapshot) snapshot; + if (targetInfo.getDataPoints().size() > 0) { + InfoSnapshot.InfoDataPointSnapshot data = targetInfo.getDataPoints().get(0); + Labels labels = data.getLabels(); + for (int i = 0; i < labels.size(); i++) { + result.put(labels.getName(i), labels.getValue(i)); + } } - return result.build(); + } } + return result.build(); + } - private InstrumentationScopeInfo instrumentationScopeFromOTelScopeInfo(MetricSnapshots snapshots) { - for (MetricSnapshot snapshot : snapshots) { - if (snapshot.getMetadata().getPrometheusName().equals("otel_scope") && snapshot instanceof InfoSnapshot) { - InfoSnapshot scopeInfo = (InfoSnapshot) snapshot; - if (scopeInfo.getDataPoints().size() > 0) { - Labels labels = scopeInfo.getDataPoints().get(0).getLabels(); - String name = null; - String version = null; - AttributesBuilder attributesBuilder = Attributes.builder(); - for (int i = 0; i < labels.size(); i++) { - if (labels.getPrometheusName(i).equals("otel_scope_name")) { - name = labels.getValue(i); - } else if (labels.getPrometheusName(i).equals("otel_scope_version")) { - version = labels.getValue(i); - } else { - attributesBuilder.put(labels.getName(i), labels.getValue(i)); - } - } - if (name != null) { - return InstrumentationScopeInfo.builder(name) - .setVersion(version) - .setAttributes(attributesBuilder.build()) - .build(); - } - } + private InstrumentationScopeInfo instrumentationScopeFromOTelScopeInfo( + MetricSnapshots snapshots) { + for (MetricSnapshot snapshot : snapshots) { + if (snapshot.getMetadata().getPrometheusName().equals("otel_scope") + && snapshot instanceof InfoSnapshot) { + InfoSnapshot scopeInfo = (InfoSnapshot) snapshot; + if (scopeInfo.getDataPoints().size() > 0) { + Labels labels = scopeInfo.getDataPoints().get(0).getLabels(); + String name = null; + String version = null; + AttributesBuilder attributesBuilder = Attributes.builder(); + for (int i = 0; i < labels.size(); i++) { + if (labels.getPrometheusName(i).equals("otel_scope_name")) { + name = labels.getValue(i); + } else if (labels.getPrometheusName(i).equals("otel_scope_version")) { + version = labels.getValue(i); + } else { + attributesBuilder.put(labels.getName(i), labels.getValue(i)); } + } + if (name != null) { + return InstrumentationScopeInfo.builder(name) + .setVersion(version) + .setAttributes(attributesBuilder.build()) + .build(); + } } - return null; + } } + return null; + } - private void addUnlessNull(List result, MetricData data) { - if (data != null) { - result.add(data); - } + private void addUnlessNull(List result, MetricData data) { + if (data != null) { + result.add(data); } + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributes.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributes.java index c4319b7fc..2c88badfc 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributes.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributes.java @@ -5,30 +5,32 @@ public class ResourceAttributes { - // TODO: The OTel Java instrumentation also has a SpringBootServiceNameDetector, we should port this over. - public static Map get(String instrumentationScopeName, - String serviceName, - String serviceNamespace, - String serviceInstanceId, - String serviceVersion, - Map configuredResourceAttributes) { - Map result = new HashMap<>(); - ResourceAttributesFromOtelAgent.addIfAbsent(result, instrumentationScopeName); - putIfAbsent(result, "service.name", serviceName); - putIfAbsent(result, "service.namespace", serviceNamespace); - putIfAbsent(result, "service.instance.id", serviceInstanceId); - putIfAbsent(result, "service.version", serviceVersion); - for (Map.Entry attribute : configuredResourceAttributes.entrySet()) { - putIfAbsent(result, attribute.getKey(), attribute.getValue()); - } - ResourceAttributesFromJarFileName.addIfAbsent(result); - ResourceAttributesDefaults.addIfAbsent(result); - return result; + // TODO: The OTel Java instrumentation also has a SpringBootServiceNameDetector, we should port + // this over. + public static Map get( + String instrumentationScopeName, + String serviceName, + String serviceNamespace, + String serviceInstanceId, + String serviceVersion, + Map configuredResourceAttributes) { + Map result = new HashMap<>(); + ResourceAttributesFromOtelAgent.addIfAbsent(result, instrumentationScopeName); + putIfAbsent(result, "service.name", serviceName); + putIfAbsent(result, "service.namespace", serviceNamespace); + putIfAbsent(result, "service.instance.id", serviceInstanceId); + putIfAbsent(result, "service.version", serviceVersion); + for (Map.Entry attribute : configuredResourceAttributes.entrySet()) { + putIfAbsent(result, attribute.getKey(), attribute.getValue()); } + ResourceAttributesFromJarFileName.addIfAbsent(result); + ResourceAttributesDefaults.addIfAbsent(result); + return result; + } - private static void putIfAbsent(Map result, String key, String value) { - if (value != null) { - result.putIfAbsent(key, value); - } + private static void putIfAbsent(Map result, String key, String value) { + if (value != null) { + result.putIfAbsent(key, value); } + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributesDefaults.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributesDefaults.java index 0e63f4ccb..19328fd73 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributesDefaults.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributesDefaults.java @@ -5,10 +5,10 @@ public class ResourceAttributesDefaults { - private static final String instanceId = UUID.randomUUID().toString(); + private static final String instanceId = UUID.randomUUID().toString(); - public static void addIfAbsent(Map result) { - result.putIfAbsent("service.instance.id", instanceId); - result.putIfAbsent("service.name", "unknown_service:java"); - } + public static void addIfAbsent(Map result) { + result.putIfAbsent("service.instance.id", instanceId); + result.putIfAbsent("service.name", "unknown_service:java"); + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributesFromJarFileName.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributesFromJarFileName.java index 093b3b44d..7cf7a51aa 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributesFromJarFileName.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributesFromJarFileName.java @@ -9,52 +9,52 @@ // See io.opentelemetry.instrumentation.resources.JarServiceNameDetector public class ResourceAttributesFromJarFileName { - public static void addIfAbsent(Map result) { - if (result.containsKey("service.name")) { - return; - } - Path jarPath = getJarPathFromSunCommandLine(); - if (jarPath == null) { - return; - } - String serviceName = getServiceName(jarPath); - result.putIfAbsent("service.name", serviceName); + public static void addIfAbsent(Map result) { + if (result.containsKey("service.name")) { + return; } - - private static Path getJarPathFromSunCommandLine() { - String programArguments = System.getProperty("sun.java.command"); - if (programArguments == null) { - return null; - } - // Take the path until the first space. If the path doesn't exist extend it up to the next - // space. Repeat until a path that exists is found or input runs out. - int next = 0; - while (true) { - int nextSpace = programArguments.indexOf(' ', next); - if (nextSpace == -1) { - return pathIfExists(programArguments); - } - Path path = pathIfExists(programArguments.substring(0, nextSpace)); - next = nextSpace + 1; - if (path != null) { - return path; - } - } + Path jarPath = getJarPathFromSunCommandLine(); + if (jarPath == null) { + return; } + String serviceName = getServiceName(jarPath); + result.putIfAbsent("service.name", serviceName); + } - private static Path pathIfExists(String programArguments) { - Path candidate; - try { - candidate = Paths.get(programArguments); - } catch (InvalidPathException e) { - return null; - } - return Files.isRegularFile(candidate) ? candidate : null; + private static Path getJarPathFromSunCommandLine() { + String programArguments = System.getProperty("sun.java.command"); + if (programArguments == null) { + return null; + } + // Take the path until the first space. If the path doesn't exist extend it up to the next + // space. Repeat until a path that exists is found or input runs out. + int next = 0; + while (true) { + int nextSpace = programArguments.indexOf(' ', next); + if (nextSpace == -1) { + return pathIfExists(programArguments); + } + Path path = pathIfExists(programArguments.substring(0, nextSpace)); + next = nextSpace + 1; + if (path != null) { + return path; + } } + } - private static String getServiceName(Path jarPath) { - String jarName = jarPath.getFileName().toString(); - int dotIndex = jarName.lastIndexOf("."); - return dotIndex == -1 ? jarName : jarName.substring(0, dotIndex); + private static Path pathIfExists(String programArguments) { + Path candidate; + try { + candidate = Paths.get(programArguments); + } catch (InvalidPathException e) { + return null; } + return Files.isRegularFile(candidate) ? candidate : null; + } + + private static String getServiceName(Path jarPath) { + String jarName = jarPath.getFileName().toString(); + int dotIndex = jarName.lastIndexOf("."); + return dotIndex == -1 ? jarName : jarName.substring(0, dotIndex); + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributesFromOtelAgent.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributesFromOtelAgent.java index a18897fe5..698db602f 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributesFromOtelAgent.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/ResourceAttributesFromOtelAgent.java @@ -1,5 +1,7 @@ package io.prometheus.metrics.exporter.opentelemetry; +import static java.nio.file.Files.createTempDirectory; + import java.io.File; import java.io.InputStream; import java.lang.reflect.Field; @@ -11,91 +13,98 @@ import java.nio.file.StandardCopyOption; import java.util.Map; -import static java.nio.file.Files.createTempDirectory; - public class ResourceAttributesFromOtelAgent { - private static final String[] OTEL_JARS = new String[]{"opentelemetry-api-1.29.0.jar", "opentelemetry-context-1.29.0.jar"}; + private static final String[] OTEL_JARS = + new String[] {"opentelemetry-api-1.29.0.jar", "opentelemetry-context-1.29.0.jar"}; - /** - * This grabs resource attributes like {@code service.name} and {@code service.instance.id} from - * the OTel Java agent (if present) and adds them to {@code result}. - *

- * The way this works is as follows: If the OTel Java agent is attached, it modifies the - * {@code GlobalOpenTelemetry.get()} method to return an agent-specific object. - * From that agent-specific object we can get the resource attributes via reflection. - *

- * So we load the {@code GlobalOpenTelemetry} class (in a separate class loader from the JAR files - * that are bundled with this module), call {@code .get()}, and inspect the returned object. - *

- * After that we discard the class loader so that all OTel specific classes are unloaded. - * No runtime dependency on any OTel version remains. - */ - public static void addIfAbsent(Map result, String instrumentationScopeName) { - try { - Path tmpDir = createTempDirectory(instrumentationScopeName + "-"); - try { - URL[] otelJars = copyOtelJarsToTempDir(tmpDir, instrumentationScopeName); + /** + * This grabs resource attributes like {@code service.name} and {@code service.instance.id} from + * the OTel Java agent (if present) and adds them to {@code result}. + * + *

The way this works is as follows: If the OTel Java agent is attached, it modifies the {@code + * GlobalOpenTelemetry.get()} method to return an agent-specific object. From that agent-specific + * object we can get the resource attributes via reflection. + * + *

So we load the {@code GlobalOpenTelemetry} class (in a separate class loader from the JAR + * files that are bundled with this module), call {@code .get()}, and inspect the returned object. + * + *

After that we discard the class loader so that all OTel specific classes are unloaded. No + * runtime dependency on any OTel version remains. + */ + public static void addIfAbsent(Map result, String instrumentationScopeName) { + try { + Path tmpDir = createTempDirectory(instrumentationScopeName + "-"); + try { + URL[] otelJars = copyOtelJarsToTempDir(tmpDir, instrumentationScopeName); - try (URLClassLoader classLoader = new URLClassLoader(otelJars)) { - Class globalOpenTelemetryClass = classLoader.loadClass("io.opentelemetry.api.GlobalOpenTelemetry"); - Object globalOpenTelemetry = globalOpenTelemetryClass.getMethod("get").invoke(null); - if (globalOpenTelemetry.getClass().getSimpleName().contains("ApplicationOpenTelemetry")) { - // GlobalOpenTelemetry is injected by the OTel Java aqent - Object applicationMeterProvider = callMethod("getMeterProvider", globalOpenTelemetry); - Object agentMeterProvider = getField("agentMeterProvider", applicationMeterProvider); - Object sdkMeterProvider = getField("delegate", agentMeterProvider); - Object sharedState = getField("sharedState", sdkMeterProvider); - Object resource = callMethod("getResource", sharedState); - Object attributes = callMethod("getAttributes", resource); - Map attributeMap = (Map) callMethod("asMap", attributes); + try (URLClassLoader classLoader = new URLClassLoader(otelJars)) { + Class globalOpenTelemetryClass = + classLoader.loadClass("io.opentelemetry.api.GlobalOpenTelemetry"); + Object globalOpenTelemetry = globalOpenTelemetryClass.getMethod("get").invoke(null); + if (globalOpenTelemetry.getClass().getSimpleName().contains("ApplicationOpenTelemetry")) { + // GlobalOpenTelemetry is injected by the OTel Java aqent + Object applicationMeterProvider = callMethod("getMeterProvider", globalOpenTelemetry); + Object agentMeterProvider = getField("agentMeterProvider", applicationMeterProvider); + Object sdkMeterProvider = getField("delegate", agentMeterProvider); + Object sharedState = getField("sharedState", sdkMeterProvider); + Object resource = callMethod("getResource", sharedState); + Object attributes = callMethod("getAttributes", resource); + Map attributeMap = (Map) callMethod("asMap", attributes); - for (Map.Entry entry : attributeMap.entrySet()) { - if (entry.getKey() != null && entry.getValue() != null) { - result.putIfAbsent(entry.getKey().toString(), entry.getValue().toString()); - } - } - } - } - } finally { - deleteTempDir(tmpDir.toFile()); + for (Map.Entry entry : attributeMap.entrySet()) { + if (entry.getKey() != null && entry.getValue() != null) { + result.putIfAbsent(entry.getKey().toString(), entry.getValue().toString()); + } } - } catch (Exception ignored) { + } } + } finally { + deleteTempDir(tmpDir.toFile()); + } + } catch (Exception ignored) { } + } - private static Object getField(String name, Object obj) throws Exception { - Field field = obj.getClass().getDeclaredField(name); - field.setAccessible(true); - return field.get(obj); - } + private static Object getField(String name, Object obj) throws Exception { + Field field = obj.getClass().getDeclaredField(name); + field.setAccessible(true); + return field.get(obj); + } - private static Object callMethod(String name, Object obj) throws Exception { - Method method = obj.getClass().getMethod(name); - method.setAccessible(true); - return method.invoke(obj); - } + private static Object callMethod(String name, Object obj) throws Exception { + Method method = obj.getClass().getMethod(name); + method.setAccessible(true); + return method.invoke(obj); + } - private static URL[] copyOtelJarsToTempDir(Path tmpDir, String instrumentationScopeName) throws Exception { - URL[] result = new URL[OTEL_JARS.length]; - for (int i = 0; i < OTEL_JARS.length; i++) { - InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("lib/" + OTEL_JARS[i]); - if (inputStream == null) { - throw new IllegalStateException("Error initializing " + instrumentationScopeName + ": lib/" + OTEL_JARS[i] + " not found in classpath."); - } - File outputFile = tmpDir.resolve(OTEL_JARS[i]).toFile(); - Files.copy(inputStream, outputFile.toPath(), StandardCopyOption.REPLACE_EXISTING); - inputStream.close(); - result[i] = outputFile.toURI().toURL(); - } - return result; + private static URL[] copyOtelJarsToTempDir(Path tmpDir, String instrumentationScopeName) + throws Exception { + URL[] result = new URL[OTEL_JARS.length]; + for (int i = 0; i < OTEL_JARS.length; i++) { + InputStream inputStream = + Thread.currentThread().getContextClassLoader().getResourceAsStream("lib/" + OTEL_JARS[i]); + if (inputStream == null) { + throw new IllegalStateException( + "Error initializing " + + instrumentationScopeName + + ": lib/" + + OTEL_JARS[i] + + " not found in classpath."); + } + File outputFile = tmpDir.resolve(OTEL_JARS[i]).toFile(); + Files.copy(inputStream, outputFile.toPath(), StandardCopyOption.REPLACE_EXISTING); + inputStream.close(); + result[i] = outputFile.toURI().toURL(); } + return result; + } - private static void deleteTempDir(File tmpDir) { - // We don't have subdirectories, so this simple implementation should work. - for (File file : tmpDir.listFiles()) { - file.delete(); - } - tmpDir.delete(); + private static void deleteTempDir(File tmpDir) { + // We don't have subdirectories, so this simple implementation should work. + for (File file : tmpDir.listFiles()) { + file.delete(); } + tmpDir.delete(); + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/DoublePointDataImpl.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/DoublePointDataImpl.java index 8f4a627ff..12b7c7acd 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/DoublePointDataImpl.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/DoublePointDataImpl.java @@ -3,20 +3,24 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.api.common.Attributes; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.DoubleExemplarData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.DoublePointData; - import java.util.List; class DoublePointDataImpl extends PointDataImpl implements DoublePointData { - private final double value; + private final double value; - public DoublePointDataImpl(double value, long startEpochNanos, long epochNanos, Attributes attributes, List exemplars) { - super(startEpochNanos, epochNanos, attributes, exemplars); - this.value = value; - } + public DoublePointDataImpl( + double value, + long startEpochNanos, + long epochNanos, + Attributes attributes, + List exemplars) { + super(startEpochNanos, epochNanos, attributes, exemplars); + this.value = value; + } - @Override - public double getValue() { - return value; - } + @Override + public double getValue() { + return value; + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/ExponentialHistogramBucketsImpl.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/ExponentialHistogramBucketsImpl.java index 5acacc457..798e9b5df 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/ExponentialHistogramBucketsImpl.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/ExponentialHistogramBucketsImpl.java @@ -1,46 +1,45 @@ package io.prometheus.metrics.exporter.opentelemetry.otelmodel; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.ExponentialHistogramBuckets; - import java.util.ArrayList; import java.util.List; class ExponentialHistogramBucketsImpl implements ExponentialHistogramBuckets { - private final int scale; - private final int offset; - private final List bucketCounts = new ArrayList<>(); - - ExponentialHistogramBucketsImpl(int scale, int offset) { - this.scale = scale; - this.offset = offset; - } - - void addCount(long count) { - bucketCounts.add(count); - } - - @Override - public int getScale() { - return scale; - } - - @Override - public int getOffset() { - return offset; - } - - @Override - public List getBucketCounts() { - return bucketCounts; - } - - @Override - public long getTotalCount() { - long result = 0; - for (Long count : bucketCounts) { - result += count; - } - return result; + private final int scale; + private final int offset; + private final List bucketCounts = new ArrayList<>(); + + ExponentialHistogramBucketsImpl(int scale, int offset) { + this.scale = scale; + this.offset = offset; + } + + void addCount(long count) { + bucketCounts.add(count); + } + + @Override + public int getScale() { + return scale; + } + + @Override + public int getOffset() { + return offset; + } + + @Override + public List getBucketCounts() { + return bucketCounts; + } + + @Override + public long getTotalCount() { + long result = 0; + for (Long count : bucketCounts) { + result += count; } + return result; + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/ExponentialHistogramPointDataImpl.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/ExponentialHistogramPointDataImpl.java index 799e44712..3c5a893e2 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/ExponentialHistogramPointDataImpl.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/ExponentialHistogramPointDataImpl.java @@ -4,82 +4,92 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.DoubleExemplarData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.ExponentialHistogramBuckets; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.ExponentialHistogramPointData; - import java.util.List; -public class ExponentialHistogramPointDataImpl extends PointDataImpl implements ExponentialHistogramPointData { - - private final int scale; - private final double sum; - private final long count; - private final long zeroCount; - private final double min; - private final double max; - - private final ExponentialHistogramBuckets positiveBuckets; - private final ExponentialHistogramBuckets negativeBuckets; - - ExponentialHistogramPointDataImpl(int scale, double sum, long count, long zeroCount, double min, double max, - ExponentialHistogramBuckets positiveBuckets, ExponentialHistogramBuckets negativeBuckets, - long startEpochNanos, long epochNanos, Attributes attributes, List exemplars) { - super(startEpochNanos, epochNanos, attributes, exemplars); - this.scale = scale; - this.sum = sum; - this.count = count; - this.zeroCount = zeroCount; - this.min = min; - this.max = max; - this.positiveBuckets = positiveBuckets; - this.negativeBuckets = negativeBuckets; - } - - @Override - public int getScale() { - return scale; - } - - @Override - public double getSum() { - return sum; - } - - @Override - public long getCount() { - return count; - } - - @Override - public long getZeroCount() { - return zeroCount; - } - - @Override - public boolean hasMin() { - return !Double.isNaN(min); - } - - @Override - public double getMin() { - return min; - } - - @Override - public boolean hasMax() { - return !Double.isNaN(max); - } - - @Override - public double getMax() { - return max; - } - - @Override - public ExponentialHistogramBuckets getPositiveBuckets() { - return positiveBuckets; - } - - @Override - public ExponentialHistogramBuckets getNegativeBuckets() { - return negativeBuckets; - } +public class ExponentialHistogramPointDataImpl extends PointDataImpl + implements ExponentialHistogramPointData { + + private final int scale; + private final double sum; + private final long count; + private final long zeroCount; + private final double min; + private final double max; + + private final ExponentialHistogramBuckets positiveBuckets; + private final ExponentialHistogramBuckets negativeBuckets; + + ExponentialHistogramPointDataImpl( + int scale, + double sum, + long count, + long zeroCount, + double min, + double max, + ExponentialHistogramBuckets positiveBuckets, + ExponentialHistogramBuckets negativeBuckets, + long startEpochNanos, + long epochNanos, + Attributes attributes, + List exemplars) { + super(startEpochNanos, epochNanos, attributes, exemplars); + this.scale = scale; + this.sum = sum; + this.count = count; + this.zeroCount = zeroCount; + this.min = min; + this.max = max; + this.positiveBuckets = positiveBuckets; + this.negativeBuckets = negativeBuckets; + } + + @Override + public int getScale() { + return scale; + } + + @Override + public double getSum() { + return sum; + } + + @Override + public long getCount() { + return count; + } + + @Override + public long getZeroCount() { + return zeroCount; + } + + @Override + public boolean hasMin() { + return !Double.isNaN(min); + } + + @Override + public double getMin() { + return min; + } + + @Override + public boolean hasMax() { + return !Double.isNaN(max); + } + + @Override + public double getMax() { + return max; + } + + @Override + public ExponentialHistogramBuckets getPositiveBuckets() { + return positiveBuckets; + } + + @Override + public ExponentialHistogramBuckets getNegativeBuckets() { + return negativeBuckets; + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/HistogramPointDataImpl.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/HistogramPointDataImpl.java index af9c1519d..19b141b04 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/HistogramPointDataImpl.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/HistogramPointDataImpl.java @@ -3,66 +3,74 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.api.common.Attributes; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.DoubleExemplarData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.HistogramPointData; - import java.util.List; public class HistogramPointDataImpl extends PointDataImpl implements HistogramPointData { - private final double sum; - private final long count; - private final double min; - private final double max; - private final List boundaries; - private final List counts; + private final double sum; + private final long count; + private final double min; + private final double max; + private final List boundaries; + private final List counts; - public HistogramPointDataImpl(double sum, long count, double min, double max, List boundaries, List counts, - long startEpochNanos, long epochNanos, Attributes attributes, List exemplars) { - super(startEpochNanos, epochNanos, attributes, exemplars); - this.sum = sum; - this.count = count; - this.min = min; - this.max = max; - this.boundaries = boundaries; - this.counts = counts; - } + public HistogramPointDataImpl( + double sum, + long count, + double min, + double max, + List boundaries, + List counts, + long startEpochNanos, + long epochNanos, + Attributes attributes, + List exemplars) { + super(startEpochNanos, epochNanos, attributes, exemplars); + this.sum = sum; + this.count = count; + this.min = min; + this.max = max; + this.boundaries = boundaries; + this.counts = counts; + } - @Override - public double getSum() { - return sum; - } + @Override + public double getSum() { + return sum; + } - @Override - public long getCount() { - return count; - } + @Override + public long getCount() { + return count; + } - @Override - public boolean hasMin() { - return !Double.isNaN(min); - } + @Override + public boolean hasMin() { + return !Double.isNaN(min); + } - @Override - public double getMin() { - return min; - } + @Override + public double getMin() { + return min; + } - @Override - public boolean hasMax() { - return !Double.isNaN(max); - } + @Override + public boolean hasMax() { + return !Double.isNaN(max); + } - @Override - public double getMax() { - return max; - } + @Override + public double getMax() { + return max; + } - @Override - public List getBoundaries() { - return boundaries; - } + @Override + public List getBoundaries() { + return boundaries; + } - @Override - public List getCounts() { - return counts; - } + @Override + public List getCounts() { + return counts; + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/MetricDataFactory.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/MetricDataFactory.java index 6e723e20a..95cb59546 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/MetricDataFactory.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/MetricDataFactory.java @@ -13,49 +13,84 @@ public class MetricDataFactory { - private final Resource resource; - private final InstrumentationScopeInfo instrumentationScopeInfo; - private final long currentTimeMillis; - - public MetricDataFactory(Resource resource, InstrumentationScopeInfo instrumentationScopeInfo, long currentTimeMillis) { - this.resource = resource; - this.instrumentationScopeInfo = instrumentationScopeInfo; - this.currentTimeMillis = currentTimeMillis; - } + private final Resource resource; + private final InstrumentationScopeInfo instrumentationScopeInfo; + private final long currentTimeMillis; - public MetricData create(CounterSnapshot snapshot) { - return new PrometheusMetricData<>(snapshot.getMetadata(), new PrometheusCounter(snapshot, currentTimeMillis), instrumentationScopeInfo, resource); - } + public MetricDataFactory( + Resource resource, + InstrumentationScopeInfo instrumentationScopeInfo, + long currentTimeMillis) { + this.resource = resource; + this.instrumentationScopeInfo = instrumentationScopeInfo; + this.currentTimeMillis = currentTimeMillis; + } - public MetricData create(GaugeSnapshot snapshot) { - return new PrometheusMetricData<>(snapshot.getMetadata(), new PrometheusGauge(snapshot, currentTimeMillis), instrumentationScopeInfo, resource); - } + public MetricData create(CounterSnapshot snapshot) { + return new PrometheusMetricData<>( + snapshot.getMetadata(), + new PrometheusCounter(snapshot, currentTimeMillis), + instrumentationScopeInfo, + resource); + } - public MetricData create(HistogramSnapshot snapshot) { - if (!snapshot.getDataPoints().isEmpty()) { - HistogramSnapshot.HistogramDataPointSnapshot firstDataPoint = snapshot.getDataPoints().get(0); - if (firstDataPoint.hasNativeHistogramData()) { - return new PrometheusMetricData<>(snapshot.getMetadata(), new PrometheusNativeHistogram(snapshot, currentTimeMillis), instrumentationScopeInfo, resource); - } else if (firstDataPoint.hasClassicHistogramData()) { - return new PrometheusMetricData<>(snapshot.getMetadata(), new PrometheusClassicHistogram(snapshot, currentTimeMillis), instrumentationScopeInfo, resource); - } - } - return null; - } + public MetricData create(GaugeSnapshot snapshot) { + return new PrometheusMetricData<>( + snapshot.getMetadata(), + new PrometheusGauge(snapshot, currentTimeMillis), + instrumentationScopeInfo, + resource); + } - public MetricData create(SummarySnapshot snapshot) { - return new PrometheusMetricData<>(snapshot.getMetadata(), new PrometheusSummary(snapshot, currentTimeMillis), instrumentationScopeInfo, resource); + public MetricData create(HistogramSnapshot snapshot) { + if (!snapshot.getDataPoints().isEmpty()) { + HistogramSnapshot.HistogramDataPointSnapshot firstDataPoint = snapshot.getDataPoints().get(0); + if (firstDataPoint.hasNativeHistogramData()) { + return new PrometheusMetricData<>( + snapshot.getMetadata(), + new PrometheusNativeHistogram(snapshot, currentTimeMillis), + instrumentationScopeInfo, + resource); + } else if (firstDataPoint.hasClassicHistogramData()) { + return new PrometheusMetricData<>( + snapshot.getMetadata(), + new PrometheusClassicHistogram(snapshot, currentTimeMillis), + instrumentationScopeInfo, + resource); + } } + return null; + } - public MetricData create(InfoSnapshot snapshot) { - return new PrometheusMetricData<>(snapshot.getMetadata(), new PrometheusInfo(snapshot, currentTimeMillis), instrumentationScopeInfo, resource); - } + public MetricData create(SummarySnapshot snapshot) { + return new PrometheusMetricData<>( + snapshot.getMetadata(), + new PrometheusSummary(snapshot, currentTimeMillis), + instrumentationScopeInfo, + resource); + } - public MetricData create(StateSetSnapshot snapshot) { - return new PrometheusMetricData<>(snapshot.getMetadata(), new PrometheusStateSet(snapshot, currentTimeMillis), instrumentationScopeInfo, resource); - } + public MetricData create(InfoSnapshot snapshot) { + return new PrometheusMetricData<>( + snapshot.getMetadata(), + new PrometheusInfo(snapshot, currentTimeMillis), + instrumentationScopeInfo, + resource); + } - public MetricData create(UnknownSnapshot snapshot) { - return new PrometheusMetricData<>(snapshot.getMetadata(), new PrometheusUnknown(snapshot, currentTimeMillis), instrumentationScopeInfo, resource); - } + public MetricData create(StateSetSnapshot snapshot) { + return new PrometheusMetricData<>( + snapshot.getMetadata(), + new PrometheusStateSet(snapshot, currentTimeMillis), + instrumentationScopeInfo, + resource); + } + + public MetricData create(UnknownSnapshot snapshot) { + return new PrometheusMetricData<>( + snapshot.getMetadata(), + new PrometheusUnknown(snapshot, currentTimeMillis), + instrumentationScopeInfo, + resource); + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PointDataImpl.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PointDataImpl.java index b3795e5dd..d1ba082e5 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PointDataImpl.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PointDataImpl.java @@ -3,40 +3,43 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.api.common.Attributes; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.DoubleExemplarData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.PointData; - import java.util.List; abstract class PointDataImpl implements PointData { - private final long startEpochNanos; - private final long epochNanos; - private final Attributes attributes; - private final List exemplars; - - PointDataImpl(long startEpochNanos, long epochNanos, Attributes attributes, List exemplars) { - this.startEpochNanos = startEpochNanos; - this.epochNanos = epochNanos; - this.attributes = attributes; - this.exemplars = exemplars; - } - - @Override - public long getStartEpochNanos() { - return startEpochNanos; - } - - @Override - public long getEpochNanos() { - return epochNanos; - } - - @Override - public Attributes getAttributes() { - return attributes; - } - - @Override - public List getExemplars() { - return exemplars; - } + private final long startEpochNanos; + private final long epochNanos; + private final Attributes attributes; + private final List exemplars; + + PointDataImpl( + long startEpochNanos, + long epochNanos, + Attributes attributes, + List exemplars) { + this.startEpochNanos = startEpochNanos; + this.epochNanos = epochNanos; + this.attributes = attributes; + this.exemplars = exemplars; + } + + @Override + public long getStartEpochNanos() { + return startEpochNanos; + } + + @Override + public long getEpochNanos() { + return epochNanos; + } + + @Override + public Attributes getAttributes() { + return attributes; + } + + @Override + public List getExemplars() { + return exemplars; + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusClassicHistogram.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusClassicHistogram.java index 0040c7d17..3434dc467 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusClassicHistogram.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusClassicHistogram.java @@ -6,75 +6,78 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.HistogramData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.HistogramPointData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.MetricDataType; - import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; -class PrometheusClassicHistogram extends PrometheusData implements HistogramData { +class PrometheusClassicHistogram extends PrometheusData + implements HistogramData { - private final List points; + private final List points; - PrometheusClassicHistogram(HistogramSnapshot snapshot, long currentTimeMillis) { - super(MetricDataType.HISTOGRAM); - this.points = snapshot.getDataPoints().stream() - .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } + PrometheusClassicHistogram(HistogramSnapshot snapshot, long currentTimeMillis) { + super(MetricDataType.HISTOGRAM); + this.points = + snapshot.getDataPoints().stream() + .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } - @Override - public AggregationTemporality getAggregationTemporality() { - return AggregationTemporality.CUMULATIVE; - } + @Override + public AggregationTemporality getAggregationTemporality() { + return AggregationTemporality.CUMULATIVE; + } - @Override - public Collection getPoints() { - return points; - } + @Override + public Collection getPoints() { + return points; + } - private HistogramPointData toOtelDataPoint(HistogramSnapshot.HistogramDataPointSnapshot dataPoint, long currentTimeMillis) { - if (!dataPoint.hasClassicHistogramData()) { - return null; - } else { - return new HistogramPointDataImpl( - dataPoint.hasSum() ? dataPoint.getSum() : Double.NaN, - dataPoint.hasCount() ? dataPoint.getCount() : calculateCount(dataPoint.getClassicBuckets()), - Double.NaN, - Double.NaN, - makeBoundaries(dataPoint.getClassicBuckets()), - makeCounts(dataPoint.getClassicBuckets()), - getStartEpochNanos(dataPoint), - getEpochNanos(dataPoint, currentTimeMillis), - labelsToAttributes(dataPoint.getLabels()), - convertExemplars(dataPoint.getExemplars()) - ); - } + private HistogramPointData toOtelDataPoint( + HistogramSnapshot.HistogramDataPointSnapshot dataPoint, long currentTimeMillis) { + if (!dataPoint.hasClassicHistogramData()) { + return null; + } else { + return new HistogramPointDataImpl( + dataPoint.hasSum() ? dataPoint.getSum() : Double.NaN, + dataPoint.hasCount() + ? dataPoint.getCount() + : calculateCount(dataPoint.getClassicBuckets()), + Double.NaN, + Double.NaN, + makeBoundaries(dataPoint.getClassicBuckets()), + makeCounts(dataPoint.getClassicBuckets()), + getStartEpochNanos(dataPoint), + getEpochNanos(dataPoint, currentTimeMillis), + labelsToAttributes(dataPoint.getLabels()), + convertExemplars(dataPoint.getExemplars())); } + } - private long calculateCount(ClassicHistogramBuckets buckets) { - int result = 0; - for (int i=0; i makeBoundaries(ClassicHistogramBuckets buckets) { - List result = new ArrayList<>(buckets.size()); - for (int i=0; i makeBoundaries(ClassicHistogramBuckets buckets) { + List result = new ArrayList<>(buckets.size()); + for (int i = 0; i < buckets.size(); i++) { + result.add(buckets.getUpperBound(i)); } + return result; + } - private List makeCounts(ClassicHistogramBuckets buckets) { - List result = new ArrayList<>(buckets.size()); - for (int i=0; i makeCounts(ClassicHistogramBuckets buckets) { + List result = new ArrayList<>(buckets.size()); + for (int i = 0; i < buckets.size(); i++) { + result.add(buckets.getCount(i)); } + return result; + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusCounter.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusCounter.java index 4ad04975f..f730fb97d 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusCounter.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusCounter.java @@ -1,48 +1,49 @@ package io.prometheus.metrics.exporter.opentelemetry.otelmodel; +import io.prometheus.metrics.model.snapshots.CounterSnapshot; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.AggregationTemporality; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.DoublePointData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.MetricDataType; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.SumData; -import io.prometheus.metrics.model.snapshots.CounterSnapshot; - import java.util.Collection; import java.util.List; import java.util.stream.Collectors; -class PrometheusCounter extends PrometheusData implements SumData { - - private final List points; - - public PrometheusCounter(CounterSnapshot snapshot, long currentTimeMillis) { - super(MetricDataType.DOUBLE_SUM); - this.points = snapshot.getDataPoints().stream() - .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) - .collect(Collectors.toList()); - } - - @Override - public boolean isMonotonic() { - return true; - } - - @Override - public AggregationTemporality getAggregationTemporality() { - return AggregationTemporality.CUMULATIVE; - } - - @Override - public Collection getPoints() { - return points; - } - - private DoublePointData toOtelDataPoint(CounterSnapshot.CounterDataPointSnapshot dataPoint, long currentTimeMillis) { - return new DoublePointDataImpl( - dataPoint.getValue(), - getStartEpochNanos(dataPoint), - getEpochNanos(dataPoint, currentTimeMillis), - labelsToAttributes(dataPoint.getLabels()), - convertExemplar(dataPoint.getExemplar()) - ); - } +class PrometheusCounter extends PrometheusData + implements SumData { + + private final List points; + + public PrometheusCounter(CounterSnapshot snapshot, long currentTimeMillis) { + super(MetricDataType.DOUBLE_SUM); + this.points = + snapshot.getDataPoints().stream() + .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) + .collect(Collectors.toList()); + } + + @Override + public boolean isMonotonic() { + return true; + } + + @Override + public AggregationTemporality getAggregationTemporality() { + return AggregationTemporality.CUMULATIVE; + } + + @Override + public Collection getPoints() { + return points; + } + + private DoublePointData toOtelDataPoint( + CounterSnapshot.CounterDataPointSnapshot dataPoint, long currentTimeMillis) { + return new DoublePointDataImpl( + dataPoint.getValue(), + getStartEpochNanos(dataPoint), + getEpochNanos(dataPoint, currentTimeMillis), + labelsToAttributes(dataPoint.getLabels()), + convertExemplar(dataPoint.getExemplar())); + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusData.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusData.java index 7d67a22fa..7e09a483e 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusData.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusData.java @@ -11,7 +11,6 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.MetricDataType; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.PointData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.internal.data.ImmutableDoubleExemplarData; - import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; @@ -20,78 +19,81 @@ abstract class PrometheusData implements Data { - private final MetricDataType type; + private final MetricDataType type; - public PrometheusData(MetricDataType type) { - this.type = type; - } + public PrometheusData(MetricDataType type) { + this.type = type; + } - public MetricDataType getType() { - return type; - } - - protected Attributes labelsToAttributes(Labels labels) { - if (labels.isEmpty()) { - return Attributes.empty(); - } else { - AttributesBuilder builder = Attributes.builder(); - for (int i=0; i convertExemplar(Exemplar exemplar) { - if (exemplar == null) { - return Collections.emptyList(); - } - return convertExemplars(Exemplars.of(exemplar)); + protected Attributes labelsToAttributes(Labels labels) { + if (labels.isEmpty()) { + return Attributes.empty(); + } else { + AttributesBuilder builder = Attributes.builder(); + for (int i = 0; i < labels.size(); i++) { + builder.put(labels.getName(i), labels.getValue(i)); + } + return builder.build(); } + } - protected List convertExemplars(Exemplars exemplars) { - return StreamSupport.stream(exemplars.spliterator(), false) - .map(this::toDoubleExemplarData) - .collect(Collectors.toList()); + protected List convertExemplar(Exemplar exemplar) { + if (exemplar == null) { + return Collections.emptyList(); } + return convertExemplars(Exemplars.of(exemplar)); + } - protected DoubleExemplarData toDoubleExemplarData(Exemplar exemplar) { - if (exemplar == null) { - return null; - } - - AttributesBuilder filteredAttributesBuilder = Attributes.builder(); - String traceId = null; - String spanId = null; - for (Label label : exemplar.getLabels()) { - if (label.getName().equals(Exemplar.TRACE_ID)) { - traceId = label.getValue(); - } - else if (label.getName().equals(Exemplar.SPAN_ID)) { - spanId = label.getValue(); - } else { - filteredAttributesBuilder.put(label.getName(), label.getValue()); - } - } - Attributes filteredAttributes = filteredAttributesBuilder.build(); + protected List convertExemplars(Exemplars exemplars) { + return StreamSupport.stream(exemplars.spliterator(), false) + .map(this::toDoubleExemplarData) + .collect(Collectors.toList()); + } - SpanContext spanContext = (traceId != null && spanId != null) - ? SpanContext.create(traceId, spanId, TraceFlags.getSampled(), TraceState.getDefault()) - : SpanContext.getInvalid(); - - return ImmutableDoubleExemplarData.create( - filteredAttributes, - TimeUnit.MILLISECONDS.toNanos(exemplar.getTimestampMillis()), - spanContext, - exemplar.getValue()); + protected DoubleExemplarData toDoubleExemplarData(Exemplar exemplar) { + if (exemplar == null) { + return null; } - protected long getStartEpochNanos(DataPointSnapshot dataPoint) { - return dataPoint.hasCreatedTimestamp() ? TimeUnit.MILLISECONDS.toNanos(dataPoint.getCreatedTimestampMillis()) : 0L; + AttributesBuilder filteredAttributesBuilder = Attributes.builder(); + String traceId = null; + String spanId = null; + for (Label label : exemplar.getLabels()) { + if (label.getName().equals(Exemplar.TRACE_ID)) { + traceId = label.getValue(); + } else if (label.getName().equals(Exemplar.SPAN_ID)) { + spanId = label.getValue(); + } else { + filteredAttributesBuilder.put(label.getName(), label.getValue()); + } } + Attributes filteredAttributes = filteredAttributesBuilder.build(); - protected long getEpochNanos(DataPointSnapshot dataPoint, long currentTimeMillis) { - return dataPoint.hasScrapeTimestamp() ? TimeUnit.MILLISECONDS.toNanos(dataPoint.getScrapeTimestampMillis()) : TimeUnit.MILLISECONDS.toNanos(currentTimeMillis); - } + SpanContext spanContext = + (traceId != null && spanId != null) + ? SpanContext.create(traceId, spanId, TraceFlags.getSampled(), TraceState.getDefault()) + : SpanContext.getInvalid(); + + return ImmutableDoubleExemplarData.create( + filteredAttributes, + TimeUnit.MILLISECONDS.toNanos(exemplar.getTimestampMillis()), + spanContext, + exemplar.getValue()); + } + + protected long getStartEpochNanos(DataPointSnapshot dataPoint) { + return dataPoint.hasCreatedTimestamp() + ? TimeUnit.MILLISECONDS.toNanos(dataPoint.getCreatedTimestampMillis()) + : 0L; + } + protected long getEpochNanos(DataPointSnapshot dataPoint, long currentTimeMillis) { + return dataPoint.hasScrapeTimestamp() + ? TimeUnit.MILLISECONDS.toNanos(dataPoint.getScrapeTimestampMillis()) + : TimeUnit.MILLISECONDS.toNanos(currentTimeMillis); + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusGauge.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusGauge.java index 73e176bb6..872d569e6 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusGauge.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusGauge.java @@ -4,34 +4,35 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.DoublePointData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.GaugeData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.MetricDataType; - import java.util.Collection; import java.util.List; import java.util.stream.Collectors; -class PrometheusGauge extends PrometheusData implements GaugeData { +class PrometheusGauge extends PrometheusData + implements GaugeData { - private final List points; + private final List points; - public PrometheusGauge(GaugeSnapshot snapshot, long currentTimeMillis) { - super(MetricDataType.DOUBLE_GAUGE); - this.points = snapshot.getDataPoints().stream() - .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) - .collect(Collectors.toList()); - } + public PrometheusGauge(GaugeSnapshot snapshot, long currentTimeMillis) { + super(MetricDataType.DOUBLE_GAUGE); + this.points = + snapshot.getDataPoints().stream() + .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) + .collect(Collectors.toList()); + } - @Override - public Collection getPoints() { - return points; - } + @Override + public Collection getPoints() { + return points; + } - private DoublePointData toOtelDataPoint(GaugeSnapshot.GaugeDataPointSnapshot dataPoint, long currentTimeMillis) { - return new DoublePointDataImpl( - dataPoint.getValue(), - getStartEpochNanos(dataPoint), - getEpochNanos(dataPoint, currentTimeMillis), - labelsToAttributes(dataPoint.getLabels()), - convertExemplar(dataPoint.getExemplar()) - ); - } + private DoublePointData toOtelDataPoint( + GaugeSnapshot.GaugeDataPointSnapshot dataPoint, long currentTimeMillis) { + return new DoublePointDataImpl( + dataPoint.getValue(), + getStartEpochNanos(dataPoint), + getEpochNanos(dataPoint, currentTimeMillis), + labelsToAttributes(dataPoint.getLabels()), + convertExemplar(dataPoint.getExemplar())); + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusInfo.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusInfo.java index 6159e77fb..eb5ae48f6 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusInfo.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusInfo.java @@ -5,45 +5,46 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.DoublePointData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.MetricDataType; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.SumData; - import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; -public class PrometheusInfo extends PrometheusData implements SumData { - - private final List points; - - public PrometheusInfo(InfoSnapshot snapshot, long currentTimeMillis) { - super(MetricDataType.DOUBLE_SUM); - this.points = snapshot.getDataPoints().stream() - .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) - .collect(Collectors.toList()); - } - - @Override - public boolean isMonotonic() { - return false; - } - - @Override - public AggregationTemporality getAggregationTemporality() { - return AggregationTemporality.CUMULATIVE; - } - - @Override - public Collection getPoints() { - return points; - } - - private DoublePointData toOtelDataPoint(InfoSnapshot.InfoDataPointSnapshot dataPoint, long currentTimeMillis) { - return new DoublePointDataImpl( - 1.0, - getStartEpochNanos(dataPoint), - getEpochNanos(dataPoint, currentTimeMillis), - labelsToAttributes(dataPoint.getLabels()), - Collections.emptyList() - ); - } +public class PrometheusInfo extends PrometheusData + implements SumData { + + private final List points; + + public PrometheusInfo(InfoSnapshot snapshot, long currentTimeMillis) { + super(MetricDataType.DOUBLE_SUM); + this.points = + snapshot.getDataPoints().stream() + .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) + .collect(Collectors.toList()); + } + + @Override + public boolean isMonotonic() { + return false; + } + + @Override + public AggregationTemporality getAggregationTemporality() { + return AggregationTemporality.CUMULATIVE; + } + + @Override + public Collection getPoints() { + return points; + } + + private DoublePointData toOtelDataPoint( + InfoSnapshot.InfoDataPointSnapshot dataPoint, long currentTimeMillis) { + return new DoublePointDataImpl( + 1.0, + getStartEpochNanos(dataPoint), + getEpochNanos(dataPoint, currentTimeMillis), + labelsToAttributes(dataPoint.getLabels()), + Collections.emptyList()); + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusMetricData.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusMetricData.java index 348ff535f..5d86000b8 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusMetricData.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusMetricData.java @@ -11,124 +11,154 @@ class PrometheusMetricData> implements MetricData { - private final Resource resource; - private final InstrumentationScopeInfo instrumentationScopeInfo; - private final String name; - private final String description; - private final String unit; - T data; + private final Resource resource; + private final InstrumentationScopeInfo instrumentationScopeInfo; + private final String name; + private final String description; + private final String unit; + T data; - PrometheusMetricData(MetricMetadata metricMetadata, T data, InstrumentationScopeInfo instrumentationScopeInfo, Resource resource) { - this.instrumentationScopeInfo = instrumentationScopeInfo; - this.resource = resource; - this.name = getNameWithoutUnit(metricMetadata); - this.description = metricMetadata.getHelp(); - this.unit = convertUnit(metricMetadata.getUnit()); - this.data = data; - } + PrometheusMetricData( + MetricMetadata metricMetadata, + T data, + InstrumentationScopeInfo instrumentationScopeInfo, + Resource resource) { + this.instrumentationScopeInfo = instrumentationScopeInfo; + this.resource = resource; + this.name = getNameWithoutUnit(metricMetadata); + this.description = metricMetadata.getHelp(); + this.unit = convertUnit(metricMetadata.getUnit()); + this.data = data; + } - // In OpenTelemetry the unit should not be part of the metric name. - private String getNameWithoutUnit(MetricMetadata metricMetadata) { - String name = metricMetadata.getName(); - if (metricMetadata.getUnit() != null) { - String unit = metricMetadata.getUnit().toString(); - if (name.endsWith(unit)) { - name = name.substring(0, name.length() - unit.length()); - } - while (name.endsWith("_")) { - name = name.substring(0, name.length()-1); - } - } - return name; + // In OpenTelemetry the unit should not be part of the metric name. + private String getNameWithoutUnit(MetricMetadata metricMetadata) { + String name = metricMetadata.getName(); + if (metricMetadata.getUnit() != null) { + String unit = metricMetadata.getUnit().toString(); + if (name.endsWith(unit)) { + name = name.substring(0, name.length() - unit.length()); + } + while (name.endsWith("_")) { + name = name.substring(0, name.length() - 1); + } } + return name; + } - // See https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/6cf4dec6cb42d87d8840e9f67d4acf66d4eb8fda/pkg/translator/prometheus/normalize_name.go#L19 - private String convertUnit(Unit unit) { - if (unit == null) { - return null; - } - switch (unit.toString()) { - // Time - case "days": return "d"; - case "hours": return "h"; - case "minutes": return "min"; - case "seconds": return "s"; - case "milliseconds": return "ms"; - case "microseconds": return "us"; - case "nanoseconds": return "ns"; - // Bytes - case "bytes": return "By"; - case "kibibytes": return "KiBy"; - case "mebibytes": return "MiBy"; - case "gibibytes": return "GiBy"; - case "tibibytes": return "TiBy"; - case "kilobytes": return "KBy"; - case "megabytes": return "MBy"; - case "gigabytes": return "GBy"; - case "terabytes": return "TBy"; - // SI - case "meters": return "m"; - case "volts": return "V"; - case "amperes": return "A"; - case "joules": return "J"; - case "watts": return "W"; - case "grams": return "g"; - // Misc - case "celsius": return "Cel"; - case "hertz": return "Hz"; - case "percent": return "%"; - // default - default: - return unit.toString(); - } + // See + // https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/6cf4dec6cb42d87d8840e9f67d4acf66d4eb8fda/pkg/translator/prometheus/normalize_name.go#L19 + private String convertUnit(Unit unit) { + if (unit == null) { + return null; } - - @Override - public Resource getResource() { - return resource; + switch (unit.toString()) { + // Time + case "days": + return "d"; + case "hours": + return "h"; + case "minutes": + return "min"; + case "seconds": + return "s"; + case "milliseconds": + return "ms"; + case "microseconds": + return "us"; + case "nanoseconds": + return "ns"; + // Bytes + case "bytes": + return "By"; + case "kibibytes": + return "KiBy"; + case "mebibytes": + return "MiBy"; + case "gibibytes": + return "GiBy"; + case "tibibytes": + return "TiBy"; + case "kilobytes": + return "KBy"; + case "megabytes": + return "MBy"; + case "gigabytes": + return "GBy"; + case "terabytes": + return "TBy"; + // SI + case "meters": + return "m"; + case "volts": + return "V"; + case "amperes": + return "A"; + case "joules": + return "J"; + case "watts": + return "W"; + case "grams": + return "g"; + // Misc + case "celsius": + return "Cel"; + case "hertz": + return "Hz"; + case "percent": + return "%"; + // default + default: + return unit.toString(); } + } + + @Override + public Resource getResource() { + return resource; + } - @Override - public InstrumentationScopeInfo getInstrumentationScopeInfo() { + @Override + public InstrumentationScopeInfo getInstrumentationScopeInfo() { return instrumentationScopeInfo; - } + } - @Override - public String getName() { - return name; - } + @Override + public String getName() { + return name; + } - @Override - public String getDescription() { - return description; - } + @Override + public String getDescription() { + return description; + } - @Override - public String getUnit() { - return unit; - } + @Override + public String getUnit() { + return unit; + } - @Override - public MetricDataType getType() { - return data.getType(); - } + @Override + public MetricDataType getType() { + return data.getType(); + } - @Override - public T getData() { - return data; - } + @Override + public T getData() { + return data; + } - @Override - public SumData getDoubleSumData() { - if (data instanceof PrometheusCounter) { - return (PrometheusCounter) data; - } - if (data instanceof PrometheusStateSet) { - return (PrometheusStateSet) data; - } - if (data instanceof PrometheusInfo) { - return (PrometheusInfo) data; - } - return MetricData.super.getDoubleSumData(); + @Override + public SumData getDoubleSumData() { + if (data instanceof PrometheusCounter) { + return (PrometheusCounter) data; + } + if (data instanceof PrometheusStateSet) { + return (PrometheusStateSet) data; + } + if (data instanceof PrometheusInfo) { + return (PrometheusInfo) data; } + return MetricData.super.getDoubleSumData(); + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusNativeHistogram.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusNativeHistogram.java index 83ce1f14a..eb2a6555e 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusNativeHistogram.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusNativeHistogram.java @@ -7,81 +7,82 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.ExponentialHistogramData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.ExponentialHistogramPointData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.MetricDataType; - import java.util.Collection; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; -class PrometheusNativeHistogram extends PrometheusData implements ExponentialHistogramData { +class PrometheusNativeHistogram extends PrometheusData + implements ExponentialHistogramData { - private final List points; + private final List points; - PrometheusNativeHistogram(HistogramSnapshot snapshot, long currentTimeMillis) { - super(MetricDataType.EXPONENTIAL_HISTOGRAM); - this.points = snapshot.getDataPoints().stream() - .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } + PrometheusNativeHistogram(HistogramSnapshot snapshot, long currentTimeMillis) { + super(MetricDataType.EXPONENTIAL_HISTOGRAM); + this.points = + snapshot.getDataPoints().stream() + .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } - @Override - public AggregationTemporality getAggregationTemporality() { - return AggregationTemporality.CUMULATIVE; - } + @Override + public AggregationTemporality getAggregationTemporality() { + return AggregationTemporality.CUMULATIVE; + } - @Override - public Collection getPoints() { - return points; - } + @Override + public Collection getPoints() { + return points; + } - private ExponentialHistogramPointData toOtelDataPoint(HistogramSnapshot.HistogramDataPointSnapshot dataPoint, long currentTimeMillis) { - if (!dataPoint.hasNativeHistogramData()) { - return null; - } - return new ExponentialHistogramPointDataImpl( - dataPoint.getNativeSchema(), - dataPoint.hasSum() ? dataPoint.getSum() : Double.NaN, - dataPoint.hasCount() ? dataPoint.getCount() : calculateCount(dataPoint), - dataPoint.getNativeZeroCount(), - Double.NaN, - Double.NaN, - convertBuckets(dataPoint.getNativeSchema(), dataPoint.getNativeBucketsForPositiveValues()), - convertBuckets(dataPoint.getNativeSchema(), dataPoint.getNativeBucketsForNegativeValues()), - getStartEpochNanos(dataPoint), - getEpochNanos(dataPoint, currentTimeMillis), - labelsToAttributes(dataPoint.getLabels()), - convertExemplars(dataPoint.getExemplars()) - ); + private ExponentialHistogramPointData toOtelDataPoint( + HistogramSnapshot.HistogramDataPointSnapshot dataPoint, long currentTimeMillis) { + if (!dataPoint.hasNativeHistogramData()) { + return null; } + return new ExponentialHistogramPointDataImpl( + dataPoint.getNativeSchema(), + dataPoint.hasSum() ? dataPoint.getSum() : Double.NaN, + dataPoint.hasCount() ? dataPoint.getCount() : calculateCount(dataPoint), + dataPoint.getNativeZeroCount(), + Double.NaN, + Double.NaN, + convertBuckets(dataPoint.getNativeSchema(), dataPoint.getNativeBucketsForPositiveValues()), + convertBuckets(dataPoint.getNativeSchema(), dataPoint.getNativeBucketsForNegativeValues()), + getStartEpochNanos(dataPoint), + getEpochNanos(dataPoint, currentTimeMillis), + labelsToAttributes(dataPoint.getLabels()), + convertExemplars(dataPoint.getExemplars())); + } - private ExponentialHistogramBuckets convertBuckets(int scale, NativeHistogramBuckets buckets) { - if (buckets.size() == 0) { - return new ExponentialHistogramBucketsImpl(scale, 0); - } - int offset = buckets.getBucketIndex(0); - ExponentialHistogramBucketsImpl result = new ExponentialHistogramBucketsImpl(scale, offset-1); - int currentBucket = 0; - for (int i=offset; i<=buckets.getBucketIndex(buckets.size()-1); i++) { - if (buckets.getBucketIndex(currentBucket) == i) { - result.addCount(buckets.getCount(currentBucket)); - currentBucket++; - } else { - result.addCount(0); - } - } - return result; + private ExponentialHistogramBuckets convertBuckets(int scale, NativeHistogramBuckets buckets) { + if (buckets.size() == 0) { + return new ExponentialHistogramBucketsImpl(scale, 0); + } + int offset = buckets.getBucketIndex(0); + ExponentialHistogramBucketsImpl result = new ExponentialHistogramBucketsImpl(scale, offset - 1); + int currentBucket = 0; + for (int i = offset; i <= buckets.getBucketIndex(buckets.size() - 1); i++) { + if (buckets.getBucketIndex(currentBucket) == i) { + result.addCount(buckets.getCount(currentBucket)); + currentBucket++; + } else { + result.addCount(0); + } } + return result; + } - private long calculateCount(HistogramSnapshot.HistogramDataPointSnapshot dataPoint) { - long result = 0L; - for (int i=0; i implements SumData { - - private final List points; - public PrometheusStateSet(StateSetSnapshot snapshot, long currentTimeMillis) { - super(MetricDataType.DOUBLE_SUM); - this.points = new ArrayList<>(); - for (StateSetSnapshot.StateSetDataPointSnapshot dataPoint : snapshot.getDataPoints()) { - for (int i=0; i + implements SumData { - @Override - public AggregationTemporality getAggregationTemporality() { - return AggregationTemporality.CUMULATIVE; - } - - @Override - public Collection getPoints() { - return points; - } + private final List points; - private DoublePointData toOtelDataPoint(StateSetSnapshot snapshot, StateSetSnapshot.StateSetDataPointSnapshot dataPoint, int i, long currentTimeMillis) { - return new DoublePointDataImpl( - dataPoint.isTrue(i) ? 1.0 : 0.0, - getStartEpochNanos(dataPoint), - getEpochNanos(dataPoint, currentTimeMillis), - labelsToAttributes(dataPoint.getLabels().merge(Labels.of(snapshot.getMetadata().getName(), dataPoint.getName(i)))), - Collections.emptyList() - ); + public PrometheusStateSet(StateSetSnapshot snapshot, long currentTimeMillis) { + super(MetricDataType.DOUBLE_SUM); + this.points = new ArrayList<>(); + for (StateSetSnapshot.StateSetDataPointSnapshot dataPoint : snapshot.getDataPoints()) { + for (int i = 0; i < dataPoint.size(); i++) { + this.points.add(toOtelDataPoint(snapshot, dataPoint, i, currentTimeMillis)); + } } + } + + @Override + public boolean isMonotonic() { + return false; + } + + @Override + public AggregationTemporality getAggregationTemporality() { + return AggregationTemporality.CUMULATIVE; + } + + @Override + public Collection getPoints() { + return points; + } + + private DoublePointData toOtelDataPoint( + StateSetSnapshot snapshot, + StateSetSnapshot.StateSetDataPointSnapshot dataPoint, + int i, + long currentTimeMillis) { + return new DoublePointDataImpl( + dataPoint.isTrue(i) ? 1.0 : 0.0, + getStartEpochNanos(dataPoint), + getEpochNanos(dataPoint, currentTimeMillis), + labelsToAttributes( + dataPoint + .getLabels() + .merge(Labels.of(snapshot.getMetadata().getName(), dataPoint.getName(i)))), + Collections.emptyList()); + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusSummary.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusSummary.java index 11e780018..709b819af 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusSummary.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusSummary.java @@ -5,39 +5,40 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.MetricDataType; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.SummaryData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.SummaryPointData; - import java.util.Collection; import java.util.List; import java.util.stream.Collectors; class PrometheusSummary extends PrometheusData implements SummaryData { - private final List points; + private final List points; - PrometheusSummary(SummarySnapshot snapshot, long currentTimeMillis) { - super(MetricDataType.SUMMARY); - this.points = snapshot.getDataPoints().stream() - .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) - .collect(Collectors.toList()); - } + PrometheusSummary(SummarySnapshot snapshot, long currentTimeMillis) { + super(MetricDataType.SUMMARY); + this.points = + snapshot.getDataPoints().stream() + .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) + .collect(Collectors.toList()); + } - @Override - public Collection getPoints() { - return points; - } + @Override + public Collection getPoints() { + return points; + } - private SummaryPointData toOtelDataPoint(SummarySnapshot.SummaryDataPointSnapshot dataPoint, long currentTimeMillis) { - SummaryPointDataImpl result = new SummaryPointDataImpl( - dataPoint.hasSum() ? dataPoint.getSum() : Double.NaN, - dataPoint.hasCount() ? dataPoint.getCount() : 0, - getStartEpochNanos(dataPoint), - getEpochNanos(dataPoint, currentTimeMillis), - labelsToAttributes(dataPoint.getLabels()), - convertExemplars(dataPoint.getExemplars()) - ); - for (Quantile quantile : dataPoint.getQuantiles()) { - result.addValue(quantile.getQuantile(), quantile.getValue()); - } - return result; + private SummaryPointData toOtelDataPoint( + SummarySnapshot.SummaryDataPointSnapshot dataPoint, long currentTimeMillis) { + SummaryPointDataImpl result = + new SummaryPointDataImpl( + dataPoint.hasSum() ? dataPoint.getSum() : Double.NaN, + dataPoint.hasCount() ? dataPoint.getCount() : 0, + getStartEpochNanos(dataPoint), + getEpochNanos(dataPoint, currentTimeMillis), + labelsToAttributes(dataPoint.getLabels()), + convertExemplars(dataPoint.getExemplars())); + for (Quantile quantile : dataPoint.getQuantiles()) { + result.addValue(quantile.getQuantile(), quantile.getValue()); } + return result; + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusUnknown.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusUnknown.java index 9bb2f8e76..39c8f508f 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusUnknown.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/PrometheusUnknown.java @@ -4,34 +4,35 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.DoublePointData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.GaugeData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.MetricDataType; - import java.util.Collection; import java.util.List; import java.util.stream.Collectors; -class PrometheusUnknown extends PrometheusData implements GaugeData { +class PrometheusUnknown extends PrometheusData + implements GaugeData { - private final List points; + private final List points; - public PrometheusUnknown(UnknownSnapshot snapshot, long currentTimeMillis) { - super(MetricDataType.DOUBLE_GAUGE); - this.points = snapshot.getDataPoints().stream() - .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) - .collect(Collectors.toList()); - } + public PrometheusUnknown(UnknownSnapshot snapshot, long currentTimeMillis) { + super(MetricDataType.DOUBLE_GAUGE); + this.points = + snapshot.getDataPoints().stream() + .map(dataPoint -> toOtelDataPoint(dataPoint, currentTimeMillis)) + .collect(Collectors.toList()); + } - @Override - public Collection getPoints() { - return points; - } + @Override + public Collection getPoints() { + return points; + } - private DoublePointData toOtelDataPoint(UnknownSnapshot.UnknownDataPointSnapshot dataPoint, long currentTimeMillis) { - return new DoublePointDataImpl( - dataPoint.getValue(), - getStartEpochNanos(dataPoint), - getEpochNanos(dataPoint, currentTimeMillis), - labelsToAttributes(dataPoint.getLabels()), - convertExemplar(dataPoint.getExemplar()) - ); - } + private DoublePointData toOtelDataPoint( + UnknownSnapshot.UnknownDataPointSnapshot dataPoint, long currentTimeMillis) { + return new DoublePointDataImpl( + dataPoint.getValue(), + getStartEpochNanos(dataPoint), + getEpochNanos(dataPoint, currentTimeMillis), + labelsToAttributes(dataPoint.getLabels()), + convertExemplar(dataPoint.getExemplar())); + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/SummaryPointDataImpl.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/SummaryPointDataImpl.java index 83155f05a..eab7da588 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/SummaryPointDataImpl.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/SummaryPointDataImpl.java @@ -4,38 +4,43 @@ import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.DoubleExemplarData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.SummaryPointData; import io.prometheus.metrics.shaded.io_opentelemetry_1_38_0.sdk.metrics.data.ValueAtQuantile; - import java.util.ArrayList; import java.util.List; public class SummaryPointDataImpl extends PointDataImpl implements SummaryPointData { - private final double sum; - private final long count; - private final List values; - - public SummaryPointDataImpl(double sum, long count, long startEpochNanos, long epochNanos, Attributes attributes, List exemplars) { - super(startEpochNanos, epochNanos, attributes, exemplars); - this.sum = sum; - this.count = count; - this.values = new ArrayList<>(); - } - - void addValue(double quantile, double value) { - values.add(new ValueAtQuantileImpl(quantile, value)); - } - - @Override - public long getCount() { - return count; - } - - @Override - public double getSum() { - return sum; - } - - @Override - public List getValues() { - return values; - } + private final double sum; + private final long count; + private final List values; + + public SummaryPointDataImpl( + double sum, + long count, + long startEpochNanos, + long epochNanos, + Attributes attributes, + List exemplars) { + super(startEpochNanos, epochNanos, attributes, exemplars); + this.sum = sum; + this.count = count; + this.values = new ArrayList<>(); + } + + void addValue(double quantile, double value) { + values.add(new ValueAtQuantileImpl(quantile, value)); + } + + @Override + public long getCount() { + return count; + } + + @Override + public double getSum() { + return sum; + } + + @Override + public List getValues() { + return values; + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/ValueAtQuantileImpl.java b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/ValueAtQuantileImpl.java index 8117c5a8c..13b39d3aa 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/ValueAtQuantileImpl.java +++ b/prometheus-metrics-exporter-opentelemetry/src/main/java/io/prometheus/metrics/exporter/opentelemetry/otelmodel/ValueAtQuantileImpl.java @@ -4,21 +4,21 @@ public class ValueAtQuantileImpl implements ValueAtQuantile { - private final double quantile; - private final double value; + private final double quantile; + private final double value; - public ValueAtQuantileImpl(double quantile, double value) { - this.quantile = quantile; - this.value = value; - } + public ValueAtQuantileImpl(double quantile, double value) { + this.quantile = quantile; + this.value = value; + } - @Override - public double getQuantile() { - return quantile; - } + @Override + public double getQuantile() { + return quantile; + } - @Override - public double getValue() { - return value; - } + @Override + public double getValue() { + return value; + } } diff --git a/prometheus-metrics-exporter-opentelemetry/src/test/java/io/prometheus/metrics/exporter/opentelemetry/ExemplarTest.java b/prometheus-metrics-exporter-opentelemetry/src/test/java/io/prometheus/metrics/exporter/opentelemetry/ExemplarTest.java index 2482d6857..5af5193f6 100644 --- a/prometheus-metrics-exporter-opentelemetry/src/test/java/io/prometheus/metrics/exporter/opentelemetry/ExemplarTest.java +++ b/prometheus-metrics-exporter-opentelemetry/src/test/java/io/prometheus/metrics/exporter/opentelemetry/ExemplarTest.java @@ -1,5 +1,15 @@ package io.prometheus.metrics.exporter.opentelemetry; +import static com.github.tomakehurst.wiremock.client.WireMock.containing; +import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; +import static com.github.tomakehurst.wiremock.client.WireMock.ok; +import static com.github.tomakehurst.wiremock.client.WireMock.post; +import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor; +import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; +import static com.github.tomakehurst.wiremock.client.WireMock.verify; +import static java.util.concurrent.TimeUnit.SECONDS; +import static org.awaitility.Awaitility.await; + import com.github.tomakehurst.wiremock.http.Request; import com.github.tomakehurst.wiremock.junit.WireMockRule; import com.github.tomakehurst.wiremock.matching.MatchResult; @@ -23,123 +33,113 @@ import org.junit.Rule; import org.junit.Test; -import static com.github.tomakehurst.wiremock.client.WireMock.containing; -import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; -import static com.github.tomakehurst.wiremock.client.WireMock.ok; -import static com.github.tomakehurst.wiremock.client.WireMock.post; -import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor; -import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; -import static com.github.tomakehurst.wiremock.client.WireMock.verify; -import static java.util.concurrent.TimeUnit.SECONDS; -import static org.awaitility.Awaitility.await; - public class ExemplarTest { - private static final String ENDPOINT_PATH = "/v1/metrics"; - private static final int TIMEOUT = 3; - private static final String INSTRUMENTATION_SCOPE_NAME = "testInstrumentationScope"; - private static final String SPAN_NAME = "test-span"; - public static final String TEST_COUNTER_NAME = "test_counter"; - private Counter testCounter; - private OpenTelemetryExporter openTelemetryExporter; - @Rule - public WireMockRule wireMockRule = new WireMockRule(4317); - - @Before - public void setUp() { - openTelemetryExporter = OpenTelemetryExporter.builder() - .endpoint("http://localhost:4317") - .protocol("http/protobuf") - .intervalSeconds(1) - .buildAndStart(); - - testCounter = Counter.builder() - .name(TEST_COUNTER_NAME) - .withExemplars() - .register(); - - wireMockRule.stubFor(post(ENDPOINT_PATH) - .withHeader("Content-Type", containing("application/x-protobuf")) - .willReturn(ok() - .withHeader("Content-Type", "application/json") - .withBody("{\"partialSuccess\":{}}"))); + private static final String ENDPOINT_PATH = "/v1/metrics"; + private static final int TIMEOUT = 3; + private static final String INSTRUMENTATION_SCOPE_NAME = "testInstrumentationScope"; + private static final String SPAN_NAME = "test-span"; + public static final String TEST_COUNTER_NAME = "test_counter"; + private Counter testCounter; + private OpenTelemetryExporter openTelemetryExporter; + @Rule public WireMockRule wireMockRule = new WireMockRule(4317); + + @Before + public void setUp() { + openTelemetryExporter = + OpenTelemetryExporter.builder() + .endpoint("http://localhost:4317") + .protocol("http/protobuf") + .intervalSeconds(1) + .buildAndStart(); + + testCounter = Counter.builder().name(TEST_COUNTER_NAME).withExemplars().register(); + + wireMockRule.stubFor( + post(ENDPOINT_PATH) + .withHeader("Content-Type", containing("application/x-protobuf")) + .willReturn( + ok().withHeader("Content-Type", "application/json") + .withBody("{\"partialSuccess\":{}}"))); + } + + @After + public void tearDown() { + PrometheusRegistry.defaultRegistry.unregister(testCounter); + openTelemetryExporter.close(); + } + + @Test + public void sampledExemplarIsForwarded() { + try (SdkTracerProvider sdkTracerProvider = + SdkTracerProvider.builder().setSampler(Sampler.alwaysOn()).build()) { + + Tracer test = sdkTracerProvider.get(INSTRUMENTATION_SCOPE_NAME); + Span span = test.spanBuilder(SPAN_NAME).startSpan(); + try (Scope scope = span.makeCurrent()) { + testCounter.inc(2); + } } - @After - public void tearDown() { - PrometheusRegistry.defaultRegistry.unregister(testCounter); - openTelemetryExporter.close(); + await() + .atMost(TIMEOUT, SECONDS) + .ignoreException(com.github.tomakehurst.wiremock.client.VerificationException.class) + .until( + () -> { + verify( + postRequestedFor(urlEqualTo(ENDPOINT_PATH)) + .withHeader("Content-Type", equalTo("application/x-protobuf")) + .andMatching(getExemplarCountMatcher(1))); + return true; + }); + } + + @Test(expected = ConditionTimeoutException.class) + public void notSampledExemplarIsNotForwarded() { + try (SdkTracerProvider sdkTracerProvider = + SdkTracerProvider.builder().setSampler(Sampler.alwaysOff()).build()) { + + Tracer test = sdkTracerProvider.get(INSTRUMENTATION_SCOPE_NAME); + Span span = test.spanBuilder(SPAN_NAME).startSpan(); + try (Scope scope = span.makeCurrent()) { + testCounter.inc(2); + } } - @Test - public void sampledExemplarIsForwarded() { - try (SdkTracerProvider sdkTracerProvider = SdkTracerProvider.builder() - .setSampler(Sampler.alwaysOn()) - .build()) { - - Tracer test = sdkTracerProvider.get(INSTRUMENTATION_SCOPE_NAME); - Span span = test.spanBuilder(SPAN_NAME) - .startSpan(); - try (Scope scope = span.makeCurrent()) { - testCounter.inc(2); + await() + .atMost(TIMEOUT, SECONDS) + .ignoreException(com.github.tomakehurst.wiremock.client.VerificationException.class) + .until( + () -> { + verify( + postRequestedFor(urlEqualTo(ENDPOINT_PATH)) + .withHeader("Content-Type", equalTo("application/x-protobuf")) + .andMatching(getExemplarCountMatcher(1))); + return true; + }); + } + + private static ValueMatcher getExemplarCountMatcher(int expectedCount) { + return request -> { + try { + ExportMetricsServiceRequest exportMetricsServiceRequest = + ExportMetricsServiceRequest.parseFrom(request.getBody()); + for (ResourceMetrics resourceMetrics : + exportMetricsServiceRequest.getResourceMetricsList()) { + for (InstrumentationLibraryMetrics instrumentationLibraryMetrics : + resourceMetrics.getInstrumentationLibraryMetricsList()) { + for (Metric metric : instrumentationLibraryMetrics.getMetricsList()) { + for (NumberDataPoint numberDataPoint : metric.getSum().getDataPointsList()) { + if (numberDataPoint.getExemplarsCount() == expectedCount) { + return MatchResult.exactMatch(); } + } } - - - await().atMost(TIMEOUT, SECONDS) - .ignoreException(com.github.tomakehurst.wiremock.client.VerificationException.class) - .until(() -> { - verify(postRequestedFor(urlEqualTo(ENDPOINT_PATH)) - .withHeader("Content-Type", equalTo("application/x-protobuf")) - .andMatching(getExemplarCountMatcher(1))); - return true; - }); - - } - - @Test(expected = ConditionTimeoutException.class) - public void notSampledExemplarIsNotForwarded() { - try (SdkTracerProvider sdkTracerProvider = SdkTracerProvider.builder() - .setSampler(Sampler.alwaysOff()) - .build()) { - - Tracer test = sdkTracerProvider.get(INSTRUMENTATION_SCOPE_NAME); - Span span = test.spanBuilder(SPAN_NAME) - .startSpan(); - try (Scope scope = span.makeCurrent()) { - testCounter.inc(2); - } + } } - - await().atMost(TIMEOUT, SECONDS) - .ignoreException(com.github.tomakehurst.wiremock.client.VerificationException.class) - .until(() -> { - verify(postRequestedFor(urlEqualTo(ENDPOINT_PATH)) - .withHeader("Content-Type", equalTo("application/x-protobuf")) - .andMatching(getExemplarCountMatcher(1))); - return true; - }); - - } - - private static ValueMatcher getExemplarCountMatcher(int expectedCount) { - return request -> { - try { - ExportMetricsServiceRequest exportMetricsServiceRequest = ExportMetricsServiceRequest.parseFrom(request.getBody()); - for (ResourceMetrics resourceMetrics : exportMetricsServiceRequest.getResourceMetricsList()) { - for (InstrumentationLibraryMetrics instrumentationLibraryMetrics : resourceMetrics.getInstrumentationLibraryMetricsList()) { - for (Metric metric : instrumentationLibraryMetrics.getMetricsList()) { - for (NumberDataPoint numberDataPoint : metric.getSum().getDataPointsList()) { - if (numberDataPoint.getExemplarsCount() == expectedCount) { - return MatchResult.exactMatch(); - } - } - } - } - } - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } - return MatchResult.noMatch(); - }; - } + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException(e); + } + return MatchResult.noMatch(); + }; + } } diff --git a/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/DefaultHttpConnectionFactory.java b/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/DefaultHttpConnectionFactory.java index f3223cb53..60c110149 100644 --- a/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/DefaultHttpConnectionFactory.java +++ b/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/DefaultHttpConnectionFactory.java @@ -6,13 +6,15 @@ /** * This can be used for creating {@link Scheme#HTTP} and {@link Scheme#HTTPS} connections. - *

- * However, if you want to use it with {@link Scheme#HTTPS} you must make sure that the keychain for verifying the server certificate is set up correctly. - * For an example of how to skip certificate verification see {@code PushGatewayTestApp} in {@code integration-tests/it-pushgateway/}. + * + *

However, if you want to use it with {@link Scheme#HTTPS} you must make sure that the keychain + * for verifying the server certificate is set up correctly. For an example of how to skip + * certificate verification see {@code PushGatewayTestApp} in {@code + * integration-tests/it-pushgateway/}. */ public class DefaultHttpConnectionFactory implements HttpConnectionFactory { - @Override - public HttpURLConnection create(URL url) throws IOException { - return (HttpURLConnection) url.openConnection(); - } + @Override + public HttpURLConnection create(URL url) throws IOException { + return (HttpURLConnection) url.openConnection(); + } } diff --git a/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/DefaultJobLabelDetector.java b/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/DefaultJobLabelDetector.java index 8d9afb71e..59a11a473 100644 --- a/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/DefaultJobLabelDetector.java +++ b/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/DefaultJobLabelDetector.java @@ -7,54 +7,54 @@ /** * The default {@code job} label is the name of the JAR file being executed. - *

- * This is copy-and-paste from {@code ResourceAttributesFromJarFileName} - * in the {@code prometheus-metrics-exporter-opentelemetry} module. + * + *

This is copy-and-paste from {@code ResourceAttributesFromJarFileName} in the {@code + * prometheus-metrics-exporter-opentelemetry} module. */ class DefaultJobLabelDetector { - static String getDefaultJobLabel() { - Path jarPath = getJarPathFromSunCommandLine(); - if (jarPath == null) { - return "unknown_job"; - } - return getServiceName(jarPath); + static String getDefaultJobLabel() { + Path jarPath = getJarPathFromSunCommandLine(); + if (jarPath == null) { + return "unknown_job"; } + return getServiceName(jarPath); + } - private static String getServiceName(Path jarPath) { - String jarName = jarPath.getFileName().toString(); - int dotIndex = jarName.lastIndexOf("."); - return dotIndex == -1 ? jarName : jarName.substring(0, dotIndex); - } + private static String getServiceName(Path jarPath) { + String jarName = jarPath.getFileName().toString(); + int dotIndex = jarName.lastIndexOf("."); + return dotIndex == -1 ? jarName : jarName.substring(0, dotIndex); + } - private static Path getJarPathFromSunCommandLine() { - String programArguments = System.getProperty("sun.java.command"); - if (programArguments == null) { - return null; - } - // Take the path until the first space. If the path doesn't exist extend it up to the next - // space. Repeat until a path that exists is found or input runs out. - int next = 0; - while (true) { - int nextSpace = programArguments.indexOf(' ', next); - if (nextSpace == -1) { - return pathIfExists(programArguments); - } - Path path = pathIfExists(programArguments.substring(0, nextSpace)); - next = nextSpace + 1; - if (path != null) { - return path; - } - } + private static Path getJarPathFromSunCommandLine() { + String programArguments = System.getProperty("sun.java.command"); + if (programArguments == null) { + return null; + } + // Take the path until the first space. If the path doesn't exist extend it up to the next + // space. Repeat until a path that exists is found or input runs out. + int next = 0; + while (true) { + int nextSpace = programArguments.indexOf(' ', next); + if (nextSpace == -1) { + return pathIfExists(programArguments); + } + Path path = pathIfExists(programArguments.substring(0, nextSpace)); + next = nextSpace + 1; + if (path != null) { + return path; + } } + } - private static Path pathIfExists(String programArguments) { - Path candidate; - try { - candidate = Paths.get(programArguments); - } catch (InvalidPathException e) { - return null; - } - return Files.isRegularFile(candidate) ? candidate : null; + private static Path pathIfExists(String programArguments) { + Path candidate; + try { + candidate = Paths.get(programArguments); + } catch (InvalidPathException e) { + return null; } + return Files.isRegularFile(candidate) ? candidate : null; + } } diff --git a/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/Format.java b/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/Format.java index 90204cff9..3d1b9d2e6 100644 --- a/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/Format.java +++ b/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/Format.java @@ -1,6 +1,6 @@ package io.prometheus.metrics.exporter.pushgateway; public enum Format { - PROMETHEUS_PROTOBUF, - PROMETHEUS_TEXT + PROMETHEUS_PROTOBUF, + PROMETHEUS_TEXT } diff --git a/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/HttpConnectionFactory.java b/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/HttpConnectionFactory.java index c583b144e..f7a039af7 100644 --- a/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/HttpConnectionFactory.java +++ b/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/HttpConnectionFactory.java @@ -4,10 +4,8 @@ import java.net.HttpURLConnection; import java.net.URL; -/** - * See {@link DefaultHttpConnectionFactory}. - */ +/** See {@link DefaultHttpConnectionFactory}. */ @FunctionalInterface public interface HttpConnectionFactory { - HttpURLConnection create(URL url) throws IOException; + HttpURLConnection create(URL url) throws IOException; } diff --git a/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/PushGateway.java b/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/PushGateway.java index 0e6ab8d54..d879c3105 100644 --- a/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/PushGateway.java +++ b/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/PushGateway.java @@ -1,5 +1,7 @@ package io.prometheus.metrics.exporter.pushgateway; +import static io.prometheus.metrics.exporter.pushgateway.Scheme.HTTP; + import io.prometheus.metrics.config.ExporterPushgatewayProperties; import io.prometheus.metrics.config.PrometheusProperties; import io.prometheus.metrics.config.PrometheusPropertiesException; @@ -8,24 +10,23 @@ import io.prometheus.metrics.model.registry.Collector; import io.prometheus.metrics.model.registry.MultiCollector; import io.prometheus.metrics.model.registry.PrometheusRegistry; - import java.io.*; import java.net.*; import java.nio.charset.StandardCharsets; import java.util.*; -import static io.prometheus.metrics.exporter.pushgateway.Scheme.HTTP; - /** - * Export metrics via the Prometheus Pushgateway - *

- * The Prometheus Pushgateway exists to allow ephemeral and batch jobs to expose their metrics to Prometheus. - * Since these kinds of jobs may not exist long enough to be scraped, they can instead push their metrics - * to a Pushgateway. This Java class allows pushing the contents of a {@link PrometheusRegistry} to a Pushgateway. - *

- * Example usage: - *

- * {@code
+ * Export metrics via the Prometheus
+ * Pushgateway
+ *
+ * 

The Prometheus Pushgateway exists to allow ephemeral and batch jobs to expose their metrics to + * Prometheus. Since these kinds of jobs may not exist long enough to be scraped, they can instead + * push their metrics to a Pushgateway. This Java class allows pushing the contents of a {@link + * PrometheusRegistry} to a Pushgateway. + * + *

Example usage: + * + *

{@code
  * void executeBatchJob() throws Exception {
  *     PrometheusRegistry registry = new PrometheusRegistry();
  *     Gauge duration = Gauge.builder()
@@ -53,384 +54,397 @@
  *         pg.pushAdd();
  *     }
  * }
- * }
- * 
- *

- * See https://github.com/prometheus/pushgateway. + * }

+ * + *

See https://github.com/prometheus/pushgateway. */ public class PushGateway { - private static final int MILLISECONDS_PER_SECOND = 1000; + private static final int MILLISECONDS_PER_SECOND = 1000; + + private final URL url; + private final Format format; + private final Map requestHeaders; + private final PrometheusRegistry registry; + private final HttpConnectionFactory connectionFactory; + + private PushGateway( + PrometheusRegistry registry, + Format format, + URL url, + HttpConnectionFactory connectionFactory, + Map requestHeaders) { + this.registry = registry; + this.format = format; + this.url = url; + this.requestHeaders = Collections.unmodifiableMap(new HashMap<>(requestHeaders)); + this.connectionFactory = connectionFactory; + } + + /** + * Push all metrics. All metrics with the same job and grouping key are replaced. + * + *

This uses the PUT HTTP method. + */ + public void push() throws IOException { + doRequest(registry, "PUT"); + } + + /** + * Push a single metric. All metrics with the same job and grouping key are replaced. + * + *

This is useful for pushing a single Gauge. + * + *

This uses the PUT HTTP method. + */ + public void push(Collector collector) throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + registry.register(collector); + doRequest(registry, "PUT"); + } + + /** + * Push a single collector. All metrics with the same job and grouping key are replaced. + * + *

This uses the PUT HTTP method. + */ + public void push(MultiCollector collector) throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + registry.register(collector); + doRequest(registry, "PUT"); + } + + /** + * Like {@link #push()}, but only metrics with the same name as the newly pushed metrics are + * replaced. + * + *

This uses the POST HTTP method. + */ + public void pushAdd() throws IOException { + doRequest(registry, "POST"); + } + + /** + * Like {@link #push(Collector)}, but only the specified metric will be replaced. + * + *

This uses the POST HTTP method. + */ + public void pushAdd(Collector collector) throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + registry.register(collector); + doRequest(registry, "POST"); + } + + /** + * Like {@link #push(MultiCollector)}, but only the metrics from the collector will be replaced. + * + *

This uses the POST HTTP method. + */ + public void pushAdd(MultiCollector collector) throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + registry.register(collector); + doRequest(registry, "POST"); + } + + /** + * Deletes metrics from the Pushgateway. + * + *

This uses the DELETE HTTP method. + */ + public void delete() throws IOException { + doRequest(null, "DELETE"); + } + + private void doRequest(PrometheusRegistry registry, String method) throws IOException { + try { + HttpURLConnection connection = connectionFactory.create(url); + requestHeaders.forEach(connection::setRequestProperty); + if (format == Format.PROMETHEUS_TEXT) { + connection.setRequestProperty("Content-Type", PrometheusTextFormatWriter.CONTENT_TYPE); + } else { + connection.setRequestProperty("Content-Type", PrometheusProtobufWriter.CONTENT_TYPE); + } + if (!method.equals("DELETE")) { + connection.setDoOutput(true); + } + connection.setRequestMethod(method); + + connection.setConnectTimeout(10 * MILLISECONDS_PER_SECOND); + connection.setReadTimeout(10 * MILLISECONDS_PER_SECOND); + connection.connect(); + + try { + if (!method.equals("DELETE")) { + OutputStream outputStream = connection.getOutputStream(); + if (format == Format.PROMETHEUS_TEXT) { + new PrometheusTextFormatWriter(false).write(outputStream, registry.scrape()); + } else { + new PrometheusProtobufWriter().write(outputStream, registry.scrape()); + } + outputStream.flush(); + outputStream.close(); + } - private final URL url; - private final Format format; - private final Map requestHeaders; - private final PrometheusRegistry registry; - private final HttpConnectionFactory connectionFactory; + int response = connection.getResponseCode(); + if (response / 100 != 2) { + String errorMessage; + InputStream errorStream = connection.getErrorStream(); + if (errorStream != null) { + String errBody = readFromStream(errorStream); + errorMessage = + "Response code from " + url + " was " + response + ", response body: " + errBody; + } else { + errorMessage = "Response code from " + url + " was " + response; + } + throw new IOException(errorMessage); + } - private PushGateway(PrometheusRegistry registry, Format format, URL url, HttpConnectionFactory connectionFactory, Map requestHeaders) { - this.registry = registry; - this.format = format; - this.url = url; - this.requestHeaders = Collections.unmodifiableMap(new HashMap<>(requestHeaders)); - this.connectionFactory = connectionFactory; + } finally { + connection.disconnect(); + } + } catch (IOException e) { + String baseUrl = url.getProtocol() + "://" + url.getHost(); + if (url.getPort() != -1) { + baseUrl += ":" + url.getPort(); + } + throw new IOException( + "Failed to push metrics to the Prometheus Pushgateway on " + + baseUrl + + ": " + + e.getMessage(), + e); + } + } + + private static String readFromStream(InputStream is) throws IOException { + ByteArrayOutputStream result = new ByteArrayOutputStream(); + byte[] buffer = new byte[1024]; + int length; + while ((length = is.read(buffer)) != -1) { + result.write(buffer, 0, length); + } + return result.toString("UTF-8"); + } + + public static Builder builder() { + return builder(PrometheusProperties.get()); + } + + /** + * The {@link PrometheusProperties} will be used to override what is set in the {@link Builder}. + */ + public static Builder builder(PrometheusProperties config) { + return new Builder(config); + } + + public static class Builder { + + private final PrometheusProperties config; + private Format format; + private String address; + private Scheme scheme; + private String job; + private final Map requestHeaders = new HashMap<>(); + private PrometheusRegistry registry = PrometheusRegistry.defaultRegistry; + private HttpConnectionFactory connectionFactory = new DefaultHttpConnectionFactory(); + private Map groupingKey = new TreeMap<>(); + + private Builder(PrometheusProperties config) { + this.config = config; } - /** - * Push all metrics. All metrics with the same job and grouping key are replaced. - *

- * This uses the PUT HTTP method. - */ - public void push() throws IOException { - doRequest(registry, "PUT"); + /** Default is {@link Format#PROMETHEUS_PROTOBUF}. */ + public Builder format(Format format) { + if (format == null) { + throw new NullPointerException(); + } + this.format = format; + return this; } /** - * Push a single metric. All metrics with the same job and grouping key are replaced. - *

- * This is useful for pushing a single Gauge. - *

- * This uses the PUT HTTP method. + * Address of the Pushgateway in format {@code host:port}. Default is {@code localhost:9091}. + * Can be overwritten at runtime with the {@code io.prometheus.exporter.pushgateway.address} + * property. */ - public void push(Collector collector) throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - registry.register(collector); - doRequest(registry, "PUT"); + public Builder address(String address) { + if (address == null) { + throw new NullPointerException(); + } + this.address = address; + return this; } - /** - * Push a single collector. All metrics with the same job and grouping key are replaced. - *

- * This uses the PUT HTTP method. - */ - public void push(MultiCollector collector) throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - registry.register(collector); - doRequest(registry, "PUT"); + /** Username and password for HTTP basic auth when pushing to the Pushgateway. */ + public Builder basicAuth(String user, String password) { + if (user == null || password == null) { + throw new NullPointerException(); + } + byte[] credentialsBytes = (user + ":" + password).getBytes(StandardCharsets.UTF_8); + String encoded = Base64.getEncoder().encodeToString(credentialsBytes); + requestHeaders.put("Authorization", String.format("Basic %s", encoded)); + return this; + } + + /** Bearer token authorization when pushing to the Pushgateway. */ + public Builder bearerToken(String token) { + if (token == null) { + throw new NullPointerException(); + } + requestHeaders.put("Authorization", String.format("Bearer %s", token)); + return this; } /** - * Like {@link #push()}, but only metrics with the same name as the newly pushed metrics are replaced. - *

- * This uses the POST HTTP method. + * Specify if metrics should be pushed using HTTP or HTTPS. Default is HTTP. Can be overwritten + * at runtime with the {@code io.prometheus.exporter.pushgateway.scheme} property. */ - public void pushAdd() throws IOException { - doRequest(registry, "POST"); + public Builder scheme(Scheme scheme) { + if (scheme == null) { + throw new NullPointerException(); + } + this.scheme = scheme; + return this; } /** - * Like {@link #push(Collector)}, but only the specified metric will be replaced. - *

- * This uses the POST HTTP method. + * Custom connection factory. Default is {@link DefaultHttpConnectionFactory}. + * + *

The {@code PushGatewayTestApp} in {@code integration-tests/it-pushgateway/} has an example + * of a custom connection factory that skips SSL certificate validation for HTTPS connections. */ - public void pushAdd(Collector collector) throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - registry.register(collector); - doRequest(registry, "POST"); + public Builder connectionFactory(HttpConnectionFactory connectionFactory) { + if (connectionFactory == null) { + throw new NullPointerException(); + } + this.connectionFactory = connectionFactory; + return this; } /** - * Like {@link #push(MultiCollector)}, but only the metrics from the collector will be replaced. - *

- * This uses the POST HTTP method. + * The {@code job} label to be used when pushing metrics. If not provided, the name of the JAR + * file will be used by default. Can be overwritten at runtime with the {@code + * io.prometheus.exporter.pushgateway.job} property. */ - public void pushAdd(MultiCollector collector) throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - registry.register(collector); - doRequest(registry, "POST"); + public Builder job(String job) { + if (job == null) { + throw new NullPointerException(); + } + this.job = job; + return this; } /** - * Deletes metrics from the Pushgateway. - *

- * This uses the DELETE HTTP method. + * Grouping keys to be used when pushing/deleting metrics. Call this method multiple times for + * adding multiple grouping keys. */ - public void delete() throws IOException { - doRequest(null, "DELETE"); + public Builder groupingKey(String name, String value) { + if (name == null || value == null) { + throw new NullPointerException(); + } + groupingKey.put(name, value); + return this; } - private void doRequest(PrometheusRegistry registry, String method) throws IOException { - try { - HttpURLConnection connection = connectionFactory.create(url); - requestHeaders.forEach(connection::setRequestProperty); - if (format == Format.PROMETHEUS_TEXT) { - connection.setRequestProperty("Content-Type", PrometheusTextFormatWriter.CONTENT_TYPE); - } else { - connection.setRequestProperty("Content-Type", PrometheusProtobufWriter.CONTENT_TYPE); - } - if (!method.equals("DELETE")) { - connection.setDoOutput(true); - } - connection.setRequestMethod(method); - - connection.setConnectTimeout(10 * MILLISECONDS_PER_SECOND); - connection.setReadTimeout(10 * MILLISECONDS_PER_SECOND); - connection.connect(); - - try { - if (!method.equals("DELETE")) { - OutputStream outputStream = connection.getOutputStream(); - if (format == Format.PROMETHEUS_TEXT) { - new PrometheusTextFormatWriter(false).write(outputStream, registry.scrape()); - } else { - new PrometheusProtobufWriter().write(outputStream, registry.scrape()); - } - outputStream.flush(); - outputStream.close(); - } - - int response = connection.getResponseCode(); - if (response / 100 != 2) { - String errorMessage; - InputStream errorStream = connection.getErrorStream(); - if (errorStream != null) { - String errBody = readFromStream(errorStream); - errorMessage = "Response code from " + url + " was " + response + ", response body: " + errBody; - } else { - errorMessage = "Response code from " + url + " was " + response; - } - throw new IOException(errorMessage); - } - - } finally { - connection.disconnect(); - } - } catch (IOException e) { - String baseUrl = url.getProtocol() + "://" + url.getHost(); - if (url.getPort() != -1) { - baseUrl += ":" + url.getPort(); - } - throw new IOException("Failed to push metrics to the Prometheus Pushgateway on " + baseUrl + ": " + e.getMessage(), e); - } + /** Convenience method for adding the current IP address as an "instance" label. */ + public Builder instanceIpGroupingKey() throws UnknownHostException { + return groupingKey("instance", InetAddress.getLocalHost().getHostAddress()); } - private static String readFromStream(InputStream is) throws IOException { - ByteArrayOutputStream result = new ByteArrayOutputStream(); - byte[] buffer = new byte[1024]; - int length; - while ((length = is.read(buffer)) != -1) { - result.write(buffer, 0, length); - } - return result.toString("UTF-8"); + /** Push metrics from this registry instead of {@link PrometheusRegistry#defaultRegistry}. */ + public Builder registry(PrometheusRegistry registry) { + if (registry == null) { + throw new NullPointerException(); + } + this.registry = registry; + return this; } - public static Builder builder() { - return builder(PrometheusProperties.get()); + private Scheme getScheme(ExporterPushgatewayProperties properties) { + if (properties != null && properties.getScheme() != null) { + return Scheme.valueOf(properties.getScheme()); + } else if (this.scheme != null) { + return this.scheme; + } else { + return HTTP; + } } - /** - * The {@link PrometheusProperties} will be used to override what is set in the {@link Builder}. - */ - public static Builder builder(PrometheusProperties config) { - return new Builder(config); + private String getAddress(ExporterPushgatewayProperties properties) { + if (properties != null && properties.getAddress() != null) { + return properties.getAddress(); + } else if (this.address != null) { + return this.address; + } else { + return "localhost:9091"; + } } - public static class Builder { - - private final PrometheusProperties config; - private Format format; - private String address; - private Scheme scheme; - private String job; - private final Map requestHeaders = new HashMap<>(); - private PrometheusRegistry registry = PrometheusRegistry.defaultRegistry; - private HttpConnectionFactory connectionFactory = new DefaultHttpConnectionFactory(); - private Map groupingKey = new TreeMap<>(); - - private Builder(PrometheusProperties config) { - this.config = config; - } - - /** - * Default is {@link Format#PROMETHEUS_PROTOBUF}. - */ - public Builder format(Format format) { - if (format == null) { - throw new NullPointerException(); - } - this.format = format; - return this; - } - - /** - * Address of the Pushgateway in format {@code host:port}. - * Default is {@code localhost:9091}. - * Can be overwritten at runtime with the {@code io.prometheus.exporter.pushgateway.address} property. - */ - public Builder address(String address) { - if (address == null) { - throw new NullPointerException(); - } - this.address = address; - return this; - } - - /** - * Username and password for HTTP basic auth when pushing to the Pushgateway. - */ - public Builder basicAuth(String user, String password) { - if (user == null || password == null) { - throw new NullPointerException(); - } - byte[] credentialsBytes = (user + ":" + password).getBytes(StandardCharsets.UTF_8); - String encoded = Base64.getEncoder().encodeToString(credentialsBytes); - requestHeaders.put("Authorization", String.format("Basic %s", encoded)); - return this; - } - - /** - * Bearer token authorization when pushing to the Pushgateway. - */ - public Builder bearerToken(String token) { - if (token == null) { - throw new NullPointerException(); - } - requestHeaders.put("Authorization", String.format("Bearer %s", token)); - return this; - } - - /** - * Specify if metrics should be pushed using HTTP or HTTPS. Default is HTTP. - * Can be overwritten at runtime with the {@code io.prometheus.exporter.pushgateway.scheme} property. - */ - public Builder scheme(Scheme scheme) { - if (scheme == null) { - throw new NullPointerException(); - } - this.scheme = scheme; - return this; - } - - /** - * Custom connection factory. Default is {@link DefaultHttpConnectionFactory}. - *

- * The {@code PushGatewayTestApp} in {@code integration-tests/it-pushgateway/} has an example of a custom - * connection factory that skips SSL certificate validation for HTTPS connections. - */ - public Builder connectionFactory(HttpConnectionFactory connectionFactory) { - if (connectionFactory == null) { - throw new NullPointerException(); - } - this.connectionFactory = connectionFactory; - return this; - } - - /** - * The {@code job} label to be used when pushing metrics. - * If not provided, the name of the JAR file will be used by default. - * Can be overwritten at runtime with the {@code io.prometheus.exporter.pushgateway.job} property. - */ - public Builder job(String job) { - if (job == null) { - throw new NullPointerException(); - } - this.job = job; - return this; - } - - /** - * Grouping keys to be used when pushing/deleting metrics. - * Call this method multiple times for adding multiple grouping keys. - */ - public Builder groupingKey(String name, String value) { - if (name == null || value == null) { - throw new NullPointerException(); - } - groupingKey.put(name, value); - return this; - } - - /** - * Convenience method for adding the current IP address as an "instance" label. - */ - public Builder instanceIpGroupingKey() throws UnknownHostException { - return groupingKey("instance", InetAddress.getLocalHost().getHostAddress()); - } - - /** - * Push metrics from this registry instead of {@link PrometheusRegistry#defaultRegistry}. - */ - public Builder registry(PrometheusRegistry registry) { - if (registry == null) { - throw new NullPointerException(); - } - this.registry = registry; - return this; - } - - private Scheme getScheme(ExporterPushgatewayProperties properties) { - if (properties != null && properties.getScheme() != null) { - return Scheme.valueOf(properties.getScheme()); - } else if (this.scheme != null) { - return this.scheme; - } else { - return HTTP; - } - } - - private String getAddress(ExporterPushgatewayProperties properties) { - if (properties != null && properties.getAddress() != null) { - return properties.getAddress(); - } else if (this.address != null) { - return this.address; - } else { - return "localhost:9091"; - } - } - - private String getJob(ExporterPushgatewayProperties properties) { - if (properties != null && properties.getJob() != null) { - return properties.getJob(); - } else if (this.job != null) { - return this.job; - } else { - return DefaultJobLabelDetector.getDefaultJobLabel(); - } - } + private String getJob(ExporterPushgatewayProperties properties) { + if (properties != null && properties.getJob() != null) { + return properties.getJob(); + } else if (this.job != null) { + return this.job; + } else { + return DefaultJobLabelDetector.getDefaultJobLabel(); + } + } - private Format getFormat(ExporterPushgatewayProperties properties) { - // currently not configurable via properties - if (this.format != null) { - return this.format; - } - return Format.PROMETHEUS_PROTOBUF; - } + private Format getFormat(ExporterPushgatewayProperties properties) { + // currently not configurable via properties + if (this.format != null) { + return this.format; + } + return Format.PROMETHEUS_PROTOBUF; + } - private URL makeUrl(ExporterPushgatewayProperties properties) throws UnsupportedEncodingException, MalformedURLException { - String url = getScheme(properties) + "://" + getAddress(properties) + "/metrics/"; - String job = getJob(properties); - if (job.contains("/")) { - url += "job@base64/" + base64url(job); - } else { - url += "job/" + URLEncoder.encode(job, "UTF-8"); - } - if (groupingKey != null) { - for (Map.Entry entry : groupingKey.entrySet()) { - if (entry.getValue().isEmpty()) { - url += "/" + entry.getKey() + "@base64/="; - } else if (entry.getValue().contains("/")) { - url += "/" + entry.getKey() + "@base64/" + base64url(entry.getValue()); - } else { - url += "/" + entry.getKey() + "/" + URLEncoder.encode(entry.getValue(), "UTF-8"); - } - } - } - return URI.create(url).normalize().toURL(); + private URL makeUrl(ExporterPushgatewayProperties properties) + throws UnsupportedEncodingException, MalformedURLException { + String url = getScheme(properties) + "://" + getAddress(properties) + "/metrics/"; + String job = getJob(properties); + if (job.contains("/")) { + url += "job@base64/" + base64url(job); + } else { + url += "job/" + URLEncoder.encode(job, "UTF-8"); + } + if (groupingKey != null) { + for (Map.Entry entry : groupingKey.entrySet()) { + if (entry.getValue().isEmpty()) { + url += "/" + entry.getKey() + "@base64/="; + } else if (entry.getValue().contains("/")) { + url += "/" + entry.getKey() + "@base64/" + base64url(entry.getValue()); + } else { + url += "/" + entry.getKey() + "/" + URLEncoder.encode(entry.getValue(), "UTF-8"); + } } + } + return URI.create(url).normalize().toURL(); + } - private String base64url(String v) { - return Base64.getEncoder().encodeToString(v.getBytes(StandardCharsets.UTF_8)).replace("+", "-").replace("/", "_"); - } + private String base64url(String v) { + return Base64.getEncoder() + .encodeToString(v.getBytes(StandardCharsets.UTF_8)) + .replace("+", "-") + .replace("/", "_"); + } - public PushGateway build() { - ExporterPushgatewayProperties properties = config == null ? null : config.getExporterPushgatewayProperties(); - try { - return new PushGateway(registry, getFormat(properties), makeUrl(properties), connectionFactory, requestHeaders); - } catch (MalformedURLException e) { - throw new PrometheusPropertiesException(address + ": Invalid address. Expecting :"); - } catch (UnsupportedEncodingException e) { - throw new RuntimeException(e); // cannot happen, UTF-8 is always supported - } - } + public PushGateway build() { + ExporterPushgatewayProperties properties = + config == null ? null : config.getExporterPushgatewayProperties(); + try { + return new PushGateway( + registry, + getFormat(properties), + makeUrl(properties), + connectionFactory, + requestHeaders); + } catch (MalformedURLException e) { + throw new PrometheusPropertiesException( + address + ": Invalid address. Expecting :"); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); // cannot happen, UTF-8 is always supported + } } + } } diff --git a/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/Scheme.java b/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/Scheme.java index adb7ebf07..51a2e32dd 100644 --- a/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/Scheme.java +++ b/prometheus-metrics-exporter-pushgateway/src/main/java/io/prometheus/metrics/exporter/pushgateway/Scheme.java @@ -1,29 +1,29 @@ package io.prometheus.metrics.exporter.pushgateway; public enum Scheme { + HTTP("http"), + HTTPS("https"); - HTTP("http"), - HTTPS("https"); + private final String name; - private final String name; + Scheme(String name) { + this.name = name; + } - Scheme(String name) { - this.name = name; - } - - @Override - public String toString() { - return name; - } + @Override + public String toString() { + return name; + } - public static Scheme fromString(String name) { - switch (name) { - case "http": - return HTTP; - case "https": - return HTTPS; - default: - throw new IllegalArgumentException(name + ": Unsupported scheme. Expecting 'http' or 'https'."); - } + public static Scheme fromString(String name) { + switch (name) { + case "http": + return HTTP; + case "https": + return HTTPS; + default: + throw new IllegalArgumentException( + name + ": Unsupported scheme. Expecting 'http' or 'https'."); } + } } diff --git a/prometheus-metrics-exporter-pushgateway/src/test/java/io/prometheus/metrics/exporter/pushgateway/BasicAuthPushGatewayTest.java b/prometheus-metrics-exporter-pushgateway/src/test/java/io/prometheus/metrics/exporter/pushgateway/BasicAuthPushGatewayTest.java index 78706ee49..7fbb32b10 100644 --- a/prometheus-metrics-exporter-pushgateway/src/test/java/io/prometheus/metrics/exporter/pushgateway/BasicAuthPushGatewayTest.java +++ b/prometheus-metrics-exporter-pushgateway/src/test/java/io/prometheus/metrics/exporter/pushgateway/BasicAuthPushGatewayTest.java @@ -1,48 +1,48 @@ package io.prometheus.metrics.exporter.pushgateway; +import static org.mockserver.model.HttpRequest.request; +import static org.mockserver.model.HttpResponse.response; + import io.prometheus.metrics.core.metrics.Gauge; import io.prometheus.metrics.model.registry.PrometheusRegistry; +import java.io.IOException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.mockserver.client.MockServerClient; import org.mockserver.junit.MockServerRule; -import java.io.IOException; - -import static org.mockserver.model.HttpRequest.request; -import static org.mockserver.model.HttpResponse.response; - public class BasicAuthPushGatewayTest { - @Rule - public MockServerRule mockServerRule = new MockServerRule(this); - private MockServerClient mockServerClient; - - PrometheusRegistry registry; - Gauge gauge; - PushGateway pushGateway; - - @Before - public void setUp() { - registry = new PrometheusRegistry(); - gauge = Gauge.builder().name("g").help("help").build(); - pushGateway = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .basicAuth("testUser", "testPwd") - .registry(registry) - .job("j") - .build(); - } - - @Test - public void testAuthorizedPush() throws IOException { - mockServerClient.when( - request() - .withMethod("PUT") - .withHeader("Authorization", "Basic dGVzdFVzZXI6dGVzdFB3ZA==") - .withPath("/metrics/job/j") - ).respond(response().withStatusCode(202)); - pushGateway.push(); - } + @Rule public MockServerRule mockServerRule = new MockServerRule(this); + private MockServerClient mockServerClient; + + PrometheusRegistry registry; + Gauge gauge; + PushGateway pushGateway; + + @Before + public void setUp() { + registry = new PrometheusRegistry(); + gauge = Gauge.builder().name("g").help("help").build(); + pushGateway = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .basicAuth("testUser", "testPwd") + .registry(registry) + .job("j") + .build(); + } + + @Test + public void testAuthorizedPush() throws IOException { + mockServerClient + .when( + request() + .withMethod("PUT") + .withHeader("Authorization", "Basic dGVzdFVzZXI6dGVzdFB3ZA==") + .withPath("/metrics/job/j")) + .respond(response().withStatusCode(202)); + pushGateway.push(); + } } diff --git a/prometheus-metrics-exporter-pushgateway/src/test/java/io/prometheus/metrics/exporter/pushgateway/BearerTokenPushGatewayTest.java b/prometheus-metrics-exporter-pushgateway/src/test/java/io/prometheus/metrics/exporter/pushgateway/BearerTokenPushGatewayTest.java index 8687b2d55..144a0f24f 100644 --- a/prometheus-metrics-exporter-pushgateway/src/test/java/io/prometheus/metrics/exporter/pushgateway/BearerTokenPushGatewayTest.java +++ b/prometheus-metrics-exporter-pushgateway/src/test/java/io/prometheus/metrics/exporter/pushgateway/BearerTokenPushGatewayTest.java @@ -1,48 +1,48 @@ package io.prometheus.metrics.exporter.pushgateway; +import static org.mockserver.model.HttpRequest.request; +import static org.mockserver.model.HttpResponse.response; + import io.prometheus.metrics.core.metrics.Gauge; import io.prometheus.metrics.model.registry.PrometheusRegistry; +import java.io.IOException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.mockserver.client.MockServerClient; import org.mockserver.junit.MockServerRule; -import java.io.IOException; - -import static org.mockserver.model.HttpRequest.request; -import static org.mockserver.model.HttpResponse.response; - public class BearerTokenPushGatewayTest { - @Rule - public MockServerRule mockServerRule = new MockServerRule(this); - private MockServerClient mockServerClient; - - PrometheusRegistry registry; - Gauge gauge; - PushGateway pushGateway; - - @Before - public void setUp() { - registry = new PrometheusRegistry(); - gauge = Gauge.builder().name("g").help("help").build(); - pushGateway = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .bearerToken("xxx") - .registry(registry) - .job("j") - .build(); - } - - @Test - public void testAuthorizedPush() throws IOException { - mockServerClient.when( - request() - .withMethod("PUT") - .withHeader("Authorization", "Bearer xxx") - .withPath("/metrics/job/j") - ).respond(response().withStatusCode(202)); - pushGateway.push(); - } + @Rule public MockServerRule mockServerRule = new MockServerRule(this); + private MockServerClient mockServerClient; + + PrometheusRegistry registry; + Gauge gauge; + PushGateway pushGateway; + + @Before + public void setUp() { + registry = new PrometheusRegistry(); + gauge = Gauge.builder().name("g").help("help").build(); + pushGateway = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .bearerToken("xxx") + .registry(registry) + .job("j") + .build(); + } + + @Test + public void testAuthorizedPush() throws IOException { + mockServerClient + .when( + request() + .withMethod("PUT") + .withHeader("Authorization", "Bearer xxx") + .withPath("/metrics/job/j")) + .respond(response().withStatusCode(202)); + pushGateway.push(); + } } diff --git a/prometheus-metrics-exporter-pushgateway/src/test/java/io/prometheus/metrics/exporter/pushgateway/PushGatewayTest.java b/prometheus-metrics-exporter-pushgateway/src/test/java/io/prometheus/metrics/exporter/pushgateway/PushGatewayTest.java index 74ebae7ac..ce88300db 100644 --- a/prometheus-metrics-exporter-pushgateway/src/test/java/io/prometheus/metrics/exporter/pushgateway/PushGatewayTest.java +++ b/prometheus-metrics-exporter-pushgateway/src/test/java/io/prometheus/metrics/exporter/pushgateway/PushGatewayTest.java @@ -1,8 +1,15 @@ package io.prometheus.metrics.exporter.pushgateway; +import static org.junit.rules.ExpectedException.none; +import static org.mockserver.model.HttpRequest.request; +import static org.mockserver.model.HttpResponse.response; import io.prometheus.metrics.core.metrics.Gauge; import io.prometheus.metrics.model.registry.PrometheusRegistry; +import java.io.IOException; +import java.lang.reflect.Field; +import java.net.InetAddress; +import java.net.URL; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -11,309 +18,276 @@ import org.mockserver.client.MockServerClient; import org.mockserver.junit.MockServerRule; -import java.io.IOException; -import java.lang.reflect.Field; -import java.net.InetAddress; -import java.net.URL; - -import static org.junit.rules.ExpectedException.none; -import static org.mockserver.model.HttpRequest.request; -import static org.mockserver.model.HttpResponse.response; - public class PushGatewayTest { - @Rule - public final ExpectedException thrown = none(); + @Rule public final ExpectedException thrown = none(); - @Rule - public MockServerRule mockServerRule = new MockServerRule(this); - private MockServerClient mockServerClient; + @Rule public MockServerRule mockServerRule = new MockServerRule(this); + private MockServerClient mockServerClient; - PrometheusRegistry registry; - Gauge gauge; + PrometheusRegistry registry; + Gauge gauge; - @Before - public void setUp() { - registry = new PrometheusRegistry(); - gauge = Gauge.builder().name("g").help("help").build(); - } + @Before + public void setUp() { + registry = new PrometheusRegistry(); + gauge = Gauge.builder().name("g").help("help").build(); + } - @Test(expected = RuntimeException.class) - public void testInvalidURLThrowsRuntimeException() { - PushGateway.builder().address("::").build(); // ":" is interpreted as port number, so parsing fails - } + @Test(expected = RuntimeException.class) + public void testInvalidURLThrowsRuntimeException() { + PushGateway.builder() + .address("::") + .build(); // ":" is interpreted as port number, so parsing fails + } - @Test - public void testMultipleSlashesAreStrippedFromURL() throws NoSuchFieldException, IllegalAccessException { - final PushGateway pushGateway = PushGateway.builder() - .address("example.com:1234/context///path//") - .job("test") - .build(); - Assert.assertEquals( - "http://example.com:1234/context/path/metrics/job/test", - getUrl(pushGateway).toString() - ); - } + @Test + public void testMultipleSlashesAreStrippedFromURL() + throws NoSuchFieldException, IllegalAccessException { + final PushGateway pushGateway = + PushGateway.builder().address("example.com:1234/context///path//").job("test").build(); + Assert.assertEquals( + "http://example.com:1234/context/path/metrics/job/test", getUrl(pushGateway).toString()); + } - private URL getUrl(PushGateway pushGateway) throws IllegalAccessException, NoSuchFieldException { - Field field = pushGateway.getClass().getDeclaredField("url"); - field.setAccessible(true); - return (URL) field.get(pushGateway); - } + private URL getUrl(PushGateway pushGateway) throws IllegalAccessException, NoSuchFieldException { + Field field = pushGateway.getClass().getDeclaredField("url"); + field.setAccessible(true); + return (URL) field.get(pushGateway); + } - @Test - public void testPush() throws IOException { - mockServerClient.when( - request() - .withMethod("PUT") - .withPath("/metrics/job/j") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .registry(registry) - .job("j") - .build(); - pg.push(); - } + @Test + public void testPush() throws IOException { + mockServerClient + .when(request().withMethod("PUT").withPath("/metrics/job/j")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .registry(registry) + .job("j") + .build(); + pg.push(); + } - @Test - public void testPush200Response() throws IOException { - mockServerClient.when( - request() - .withMethod("PUT") - .withPath("/metrics/job/j") - ).respond(response().withStatusCode(200)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .registry(registry) - .job("j") - .build(); - pg.push(); - } + @Test + public void testPush200Response() throws IOException { + mockServerClient + .when(request().withMethod("PUT").withPath("/metrics/job/j")) + .respond(response().withStatusCode(200)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .registry(registry) + .job("j") + .build(); + pg.push(); + } - @Test - public void testNon202ResponseThrows() throws IOException { - mockServerClient.when( - request() - .withMethod("PUT") - .withPath("/metrics/job/j") - ).respond(response().withStatusCode(500)); - thrown.expect(IOException.class); - thrown.expectMessage( - "Response code from http://localhost:" - + mockServerRule.getPort() - + "/metrics/job/j was 500"); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .registry(registry) - .job("j") - .build(); - pg.push(); - } + @Test + public void testNon202ResponseThrows() throws IOException { + mockServerClient + .when(request().withMethod("PUT").withPath("/metrics/job/j")) + .respond(response().withStatusCode(500)); + thrown.expect(IOException.class); + thrown.expectMessage( + "Response code from http://localhost:" + + mockServerRule.getPort() + + "/metrics/job/j was 500"); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .registry(registry) + .job("j") + .build(); + pg.push(); + } - @Test - public void testPushCollector() throws IOException { - mockServerClient.when( - request() - .withMethod("PUT") - .withPath("/metrics/job/j") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .registry(registry) - .job("j") - .build(); - pg.push(); - } + @Test + public void testPushCollector() throws IOException { + mockServerClient + .when(request().withMethod("PUT").withPath("/metrics/job/j")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .registry(registry) + .job("j") + .build(); + pg.push(); + } - @Test - public void testPushWithGroupingKey() throws IOException { - mockServerClient.when( - request() - .withMethod("PUT") - .withPath("/metrics/job/j/l/v") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .registry(registry) - .job("j") - .groupingKey("l", "v") - .build(); - pg.push(); - } + @Test + public void testPushWithGroupingKey() throws IOException { + mockServerClient + .when(request().withMethod("PUT").withPath("/metrics/job/j/l/v")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .registry(registry) + .job("j") + .groupingKey("l", "v") + .build(); + pg.push(); + } - @Test - public void testPushWithMultiGroupingKey() throws IOException { - mockServerClient.when( - request() - .withMethod("PUT") - .withPath("/metrics/job/j/l/v/l2/v2") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .registry(registry) - .job("j") - .groupingKey("l", "v") - .groupingKey("l2", "v2") - .build(); - pg.push(); - } + @Test + public void testPushWithMultiGroupingKey() throws IOException { + mockServerClient + .when(request().withMethod("PUT").withPath("/metrics/job/j/l/v/l2/v2")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .registry(registry) + .job("j") + .groupingKey("l", "v") + .groupingKey("l2", "v2") + .build(); + pg.push(); + } - @Test - public void testPushWithEmptyLabelGroupingKey() throws IOException { - mockServerClient.when( - request() - .withMethod("PUT") - .withPath("/metrics/job/j/l/v/l2@base64/=") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .registry(registry) - .job("j") - .groupingKey("l", "v") - .groupingKey("l2", "") - .build(); - pg.push(); - } + @Test + public void testPushWithEmptyLabelGroupingKey() throws IOException { + mockServerClient + .when(request().withMethod("PUT").withPath("/metrics/job/j/l/v/l2@base64/=")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .registry(registry) + .job("j") + .groupingKey("l", "v") + .groupingKey("l2", "") + .build(); + pg.push(); + } - @Test - public void testPushWithGroupingKeyWithSlashes() throws IOException { - mockServerClient.when( - request() - .withMethod("PUT") - .withPath("/metrics/job@base64/YS9i/l/v/l2@base64/75-_Lw==") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .registry(registry) - .job("a/b") - .groupingKey("l", "v") - .groupingKey("l2", "\uF7FF/") - .build(); - pg.push(); - } + @Test + public void testPushWithGroupingKeyWithSlashes() throws IOException { + mockServerClient + .when( + request().withMethod("PUT").withPath("/metrics/job@base64/YS9i/l/v/l2@base64/75-_Lw==")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .registry(registry) + .job("a/b") + .groupingKey("l", "v") + .groupingKey("l2", "\uF7FF/") + .build(); + pg.push(); + } - @Test - public void testPushCollectorWithGroupingKey() throws IOException { - mockServerClient.when( - request() - .withMethod("PUT") - .withPath("/metrics/job/j/l/v") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .registry(registry) - .job("j") - .groupingKey("l", "v") - .build(); - pg.push(gauge); - } + @Test + public void testPushCollectorWithGroupingKey() throws IOException { + mockServerClient + .when(request().withMethod("PUT").withPath("/metrics/job/j/l/v")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .registry(registry) + .job("j") + .groupingKey("l", "v") + .build(); + pg.push(gauge); + } - @Test - public void testPushAdd() throws IOException { - mockServerClient.when( - request() - .withMethod("POST") - .withPath("/metrics/job/j") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .registry(registry) - .job("j") - .build(); - pg.pushAdd(); - } + @Test + public void testPushAdd() throws IOException { + mockServerClient + .when(request().withMethod("POST").withPath("/metrics/job/j")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .registry(registry) + .job("j") + .build(); + pg.pushAdd(); + } - @Test - public void testPushAddCollector() throws IOException { - mockServerClient.when( - request() - .withMethod("POST") - .withPath("/metrics/job/j") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .job("j") - .build(); - pg.pushAdd(gauge); - } + @Test + public void testPushAddCollector() throws IOException { + mockServerClient + .when(request().withMethod("POST").withPath("/metrics/job/j")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder().address("localhost:" + mockServerRule.getPort()).job("j").build(); + pg.pushAdd(gauge); + } - @Test - public void testPushAddWithGroupingKey() throws IOException { - mockServerClient.when( - request() - .withMethod("POST") - .withPath("/metrics/job/j/l/v") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .registry(registry) - .groupingKey("l", "v") - .job("j") - .build(); - pg.pushAdd(); - } + @Test + public void testPushAddWithGroupingKey() throws IOException { + mockServerClient + .when(request().withMethod("POST").withPath("/metrics/job/j/l/v")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .registry(registry) + .groupingKey("l", "v") + .job("j") + .build(); + pg.pushAdd(); + } - @Test - public void testPushAddCollectorWithGroupingKey() throws IOException { - mockServerClient.when( - request() - .withMethod("POST") - .withPath("/metrics/job/j/l/v") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .registry(registry) - .groupingKey("l", "v") - .job("j") - .build(); - pg.pushAdd(gauge); - } + @Test + public void testPushAddCollectorWithGroupingKey() throws IOException { + mockServerClient + .when(request().withMethod("POST").withPath("/metrics/job/j/l/v")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .registry(registry) + .groupingKey("l", "v") + .job("j") + .build(); + pg.pushAdd(gauge); + } - @Test - public void testDelete() throws IOException { - mockServerClient.when( - request() - .withMethod("DELETE") - .withPath("/metrics/job/j") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .job("j") - .build(); - pg.delete(); - } + @Test + public void testDelete() throws IOException { + mockServerClient + .when(request().withMethod("DELETE").withPath("/metrics/job/j")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder().address("localhost:" + mockServerRule.getPort()).job("j").build(); + pg.delete(); + } - @Test - public void testDeleteWithGroupingKey() throws IOException { - mockServerClient.when( - request() - .withMethod("DELETE") - .withPath("/metrics/job/j/l/v") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .job("j") - .groupingKey("l", "v") - .build(); - pg.delete(); - } + @Test + public void testDeleteWithGroupingKey() throws IOException { + mockServerClient + .when(request().withMethod("DELETE").withPath("/metrics/job/j/l/v")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .job("j") + .groupingKey("l", "v") + .build(); + pg.delete(); + } - @Test - public void testInstanceIpGroupingKey() throws IOException { - String ip = InetAddress.getLocalHost().getHostAddress(); - Assert.assertFalse(ip.isEmpty()); - mockServerClient.when( - request() - .withMethod("DELETE") - .withPath("/metrics/job/j/instance/" + ip + "/l/v") - ).respond(response().withStatusCode(202)); - PushGateway pg = PushGateway.builder() - .address("localhost:" + mockServerRule.getPort()) - .job("j") - .groupingKey("l", "v") - .instanceIpGroupingKey() - .build(); - pg.delete(); - } + @Test + public void testInstanceIpGroupingKey() throws IOException { + String ip = InetAddress.getLocalHost().getHostAddress(); + Assert.assertFalse(ip.isEmpty()); + mockServerClient + .when(request().withMethod("DELETE").withPath("/metrics/job/j/instance/" + ip + "/l/v")) + .respond(response().withStatusCode(202)); + PushGateway pg = + PushGateway.builder() + .address("localhost:" + mockServerRule.getPort()) + .job("j") + .groupingKey("l", "v") + .instanceIpGroupingKey() + .build(); + pg.delete(); + } } diff --git a/prometheus-metrics-exporter-servlet-jakarta/src/main/java/io/prometheus/metrics/exporter/servlet/jakarta/HttpExchangeAdapter.java b/prometheus-metrics-exporter-servlet-jakarta/src/main/java/io/prometheus/metrics/exporter/servlet/jakarta/HttpExchangeAdapter.java index afcfb6cbc..6953b1c6d 100644 --- a/prometheus-metrics-exporter-servlet-jakarta/src/main/java/io/prometheus/metrics/exporter/servlet/jakarta/HttpExchangeAdapter.java +++ b/prometheus-metrics-exporter-servlet-jakarta/src/main/java/io/prometheus/metrics/exporter/servlet/jakarta/HttpExchangeAdapter.java @@ -5,109 +5,108 @@ import io.prometheus.metrics.exporter.common.PrometheusHttpResponse; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; - import java.io.IOException; import java.io.OutputStream; -import java.net.URI; import java.util.Enumeration; public class HttpExchangeAdapter implements PrometheusHttpExchange { - private final Request request; - private final Response response; + private final Request request; + private final Response response; - public HttpExchangeAdapter(HttpServletRequest request, HttpServletResponse response) { - this.request = new Request(request); - this.response = new Response(response); - } + public HttpExchangeAdapter(HttpServletRequest request, HttpServletResponse response) { + this.request = new Request(request); + this.response = new Response(response); + } - @Override - public PrometheusHttpRequest getRequest() { - return request; - } + @Override + public PrometheusHttpRequest getRequest() { + return request; + } - @Override - public PrometheusHttpResponse getResponse() { - return response; + @Override + public PrometheusHttpResponse getResponse() { + return response; + } + + @Override + public void handleException(IOException e) throws IOException { + throw e; // leave exception handling to the servlet container + } + + @Override + public void handleException(RuntimeException e) { + throw e; // leave exception handling to the servlet container + } + + @Override + public void close() { + // nothing to do for Servlets. + } + + public static class Request implements PrometheusHttpRequest { + + private final HttpServletRequest request; + + public Request(HttpServletRequest request) { + this.request = request; } @Override - public void handleException(IOException e) throws IOException { - throw e; // leave exception handling to the servlet container + public String getQueryString() { + return request.getQueryString(); } @Override - public void handleException(RuntimeException e) { - throw e; // leave exception handling to the servlet container + public Enumeration getHeaders(String name) { + return request.getHeaders(name); } @Override - public void close() { - // nothing to do for Servlets. + public String getMethod() { + return request.getMethod(); } - public static class Request implements PrometheusHttpRequest { - - private final HttpServletRequest request; - - public Request(HttpServletRequest request) { - this.request = request; - } - - @Override - public String getQueryString() { - return request.getQueryString(); - } - - @Override - public Enumeration getHeaders(String name) { - return request.getHeaders(name); - } - - @Override - public String getMethod() { - return request.getMethod(); - } - - @Override - public String getRequestPath() { - StringBuilder uri = new StringBuilder(); - String contextPath = request.getContextPath(); - if (contextPath.startsWith("/")) { - uri.append(contextPath); - } - String servletPath = request.getServletPath(); - if (servletPath.startsWith("/")) { - uri.append(servletPath); - } - String pathInfo = request.getPathInfo(); - if (pathInfo != null) { - uri.append(pathInfo); - } - return uri.toString(); - } + @Override + public String getRequestPath() { + StringBuilder uri = new StringBuilder(); + String contextPath = request.getContextPath(); + if (contextPath.startsWith("/")) { + uri.append(contextPath); + } + String servletPath = request.getServletPath(); + if (servletPath.startsWith("/")) { + uri.append(servletPath); + } + String pathInfo = request.getPathInfo(); + if (pathInfo != null) { + uri.append(pathInfo); + } + return uri.toString(); } + } - public static class Response implements PrometheusHttpResponse { + public static class Response implements PrometheusHttpResponse { - private final HttpServletResponse response; + private final HttpServletResponse response; - public Response(HttpServletResponse response) { - this.response = response; - } + public Response(HttpServletResponse response) { + this.response = response; + } - @Override - public void setHeader(String name, String value) { - response.setHeader(name, value); - } + @Override + public void setHeader(String name, String value) { + response.setHeader(name, value); + } - @Override - public OutputStream sendHeadersAndGetBody(int statusCode, int contentLength) throws IOException { - if (response.getHeader("Content-Length") == null && contentLength > 0) { - response.setContentLength(contentLength); - } - response.setStatus(statusCode); - return response.getOutputStream(); - } + @Override + public OutputStream sendHeadersAndGetBody(int statusCode, int contentLength) + throws IOException { + if (response.getHeader("Content-Length") == null && contentLength > 0) { + response.setContentLength(contentLength); + } + response.setStatus(statusCode); + return response.getOutputStream(); } + } } diff --git a/prometheus-metrics-exporter-servlet-jakarta/src/main/java/io/prometheus/metrics/exporter/servlet/jakarta/PrometheusMetricsServlet.java b/prometheus-metrics-exporter-servlet-jakarta/src/main/java/io/prometheus/metrics/exporter/servlet/jakarta/PrometheusMetricsServlet.java index 94dcf83eb..28728dc94 100644 --- a/prometheus-metrics-exporter-servlet-jakarta/src/main/java/io/prometheus/metrics/exporter/servlet/jakarta/PrometheusMetricsServlet.java +++ b/prometheus-metrics-exporter-servlet-jakarta/src/main/java/io/prometheus/metrics/exporter/servlet/jakarta/PrometheusMetricsServlet.java @@ -6,36 +6,37 @@ import jakarta.servlet.http.HttpServlet; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; - import java.io.IOException; /** * Initial example exporter so that we can try the new metrics library out. - *

- * We'll add a Jakarta servlet, the built-in HTTPServer, etc. soon, and likely move common code into a common module. + * + *

We'll add a Jakarta servlet, the built-in HTTPServer, etc. soon, and likely move common code + * into a common module. */ public class PrometheusMetricsServlet extends HttpServlet { - private final PrometheusScrapeHandler handler; + private final PrometheusScrapeHandler handler; - public PrometheusMetricsServlet() { - this(PrometheusProperties.get(), PrometheusRegistry.defaultRegistry); - } + public PrometheusMetricsServlet() { + this(PrometheusProperties.get(), PrometheusRegistry.defaultRegistry); + } - public PrometheusMetricsServlet(PrometheusRegistry registry) { - this(PrometheusProperties.get(), registry); - } + public PrometheusMetricsServlet(PrometheusRegistry registry) { + this(PrometheusProperties.get(), registry); + } - public PrometheusMetricsServlet(PrometheusProperties config) { - this(config, PrometheusRegistry.defaultRegistry); - } + public PrometheusMetricsServlet(PrometheusProperties config) { + this(config, PrometheusRegistry.defaultRegistry); + } - public PrometheusMetricsServlet(PrometheusProperties config, PrometheusRegistry registry) { - this.handler = new PrometheusScrapeHandler(config, registry); - } + public PrometheusMetricsServlet(PrometheusProperties config, PrometheusRegistry registry) { + this.handler = new PrometheusScrapeHandler(config, registry); + } - @Override - protected void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { - handler.handleRequest(new HttpExchangeAdapter(request, response)); - } + @Override + protected void doGet(HttpServletRequest request, HttpServletResponse response) + throws IOException { + handler.handleRequest(new HttpExchangeAdapter(request, response)); + } } diff --git a/prometheus-metrics-exporter-servlet-javax/src/main/java/io/prometheus/metrics/exporter/servlet/javax/HttpExchangeAdapter.java b/prometheus-metrics-exporter-servlet-javax/src/main/java/io/prometheus/metrics/exporter/servlet/javax/HttpExchangeAdapter.java index 0073db368..939b4d259 100644 --- a/prometheus-metrics-exporter-servlet-javax/src/main/java/io/prometheus/metrics/exporter/servlet/javax/HttpExchangeAdapter.java +++ b/prometheus-metrics-exporter-servlet-javax/src/main/java/io/prometheus/metrics/exporter/servlet/javax/HttpExchangeAdapter.java @@ -3,151 +3,143 @@ import io.prometheus.metrics.exporter.common.PrometheusHttpExchange; import io.prometheus.metrics.exporter.common.PrometheusHttpRequest; import io.prometheus.metrics.exporter.common.PrometheusHttpResponse; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.OutputStream; import java.util.Enumeration; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; /** * This class is an adapter for HTTP exchanges, implementing the PrometheusHttpExchange interface. - * It wraps HttpServletRequest and HttpServletResponse objects into Request and Response inner classes. + * It wraps HttpServletRequest and HttpServletResponse objects into Request and Response inner + * classes. */ public class HttpExchangeAdapter implements PrometheusHttpExchange { - private final Request request; - private final Response response; + private final Request request; + private final Response response; + + /** + * Constructs a new HttpExchangeAdapter with the given HttpServletRequest and HttpServletResponse. + * + * @param request the HttpServletRequest to be adapted + * @param response the HttpServletResponse to be adapted + */ + public HttpExchangeAdapter(HttpServletRequest request, HttpServletResponse response) { + this.request = new Request(request); + this.response = new Response(response); + } + + /** + * Returns the adapted HttpServletRequest. + * + * @return the adapted HttpServletRequest + */ + @Override + public PrometheusHttpRequest getRequest() { + return request; + } + + /** + * Returns the adapted HttpServletResponse. + * + * @return the adapted HttpServletResponse + */ + @Override + public PrometheusHttpResponse getResponse() { + return response; + } + + @Override + public void handleException(IOException e) throws IOException { + throw e; // leave exception handling to the servlet container + } + + @Override + public void handleException(RuntimeException e) { + throw e; // leave exception handling to the servlet container + } + + @Override + public void close() { + // nothing to do for Servlets. + } + + /** This inner class adapts a HttpServletRequest to a PrometheusHttpRequest. */ + public static class Request implements PrometheusHttpRequest { + + private final HttpServletRequest request; /** - * Constructs a new HttpExchangeAdapter with the given HttpServletRequest and HttpServletResponse. + * Constructs a new Request with the given HttpServletRequest. * - * @param request the HttpServletRequest to be adapted - * @param response the HttpServletResponse to be adapted + * @param request the HttpServletRequest to be adapted */ - public HttpExchangeAdapter(HttpServletRequest request, HttpServletResponse response) { - this.request = new Request(request); - this.response = new Response(response); + public Request(HttpServletRequest request) { + this.request = request; } - /** - * Returns the adapted HttpServletRequest. - * - * @return the adapted HttpServletRequest - */ @Override - public PrometheusHttpRequest getRequest() { - return request; + public String getQueryString() { + return request.getQueryString(); } - /** - * Returns the adapted HttpServletResponse. - * - * @return the adapted HttpServletResponse - */ @Override - public PrometheusHttpResponse getResponse() { - return response; + public Enumeration getHeaders(String name) { + return request.getHeaders(name); } @Override - public void handleException(IOException e) throws IOException { - throw e; // leave exception handling to the servlet container + public String getMethod() { + return request.getMethod(); } @Override - public void handleException(RuntimeException e) { - throw e; // leave exception handling to the servlet container + public String getRequestPath() { + StringBuilder uri = new StringBuilder(); + String contextPath = request.getContextPath(); + if (contextPath.startsWith("/")) { + uri.append(contextPath); + } + String servletPath = request.getServletPath(); + if (servletPath.startsWith("/")) { + uri.append(servletPath); + } + String pathInfo = request.getPathInfo(); + if (pathInfo != null) { + uri.append(pathInfo); + } + return uri.toString(); } + } - @Override - public void close() { - // nothing to do for Servlets. - } + /** This inner class adapts a HttpServletResponse to a PrometheusHttpResponse. */ + public static class Response implements PrometheusHttpResponse { + + private final HttpServletResponse response; /** - * This inner class adapts a HttpServletRequest to a PrometheusHttpRequest. + * Constructs a new Response with the given HttpServletResponse. + * + * @param response the HttpServletResponse to be adapted */ - public static class Request implements PrometheusHttpRequest { - - private final HttpServletRequest request; - - /** - * Constructs a new Request with the given HttpServletRequest. - * - * @param request the HttpServletRequest to be adapted - */ - public Request(HttpServletRequest request) { - this.request = request; - } - - @Override - public String getQueryString() { - return request.getQueryString(); - } - - - @Override - public Enumeration getHeaders(String name) { - return request.getHeaders(name); - } - - - @Override - public String getMethod() { - return request.getMethod(); - } - - - @Override - public String getRequestPath() { - StringBuilder uri = new StringBuilder(); - String contextPath = request.getContextPath(); - if (contextPath.startsWith("/")) { - uri.append(contextPath); - } - String servletPath = request.getServletPath(); - if (servletPath.startsWith("/")) { - uri.append(servletPath); - } - String pathInfo = request.getPathInfo(); - if (pathInfo != null) { - uri.append(pathInfo); - } - return uri.toString(); - } + public Response(HttpServletResponse response) { + this.response = response; } - /** - * This inner class adapts a HttpServletResponse to a PrometheusHttpResponse. - */ - public static class Response implements PrometheusHttpResponse { - - private final HttpServletResponse response; - - /** - * Constructs a new Response with the given HttpServletResponse. - * - * @param response the HttpServletResponse to be adapted - */ - public Response(HttpServletResponse response) { - this.response = response; - } - - - @Override - public void setHeader(String name, String value) { - response.setHeader(name, value); - } - - - @Override - public OutputStream sendHeadersAndGetBody(int statusCode, int contentLength) throws IOException { - if (response.getHeader("Content-Length") == null && contentLength > 0) { - response.setContentLength(contentLength); - } - response.setStatus(statusCode); - return response.getOutputStream(); - } + @Override + public void setHeader(String name, String value) { + response.setHeader(name, value); + } + + @Override + public OutputStream sendHeadersAndGetBody(int statusCode, int contentLength) + throws IOException { + if (response.getHeader("Content-Length") == null && contentLength > 0) { + response.setContentLength(contentLength); + } + response.setStatus(statusCode); + return response.getOutputStream(); } -} \ No newline at end of file + } +} diff --git a/prometheus-metrics-exporter-servlet-javax/src/main/java/io/prometheus/metrics/exporter/servlet/javax/PrometheusMetricsServlet.java b/prometheus-metrics-exporter-servlet-javax/src/main/java/io/prometheus/metrics/exporter/servlet/javax/PrometheusMetricsServlet.java index 88564d118..fe81fe96b 100644 --- a/prometheus-metrics-exporter-servlet-javax/src/main/java/io/prometheus/metrics/exporter/servlet/javax/PrometheusMetricsServlet.java +++ b/prometheus-metrics-exporter-servlet-javax/src/main/java/io/prometheus/metrics/exporter/servlet/javax/PrometheusMetricsServlet.java @@ -3,65 +3,64 @@ import io.prometheus.metrics.config.PrometheusProperties; import io.prometheus.metrics.exporter.common.PrometheusScrapeHandler; import io.prometheus.metrics.model.registry.PrometheusRegistry; - +import java.io.IOException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import java.io.IOException; /** - * This class extends HttpServlet to create a servlet for exporting Prometheus metrics. - * It uses a PrometheusScrapeHandler to handle HTTP GET requests and export metrics. - * The servlet can be configured with custom PrometheusProperties and a PrometheusRegistry. + * This class extends HttpServlet to create a servlet for exporting Prometheus metrics. It uses a + * PrometheusScrapeHandler to handle HTTP GET requests and export metrics. The servlet can be + * configured with custom PrometheusProperties and a PrometheusRegistry. */ public class PrometheusMetricsServlet extends HttpServlet { - private final PrometheusScrapeHandler handler; + private final PrometheusScrapeHandler handler; - /** - * Default constructor. Uses the default PrometheusProperties and PrometheusRegistry. - */ - public PrometheusMetricsServlet() { - this(PrometheusProperties.get(), PrometheusRegistry.defaultRegistry); - } + /** Default constructor. Uses the default PrometheusProperties and PrometheusRegistry. */ + public PrometheusMetricsServlet() { + this(PrometheusProperties.get(), PrometheusRegistry.defaultRegistry); + } - /** - * Constructor with a custom PrometheusRegistry. Uses the default PrometheusProperties. - * - * @param registry the PrometheusRegistry to use - */ - public PrometheusMetricsServlet(PrometheusRegistry registry) { - this(PrometheusProperties.get(), registry); - } + /** + * Constructor with a custom PrometheusRegistry. Uses the default PrometheusProperties. + * + * @param registry the PrometheusRegistry to use + */ + public PrometheusMetricsServlet(PrometheusRegistry registry) { + this(PrometheusProperties.get(), registry); + } - /** - * Constructor with custom PrometheusProperties. Uses the default PrometheusRegistry. - * - * @param config the PrometheusProperties to use - */ - public PrometheusMetricsServlet(PrometheusProperties config) { - this(config, PrometheusRegistry.defaultRegistry); - } + /** + * Constructor with custom PrometheusProperties. Uses the default PrometheusRegistry. + * + * @param config the PrometheusProperties to use + */ + public PrometheusMetricsServlet(PrometheusProperties config) { + this(config, PrometheusRegistry.defaultRegistry); + } - /** - * Constructor with custom PrometheusProperties and PrometheusRegistry. - * - * @param config the PrometheusProperties to use - * @param registry the PrometheusRegistry to use - */ - public PrometheusMetricsServlet(PrometheusProperties config, PrometheusRegistry registry) { - this.handler = new PrometheusScrapeHandler(config, registry); - } + /** + * Constructor with custom PrometheusProperties and PrometheusRegistry. + * + * @param config the PrometheusProperties to use + * @param registry the PrometheusRegistry to use + */ + public PrometheusMetricsServlet(PrometheusProperties config, PrometheusRegistry registry) { + this.handler = new PrometheusScrapeHandler(config, registry); + } - /** - * Handles HTTP GET requests. Exports Prometheus metrics by delegating to the PrometheusScrapeHandler. - * - * @param request the HttpServletRequest - * @param response the HttpServletResponse - * @throws IOException if an I/O error occurs - */ - @Override - protected void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { - handler.handleRequest(new HttpExchangeAdapter(request, response)); - } -} \ No newline at end of file + /** + * Handles HTTP GET requests. Exports Prometheus metrics by delegating to the + * PrometheusScrapeHandler. + * + * @param request the HttpServletRequest + * @param response the HttpServletResponse + * @throws IOException if an I/O error occurs + */ + @Override + protected void doGet(HttpServletRequest request, HttpServletResponse response) + throws IOException { + handler.handleRequest(new HttpExchangeAdapter(request, response)); + } +} diff --git a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/ExpositionFormatWriter.java b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/ExpositionFormatWriter.java index 3db354c56..e693e0c15 100644 --- a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/ExpositionFormatWriter.java +++ b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/ExpositionFormatWriter.java @@ -1,16 +1,14 @@ package io.prometheus.metrics.expositionformats; import io.prometheus.metrics.model.snapshots.MetricSnapshots; - import java.io.IOException; import java.io.OutputStream; public interface ExpositionFormatWriter { - boolean accepts(String acceptHeader); + boolean accepts(String acceptHeader); + + /** Text formats use UTF-8 encoding. */ + void write(OutputStream out, MetricSnapshots metricSnapshots) throws IOException; - /** - * Text formats use UTF-8 encoding. - */ - void write(OutputStream out, MetricSnapshots metricSnapshots) throws IOException; - String getContentType(); + String getContentType(); } diff --git a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/ExpositionFormats.java b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/ExpositionFormats.java index f913753cd..01ea9ef40 100644 --- a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/ExpositionFormats.java +++ b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/ExpositionFormats.java @@ -5,49 +5,50 @@ public class ExpositionFormats { - private final PrometheusProtobufWriter prometheusProtobufWriter; - private final PrometheusTextFormatWriter prometheusTextFormatWriter; - private final OpenMetricsTextFormatWriter openMetricsTextFormatWriter; - - private ExpositionFormats(PrometheusProtobufWriter prometheusProtobufWriter, - PrometheusTextFormatWriter prometheusTextFormatWriter, - OpenMetricsTextFormatWriter openMetricsTextFormatWriter) { - this.prometheusProtobufWriter = prometheusProtobufWriter; - this.prometheusTextFormatWriter = prometheusTextFormatWriter; - this.openMetricsTextFormatWriter = openMetricsTextFormatWriter; + private final PrometheusProtobufWriter prometheusProtobufWriter; + private final PrometheusTextFormatWriter prometheusTextFormatWriter; + private final OpenMetricsTextFormatWriter openMetricsTextFormatWriter; + + private ExpositionFormats( + PrometheusProtobufWriter prometheusProtobufWriter, + PrometheusTextFormatWriter prometheusTextFormatWriter, + OpenMetricsTextFormatWriter openMetricsTextFormatWriter) { + this.prometheusProtobufWriter = prometheusProtobufWriter; + this.prometheusTextFormatWriter = prometheusTextFormatWriter; + this.openMetricsTextFormatWriter = openMetricsTextFormatWriter; + } + + public static ExpositionFormats init() { + return init(PrometheusProperties.get().getExporterProperties()); + } + + public static ExpositionFormats init(ExporterProperties properties) { + return new ExpositionFormats( + new PrometheusProtobufWriter(), + new PrometheusTextFormatWriter(properties.getIncludeCreatedTimestamps()), + new OpenMetricsTextFormatWriter( + properties.getIncludeCreatedTimestamps(), properties.getExemplarsOnAllMetricTypes())); + } + + public ExpositionFormatWriter findWriter(String acceptHeader) { + if (prometheusProtobufWriter.accepts(acceptHeader)) { + return prometheusProtobufWriter; } - - public static ExpositionFormats init() { - return init(PrometheusProperties.get().getExporterProperties()); - } - - public static ExpositionFormats init(ExporterProperties properties) { - return new ExpositionFormats( - new PrometheusProtobufWriter(), - new PrometheusTextFormatWriter(properties.getIncludeCreatedTimestamps()), - new OpenMetricsTextFormatWriter(properties.getIncludeCreatedTimestamps(), properties.getExemplarsOnAllMetricTypes()) - ); - } - - public ExpositionFormatWriter findWriter(String acceptHeader) { - if (prometheusProtobufWriter.accepts(acceptHeader)) { - return prometheusProtobufWriter; - } - if (openMetricsTextFormatWriter.accepts(acceptHeader)) { - return openMetricsTextFormatWriter; - } - return prometheusTextFormatWriter; + if (openMetricsTextFormatWriter.accepts(acceptHeader)) { + return openMetricsTextFormatWriter; } + return prometheusTextFormatWriter; + } - public PrometheusProtobufWriter getPrometheusProtobufWriter() { - return prometheusProtobufWriter; - } + public PrometheusProtobufWriter getPrometheusProtobufWriter() { + return prometheusProtobufWriter; + } - public PrometheusTextFormatWriter getPrometheusTextFormatWriter() { - return prometheusTextFormatWriter; - } + public PrometheusTextFormatWriter getPrometheusTextFormatWriter() { + return prometheusTextFormatWriter; + } - public OpenMetricsTextFormatWriter getOpenMetricsTextFormatWriter() { - return openMetricsTextFormatWriter; - } + public OpenMetricsTextFormatWriter getOpenMetricsTextFormatWriter() { + return openMetricsTextFormatWriter; + } } diff --git a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/OpenMetricsTextFormatWriter.java b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/OpenMetricsTextFormatWriter.java index dd92391ff..00ac6b9fc 100644 --- a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/OpenMetricsTextFormatWriter.java +++ b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/OpenMetricsTextFormatWriter.java @@ -1,5 +1,11 @@ package io.prometheus.metrics.expositionformats; +import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeDouble; +import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeEscapedLabelValue; +import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeLabels; +import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeLong; +import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeTimestamp; + import io.prometheus.metrics.model.snapshots.ClassicHistogramBuckets; import io.prometheus.metrics.model.snapshots.CounterSnapshot; import io.prometheus.metrics.model.snapshots.DataPointSnapshot; @@ -17,317 +23,356 @@ import io.prometheus.metrics.model.snapshots.StateSetSnapshot; import io.prometheus.metrics.model.snapshots.SummarySnapshot; import io.prometheus.metrics.model.snapshots.UnknownSnapshot; - import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.util.List; -import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeDouble; -import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeEscapedLabelValue; -import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeLabels; -import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeLong; -import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeTimestamp; - /** - * Write the OpenMetrics text format as defined on https://openmetrics.io. + * Write the OpenMetrics text format as defined on https://openmetrics.io. */ public class OpenMetricsTextFormatWriter implements ExpositionFormatWriter { - public static final String CONTENT_TYPE = "application/openmetrics-text; version=1.0.0; charset=utf-8"; - private final boolean createdTimestampsEnabled; - private final boolean exemplarsOnAllMetricTypesEnabled; + public static final String CONTENT_TYPE = + "application/openmetrics-text; version=1.0.0; charset=utf-8"; + private final boolean createdTimestampsEnabled; + private final boolean exemplarsOnAllMetricTypesEnabled; - /** - * @param createdTimestampsEnabled defines if {@code _created} timestamps should be included in the output or not. - */ - public OpenMetricsTextFormatWriter(boolean createdTimestampsEnabled, boolean exemplarsOnAllMetricTypesEnabled) { - this.createdTimestampsEnabled = createdTimestampsEnabled; - this.exemplarsOnAllMetricTypesEnabled = exemplarsOnAllMetricTypesEnabled; - } + /** + * @param createdTimestampsEnabled defines if {@code _created} timestamps should be included in + * the output or not. + */ + public OpenMetricsTextFormatWriter( + boolean createdTimestampsEnabled, boolean exemplarsOnAllMetricTypesEnabled) { + this.createdTimestampsEnabled = createdTimestampsEnabled; + this.exemplarsOnAllMetricTypesEnabled = exemplarsOnAllMetricTypesEnabled; + } - @Override - public boolean accepts(String acceptHeader) { - if (acceptHeader == null) { - return false; - } - return acceptHeader.contains("application/openmetrics-text"); + @Override + public boolean accepts(String acceptHeader) { + if (acceptHeader == null) { + return false; } + return acceptHeader.contains("application/openmetrics-text"); + } - @Override - public String getContentType() { - return CONTENT_TYPE; - } + @Override + public String getContentType() { + return CONTENT_TYPE; + } - public void write(OutputStream out, MetricSnapshots metricSnapshots) throws IOException { - OutputStreamWriter writer = new OutputStreamWriter(out, StandardCharsets.UTF_8); - for (MetricSnapshot snapshot : metricSnapshots) { - if (snapshot.getDataPoints().size() > 0) { - if (snapshot instanceof CounterSnapshot) { - writeCounter(writer, (CounterSnapshot) snapshot); - } else if (snapshot instanceof GaugeSnapshot) { - writeGauge(writer, (GaugeSnapshot) snapshot); - } else if (snapshot instanceof HistogramSnapshot) { - writeHistogram(writer, (HistogramSnapshot) snapshot); - } else if (snapshot instanceof SummarySnapshot) { - writeSummary(writer, (SummarySnapshot) snapshot); - } else if (snapshot instanceof InfoSnapshot) { - writeInfo(writer, (InfoSnapshot) snapshot); - } else if (snapshot instanceof StateSetSnapshot) { - writeStateSet(writer, (StateSetSnapshot) snapshot); - } else if (snapshot instanceof UnknownSnapshot) { - writeUnknown(writer, (UnknownSnapshot) snapshot); - } - } + public void write(OutputStream out, MetricSnapshots metricSnapshots) throws IOException { + OutputStreamWriter writer = new OutputStreamWriter(out, StandardCharsets.UTF_8); + for (MetricSnapshot snapshot : metricSnapshots) { + if (snapshot.getDataPoints().size() > 0) { + if (snapshot instanceof CounterSnapshot) { + writeCounter(writer, (CounterSnapshot) snapshot); + } else if (snapshot instanceof GaugeSnapshot) { + writeGauge(writer, (GaugeSnapshot) snapshot); + } else if (snapshot instanceof HistogramSnapshot) { + writeHistogram(writer, (HistogramSnapshot) snapshot); + } else if (snapshot instanceof SummarySnapshot) { + writeSummary(writer, (SummarySnapshot) snapshot); + } else if (snapshot instanceof InfoSnapshot) { + writeInfo(writer, (InfoSnapshot) snapshot); + } else if (snapshot instanceof StateSetSnapshot) { + writeStateSet(writer, (StateSetSnapshot) snapshot); + } else if (snapshot instanceof UnknownSnapshot) { + writeUnknown(writer, (UnknownSnapshot) snapshot); } - writer.write("# EOF\n"); - writer.flush(); + } } + writer.write("# EOF\n"); + writer.flush(); + } - private void writeCounter(OutputStreamWriter writer, CounterSnapshot snapshot) throws IOException { - MetricMetadata metadata = snapshot.getMetadata(); - writeMetadata(writer, "counter", metadata); - for (CounterSnapshot.CounterDataPointSnapshot data : snapshot.getDataPoints()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), "_total", data.getLabels()); - writeDouble(writer, data.getValue()); - writeScrapeTimestampAndExemplar(writer, data, data.getExemplar()); - writeCreated(writer, metadata, data); - } + private void writeCounter(OutputStreamWriter writer, CounterSnapshot snapshot) + throws IOException { + MetricMetadata metadata = snapshot.getMetadata(); + writeMetadata(writer, "counter", metadata); + for (CounterSnapshot.CounterDataPointSnapshot data : snapshot.getDataPoints()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), "_total", data.getLabels()); + writeDouble(writer, data.getValue()); + writeScrapeTimestampAndExemplar(writer, data, data.getExemplar()); + writeCreated(writer, metadata, data); } + } - private void writeGauge(OutputStreamWriter writer, GaugeSnapshot snapshot) throws IOException { - MetricMetadata metadata = snapshot.getMetadata(); - writeMetadata(writer, "gauge", metadata); - for (GaugeSnapshot.GaugeDataPointSnapshot data : snapshot.getDataPoints()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), null, data.getLabels()); - writeDouble(writer, data.getValue()); - if (exemplarsOnAllMetricTypesEnabled) { - writeScrapeTimestampAndExemplar(writer, data, data.getExemplar()); - } else { - writeScrapeTimestampAndExemplar(writer, data, null); - } - } + private void writeGauge(OutputStreamWriter writer, GaugeSnapshot snapshot) throws IOException { + MetricMetadata metadata = snapshot.getMetadata(); + writeMetadata(writer, "gauge", metadata); + for (GaugeSnapshot.GaugeDataPointSnapshot data : snapshot.getDataPoints()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), null, data.getLabels()); + writeDouble(writer, data.getValue()); + if (exemplarsOnAllMetricTypesEnabled) { + writeScrapeTimestampAndExemplar(writer, data, data.getExemplar()); + } else { + writeScrapeTimestampAndExemplar(writer, data, null); + } } + } - private void writeHistogram(OutputStreamWriter writer, HistogramSnapshot snapshot) throws IOException { - MetricMetadata metadata = snapshot.getMetadata(); - if (snapshot.isGaugeHistogram()) { - writeMetadata(writer, "gaugehistogram", metadata); - writeClassicHistogramBuckets(writer, metadata, "_gcount", "_gsum", snapshot.getDataPoints()); - } else { - writeMetadata(writer, "histogram", metadata); - writeClassicHistogramBuckets(writer, metadata, "_count", "_sum", snapshot.getDataPoints()); - } - } - - private void writeClassicHistogramBuckets(OutputStreamWriter writer, MetricMetadata metadata, String countSuffix, String sumSuffix, List dataList) throws IOException { - for (HistogramSnapshot.HistogramDataPointSnapshot data : dataList) { - ClassicHistogramBuckets buckets = getClassicBuckets(data); - Exemplars exemplars = data.getExemplars(); - long cumulativeCount = 0; - for (int i = 0; i < buckets.size(); i++) { - cumulativeCount += buckets.getCount(i); - writeNameAndLabels(writer, metadata.getPrometheusName(), "_bucket", data.getLabels(), "le", buckets.getUpperBound(i)); - writeLong(writer, cumulativeCount); - Exemplar exemplar; - if (i == 0) { - exemplar = exemplars.get(Double.NEGATIVE_INFINITY, buckets.getUpperBound(i)); - } else { - exemplar = exemplars.get(buckets.getUpperBound(i - 1), buckets.getUpperBound(i)); - } - writeScrapeTimestampAndExemplar(writer, data, exemplar); - } - // In OpenMetrics format, histogram _count and _sum are either both present or both absent. - if (data.hasCount() && data.hasSum()) { - writeCountAndSum(writer, metadata, data, countSuffix, sumSuffix, exemplars); - } - writeCreated(writer, metadata, data); - } + private void writeHistogram(OutputStreamWriter writer, HistogramSnapshot snapshot) + throws IOException { + MetricMetadata metadata = snapshot.getMetadata(); + if (snapshot.isGaugeHistogram()) { + writeMetadata(writer, "gaugehistogram", metadata); + writeClassicHistogramBuckets(writer, metadata, "_gcount", "_gsum", snapshot.getDataPoints()); + } else { + writeMetadata(writer, "histogram", metadata); + writeClassicHistogramBuckets(writer, metadata, "_count", "_sum", snapshot.getDataPoints()); } + } - private ClassicHistogramBuckets getClassicBuckets(HistogramSnapshot.HistogramDataPointSnapshot data) { - if (data.getClassicBuckets().isEmpty()) { - return ClassicHistogramBuckets.of( - new double[]{Double.POSITIVE_INFINITY}, - new long[]{data.getCount()} - ); + private void writeClassicHistogramBuckets( + OutputStreamWriter writer, + MetricMetadata metadata, + String countSuffix, + String sumSuffix, + List dataList) + throws IOException { + for (HistogramSnapshot.HistogramDataPointSnapshot data : dataList) { + ClassicHistogramBuckets buckets = getClassicBuckets(data); + Exemplars exemplars = data.getExemplars(); + long cumulativeCount = 0; + for (int i = 0; i < buckets.size(); i++) { + cumulativeCount += buckets.getCount(i); + writeNameAndLabels( + writer, + metadata.getPrometheusName(), + "_bucket", + data.getLabels(), + "le", + buckets.getUpperBound(i)); + writeLong(writer, cumulativeCount); + Exemplar exemplar; + if (i == 0) { + exemplar = exemplars.get(Double.NEGATIVE_INFINITY, buckets.getUpperBound(i)); } else { - return data.getClassicBuckets(); + exemplar = exemplars.get(buckets.getUpperBound(i - 1), buckets.getUpperBound(i)); } + writeScrapeTimestampAndExemplar(writer, data, exemplar); + } + // In OpenMetrics format, histogram _count and _sum are either both present or both absent. + if (data.hasCount() && data.hasSum()) { + writeCountAndSum(writer, metadata, data, countSuffix, sumSuffix, exemplars); + } + writeCreated(writer, metadata, data); } + } - private void writeSummary(OutputStreamWriter writer, SummarySnapshot snapshot) throws IOException { - boolean metadataWritten = false; - MetricMetadata metadata = snapshot.getMetadata(); - for (SummarySnapshot.SummaryDataPointSnapshot data : snapshot.getDataPoints()) { - if (data.getQuantiles().size() == 0 && !data.hasCount() && !data.hasSum()) { - continue; - } - if (!metadataWritten) { - writeMetadata(writer, "summary", metadata); - metadataWritten = true; - } - Exemplars exemplars = data.getExemplars(); - // Exemplars for summaries are new, and there's no best practice yet which Exemplars to choose for which - // time series. We select exemplars[0] for _count, exemplars[1] for _sum, and exemplars[2...] for the - // quantiles, all indexes modulo exemplars.length. - int exemplarIndex = 1; - for (Quantile quantile : data.getQuantiles()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), null, data.getLabels(), "quantile", quantile.getQuantile()); - writeDouble(writer, quantile.getValue()); - if (exemplars.size() > 0 && exemplarsOnAllMetricTypesEnabled) { - exemplarIndex = (exemplarIndex + 1) % exemplars.size(); - writeScrapeTimestampAndExemplar(writer, data, exemplars.get(exemplarIndex)); - } else { - writeScrapeTimestampAndExemplar(writer, data, null); - } - } - // Unlike histograms, summaries can have only a count or only a sum according to OpenMetrics. - writeCountAndSum(writer, metadata, data, "_count", "_sum", exemplars); - writeCreated(writer, metadata, data); - } + private ClassicHistogramBuckets getClassicBuckets( + HistogramSnapshot.HistogramDataPointSnapshot data) { + if (data.getClassicBuckets().isEmpty()) { + return ClassicHistogramBuckets.of( + new double[] {Double.POSITIVE_INFINITY}, new long[] {data.getCount()}); + } else { + return data.getClassicBuckets(); } + } - private void writeInfo(OutputStreamWriter writer, InfoSnapshot snapshot) throws IOException { - MetricMetadata metadata = snapshot.getMetadata(); - writeMetadata(writer, "info", metadata); - for (InfoSnapshot.InfoDataPointSnapshot data : snapshot.getDataPoints()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), "_info", data.getLabels()); - writer.write("1"); - writeScrapeTimestampAndExemplar(writer, data, null); + private void writeSummary(OutputStreamWriter writer, SummarySnapshot snapshot) + throws IOException { + boolean metadataWritten = false; + MetricMetadata metadata = snapshot.getMetadata(); + for (SummarySnapshot.SummaryDataPointSnapshot data : snapshot.getDataPoints()) { + if (data.getQuantiles().size() == 0 && !data.hasCount() && !data.hasSum()) { + continue; + } + if (!metadataWritten) { + writeMetadata(writer, "summary", metadata); + metadataWritten = true; + } + Exemplars exemplars = data.getExemplars(); + // Exemplars for summaries are new, and there's no best practice yet which Exemplars to choose + // for which + // time series. We select exemplars[0] for _count, exemplars[1] for _sum, and exemplars[2...] + // for the + // quantiles, all indexes modulo exemplars.length. + int exemplarIndex = 1; + for (Quantile quantile : data.getQuantiles()) { + writeNameAndLabels( + writer, + metadata.getPrometheusName(), + null, + data.getLabels(), + "quantile", + quantile.getQuantile()); + writeDouble(writer, quantile.getValue()); + if (exemplars.size() > 0 && exemplarsOnAllMetricTypesEnabled) { + exemplarIndex = (exemplarIndex + 1) % exemplars.size(); + writeScrapeTimestampAndExemplar(writer, data, exemplars.get(exemplarIndex)); + } else { + writeScrapeTimestampAndExemplar(writer, data, null); } + } + // Unlike histograms, summaries can have only a count or only a sum according to OpenMetrics. + writeCountAndSum(writer, metadata, data, "_count", "_sum", exemplars); + writeCreated(writer, metadata, data); } + } - private void writeStateSet(OutputStreamWriter writer, StateSetSnapshot snapshot) throws IOException { - MetricMetadata metadata = snapshot.getMetadata(); - writeMetadata(writer, "stateset", metadata); - for (StateSetSnapshot.StateSetDataPointSnapshot data : snapshot.getDataPoints()) { - for (int i = 0; i < data.size(); i++) { - writer.write(metadata.getPrometheusName()); - writer.write('{'); - for (int j = 0; j < data.getLabels().size(); j++) { - if (j > 0) { - writer.write(","); - } - writer.write(data.getLabels().getPrometheusName(j)); - writer.write("=\""); - writeEscapedLabelValue(writer, data.getLabels().getValue(j)); - writer.write("\""); - } - if (!data.getLabels().isEmpty()) { - writer.write(","); - } - writer.write(metadata.getPrometheusName()); - writer.write("=\""); - writeEscapedLabelValue(writer, data.getName(i)); - writer.write("\"} "); - if (data.isTrue(i)) { - writer.write("1"); - } else { - writer.write("0"); - } - writeScrapeTimestampAndExemplar(writer, data, null); - } - } + private void writeInfo(OutputStreamWriter writer, InfoSnapshot snapshot) throws IOException { + MetricMetadata metadata = snapshot.getMetadata(); + writeMetadata(writer, "info", metadata); + for (InfoSnapshot.InfoDataPointSnapshot data : snapshot.getDataPoints()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), "_info", data.getLabels()); + writer.write("1"); + writeScrapeTimestampAndExemplar(writer, data, null); } + } - private void writeUnknown(OutputStreamWriter writer, UnknownSnapshot snapshot) throws IOException { - MetricMetadata metadata = snapshot.getMetadata(); - writeMetadata(writer, "unknown", metadata); - for (UnknownSnapshot.UnknownDataPointSnapshot data : snapshot.getDataPoints()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), null, data.getLabels()); - writeDouble(writer, data.getValue()); - if (exemplarsOnAllMetricTypesEnabled) { - writeScrapeTimestampAndExemplar(writer, data, data.getExemplar()); - } else { - writeScrapeTimestampAndExemplar(writer, data, null); - } + private void writeStateSet(OutputStreamWriter writer, StateSetSnapshot snapshot) + throws IOException { + MetricMetadata metadata = snapshot.getMetadata(); + writeMetadata(writer, "stateset", metadata); + for (StateSetSnapshot.StateSetDataPointSnapshot data : snapshot.getDataPoints()) { + for (int i = 0; i < data.size(); i++) { + writer.write(metadata.getPrometheusName()); + writer.write('{'); + for (int j = 0; j < data.getLabels().size(); j++) { + if (j > 0) { + writer.write(","); + } + writer.write(data.getLabels().getPrometheusName(j)); + writer.write("=\""); + writeEscapedLabelValue(writer, data.getLabels().getValue(j)); + writer.write("\""); } - } - - private void writeCountAndSum(OutputStreamWriter writer, MetricMetadata metadata, DistributionDataPointSnapshot data, String countSuffix, String sumSuffix, Exemplars exemplars) throws IOException { - if (data.hasCount()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), countSuffix, data.getLabels()); - writeLong(writer, data.getCount()); - if (exemplarsOnAllMetricTypesEnabled) { - writeScrapeTimestampAndExemplar(writer, data, exemplars.getLatest()); - } else { - writeScrapeTimestampAndExemplar(writer, data, null); - } + if (!data.getLabels().isEmpty()) { + writer.write(","); } - if (data.hasSum()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), sumSuffix, data.getLabels()); - writeDouble(writer, data.getSum()); - writeScrapeTimestampAndExemplar(writer, data, null); + writer.write(metadata.getPrometheusName()); + writer.write("=\""); + writeEscapedLabelValue(writer, data.getName(i)); + writer.write("\"} "); + if (data.isTrue(i)) { + writer.write("1"); + } else { + writer.write("0"); } + writeScrapeTimestampAndExemplar(writer, data, null); + } } + } - private void writeCreated(OutputStreamWriter writer, MetricMetadata metadata, DataPointSnapshot data) throws IOException { - if (createdTimestampsEnabled && data.hasCreatedTimestamp()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), "_created", data.getLabels()); - writeTimestamp(writer, data.getCreatedTimestampMillis()); - if (data.hasScrapeTimestamp()) { - writer.write(' '); - writeTimestamp(writer, data.getScrapeTimestampMillis()); - } - writer.write('\n'); - } + private void writeUnknown(OutputStreamWriter writer, UnknownSnapshot snapshot) + throws IOException { + MetricMetadata metadata = snapshot.getMetadata(); + writeMetadata(writer, "unknown", metadata); + for (UnknownSnapshot.UnknownDataPointSnapshot data : snapshot.getDataPoints()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), null, data.getLabels()); + writeDouble(writer, data.getValue()); + if (exemplarsOnAllMetricTypesEnabled) { + writeScrapeTimestampAndExemplar(writer, data, data.getExemplar()); + } else { + writeScrapeTimestampAndExemplar(writer, data, null); + } } + } - private void writeNameAndLabels(OutputStreamWriter writer, String name, String suffix, Labels labels) throws IOException { - writeNameAndLabels(writer, name, suffix, labels, null, 0.0); + private void writeCountAndSum( + OutputStreamWriter writer, + MetricMetadata metadata, + DistributionDataPointSnapshot data, + String countSuffix, + String sumSuffix, + Exemplars exemplars) + throws IOException { + if (data.hasCount()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), countSuffix, data.getLabels()); + writeLong(writer, data.getCount()); + if (exemplarsOnAllMetricTypesEnabled) { + writeScrapeTimestampAndExemplar(writer, data, exemplars.getLatest()); + } else { + writeScrapeTimestampAndExemplar(writer, data, null); + } } + if (data.hasSum()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), sumSuffix, data.getLabels()); + writeDouble(writer, data.getSum()); + writeScrapeTimestampAndExemplar(writer, data, null); + } + } - private void writeNameAndLabels(OutputStreamWriter writer, String name, String suffix, Labels labels, - String additionalLabelName, double additionalLabelValue) throws IOException { - writer.write(name); - if (suffix != null) { - writer.write(suffix); - } - if (!labels.isEmpty() || additionalLabelName != null) { - writeLabels(writer, labels, additionalLabelName, additionalLabelValue); - } + private void writeCreated( + OutputStreamWriter writer, MetricMetadata metadata, DataPointSnapshot data) + throws IOException { + if (createdTimestampsEnabled && data.hasCreatedTimestamp()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), "_created", data.getLabels()); + writeTimestamp(writer, data.getCreatedTimestampMillis()); + if (data.hasScrapeTimestamp()) { writer.write(' '); + writeTimestamp(writer, data.getScrapeTimestampMillis()); + } + writer.write('\n'); } + } - private void writeScrapeTimestampAndExemplar(OutputStreamWriter writer, DataPointSnapshot data, Exemplar exemplar) throws IOException { - if (data.hasScrapeTimestamp()) { - writer.write(' '); - writeTimestamp(writer, data.getScrapeTimestampMillis()); - } - if (exemplar != null) { - writer.write(" # "); - writeLabels(writer, exemplar.getLabels(), null, 0); - writer.write(' '); - writeDouble(writer, exemplar.getValue()); - if (exemplar.hasTimestamp()) { - writer.write(' '); - writeTimestamp(writer, exemplar.getTimestampMillis()); - } - } - writer.write('\n'); + private void writeNameAndLabels( + OutputStreamWriter writer, String name, String suffix, Labels labels) throws IOException { + writeNameAndLabels(writer, name, suffix, labels, null, 0.0); + } + + private void writeNameAndLabels( + OutputStreamWriter writer, + String name, + String suffix, + Labels labels, + String additionalLabelName, + double additionalLabelValue) + throws IOException { + writer.write(name); + if (suffix != null) { + writer.write(suffix); } + if (!labels.isEmpty() || additionalLabelName != null) { + writeLabels(writer, labels, additionalLabelName, additionalLabelValue); + } + writer.write(' '); + } - private void writeMetadata(OutputStreamWriter writer, String typeName, MetricMetadata metadata) throws IOException { - writer.write("# TYPE "); - writer.write(metadata.getPrometheusName()); + private void writeScrapeTimestampAndExemplar( + OutputStreamWriter writer, DataPointSnapshot data, Exemplar exemplar) throws IOException { + if (data.hasScrapeTimestamp()) { + writer.write(' '); + writeTimestamp(writer, data.getScrapeTimestampMillis()); + } + if (exemplar != null) { + writer.write(" # "); + writeLabels(writer, exemplar.getLabels(), null, 0); + writer.write(' '); + writeDouble(writer, exemplar.getValue()); + if (exemplar.hasTimestamp()) { writer.write(' '); - writer.write(typeName); - writer.write('\n'); - if (metadata.getUnit() != null) { - writer.write("# UNIT "); - writer.write(metadata.getPrometheusName()); - writer.write(' '); - writeEscapedLabelValue(writer, metadata.getUnit().toString()); - writer.write('\n'); - } - if (metadata.getHelp() != null && !metadata.getHelp().isEmpty()) { - writer.write("# HELP "); - writer.write(metadata.getPrometheusName()); - writer.write(' '); - writeEscapedLabelValue(writer, metadata.getHelp()); - writer.write('\n'); - } + writeTimestamp(writer, exemplar.getTimestampMillis()); + } + } + writer.write('\n'); + } + + private void writeMetadata(OutputStreamWriter writer, String typeName, MetricMetadata metadata) + throws IOException { + writer.write("# TYPE "); + writer.write(metadata.getPrometheusName()); + writer.write(' '); + writer.write(typeName); + writer.write('\n'); + if (metadata.getUnit() != null) { + writer.write("# UNIT "); + writer.write(metadata.getPrometheusName()); + writer.write(' '); + writeEscapedLabelValue(writer, metadata.getUnit().toString()); + writer.write('\n'); + } + if (metadata.getHelp() != null && !metadata.getHelp().isEmpty()) { + writer.write("# HELP "); + writer.write(metadata.getPrometheusName()); + writer.write(' '); + writeEscapedLabelValue(writer, metadata.getHelp()); + writer.write('\n'); } + } } diff --git a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/PrometheusProtobufWriter.java b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/PrometheusProtobufWriter.java index e0103059a..1c1535de3 100644 --- a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/PrometheusProtobufWriter.java +++ b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/PrometheusProtobufWriter.java @@ -1,16 +1,17 @@ package io.prometheus.metrics.expositionformats; -import io.prometheus.metrics.shaded.com_google_protobuf_3_25_3.TextFormat; +import static io.prometheus.metrics.expositionformats.ProtobufUtil.timestampFromMillis; + import io.prometheus.metrics.expositionformats.generated.com_google_protobuf_3_25_3.Metrics; import io.prometheus.metrics.model.snapshots.ClassicHistogramBuckets; import io.prometheus.metrics.model.snapshots.CounterSnapshot; import io.prometheus.metrics.model.snapshots.CounterSnapshot.CounterDataPointSnapshot; +import io.prometheus.metrics.model.snapshots.DataPointSnapshot; import io.prometheus.metrics.model.snapshots.Exemplar; import io.prometheus.metrics.model.snapshots.GaugeSnapshot; import io.prometheus.metrics.model.snapshots.HistogramSnapshot; import io.prometheus.metrics.model.snapshots.InfoSnapshot; import io.prometheus.metrics.model.snapshots.Labels; -import io.prometheus.metrics.model.snapshots.DataPointSnapshot; import io.prometheus.metrics.model.snapshots.MetricMetadata; import io.prometheus.metrics.model.snapshots.MetricSnapshot; import io.prometheus.metrics.model.snapshots.MetricSnapshots; @@ -19,348 +20,362 @@ import io.prometheus.metrics.model.snapshots.StateSetSnapshot; import io.prometheus.metrics.model.snapshots.SummarySnapshot; import io.prometheus.metrics.model.snapshots.UnknownSnapshot; - +import io.prometheus.metrics.shaded.com_google_protobuf_3_25_3.TextFormat; import java.io.IOException; import java.io.OutputStream; -import static io.prometheus.metrics.expositionformats.ProtobufUtil.timestampFromMillis; - /** - * Write the Prometheus protobuf format as defined in - * github.com/prometheus/client_model. - *

- * As of today, this is the only exposition format that supports native histograms. + * Write the Prometheus protobuf format as defined in github.com/prometheus/client_model. + * + *

As of today, this is the only exposition format that supports native histograms. */ public class PrometheusProtobufWriter implements ExpositionFormatWriter { - public static final String CONTENT_TYPE = "application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited"; + public static final String CONTENT_TYPE = + "application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited"; - @Override - public boolean accepts(String acceptHeader) { - if (acceptHeader == null) { - return false; - } else { - return acceptHeader.contains("application/vnd.google.protobuf") - && acceptHeader.contains("proto=io.prometheus.client.MetricFamily"); - } + @Override + public boolean accepts(String acceptHeader) { + if (acceptHeader == null) { + return false; + } else { + return acceptHeader.contains("application/vnd.google.protobuf") + && acceptHeader.contains("proto=io.prometheus.client.MetricFamily"); } + } - @Override - public String getContentType() { - return CONTENT_TYPE; - } + @Override + public String getContentType() { + return CONTENT_TYPE; + } - public String toDebugString(MetricSnapshots metricSnapshots) { - StringBuilder stringBuilder = new StringBuilder(); - for (MetricSnapshot snapshot : metricSnapshots) { - if (snapshot.getDataPoints().size() > 0) { - stringBuilder.append(TextFormat.printer().printToString(convert(snapshot))); - } - } - return stringBuilder.toString(); + public String toDebugString(MetricSnapshots metricSnapshots) { + StringBuilder stringBuilder = new StringBuilder(); + for (MetricSnapshot snapshot : metricSnapshots) { + if (snapshot.getDataPoints().size() > 0) { + stringBuilder.append(TextFormat.printer().printToString(convert(snapshot))); + } } + return stringBuilder.toString(); + } - @Override - public void write(OutputStream out, MetricSnapshots metricSnapshots) throws IOException { - for (MetricSnapshot snapshot : metricSnapshots) { - if (snapshot.getDataPoints().size() > 0) { - convert(snapshot).writeDelimitedTo(out); - } - } - } - - public Metrics.MetricFamily convert(MetricSnapshot snapshot) { - Metrics.MetricFamily.Builder builder = Metrics.MetricFamily.newBuilder(); - if (snapshot instanceof CounterSnapshot) { - for (CounterDataPointSnapshot data : ((CounterSnapshot) snapshot).getDataPoints()) { - builder.addMetric(convert(data)); - } - setMetadataUnlessEmpty(builder, snapshot.getMetadata(), "_total", Metrics.MetricType.COUNTER); - } else if (snapshot instanceof GaugeSnapshot) { - for (GaugeSnapshot.GaugeDataPointSnapshot data : ((GaugeSnapshot) snapshot).getDataPoints()) { - builder.addMetric(convert(data)); - } - setMetadataUnlessEmpty(builder, snapshot.getMetadata(), null, Metrics.MetricType.GAUGE); - } else if (snapshot instanceof HistogramSnapshot) { - HistogramSnapshot histogram = (HistogramSnapshot) snapshot; - for (HistogramSnapshot.HistogramDataPointSnapshot data : histogram.getDataPoints()) { - builder.addMetric(convert(data)); - } - Metrics.MetricType type = histogram.isGaugeHistogram() ? Metrics.MetricType.GAUGE_HISTOGRAM : Metrics.MetricType.HISTOGRAM; - setMetadataUnlessEmpty(builder, snapshot.getMetadata(), null, type); - } else if (snapshot instanceof SummarySnapshot) { - for (SummarySnapshot.SummaryDataPointSnapshot data : ((SummarySnapshot) snapshot).getDataPoints()) { - if (data.hasCount() || data.hasSum() || data.getQuantiles().size() > 0) { - builder.addMetric(convert(data)); - } - } - setMetadataUnlessEmpty(builder, snapshot.getMetadata(), null, Metrics.MetricType.SUMMARY); - } else if (snapshot instanceof InfoSnapshot) { - for (InfoSnapshot.InfoDataPointSnapshot data : ((InfoSnapshot) snapshot).getDataPoints()) { - builder.addMetric(convert(data)); - } - setMetadataUnlessEmpty(builder, snapshot.getMetadata(), "_info", Metrics.MetricType.GAUGE); - } else if (snapshot instanceof StateSetSnapshot) { - for (StateSetSnapshot.StateSetDataPointSnapshot data : ((StateSetSnapshot) snapshot).getDataPoints()) { - for (int i = 0; i < data.size(); i++) { - builder.addMetric(convert(data, snapshot.getMetadata().getPrometheusName(), i)); - } - } - setMetadataUnlessEmpty(builder, snapshot.getMetadata(), null, Metrics.MetricType.GAUGE); - } else if (snapshot instanceof UnknownSnapshot) { - for (UnknownSnapshot.UnknownDataPointSnapshot data : ((UnknownSnapshot) snapshot).getDataPoints()) { - builder.addMetric(convert(data)); - } - setMetadataUnlessEmpty(builder, snapshot.getMetadata(), null, Metrics.MetricType.UNTYPED); - } - return builder.build(); + @Override + public void write(OutputStream out, MetricSnapshots metricSnapshots) throws IOException { + for (MetricSnapshot snapshot : metricSnapshots) { + if (snapshot.getDataPoints().size() > 0) { + convert(snapshot).writeDelimitedTo(out); + } } + } - private void setMetadataUnlessEmpty(Metrics.MetricFamily.Builder builder, MetricMetadata metadata, String nameSuffix, Metrics.MetricType type) { - if (builder.getMetricCount() == 0) { - return; + public Metrics.MetricFamily convert(MetricSnapshot snapshot) { + Metrics.MetricFamily.Builder builder = Metrics.MetricFamily.newBuilder(); + if (snapshot instanceof CounterSnapshot) { + for (CounterDataPointSnapshot data : ((CounterSnapshot) snapshot).getDataPoints()) { + builder.addMetric(convert(data)); + } + setMetadataUnlessEmpty(builder, snapshot.getMetadata(), "_total", Metrics.MetricType.COUNTER); + } else if (snapshot instanceof GaugeSnapshot) { + for (GaugeSnapshot.GaugeDataPointSnapshot data : ((GaugeSnapshot) snapshot).getDataPoints()) { + builder.addMetric(convert(data)); + } + setMetadataUnlessEmpty(builder, snapshot.getMetadata(), null, Metrics.MetricType.GAUGE); + } else if (snapshot instanceof HistogramSnapshot) { + HistogramSnapshot histogram = (HistogramSnapshot) snapshot; + for (HistogramSnapshot.HistogramDataPointSnapshot data : histogram.getDataPoints()) { + builder.addMetric(convert(data)); + } + Metrics.MetricType type = + histogram.isGaugeHistogram() + ? Metrics.MetricType.GAUGE_HISTOGRAM + : Metrics.MetricType.HISTOGRAM; + setMetadataUnlessEmpty(builder, snapshot.getMetadata(), null, type); + } else if (snapshot instanceof SummarySnapshot) { + for (SummarySnapshot.SummaryDataPointSnapshot data : + ((SummarySnapshot) snapshot).getDataPoints()) { + if (data.hasCount() || data.hasSum() || data.getQuantiles().size() > 0) { + builder.addMetric(convert(data)); } - if (nameSuffix == null) { - builder.setName(metadata.getPrometheusName()); - } else { - builder.setName(metadata.getPrometheusName() + nameSuffix); - } - if (metadata.getHelp() != null) { - builder.setHelp(metadata.getHelp()); + } + setMetadataUnlessEmpty(builder, snapshot.getMetadata(), null, Metrics.MetricType.SUMMARY); + } else if (snapshot instanceof InfoSnapshot) { + for (InfoSnapshot.InfoDataPointSnapshot data : ((InfoSnapshot) snapshot).getDataPoints()) { + builder.addMetric(convert(data)); + } + setMetadataUnlessEmpty(builder, snapshot.getMetadata(), "_info", Metrics.MetricType.GAUGE); + } else if (snapshot instanceof StateSetSnapshot) { + for (StateSetSnapshot.StateSetDataPointSnapshot data : + ((StateSetSnapshot) snapshot).getDataPoints()) { + for (int i = 0; i < data.size(); i++) { + builder.addMetric(convert(data, snapshot.getMetadata().getPrometheusName(), i)); } - builder.setType(type); + } + setMetadataUnlessEmpty(builder, snapshot.getMetadata(), null, Metrics.MetricType.GAUGE); + } else if (snapshot instanceof UnknownSnapshot) { + for (UnknownSnapshot.UnknownDataPointSnapshot data : + ((UnknownSnapshot) snapshot).getDataPoints()) { + builder.addMetric(convert(data)); + } + setMetadataUnlessEmpty(builder, snapshot.getMetadata(), null, Metrics.MetricType.UNTYPED); } + return builder.build(); + } - private Metrics.Metric.Builder convert(CounterDataPointSnapshot data) { - Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); - Metrics.Counter.Builder counterBuilder = Metrics.Counter.newBuilder(); - counterBuilder.setValue(data.getValue()); - if (data.getExemplar() != null) { - counterBuilder.setExemplar(convert(data.getExemplar())); - } - addLabels(metricBuilder, data.getLabels()); - metricBuilder.setCounter(counterBuilder.build()); - setScrapeTimestamp(metricBuilder, data); - return metricBuilder; + private void setMetadataUnlessEmpty( + Metrics.MetricFamily.Builder builder, + MetricMetadata metadata, + String nameSuffix, + Metrics.MetricType type) { + if (builder.getMetricCount() == 0) { + return; } + if (nameSuffix == null) { + builder.setName(metadata.getPrometheusName()); + } else { + builder.setName(metadata.getPrometheusName() + nameSuffix); + } + if (metadata.getHelp() != null) { + builder.setHelp(metadata.getHelp()); + } + builder.setType(type); + } - private Metrics.Metric.Builder convert(GaugeSnapshot.GaugeDataPointSnapshot data) { - Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); - Metrics.Gauge.Builder gaugeBuilder = Metrics.Gauge.newBuilder(); - gaugeBuilder.setValue(data.getValue()); - addLabels(metricBuilder, data.getLabels()); - metricBuilder.setGauge(gaugeBuilder); - setScrapeTimestamp(metricBuilder, data); - return metricBuilder; + private Metrics.Metric.Builder convert(CounterDataPointSnapshot data) { + Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); + Metrics.Counter.Builder counterBuilder = Metrics.Counter.newBuilder(); + counterBuilder.setValue(data.getValue()); + if (data.getExemplar() != null) { + counterBuilder.setExemplar(convert(data.getExemplar())); } + addLabels(metricBuilder, data.getLabels()); + metricBuilder.setCounter(counterBuilder.build()); + setScrapeTimestamp(metricBuilder, data); + return metricBuilder; + } - private Metrics.Metric.Builder convert(HistogramSnapshot.HistogramDataPointSnapshot data) { - Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); - Metrics.Histogram.Builder histogramBuilder = Metrics.Histogram.newBuilder(); - if (data.hasNativeHistogramData()) { - histogramBuilder.setSchema(data.getNativeSchema()); - histogramBuilder.setZeroCount(data.getNativeZeroCount()); - histogramBuilder.setZeroThreshold(data.getNativeZeroThreshold()); - addBuckets(histogramBuilder, data.getNativeBucketsForPositiveValues(), +1); - addBuckets(histogramBuilder, data.getNativeBucketsForNegativeValues(), -1); + private Metrics.Metric.Builder convert(GaugeSnapshot.GaugeDataPointSnapshot data) { + Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); + Metrics.Gauge.Builder gaugeBuilder = Metrics.Gauge.newBuilder(); + gaugeBuilder.setValue(data.getValue()); + addLabels(metricBuilder, data.getLabels()); + metricBuilder.setGauge(gaugeBuilder); + setScrapeTimestamp(metricBuilder, data); + return metricBuilder; + } - if (!data.hasClassicHistogramData()) { // native only - // Add a single +Inf bucket for the exemplar. - Exemplar exemplar = data.getExemplars().getLatest(); - if (exemplar != null) { - Metrics.Bucket.Builder bucketBuilder = Metrics.Bucket.newBuilder() - .setCumulativeCount(getNativeCount(data)) - .setUpperBound(Double.POSITIVE_INFINITY); - bucketBuilder.setExemplar(convert(exemplar)); - histogramBuilder.addBucket(bucketBuilder); - } - } - } - if (data.hasClassicHistogramData()) { + private Metrics.Metric.Builder convert(HistogramSnapshot.HistogramDataPointSnapshot data) { + Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); + Metrics.Histogram.Builder histogramBuilder = Metrics.Histogram.newBuilder(); + if (data.hasNativeHistogramData()) { + histogramBuilder.setSchema(data.getNativeSchema()); + histogramBuilder.setZeroCount(data.getNativeZeroCount()); + histogramBuilder.setZeroThreshold(data.getNativeZeroThreshold()); + addBuckets(histogramBuilder, data.getNativeBucketsForPositiveValues(), +1); + addBuckets(histogramBuilder, data.getNativeBucketsForNegativeValues(), -1); - ClassicHistogramBuckets buckets = data.getClassicBuckets(); - double lowerBound = Double.NEGATIVE_INFINITY; - long cumulativeCount = 0; - for (int i = 0; i < buckets.size(); i++) { - cumulativeCount += buckets.getCount(i); - double upperBound = buckets.getUpperBound(i); - Metrics.Bucket.Builder bucketBuilder = Metrics.Bucket.newBuilder() - .setCumulativeCount(cumulativeCount) - .setUpperBound(upperBound); - Exemplar exemplar = data.getExemplars().get(lowerBound, upperBound); - if (exemplar != null) { - bucketBuilder.setExemplar(convert(exemplar)); - } - histogramBuilder.addBucket(bucketBuilder); - lowerBound = upperBound; - } - } - addLabels(metricBuilder, data.getLabels()); - setScrapeTimestamp(metricBuilder, data); - if (data.hasCount()) { - histogramBuilder.setSampleCount(data.getCount()); + if (!data.hasClassicHistogramData()) { // native only + // Add a single +Inf bucket for the exemplar. + Exemplar exemplar = data.getExemplars().getLatest(); + if (exemplar != null) { + Metrics.Bucket.Builder bucketBuilder = + Metrics.Bucket.newBuilder() + .setCumulativeCount(getNativeCount(data)) + .setUpperBound(Double.POSITIVE_INFINITY); + bucketBuilder.setExemplar(convert(exemplar)); + histogramBuilder.addBucket(bucketBuilder); } - if (data.hasSum()) { - histogramBuilder.setSampleSum(data.getSum()); - } - metricBuilder.setHistogram(histogramBuilder.build()); - return metricBuilder; + } } + if (data.hasClassicHistogramData()) { - private long getNativeCount(HistogramSnapshot.HistogramDataPointSnapshot data) { - if (data.hasCount()) { - return data.getCount(); - } else { - long count = data.getNativeZeroCount(); - for (int i = 0; i < data.getNativeBucketsForPositiveValues().size(); i++) { - count += data.getNativeBucketsForPositiveValues().getCount(i); - } - for (int i = 0; i < data.getNativeBucketsForNegativeValues().size(); i++) { - count += data.getNativeBucketsForNegativeValues().getCount(i); - } - return count; + ClassicHistogramBuckets buckets = data.getClassicBuckets(); + double lowerBound = Double.NEGATIVE_INFINITY; + long cumulativeCount = 0; + for (int i = 0; i < buckets.size(); i++) { + cumulativeCount += buckets.getCount(i); + double upperBound = buckets.getUpperBound(i); + Metrics.Bucket.Builder bucketBuilder = + Metrics.Bucket.newBuilder() + .setCumulativeCount(cumulativeCount) + .setUpperBound(upperBound); + Exemplar exemplar = data.getExemplars().get(lowerBound, upperBound); + if (exemplar != null) { + bucketBuilder.setExemplar(convert(exemplar)); } + histogramBuilder.addBucket(bucketBuilder); + lowerBound = upperBound; + } + } + addLabels(metricBuilder, data.getLabels()); + setScrapeTimestamp(metricBuilder, data); + if (data.hasCount()) { + histogramBuilder.setSampleCount(data.getCount()); + } + if (data.hasSum()) { + histogramBuilder.setSampleSum(data.getSum()); } + metricBuilder.setHistogram(histogramBuilder.build()); + return metricBuilder; + } - private void addBuckets(Metrics.Histogram.Builder histogramBuilder, NativeHistogramBuckets buckets, int sgn) { - if (buckets.size() > 0) { - Metrics.BucketSpan.Builder currentSpan = Metrics.BucketSpan.newBuilder(); - currentSpan.setOffset(buckets.getBucketIndex(0)); - currentSpan.setLength(0); - int previousIndex = currentSpan.getOffset(); - long previousCount = 0; - for (int i = 0; i < buckets.size(); i++) { - if (buckets.getBucketIndex(i) > previousIndex + 1) { - // If the gap between bucketIndex and previousIndex is just 1 or 2, - // we don't start a new span but continue the existing span and add 1 or 2 empty buckets. - if (buckets.getBucketIndex(i) <= previousIndex + 3) { - while (buckets.getBucketIndex(i) > previousIndex + 1) { - currentSpan.setLength(currentSpan.getLength() + 1); - previousIndex++; - if (sgn > 0) { - histogramBuilder.addPositiveDelta(-previousCount); - } else { - histogramBuilder.addNegativeDelta(-previousCount); - } - previousCount = 0; - } - } else { - if (sgn > 0) { - histogramBuilder.addPositiveSpan(currentSpan.build()); - } else { - histogramBuilder.addNegativeSpan(currentSpan.build()); - } - currentSpan = Metrics.BucketSpan.newBuilder(); - currentSpan.setOffset(buckets.getBucketIndex(i) - (previousIndex + 1)); - } - } - currentSpan.setLength(currentSpan.getLength() + 1); - previousIndex = buckets.getBucketIndex(i); - if (sgn > 0) { - histogramBuilder.addPositiveDelta(buckets.getCount(i) - previousCount); - } else { - histogramBuilder.addNegativeDelta(buckets.getCount(i) - previousCount); - } - previousCount = buckets.getCount(i); + private long getNativeCount(HistogramSnapshot.HistogramDataPointSnapshot data) { + if (data.hasCount()) { + return data.getCount(); + } else { + long count = data.getNativeZeroCount(); + for (int i = 0; i < data.getNativeBucketsForPositiveValues().size(); i++) { + count += data.getNativeBucketsForPositiveValues().getCount(i); + } + for (int i = 0; i < data.getNativeBucketsForNegativeValues().size(); i++) { + count += data.getNativeBucketsForNegativeValues().getCount(i); + } + return count; + } + } + + private void addBuckets( + Metrics.Histogram.Builder histogramBuilder, NativeHistogramBuckets buckets, int sgn) { + if (buckets.size() > 0) { + Metrics.BucketSpan.Builder currentSpan = Metrics.BucketSpan.newBuilder(); + currentSpan.setOffset(buckets.getBucketIndex(0)); + currentSpan.setLength(0); + int previousIndex = currentSpan.getOffset(); + long previousCount = 0; + for (int i = 0; i < buckets.size(); i++) { + if (buckets.getBucketIndex(i) > previousIndex + 1) { + // If the gap between bucketIndex and previousIndex is just 1 or 2, + // we don't start a new span but continue the existing span and add 1 or 2 empty buckets. + if (buckets.getBucketIndex(i) <= previousIndex + 3) { + while (buckets.getBucketIndex(i) > previousIndex + 1) { + currentSpan.setLength(currentSpan.getLength() + 1); + previousIndex++; + if (sgn > 0) { + histogramBuilder.addPositiveDelta(-previousCount); + } else { + histogramBuilder.addNegativeDelta(-previousCount); + } + previousCount = 0; } + } else { if (sgn > 0) { - histogramBuilder.addPositiveSpan(currentSpan.build()); + histogramBuilder.addPositiveSpan(currentSpan.build()); } else { - histogramBuilder.addNegativeSpan(currentSpan.build()); + histogramBuilder.addNegativeSpan(currentSpan.build()); } + currentSpan = Metrics.BucketSpan.newBuilder(); + currentSpan.setOffset(buckets.getBucketIndex(i) - (previousIndex + 1)); + } } - } - - private Metrics.Metric.Builder convert(SummarySnapshot.SummaryDataPointSnapshot data) { - Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); - Metrics.Summary.Builder summaryBuilder = Metrics.Summary.newBuilder(); - if (data.hasCount()) { - summaryBuilder.setSampleCount(data.getCount()); - } - if (data.hasSum()) { - summaryBuilder.setSampleSum(data.getSum()); - } - Quantiles quantiles = data.getQuantiles(); - for (int i = 0; i < quantiles.size(); i++) { - summaryBuilder.addQuantile(Metrics.Quantile.newBuilder() - .setQuantile(quantiles.get(i).getQuantile()) - .setValue(quantiles.get(i).getValue()) - .build()); + currentSpan.setLength(currentSpan.getLength() + 1); + previousIndex = buckets.getBucketIndex(i); + if (sgn > 0) { + histogramBuilder.addPositiveDelta(buckets.getCount(i) - previousCount); + } else { + histogramBuilder.addNegativeDelta(buckets.getCount(i) - previousCount); } - addLabels(metricBuilder, data.getLabels()); - metricBuilder.setSummary(summaryBuilder.build()); - setScrapeTimestamp(metricBuilder, data); - return metricBuilder; + previousCount = buckets.getCount(i); + } + if (sgn > 0) { + histogramBuilder.addPositiveSpan(currentSpan.build()); + } else { + histogramBuilder.addNegativeSpan(currentSpan.build()); + } } + } - private Metrics.Metric.Builder convert(InfoSnapshot.InfoDataPointSnapshot data) { - Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); - Metrics.Gauge.Builder gaugeBuilder = Metrics.Gauge.newBuilder(); - gaugeBuilder.setValue(1); - addLabels(metricBuilder, data.getLabels()); - metricBuilder.setGauge(gaugeBuilder); - setScrapeTimestamp(metricBuilder, data); - return metricBuilder; + private Metrics.Metric.Builder convert(SummarySnapshot.SummaryDataPointSnapshot data) { + Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); + Metrics.Summary.Builder summaryBuilder = Metrics.Summary.newBuilder(); + if (data.hasCount()) { + summaryBuilder.setSampleCount(data.getCount()); } - - private Metrics.Metric.Builder convert(StateSetSnapshot.StateSetDataPointSnapshot data, String name, int i) { - Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); - Metrics.Gauge.Builder gaugeBuilder = Metrics.Gauge.newBuilder(); - addLabels(metricBuilder, data.getLabels()); - metricBuilder.addLabel(Metrics.LabelPair.newBuilder() - .setName(name) - .setValue(data.getName(i)) - .build()); - if (data.isTrue(i)) { - gaugeBuilder.setValue(1); - } else { - gaugeBuilder.setValue(0); - } - metricBuilder.setGauge(gaugeBuilder); - setScrapeTimestamp(metricBuilder, data); - return metricBuilder; + if (data.hasSum()) { + summaryBuilder.setSampleSum(data.getSum()); } + Quantiles quantiles = data.getQuantiles(); + for (int i = 0; i < quantiles.size(); i++) { + summaryBuilder.addQuantile( + Metrics.Quantile.newBuilder() + .setQuantile(quantiles.get(i).getQuantile()) + .setValue(quantiles.get(i).getValue()) + .build()); + } + addLabels(metricBuilder, data.getLabels()); + metricBuilder.setSummary(summaryBuilder.build()); + setScrapeTimestamp(metricBuilder, data); + return metricBuilder; + } + + private Metrics.Metric.Builder convert(InfoSnapshot.InfoDataPointSnapshot data) { + Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); + Metrics.Gauge.Builder gaugeBuilder = Metrics.Gauge.newBuilder(); + gaugeBuilder.setValue(1); + addLabels(metricBuilder, data.getLabels()); + metricBuilder.setGauge(gaugeBuilder); + setScrapeTimestamp(metricBuilder, data); + return metricBuilder; + } - private Metrics.Metric.Builder convert(UnknownSnapshot.UnknownDataPointSnapshot data) { - Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); - Metrics.Untyped.Builder untypedBuilder = Metrics.Untyped.newBuilder(); - untypedBuilder.setValue(data.getValue()); - addLabels(metricBuilder, data.getLabels()); - metricBuilder.setUntyped(untypedBuilder); - return metricBuilder; + private Metrics.Metric.Builder convert( + StateSetSnapshot.StateSetDataPointSnapshot data, String name, int i) { + Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); + Metrics.Gauge.Builder gaugeBuilder = Metrics.Gauge.newBuilder(); + addLabels(metricBuilder, data.getLabels()); + metricBuilder.addLabel( + Metrics.LabelPair.newBuilder().setName(name).setValue(data.getName(i)).build()); + if (data.isTrue(i)) { + gaugeBuilder.setValue(1); + } else { + gaugeBuilder.setValue(0); } + metricBuilder.setGauge(gaugeBuilder); + setScrapeTimestamp(metricBuilder, data); + return metricBuilder; + } - private void addLabels(Metrics.Metric.Builder metricBuilder, Labels labels) { - for (int i = 0; i < labels.size(); i++) { - metricBuilder.addLabel(Metrics.LabelPair.newBuilder() - .setName(labels.getPrometheusName(i)) - .setValue(labels.getValue(i)) - .build()); - } + private Metrics.Metric.Builder convert(UnknownSnapshot.UnknownDataPointSnapshot data) { + Metrics.Metric.Builder metricBuilder = Metrics.Metric.newBuilder(); + Metrics.Untyped.Builder untypedBuilder = Metrics.Untyped.newBuilder(); + untypedBuilder.setValue(data.getValue()); + addLabels(metricBuilder, data.getLabels()); + metricBuilder.setUntyped(untypedBuilder); + return metricBuilder; + } + + private void addLabels(Metrics.Metric.Builder metricBuilder, Labels labels) { + for (int i = 0; i < labels.size(); i++) { + metricBuilder.addLabel( + Metrics.LabelPair.newBuilder() + .setName(labels.getPrometheusName(i)) + .setValue(labels.getValue(i)) + .build()); } + } - private void addLabels(Metrics.Exemplar.Builder metricBuilder, Labels labels) { - for (int i = 0; i < labels.size(); i++) { - metricBuilder.addLabel(Metrics.LabelPair.newBuilder() - .setName(labels.getPrometheusName(i)) - .setValue(labels.getValue(i)) - .build()); - } + private void addLabels(Metrics.Exemplar.Builder metricBuilder, Labels labels) { + for (int i = 0; i < labels.size(); i++) { + metricBuilder.addLabel( + Metrics.LabelPair.newBuilder() + .setName(labels.getPrometheusName(i)) + .setValue(labels.getValue(i)) + .build()); } + } - private Metrics.Exemplar.Builder convert(Exemplar exemplar) { - Metrics.Exemplar.Builder builder = Metrics.Exemplar.newBuilder(); - builder.setValue(exemplar.getValue()); - addLabels(builder, exemplar.getLabels()); - if (exemplar.hasTimestamp()) { - builder.setTimestamp(timestampFromMillis(exemplar.getTimestampMillis())); - } - return builder; + private Metrics.Exemplar.Builder convert(Exemplar exemplar) { + Metrics.Exemplar.Builder builder = Metrics.Exemplar.newBuilder(); + builder.setValue(exemplar.getValue()); + addLabels(builder, exemplar.getLabels()); + if (exemplar.hasTimestamp()) { + builder.setTimestamp(timestampFromMillis(exemplar.getTimestampMillis())); } + return builder; + } - private void setScrapeTimestamp(Metrics.Metric.Builder metricBuilder, DataPointSnapshot data) { - if (data.hasScrapeTimestamp()) { - metricBuilder.setTimestampMs(data.getScrapeTimestampMillis()); - } + private void setScrapeTimestamp(Metrics.Metric.Builder metricBuilder, DataPointSnapshot data) { + if (data.hasScrapeTimestamp()) { + metricBuilder.setTimestampMs(data.getScrapeTimestampMillis()); } + } } diff --git a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/PrometheusTextFormatWriter.java b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/PrometheusTextFormatWriter.java index cf9bc3d10..28d049f25 100644 --- a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/PrometheusTextFormatWriter.java +++ b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/PrometheusTextFormatWriter.java @@ -1,12 +1,18 @@ package io.prometheus.metrics.expositionformats; -import io.prometheus.metrics.model.snapshots.CounterSnapshot; +import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeDouble; +import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeEscapedLabelValue; +import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeLabels; +import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeLong; +import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeTimestamp; + import io.prometheus.metrics.model.snapshots.ClassicHistogramBuckets; +import io.prometheus.metrics.model.snapshots.CounterSnapshot; +import io.prometheus.metrics.model.snapshots.DataPointSnapshot; import io.prometheus.metrics.model.snapshots.GaugeSnapshot; import io.prometheus.metrics.model.snapshots.HistogramSnapshot; import io.prometheus.metrics.model.snapshots.InfoSnapshot; import io.prometheus.metrics.model.snapshots.Labels; -import io.prometheus.metrics.model.snapshots.DataPointSnapshot; import io.prometheus.metrics.model.snapshots.MetricMetadata; import io.prometheus.metrics.model.snapshots.MetricSnapshot; import io.prometheus.metrics.model.snapshots.MetricSnapshots; @@ -14,334 +20,356 @@ import io.prometheus.metrics.model.snapshots.StateSetSnapshot; import io.prometheus.metrics.model.snapshots.SummarySnapshot; import io.prometheus.metrics.model.snapshots.UnknownSnapshot; - import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; -import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeDouble; -import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeEscapedLabelValue; -import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeLabels; -import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeLong; -import static io.prometheus.metrics.expositionformats.TextFormatUtil.writeTimestamp; - /** - * Write the Prometheus text format. This is the default if you view a Prometheus endpoint with your Web browser. + * Write the Prometheus text format. This is the default if you view a Prometheus endpoint with your + * Web browser. */ public class PrometheusTextFormatWriter implements ExpositionFormatWriter { - public static final String CONTENT_TYPE = "text/plain; version=0.0.4; charset=utf-8"; + public static final String CONTENT_TYPE = "text/plain; version=0.0.4; charset=utf-8"; - private final boolean writeCreatedTimestamps; + private final boolean writeCreatedTimestamps; - public PrometheusTextFormatWriter(boolean writeCreatedTimestamps) { - this.writeCreatedTimestamps = writeCreatedTimestamps; - } + public PrometheusTextFormatWriter(boolean writeCreatedTimestamps) { + this.writeCreatedTimestamps = writeCreatedTimestamps; + } - @Override - public boolean accepts(String acceptHeader) { - if (acceptHeader == null) { - return false; - } else { - return acceptHeader.contains("text/plain"); - } + @Override + public boolean accepts(String acceptHeader) { + if (acceptHeader == null) { + return false; + } else { + return acceptHeader.contains("text/plain"); } + } - @Override - public String getContentType() { - return CONTENT_TYPE; - } + @Override + public String getContentType() { + return CONTENT_TYPE; + } - public void write(OutputStream out, MetricSnapshots metricSnapshots) throws IOException { - // See https://prometheus.io/docs/instrumenting/exposition_formats/ - // "unknown", "gauge", "counter", "stateset", "info", "histogram", "gaugehistogram", and "summary". - OutputStreamWriter writer = new OutputStreamWriter(out, StandardCharsets.UTF_8); - for (MetricSnapshot snapshot : metricSnapshots) { - if (snapshot.getDataPoints().size() > 0) { - if (snapshot instanceof CounterSnapshot) { - writeCounter(writer, (CounterSnapshot) snapshot); - } else if (snapshot instanceof GaugeSnapshot) { - writeGauge(writer, (GaugeSnapshot) snapshot); - } else if (snapshot instanceof HistogramSnapshot) { - writeHistogram(writer, (HistogramSnapshot) snapshot); - } else if (snapshot instanceof SummarySnapshot) { - writeSummary(writer, (SummarySnapshot) snapshot); - } else if (snapshot instanceof InfoSnapshot) { - writeInfo(writer, (InfoSnapshot) snapshot); - } else if (snapshot instanceof StateSetSnapshot) { - writeStateSet(writer, (StateSetSnapshot) snapshot); - } else if (snapshot instanceof UnknownSnapshot) { - writeUnknown(writer, (UnknownSnapshot) snapshot); - } - } - } - if (writeCreatedTimestamps) { - for (MetricSnapshot snapshot : metricSnapshots) { - if (snapshot.getDataPoints().size() > 0) { - if (snapshot instanceof CounterSnapshot) { - writeCreated(writer, snapshot); - } else if (snapshot instanceof HistogramSnapshot) { - writeCreated(writer, snapshot); - } else if (snapshot instanceof SummarySnapshot) { - writeCreated(writer, snapshot); - } - } - } + public void write(OutputStream out, MetricSnapshots metricSnapshots) throws IOException { + // See https://prometheus.io/docs/instrumenting/exposition_formats/ + // "unknown", "gauge", "counter", "stateset", "info", "histogram", "gaugehistogram", and + // "summary". + OutputStreamWriter writer = new OutputStreamWriter(out, StandardCharsets.UTF_8); + for (MetricSnapshot snapshot : metricSnapshots) { + if (snapshot.getDataPoints().size() > 0) { + if (snapshot instanceof CounterSnapshot) { + writeCounter(writer, (CounterSnapshot) snapshot); + } else if (snapshot instanceof GaugeSnapshot) { + writeGauge(writer, (GaugeSnapshot) snapshot); + } else if (snapshot instanceof HistogramSnapshot) { + writeHistogram(writer, (HistogramSnapshot) snapshot); + } else if (snapshot instanceof SummarySnapshot) { + writeSummary(writer, (SummarySnapshot) snapshot); + } else if (snapshot instanceof InfoSnapshot) { + writeInfo(writer, (InfoSnapshot) snapshot); + } else if (snapshot instanceof StateSetSnapshot) { + writeStateSet(writer, (StateSetSnapshot) snapshot); + } else if (snapshot instanceof UnknownSnapshot) { + writeUnknown(writer, (UnknownSnapshot) snapshot); } - writer.flush(); - } - - public void writeCreated(OutputStreamWriter writer, MetricSnapshot snapshot) throws IOException { - boolean metadataWritten = false; - MetricMetadata metadata = snapshot.getMetadata(); - for (DataPointSnapshot data : snapshot.getDataPoints()) { - if (data.hasCreatedTimestamp()) { - if (!metadataWritten) { - writeMetadata(writer, "_created", "gauge", metadata); - metadataWritten = true; - } - writeNameAndLabels(writer, metadata.getPrometheusName(), "_created", data.getLabels()); - writeTimestamp(writer, data.getCreatedTimestampMillis()); - writeScrapeTimestampAndNewline(writer, data); - } - } - + } } - - private void writeCounter(OutputStreamWriter writer, CounterSnapshot snapshot) throws IOException { + if (writeCreatedTimestamps) { + for (MetricSnapshot snapshot : metricSnapshots) { if (snapshot.getDataPoints().size() > 0) { - MetricMetadata metadata = snapshot.getMetadata(); - writeMetadata(writer, "_total", "counter", metadata); - for (CounterSnapshot.CounterDataPointSnapshot data : snapshot.getDataPoints()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), "_total", data.getLabels()); - writeDouble(writer, data.getValue()); - writeScrapeTimestampAndNewline(writer, data); - } + if (snapshot instanceof CounterSnapshot) { + writeCreated(writer, snapshot); + } else if (snapshot instanceof HistogramSnapshot) { + writeCreated(writer, snapshot); + } else if (snapshot instanceof SummarySnapshot) { + writeCreated(writer, snapshot); + } } + } } + writer.flush(); + } - private void writeGauge(OutputStreamWriter writer, GaugeSnapshot snapshot) throws IOException { - MetricMetadata metadata = snapshot.getMetadata(); - writeMetadata(writer, "", "gauge", metadata); - for (GaugeSnapshot.GaugeDataPointSnapshot data : snapshot.getDataPoints()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), null, data.getLabels()); - writeDouble(writer, data.getValue()); - writeScrapeTimestampAndNewline(writer, data); + public void writeCreated(OutputStreamWriter writer, MetricSnapshot snapshot) throws IOException { + boolean metadataWritten = false; + MetricMetadata metadata = snapshot.getMetadata(); + for (DataPointSnapshot data : snapshot.getDataPoints()) { + if (data.hasCreatedTimestamp()) { + if (!metadataWritten) { + writeMetadata(writer, "_created", "gauge", metadata); + metadataWritten = true; } + writeNameAndLabels(writer, metadata.getPrometheusName(), "_created", data.getLabels()); + writeTimestamp(writer, data.getCreatedTimestampMillis()); + writeScrapeTimestampAndNewline(writer, data); + } } + } - private void writeHistogram(OutputStreamWriter writer, HistogramSnapshot snapshot) throws IOException { - MetricMetadata metadata = snapshot.getMetadata(); - writeMetadata(writer, "", "histogram", metadata); - for (HistogramSnapshot.HistogramDataPointSnapshot data : snapshot.getDataPoints()) { - ClassicHistogramBuckets buckets = getClassicBuckets(data); - long cumulativeCount = 0; - for (int i = 0; i < buckets.size(); i++) { - cumulativeCount += buckets.getCount(i); - writeNameAndLabels(writer, metadata.getPrometheusName(), "_bucket", data.getLabels(), "le", buckets.getUpperBound(i)); - writeLong(writer, cumulativeCount); - writeScrapeTimestampAndNewline(writer, data); - } - if (!snapshot.isGaugeHistogram()) { - if (data.hasCount()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), "_count", data.getLabels()); - writeLong(writer, data.getCount()); - writeScrapeTimestampAndNewline(writer, data); - } - if (data.hasSum()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), "_sum", data.getLabels()); - writeDouble(writer, data.getSum()); - writeScrapeTimestampAndNewline(writer, data); - } - } - } - if (snapshot.isGaugeHistogram()) { - writeGaugeCountSum(writer, snapshot, metadata); - } + private void writeCounter(OutputStreamWriter writer, CounterSnapshot snapshot) + throws IOException { + if (snapshot.getDataPoints().size() > 0) { + MetricMetadata metadata = snapshot.getMetadata(); + writeMetadata(writer, "_total", "counter", metadata); + for (CounterSnapshot.CounterDataPointSnapshot data : snapshot.getDataPoints()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), "_total", data.getLabels()); + writeDouble(writer, data.getValue()); + writeScrapeTimestampAndNewline(writer, data); + } } + } - private ClassicHistogramBuckets getClassicBuckets(HistogramSnapshot.HistogramDataPointSnapshot data) { - if (data.getClassicBuckets().isEmpty()) { - return ClassicHistogramBuckets.of( - new double[]{Double.POSITIVE_INFINITY}, - new long[]{data.getCount()} - ); - } else { - return data.getClassicBuckets(); - } + private void writeGauge(OutputStreamWriter writer, GaugeSnapshot snapshot) throws IOException { + MetricMetadata metadata = snapshot.getMetadata(); + writeMetadata(writer, "", "gauge", metadata); + for (GaugeSnapshot.GaugeDataPointSnapshot data : snapshot.getDataPoints()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), null, data.getLabels()); + writeDouble(writer, data.getValue()); + writeScrapeTimestampAndNewline(writer, data); } + } - private void writeGaugeCountSum(OutputStreamWriter writer, HistogramSnapshot snapshot, MetricMetadata metadata) throws IOException { - // Prometheus text format does not support gaugehistogram's _gcount and _gsum. - // So we append _gcount and _gsum as gauge metrics. - boolean metadataWritten = false; - for (HistogramSnapshot.HistogramDataPointSnapshot data : snapshot.getDataPoints()) { - if (data.hasCount()) { - if (!metadataWritten) { - writeMetadata(writer, "_gcount", "gauge", metadata); - metadataWritten = true; - } - writeNameAndLabels(writer, metadata.getPrometheusName(), "_gcount", data.getLabels()); - writeLong(writer, data.getCount()); - writeScrapeTimestampAndNewline(writer, data); - } + private void writeHistogram(OutputStreamWriter writer, HistogramSnapshot snapshot) + throws IOException { + MetricMetadata metadata = snapshot.getMetadata(); + writeMetadata(writer, "", "histogram", metadata); + for (HistogramSnapshot.HistogramDataPointSnapshot data : snapshot.getDataPoints()) { + ClassicHistogramBuckets buckets = getClassicBuckets(data); + long cumulativeCount = 0; + for (int i = 0; i < buckets.size(); i++) { + cumulativeCount += buckets.getCount(i); + writeNameAndLabels( + writer, + metadata.getPrometheusName(), + "_bucket", + data.getLabels(), + "le", + buckets.getUpperBound(i)); + writeLong(writer, cumulativeCount); + writeScrapeTimestampAndNewline(writer, data); + } + if (!snapshot.isGaugeHistogram()) { + if (data.hasCount()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), "_count", data.getLabels()); + writeLong(writer, data.getCount()); + writeScrapeTimestampAndNewline(writer, data); } - metadataWritten = false; - for (HistogramSnapshot.HistogramDataPointSnapshot data : snapshot.getDataPoints()) { - if (data.hasSum()) { - if (!metadataWritten) { - writeMetadata(writer, "_gsum", "gauge", metadata); - metadataWritten = true; - } - writeNameAndLabels(writer, metadata.getPrometheusName(), "_gsum", data.getLabels()); - writeDouble(writer, data.getSum()); - writeScrapeTimestampAndNewline(writer, data); - } + if (data.hasSum()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), "_sum", data.getLabels()); + writeDouble(writer, data.getSum()); + writeScrapeTimestampAndNewline(writer, data); } + } } - - private void writeSummary(OutputStreamWriter writer, SummarySnapshot snapshot) throws IOException { - boolean metadataWritten = false; - MetricMetadata metadata = snapshot.getMetadata(); - for (SummarySnapshot.SummaryDataPointSnapshot data : snapshot.getDataPoints()) { - if (data.getQuantiles().size() == 0 && !data.hasCount() && !data.hasSum()) { - continue; - } - if (!metadataWritten) { - writeMetadata(writer, "", "summary", metadata); - metadataWritten = true; - } - for (Quantile quantile : data.getQuantiles()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), null, data.getLabels(), "quantile", quantile.getQuantile()); - writeDouble(writer, quantile.getValue()); - writeScrapeTimestampAndNewline(writer, data); - } - if (data.hasCount()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), "_count", data.getLabels()); - writeLong(writer, data.getCount()); - writeScrapeTimestampAndNewline(writer, data); - } - if (data.hasSum()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), "_sum", data.getLabels()); - writeDouble(writer, data.getSum()); - writeScrapeTimestampAndNewline(writer, data); - } - } + if (snapshot.isGaugeHistogram()) { + writeGaugeCountSum(writer, snapshot, metadata); } + } - private void writeInfo(OutputStreamWriter writer, InfoSnapshot snapshot) throws IOException { - MetricMetadata metadata = snapshot.getMetadata(); - writeMetadata(writer, "_info", "gauge", metadata); - for (InfoSnapshot.InfoDataPointSnapshot data : snapshot.getDataPoints()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), "_info", data.getLabels()); - writer.write("1"); - writeScrapeTimestampAndNewline(writer, data); - } + private ClassicHistogramBuckets getClassicBuckets( + HistogramSnapshot.HistogramDataPointSnapshot data) { + if (data.getClassicBuckets().isEmpty()) { + return ClassicHistogramBuckets.of( + new double[] {Double.POSITIVE_INFINITY}, new long[] {data.getCount()}); + } else { + return data.getClassicBuckets(); } + } - private void writeStateSet(OutputStreamWriter writer, StateSetSnapshot snapshot) throws IOException { - MetricMetadata metadata = snapshot.getMetadata(); - writeMetadata(writer, "", "gauge", metadata); - for (StateSetSnapshot.StateSetDataPointSnapshot data : snapshot.getDataPoints()) { - for (int i = 0; i < data.size(); i++) { - writer.write(metadata.getPrometheusName()); - writer.write('{'); - for (int j = 0; j < data.getLabels().size(); j++) { - if (j > 0) { - writer.write(","); - } - writer.write(data.getLabels().getPrometheusName(j)); - writer.write("=\""); - writeEscapedLabelValue(writer, data.getLabels().getValue(j)); - writer.write("\""); - } - if (!data.getLabels().isEmpty()) { - writer.write(","); - } - writer.write(metadata.getPrometheusName()); - writer.write("=\""); - writeEscapedLabelValue(writer, data.getName(i)); - writer.write("\"} "); - if (data.isTrue(i)) { - writer.write("1"); - } else { - writer.write("0"); - } - writeScrapeTimestampAndNewline(writer, data); - } + private void writeGaugeCountSum( + OutputStreamWriter writer, HistogramSnapshot snapshot, MetricMetadata metadata) + throws IOException { + // Prometheus text format does not support gaugehistogram's _gcount and _gsum. + // So we append _gcount and _gsum as gauge metrics. + boolean metadataWritten = false; + for (HistogramSnapshot.HistogramDataPointSnapshot data : snapshot.getDataPoints()) { + if (data.hasCount()) { + if (!metadataWritten) { + writeMetadata(writer, "_gcount", "gauge", metadata); + metadataWritten = true; } + writeNameAndLabels(writer, metadata.getPrometheusName(), "_gcount", data.getLabels()); + writeLong(writer, data.getCount()); + writeScrapeTimestampAndNewline(writer, data); + } } - - private void writeUnknown(OutputStreamWriter writer, UnknownSnapshot snapshot) throws IOException { - MetricMetadata metadata = snapshot.getMetadata(); - writeMetadata(writer, "", "untyped", metadata); - for (UnknownSnapshot.UnknownDataPointSnapshot data : snapshot.getDataPoints()) { - writeNameAndLabels(writer, metadata.getPrometheusName(), null, data.getLabels()); - writeDouble(writer, data.getValue()); - writeScrapeTimestampAndNewline(writer, data); + metadataWritten = false; + for (HistogramSnapshot.HistogramDataPointSnapshot data : snapshot.getDataPoints()) { + if (data.hasSum()) { + if (!metadataWritten) { + writeMetadata(writer, "_gsum", "gauge", metadata); + metadataWritten = true; } + writeNameAndLabels(writer, metadata.getPrometheusName(), "_gsum", data.getLabels()); + writeDouble(writer, data.getSum()); + writeScrapeTimestampAndNewline(writer, data); + } } + } - private void writeNameAndLabels(OutputStreamWriter writer, String name, String suffix, Labels labels) throws IOException { - writeNameAndLabels(writer, name, suffix, labels, null, 0.0); + private void writeSummary(OutputStreamWriter writer, SummarySnapshot snapshot) + throws IOException { + boolean metadataWritten = false; + MetricMetadata metadata = snapshot.getMetadata(); + for (SummarySnapshot.SummaryDataPointSnapshot data : snapshot.getDataPoints()) { + if (data.getQuantiles().size() == 0 && !data.hasCount() && !data.hasSum()) { + continue; + } + if (!metadataWritten) { + writeMetadata(writer, "", "summary", metadata); + metadataWritten = true; + } + for (Quantile quantile : data.getQuantiles()) { + writeNameAndLabels( + writer, + metadata.getPrometheusName(), + null, + data.getLabels(), + "quantile", + quantile.getQuantile()); + writeDouble(writer, quantile.getValue()); + writeScrapeTimestampAndNewline(writer, data); + } + if (data.hasCount()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), "_count", data.getLabels()); + writeLong(writer, data.getCount()); + writeScrapeTimestampAndNewline(writer, data); + } + if (data.hasSum()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), "_sum", data.getLabels()); + writeDouble(writer, data.getSum()); + writeScrapeTimestampAndNewline(writer, data); + } } + } - private void writeNameAndLabels(OutputStreamWriter writer, String name, String suffix, Labels labels, - String additionalLabelName, double additionalLabelValue) throws IOException { - writer.write(name); - if (suffix != null) { - writer.write(suffix); - } - if (!labels.isEmpty() || additionalLabelName != null) { - writeLabels(writer, labels, additionalLabelName, additionalLabelValue); - } - writer.write(' '); + private void writeInfo(OutputStreamWriter writer, InfoSnapshot snapshot) throws IOException { + MetricMetadata metadata = snapshot.getMetadata(); + writeMetadata(writer, "_info", "gauge", metadata); + for (InfoSnapshot.InfoDataPointSnapshot data : snapshot.getDataPoints()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), "_info", data.getLabels()); + writer.write("1"); + writeScrapeTimestampAndNewline(writer, data); } + } - private void writeMetadata(OutputStreamWriter writer, String suffix, String typeString, MetricMetadata metadata) throws IOException { - if (metadata.getHelp() != null && !metadata.getHelp().isEmpty()) { - writer.write("# HELP "); - writer.write(metadata.getPrometheusName()); - if (suffix != null) { - writer.write(suffix); - } - writer.write(' '); - writeEscapedHelp(writer, metadata.getHelp()); - writer.write('\n'); + private void writeStateSet(OutputStreamWriter writer, StateSetSnapshot snapshot) + throws IOException { + MetricMetadata metadata = snapshot.getMetadata(); + writeMetadata(writer, "", "gauge", metadata); + for (StateSetSnapshot.StateSetDataPointSnapshot data : snapshot.getDataPoints()) { + for (int i = 0; i < data.size(); i++) { + writer.write(metadata.getPrometheusName()); + writer.write('{'); + for (int j = 0; j < data.getLabels().size(); j++) { + if (j > 0) { + writer.write(","); + } + writer.write(data.getLabels().getPrometheusName(j)); + writer.write("=\""); + writeEscapedLabelValue(writer, data.getLabels().getValue(j)); + writer.write("\""); + } + if (!data.getLabels().isEmpty()) { + writer.write(","); } - writer.write("# TYPE "); writer.write(metadata.getPrometheusName()); - if (suffix != null) { - writer.write(suffix); + writer.write("=\""); + writeEscapedLabelValue(writer, data.getName(i)); + writer.write("\"} "); + if (data.isTrue(i)) { + writer.write("1"); + } else { + writer.write("0"); } - writer.write(' '); - writer.write(typeString); - writer.write('\n'); + writeScrapeTimestampAndNewline(writer, data); + } } + } - private void writeEscapedHelp(Writer writer, String s) throws IOException { - for (int i = 0; i < s.length(); i++) { - char c = s.charAt(i); - switch (c) { - case '\\': - writer.append("\\\\"); - break; - case '\n': - writer.append("\\n"); - break; - default: - writer.append(c); - } - } + private void writeUnknown(OutputStreamWriter writer, UnknownSnapshot snapshot) + throws IOException { + MetricMetadata metadata = snapshot.getMetadata(); + writeMetadata(writer, "", "untyped", metadata); + for (UnknownSnapshot.UnknownDataPointSnapshot data : snapshot.getDataPoints()) { + writeNameAndLabels(writer, metadata.getPrometheusName(), null, data.getLabels()); + writeDouble(writer, data.getValue()); + writeScrapeTimestampAndNewline(writer, data); } + } - private void writeScrapeTimestampAndNewline(OutputStreamWriter writer, DataPointSnapshot data) throws IOException { - if (data.hasScrapeTimestamp()) { - writer.write(' '); - writeTimestamp(writer, data.getScrapeTimestampMillis()); - } - writer.write('\n'); + private void writeNameAndLabels( + OutputStreamWriter writer, String name, String suffix, Labels labels) throws IOException { + writeNameAndLabels(writer, name, suffix, labels, null, 0.0); + } + + private void writeNameAndLabels( + OutputStreamWriter writer, + String name, + String suffix, + Labels labels, + String additionalLabelName, + double additionalLabelValue) + throws IOException { + writer.write(name); + if (suffix != null) { + writer.write(suffix); + } + if (!labels.isEmpty() || additionalLabelName != null) { + writeLabels(writer, labels, additionalLabelName, additionalLabelValue); + } + writer.write(' '); + } + + private void writeMetadata( + OutputStreamWriter writer, String suffix, String typeString, MetricMetadata metadata) + throws IOException { + if (metadata.getHelp() != null && !metadata.getHelp().isEmpty()) { + writer.write("# HELP "); + writer.write(metadata.getPrometheusName()); + if (suffix != null) { + writer.write(suffix); + } + writer.write(' '); + writeEscapedHelp(writer, metadata.getHelp()); + writer.write('\n'); + } + writer.write("# TYPE "); + writer.write(metadata.getPrometheusName()); + if (suffix != null) { + writer.write(suffix); + } + writer.write(' '); + writer.write(typeString); + writer.write('\n'); + } + + private void writeEscapedHelp(Writer writer, String s) throws IOException { + for (int i = 0; i < s.length(); i++) { + char c = s.charAt(i); + switch (c) { + case '\\': + writer.append("\\\\"); + break; + case '\n': + writer.append("\\n"); + break; + default: + writer.append(c); + } + } + } + + private void writeScrapeTimestampAndNewline(OutputStreamWriter writer, DataPointSnapshot data) + throws IOException { + if (data.hasScrapeTimestamp()) { + writer.write(' '); + writeTimestamp(writer, data.getScrapeTimestampMillis()); } + writer.write('\n'); + } } diff --git a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/ProtobufUtil.java b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/ProtobufUtil.java index ad3a28828..fee5d8eb1 100644 --- a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/ProtobufUtil.java +++ b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/ProtobufUtil.java @@ -4,10 +4,10 @@ public class ProtobufUtil { - static Timestamp timestampFromMillis(long timestampMillis) { - return Timestamp.newBuilder() - .setSeconds(timestampMillis / 1000L) - .setNanos((int) (timestampMillis % 1000L * 1000000L)) - .build(); - } + static Timestamp timestampFromMillis(long timestampMillis) { + return Timestamp.newBuilder() + .setSeconds(timestampMillis / 1000L) + .setNanos((int) (timestampMillis % 1000L * 1000000L)) + .build(); + } } diff --git a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/TextFormatUtil.java b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/TextFormatUtil.java index 423fa6692..54daaaa3e 100644 --- a/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/TextFormatUtil.java +++ b/prometheus-metrics-exposition-formats/src/main/java/io/prometheus/metrics/expositionformats/TextFormatUtil.java @@ -1,82 +1,84 @@ package io.prometheus.metrics.expositionformats; import io.prometheus.metrics.model.snapshots.Labels; - import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; -import static io.prometheus.metrics.model.snapshots.PrometheusNaming.prometheusName; - public class TextFormatUtil { - static void writeLong(OutputStreamWriter writer, long value) throws IOException { - writer.append(Long.toString(value)); - } + static void writeLong(OutputStreamWriter writer, long value) throws IOException { + writer.append(Long.toString(value)); + } - static void writeDouble(OutputStreamWriter writer, double d) throws IOException { - if (d == Double.POSITIVE_INFINITY) { - writer.write("+Inf"); - } else if (d == Double.NEGATIVE_INFINITY) { - writer.write("-Inf"); - } else { - writer.write(Double.toString(d)); - // FloatingDecimal.getBinaryToASCIIConverter(d).appendTo(writer); - } + static void writeDouble(OutputStreamWriter writer, double d) throws IOException { + if (d == Double.POSITIVE_INFINITY) { + writer.write("+Inf"); + } else if (d == Double.NEGATIVE_INFINITY) { + writer.write("-Inf"); + } else { + writer.write(Double.toString(d)); + // FloatingDecimal.getBinaryToASCIIConverter(d).appendTo(writer); } + } - static void writeTimestamp(OutputStreamWriter writer, long timestampMs) throws IOException { - writer.write(Long.toString(timestampMs / 1000L)); - writer.write("."); - long ms = timestampMs % 1000; - if (ms < 100) { - writer.write("0"); - } - if (ms < 10) { - writer.write("0"); - } - writer.write(Long.toString(ms)); + static void writeTimestamp(OutputStreamWriter writer, long timestampMs) throws IOException { + writer.write(Long.toString(timestampMs / 1000L)); + writer.write("."); + long ms = timestampMs % 1000; + if (ms < 100) { + writer.write("0"); } + if (ms < 10) { + writer.write("0"); + } + writer.write(Long.toString(ms)); + } - static void writeEscapedLabelValue(Writer writer, String s) throws IOException { - for (int i = 0; i < s.length(); i++) { - char c = s.charAt(i); - switch (c) { - case '\\': - writer.append("\\\\"); - break; - case '\"': - writer.append("\\\""); - break; - case '\n': - writer.append("\\n"); - break; - default: - writer.append(c); - } - } + static void writeEscapedLabelValue(Writer writer, String s) throws IOException { + for (int i = 0; i < s.length(); i++) { + char c = s.charAt(i); + switch (c) { + case '\\': + writer.append("\\\\"); + break; + case '\"': + writer.append("\\\""); + break; + case '\n': + writer.append("\\n"); + break; + default: + writer.append(c); + } } + } - static void writeLabels(OutputStreamWriter writer, Labels labels, String additionalLabelName, double additionalLabelValue) throws IOException { - writer.write('{'); - for (int i = 0; i < labels.size(); i++) { - if (i > 0) { - writer.write(","); - } - writer.write(labels.getPrometheusName(i)); - writer.write("=\""); - writeEscapedLabelValue(writer, labels.getValue(i)); - writer.write("\""); - } - if (additionalLabelName != null) { - if (!labels.isEmpty()) { - writer.write(","); - } - writer.write(additionalLabelName); - writer.write("=\""); - writeDouble(writer, additionalLabelValue); - writer.write("\""); - } - writer.write('}'); + static void writeLabels( + OutputStreamWriter writer, + Labels labels, + String additionalLabelName, + double additionalLabelValue) + throws IOException { + writer.write('{'); + for (int i = 0; i < labels.size(); i++) { + if (i > 0) { + writer.write(","); + } + writer.write(labels.getPrometheusName(i)); + writer.write("=\""); + writeEscapedLabelValue(writer, labels.getValue(i)); + writer.write("\""); + } + if (additionalLabelName != null) { + if (!labels.isEmpty()) { + writer.write(","); + } + writer.write(additionalLabelName); + writer.write("=\""); + writeDouble(writer, additionalLabelValue); + writer.write("\""); } + writer.write('}'); + } } diff --git a/prometheus-metrics-exposition-formats/src/test/java/io/prometheus/metrics/expositionformats/ExpositionFormatsTest.java b/prometheus-metrics-exposition-formats/src/test/java/io/prometheus/metrics/expositionformats/ExpositionFormatsTest.java index 09600eafd..1ce3f2840 100644 --- a/prometheus-metrics-exposition-formats/src/test/java/io/prometheus/metrics/expositionformats/ExpositionFormatsTest.java +++ b/prometheus-metrics-exposition-formats/src/test/java/io/prometheus/metrics/expositionformats/ExpositionFormatsTest.java @@ -1,1938 +1,2766 @@ package io.prometheus.metrics.expositionformats; -import io.prometheus.metrics.model.snapshots.*; -import io.prometheus.metrics.shaded.com_google_protobuf_3_25_3.TextFormat; import io.prometheus.metrics.expositionformats.generated.com_google_protobuf_3_25_3.Metrics; +import io.prometheus.metrics.model.snapshots.*; import io.prometheus.metrics.model.snapshots.CounterSnapshot.CounterDataPointSnapshot; import io.prometheus.metrics.model.snapshots.GaugeSnapshot.GaugeDataPointSnapshot; import io.prometheus.metrics.model.snapshots.SummarySnapshot.SummaryDataPointSnapshot; import io.prometheus.metrics.model.snapshots.UnknownSnapshot.UnknownDataPointSnapshot; -import org.junit.Assert; -import org.junit.Test; - +import io.prometheus.metrics.shaded.com_google_protobuf_3_25_3.TextFormat; import java.io.ByteArrayOutputStream; import java.io.IOException; +import org.junit.Assert; +import org.junit.Test; public class ExpositionFormatsTest { - private final String exemplar1String = "{env=\"prod\",span_id=\"12345\",trace_id=\"abcde\"} 1.7 1672850685.829"; - private final String exemplar2String = "{env=\"dev\",span_id=\"23456\",trace_id=\"bcdef\"} 2.4 1672850685.830"; - private final String exemplarWithDotsString = "{some_exemplar_key=\"some value\"} 3.0 1690298864.383"; + private final String exemplar1String = + "{env=\"prod\",span_id=\"12345\",trace_id=\"abcde\"} 1.7 1672850685.829"; + private final String exemplar2String = + "{env=\"dev\",span_id=\"23456\",trace_id=\"bcdef\"} 2.4 1672850685.830"; + private final String exemplarWithDotsString = + "{some_exemplar_key=\"some value\"} 3.0 1690298864.383"; - private final String exemplar1protoString = "exemplar { " + - "label { name: \"env\" value: \"prod\" } " + - "label { name: \"span_id\" value: \"12345\" } " + - "label { name: \"trace_id\" value: \"abcde\" } " + - "value: 1.7 " + - "timestamp { seconds: 1672850685 nanos: 829000000 } }"; + private final String exemplar1protoString = + "exemplar { " + + "label { name: \"env\" value: \"prod\" } " + + "label { name: \"span_id\" value: \"12345\" } " + + "label { name: \"trace_id\" value: \"abcde\" } " + + "value: 1.7 " + + "timestamp { seconds: 1672850685 nanos: 829000000 } }"; - private final String exemplar2protoString = "exemplar { " + - "label { name: \"env\" value: \"dev\" } " + - "label { name: \"span_id\" value: \"23456\" } " + - "label { name: \"trace_id\" value: \"bcdef\" } " + - "value: 2.4 " + - "timestamp { seconds: 1672850685 nanos: 830000000 } }"; + private final String exemplar2protoString = + "exemplar { " + + "label { name: \"env\" value: \"dev\" } " + + "label { name: \"span_id\" value: \"23456\" } " + + "label { name: \"trace_id\" value: \"bcdef\" } " + + "value: 2.4 " + + "timestamp { seconds: 1672850685 nanos: 830000000 } }"; - private final String exemplarWithDotsProtoString = "exemplar { " + - "label { name: \"some_exemplar_key\" value: \"some value\" } " + - "value: 3.0 " + - "timestamp { seconds: 1690298864 nanos: 383000000 } }"; + private final String exemplarWithDotsProtoString = + "exemplar { " + + "label { name: \"some_exemplar_key\" value: \"some value\" } " + + "value: 3.0 " + + "timestamp { seconds: 1690298864 nanos: 383000000 } }"; - private final String createdTimestamp1s = "1672850385.800"; - private final long createdTimestamp1 = (long) (1000 * Double.parseDouble(createdTimestamp1s)); - private final String createdTimestamp2s = "1672850285.000"; - private final long createdTimestamp2 = (long) (1000 * Double.parseDouble(createdTimestamp2s)); - private final String scrapeTimestamp1s = "1672850685.829"; - private final long scrapeTimestamp1 = (long) (1000 * Double.parseDouble(scrapeTimestamp1s)); - private final String scrapeTimestamp2s = "1672850585.820"; - private final long scrapeTimestamp2 = (long) (1000 * Double.parseDouble(scrapeTimestamp2s)); + private final String createdTimestamp1s = "1672850385.800"; + private final long createdTimestamp1 = (long) (1000 * Double.parseDouble(createdTimestamp1s)); + private final String createdTimestamp2s = "1672850285.000"; + private final long createdTimestamp2 = (long) (1000 * Double.parseDouble(createdTimestamp2s)); + private final String scrapeTimestamp1s = "1672850685.829"; + private final long scrapeTimestamp1 = (long) (1000 * Double.parseDouble(scrapeTimestamp1s)); + private final String scrapeTimestamp2s = "1672850585.820"; + private final long scrapeTimestamp2 = (long) (1000 * Double.parseDouble(scrapeTimestamp2s)); - private final Exemplar exemplar1 = Exemplar.builder() - .spanId("12345") - .traceId("abcde") - .labels(Labels.of("env", "prod")) - .value(1.7) - .timestampMillis(1672850685829L) - .build(); + private final Exemplar exemplar1 = + Exemplar.builder() + .spanId("12345") + .traceId("abcde") + .labels(Labels.of("env", "prod")) + .value(1.7) + .timestampMillis(1672850685829L) + .build(); - private final Exemplar exemplar2 = Exemplar.builder() - .spanId("23456") - .traceId("bcdef") - .labels(Labels.of("env", "dev")) - .value(2.4) - .timestampMillis(1672850685830L) - .build(); + private final Exemplar exemplar2 = + Exemplar.builder() + .spanId("23456") + .traceId("bcdef") + .labels(Labels.of("env", "dev")) + .value(2.4) + .timestampMillis(1672850685830L) + .build(); - private final Exemplar exemplarWithDots = Exemplar.builder() - .labels(Labels.of("some.exemplar.key", "some value")) - .value(3.0) - .timestampMillis(1690298864383L) - .build(); + private final Exemplar exemplarWithDots = + Exemplar.builder() + .labels(Labels.of("some.exemplar.key", "some value")) + .value(3.0) + .timestampMillis(1690298864383L) + .build(); - @Test - public void testCounterComplete() throws IOException { - String openMetricsText = "" + - "# TYPE service_time_seconds counter\n" + - "# UNIT service_time_seconds seconds\n" + - "# HELP service_time_seconds total time spent serving\n" + - "service_time_seconds_total{path=\"/hello\",status=\"200\"} 0.8 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "service_time_seconds_created{path=\"/hello\",status=\"200\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "service_time_seconds_total{path=\"/hello\",status=\"500\"} 0.9 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "service_time_seconds_created{path=\"/hello\",status=\"500\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP service_time_seconds_total total time spent serving\n" + - "# TYPE service_time_seconds_total counter\n" + - "service_time_seconds_total{path=\"/hello\",status=\"200\"} 0.8 " + scrapeTimestamp1s + "\n" + - "service_time_seconds_total{path=\"/hello\",status=\"500\"} 0.9 " + scrapeTimestamp2s + "\n" + - "# HELP service_time_seconds_created total time spent serving\n" + - "# TYPE service_time_seconds_created gauge\n" + - "service_time_seconds_created{path=\"/hello\",status=\"200\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "service_time_seconds_created{path=\"/hello\",status=\"500\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n"; - String openMetricsTextWithoutCreated = "" + - "# TYPE service_time_seconds counter\n" + - "# UNIT service_time_seconds seconds\n" + - "# HELP service_time_seconds total time spent serving\n" + - "service_time_seconds_total{path=\"/hello\",status=\"200\"} 0.8 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "service_time_seconds_total{path=\"/hello\",status=\"500\"} 0.9 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "# EOF\n"; - String prometheusTextWithoutCreated = "" + - "# HELP service_time_seconds_total total time spent serving\n" + - "# TYPE service_time_seconds_total counter\n" + - "service_time_seconds_total{path=\"/hello\",status=\"200\"} 0.8 " + scrapeTimestamp1s + "\n" + - "service_time_seconds_total{path=\"/hello\",status=\"500\"} 0.9 " + scrapeTimestamp2s + "\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"service_time_seconds_total\" " + - "help: \"total time spent serving\" " + - "type: COUNTER " + - "metric { " + - "label { name: \"path\" value: \"/hello\" } " + - "label { name: \"status\" value: \"200\" } " + - "counter { " + - "value: 0.8 " + - exemplar1protoString + " " + - "} " + - "timestamp_ms: 1672850685829 " + - "} " + - "metric { " + - "label { name: \"path\" value: \"/hello\" } " + - "label { name: \"status\" value: \"500\" } " + - "counter { " + - "value: 0.9 " + - exemplar2protoString + " " + - "} " + - "timestamp_ms: 1672850585820 " + - "}"; - //@formatter:on + @Test + public void testCounterComplete() throws IOException { + String openMetricsText = + "" + + "# TYPE service_time_seconds counter\n" + + "# UNIT service_time_seconds seconds\n" + + "# HELP service_time_seconds total time spent serving\n" + + "service_time_seconds_total{path=\"/hello\",status=\"200\"} 0.8 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "service_time_seconds_created{path=\"/hello\",status=\"200\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "service_time_seconds_total{path=\"/hello\",status=\"500\"} 0.9 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "service_time_seconds_created{path=\"/hello\",status=\"500\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP service_time_seconds_total total time spent serving\n" + + "# TYPE service_time_seconds_total counter\n" + + "service_time_seconds_total{path=\"/hello\",status=\"200\"} 0.8 " + + scrapeTimestamp1s + + "\n" + + "service_time_seconds_total{path=\"/hello\",status=\"500\"} 0.9 " + + scrapeTimestamp2s + + "\n" + + "# HELP service_time_seconds_created total time spent serving\n" + + "# TYPE service_time_seconds_created gauge\n" + + "service_time_seconds_created{path=\"/hello\",status=\"200\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "service_time_seconds_created{path=\"/hello\",status=\"500\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n"; + String openMetricsTextWithoutCreated = + "" + + "# TYPE service_time_seconds counter\n" + + "# UNIT service_time_seconds seconds\n" + + "# HELP service_time_seconds total time spent serving\n" + + "service_time_seconds_total{path=\"/hello\",status=\"200\"} 0.8 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "service_time_seconds_total{path=\"/hello\",status=\"500\"} 0.9 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "# EOF\n"; + String prometheusTextWithoutCreated = + "" + + "# HELP service_time_seconds_total total time spent serving\n" + + "# TYPE service_time_seconds_total counter\n" + + "service_time_seconds_total{path=\"/hello\",status=\"200\"} 0.8 " + + scrapeTimestamp1s + + "\n" + + "service_time_seconds_total{path=\"/hello\",status=\"500\"} 0.9 " + + scrapeTimestamp2s + + "\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"service_time_seconds_total\" " + + "help: \"total time spent serving\" " + + "type: COUNTER " + + "metric { " + + "label { name: \"path\" value: \"/hello\" } " + + "label { name: \"status\" value: \"200\" } " + + "counter { " + + "value: 0.8 " + + exemplar1protoString + + " " + + "} " + + "timestamp_ms: 1672850685829 " + + "} " + + "metric { " + + "label { name: \"path\" value: \"/hello\" } " + + "label { name: \"status\" value: \"500\" } " + + "counter { " + + "value: 0.9 " + + exemplar2protoString + + " " + + "} " + + "timestamp_ms: 1672850585820 " + + "}"; + // @formatter:on - CounterSnapshot counter = CounterSnapshot.builder() - .name("service_time_seconds") - .help("total time spent serving") - .unit(Unit.SECONDS) - .dataPoint(CounterDataPointSnapshot.builder() - .value(0.8) - .labels(Labels.builder() - .label("path", "/hello") - .label("status", "200") - .build()) - .exemplar(exemplar1) - .createdTimestampMillis(createdTimestamp1) - .scrapeTimestampMillis(scrapeTimestamp1) - .build()) - .dataPoint(CounterDataPointSnapshot.builder() - .value(0.9) - .labels(Labels.builder() - .label("path", "/hello") - .label("status", "500") - .build()) - .exemplar(exemplar2) - .createdTimestampMillis(createdTimestamp2) - .scrapeTimestampMillis(scrapeTimestamp2) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, counter); - assertPrometheusText(prometheusText, counter); - assertOpenMetricsTextWithoutCreated(openMetricsTextWithoutCreated, counter); - assertPrometheusTextWithoutCreated(prometheusTextWithoutCreated, counter); - assertPrometheusProtobuf(prometheusProtobuf, counter); - } + CounterSnapshot counter = + CounterSnapshot.builder() + .name("service_time_seconds") + .help("total time spent serving") + .unit(Unit.SECONDS) + .dataPoint( + CounterDataPointSnapshot.builder() + .value(0.8) + .labels(Labels.builder().label("path", "/hello").label("status", "200").build()) + .exemplar(exemplar1) + .createdTimestampMillis(createdTimestamp1) + .scrapeTimestampMillis(scrapeTimestamp1) + .build()) + .dataPoint( + CounterDataPointSnapshot.builder() + .value(0.9) + .labels(Labels.builder().label("path", "/hello").label("status", "500").build()) + .exemplar(exemplar2) + .createdTimestampMillis(createdTimestamp2) + .scrapeTimestampMillis(scrapeTimestamp2) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, counter); + assertPrometheusText(prometheusText, counter); + assertOpenMetricsTextWithoutCreated(openMetricsTextWithoutCreated, counter); + assertPrometheusTextWithoutCreated(prometheusTextWithoutCreated, counter); + assertPrometheusProtobuf(prometheusProtobuf, counter); + } - @Test - public void testCounterMinimal() throws IOException { - String openMetricsText = "" + - "# TYPE my_counter counter\n" + - "my_counter_total 1.1\n" + - "# EOF\n"; - String prometheusText = "" + - "# TYPE my_counter_total counter\n" + - "my_counter_total 1.1\n"; - String prometheusProtobuf = "" + - "name: \"my_counter_total\" type: COUNTER metric { counter { value: 1.1 } }"; - CounterSnapshot counter = CounterSnapshot.builder() - .name("my_counter") - .dataPoint(CounterDataPointSnapshot.builder().value(1.1).build()) - .build(); - assertOpenMetricsText(openMetricsText, counter); - assertPrometheusText(prometheusText, counter); - assertOpenMetricsTextWithoutCreated(openMetricsText, counter); - assertPrometheusTextWithoutCreated(prometheusText, counter); - assertPrometheusProtobuf(prometheusProtobuf, counter); - } + @Test + public void testCounterMinimal() throws IOException { + String openMetricsText = + "" + "# TYPE my_counter counter\n" + "my_counter_total 1.1\n" + "# EOF\n"; + String prometheusText = "" + "# TYPE my_counter_total counter\n" + "my_counter_total 1.1\n"; + String prometheusProtobuf = + "" + "name: \"my_counter_total\" type: COUNTER metric { counter { value: 1.1 } }"; + CounterSnapshot counter = + CounterSnapshot.builder() + .name("my_counter") + .dataPoint(CounterDataPointSnapshot.builder().value(1.1).build()) + .build(); + assertOpenMetricsText(openMetricsText, counter); + assertPrometheusText(prometheusText, counter); + assertOpenMetricsTextWithoutCreated(openMetricsText, counter); + assertPrometheusTextWithoutCreated(prometheusText, counter); + assertPrometheusProtobuf(prometheusProtobuf, counter); + } - @Test - public void testCounterWithDots() throws IOException { - String openMetricsText = "" + - "# TYPE my_request_count counter\n" + - "my_request_count_total{http_path=\"/hello\"} 3.0 # " + exemplarWithDotsString + "\n" + - "# EOF\n"; - String prometheusText = "" + - "# TYPE my_request_count_total counter\n" + - "my_request_count_total{http_path=\"/hello\"} 3.0\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"my_request_count_total\" " + - "type: COUNTER " + - "metric { " + - "label { name: \"http_path\" value: \"/hello\" } " + - "counter { " + - "value: 3.0 " + exemplarWithDotsProtoString + " " + - "} " + - "}"; - //@formatter:on + @Test + public void testCounterWithDots() throws IOException { + String openMetricsText = + "" + + "# TYPE my_request_count counter\n" + + "my_request_count_total{http_path=\"/hello\"} 3.0 # " + + exemplarWithDotsString + + "\n" + + "# EOF\n"; + String prometheusText = + "" + + "# TYPE my_request_count_total counter\n" + + "my_request_count_total{http_path=\"/hello\"} 3.0\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"my_request_count_total\" " + + "type: COUNTER " + + "metric { " + + "label { name: \"http_path\" value: \"/hello\" } " + + "counter { " + + "value: 3.0 " + + exemplarWithDotsProtoString + + " " + + "} " + + "}"; + // @formatter:on - CounterSnapshot counter = CounterSnapshot.builder() - .name("my.request.count") - .dataPoint(CounterDataPointSnapshot.builder() - .value(3.0) - .labels(Labels.builder() - .label("http.path", "/hello") - .build()) - .exemplar(exemplarWithDots) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, counter); - assertPrometheusText(prometheusText, counter); - assertPrometheusProtobuf(prometheusProtobuf, counter); - } + CounterSnapshot counter = + CounterSnapshot.builder() + .name("my.request.count") + .dataPoint( + CounterDataPointSnapshot.builder() + .value(3.0) + .labels(Labels.builder().label("http.path", "/hello").build()) + .exemplar(exemplarWithDots) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, counter); + assertPrometheusText(prometheusText, counter); + assertPrometheusProtobuf(prometheusProtobuf, counter); + } - @Test - public void testGaugeComplete() throws IOException { - String openMetricsText = "" + - "# TYPE disk_usage_ratio gauge\n" + - "# UNIT disk_usage_ratio ratio\n" + - "# HELP disk_usage_ratio percentage used\n" + - "disk_usage_ratio{device=\"/dev/sda1\"} 0.2 " + scrapeTimestamp1s + "\n" + - "disk_usage_ratio{device=\"/dev/sda2\"} 0.7 " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String openMetricsTextWithExemplarsOnAllTimeSeries = "" + - "# TYPE disk_usage_ratio gauge\n" + - "# UNIT disk_usage_ratio ratio\n" + - "# HELP disk_usage_ratio percentage used\n" + - "disk_usage_ratio{device=\"/dev/sda1\"} 0.2 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "disk_usage_ratio{device=\"/dev/sda2\"} 0.7 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP disk_usage_ratio percentage used\n" + - "# TYPE disk_usage_ratio gauge\n" + - "disk_usage_ratio{device=\"/dev/sda1\"} 0.2 " + scrapeTimestamp1s + "\n" + - "disk_usage_ratio{device=\"/dev/sda2\"} 0.7 " + scrapeTimestamp2s + "\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"disk_usage_ratio\" " + - "help: \"percentage used\" " + - "type: GAUGE " + - "metric { " + - "label { name: \"device\" value: \"/dev/sda1\" } " + - "gauge { value: 0.2 } " + - "timestamp_ms: 1672850685829 " + - "} metric { " + - "label { name: \"device\" value: \"/dev/sda2\" } " + - "gauge { value: 0.7 } " + - "timestamp_ms: 1672850585820 " + - "}"; - //@formatter:on - GaugeSnapshot gauge = GaugeSnapshot.builder() - .name("disk_usage_ratio") - .help("percentage used") - .unit(new Unit("ratio")) - .dataPoint(GaugeDataPointSnapshot.builder() - .value(0.7) - .labels(Labels.builder() - .label("device", "/dev/sda2") - .build()) - .exemplar(exemplar2) - .scrapeTimestampMillis(scrapeTimestamp2) - .build()) - .dataPoint(GaugeDataPointSnapshot.builder() - .value(0.2) - .labels(Labels.builder() - .label("device", "/dev/sda1") - .build()) - .exemplar(exemplar1) - .scrapeTimestampMillis(scrapeTimestamp1) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, gauge); - assertOpenMetricsTextWithExemplarsOnAllTimeSeries(openMetricsTextWithExemplarsOnAllTimeSeries, gauge); - assertPrometheusText(prometheusText, gauge); - assertOpenMetricsTextWithoutCreated(openMetricsText, gauge); - assertPrometheusTextWithoutCreated(prometheusText, gauge); - assertPrometheusProtobuf(prometheusProtobuf, gauge); - } + @Test + public void testGaugeComplete() throws IOException { + String openMetricsText = + "" + + "# TYPE disk_usage_ratio gauge\n" + + "# UNIT disk_usage_ratio ratio\n" + + "# HELP disk_usage_ratio percentage used\n" + + "disk_usage_ratio{device=\"/dev/sda1\"} 0.2 " + + scrapeTimestamp1s + + "\n" + + "disk_usage_ratio{device=\"/dev/sda2\"} 0.7 " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String openMetricsTextWithExemplarsOnAllTimeSeries = + "" + + "# TYPE disk_usage_ratio gauge\n" + + "# UNIT disk_usage_ratio ratio\n" + + "# HELP disk_usage_ratio percentage used\n" + + "disk_usage_ratio{device=\"/dev/sda1\"} 0.2 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "disk_usage_ratio{device=\"/dev/sda2\"} 0.7 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP disk_usage_ratio percentage used\n" + + "# TYPE disk_usage_ratio gauge\n" + + "disk_usage_ratio{device=\"/dev/sda1\"} 0.2 " + + scrapeTimestamp1s + + "\n" + + "disk_usage_ratio{device=\"/dev/sda2\"} 0.7 " + + scrapeTimestamp2s + + "\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"disk_usage_ratio\" " + + "help: \"percentage used\" " + + "type: GAUGE " + + "metric { " + + "label { name: \"device\" value: \"/dev/sda1\" } " + + "gauge { value: 0.2 } " + + "timestamp_ms: 1672850685829 " + + "} metric { " + + "label { name: \"device\" value: \"/dev/sda2\" } " + + "gauge { value: 0.7 } " + + "timestamp_ms: 1672850585820 " + + "}"; + // @formatter:on + GaugeSnapshot gauge = + GaugeSnapshot.builder() + .name("disk_usage_ratio") + .help("percentage used") + .unit(new Unit("ratio")) + .dataPoint( + GaugeDataPointSnapshot.builder() + .value(0.7) + .labels(Labels.builder().label("device", "/dev/sda2").build()) + .exemplar(exemplar2) + .scrapeTimestampMillis(scrapeTimestamp2) + .build()) + .dataPoint( + GaugeDataPointSnapshot.builder() + .value(0.2) + .labels(Labels.builder().label("device", "/dev/sda1").build()) + .exemplar(exemplar1) + .scrapeTimestampMillis(scrapeTimestamp1) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, gauge); + assertOpenMetricsTextWithExemplarsOnAllTimeSeries( + openMetricsTextWithExemplarsOnAllTimeSeries, gauge); + assertPrometheusText(prometheusText, gauge); + assertOpenMetricsTextWithoutCreated(openMetricsText, gauge); + assertPrometheusTextWithoutCreated(prometheusText, gauge); + assertPrometheusProtobuf(prometheusProtobuf, gauge); + } - @Test - public void testGaugeMinimal() throws IOException { - String openMetricsText = "" + - "# TYPE temperature_centigrade gauge\n" + - "temperature_centigrade 22.3\n" + - "# EOF\n"; - String prometheusText = "" + - "# TYPE temperature_centigrade gauge\n" + - "temperature_centigrade 22.3\n"; - String prometheusProtobuf = "" + - "name: \"temperature_centigrade\" type: GAUGE metric { gauge { value: 22.3 } }"; - GaugeSnapshot gauge = GaugeSnapshot.builder() - .name("temperature_centigrade") - .dataPoint(GaugeDataPointSnapshot.builder() - .value(22.3) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, gauge); - assertPrometheusText(prometheusText, gauge); - assertOpenMetricsTextWithoutCreated(openMetricsText, gauge); - assertPrometheusTextWithoutCreated(prometheusText, gauge); - assertPrometheusProtobuf(prometheusProtobuf, gauge); - } + @Test + public void testGaugeMinimal() throws IOException { + String openMetricsText = + "" + "# TYPE temperature_centigrade gauge\n" + "temperature_centigrade 22.3\n" + "# EOF\n"; + String prometheusText = + "" + "# TYPE temperature_centigrade gauge\n" + "temperature_centigrade 22.3\n"; + String prometheusProtobuf = + "" + "name: \"temperature_centigrade\" type: GAUGE metric { gauge { value: 22.3 } }"; + GaugeSnapshot gauge = + GaugeSnapshot.builder() + .name("temperature_centigrade") + .dataPoint(GaugeDataPointSnapshot.builder().value(22.3).build()) + .build(); + assertOpenMetricsText(openMetricsText, gauge); + assertPrometheusText(prometheusText, gauge); + assertOpenMetricsTextWithoutCreated(openMetricsText, gauge); + assertPrometheusTextWithoutCreated(prometheusText, gauge); + assertPrometheusProtobuf(prometheusProtobuf, gauge); + } - @Test - public void testGaugeWithDots() throws IOException { - String openMetricsText = "" + - "# TYPE my_temperature_celsius gauge\n" + - "# UNIT my_temperature_celsius celsius\n" + - "# HELP my_temperature_celsius Temperature\n" + - "my_temperature_celsius{location_id=\"data-center-1\"} 23.0\n" + - "# EOF\n"; - String openMetricsTextWithExemplarsOnAllTimeSeries = "" + - "# TYPE my_temperature_celsius gauge\n" + - "# UNIT my_temperature_celsius celsius\n" + - "# HELP my_temperature_celsius Temperature\n" + - "my_temperature_celsius{location_id=\"data-center-1\"} 23.0 # " + exemplarWithDotsString + "\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP my_temperature_celsius Temperature\n" + - "# TYPE my_temperature_celsius gauge\n" + - "my_temperature_celsius{location_id=\"data-center-1\"} 23.0\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"my_temperature_celsius\" " + - "help: \"Temperature\" " + - "type: GAUGE " + - "metric { " + - "label { name: \"location_id\" value: \"data-center-1\" } " + - "gauge { " + - "value: 23.0 " + - "} " + - "}"; - //@formatter:on + @Test + public void testGaugeWithDots() throws IOException { + String openMetricsText = + "" + + "# TYPE my_temperature_celsius gauge\n" + + "# UNIT my_temperature_celsius celsius\n" + + "# HELP my_temperature_celsius Temperature\n" + + "my_temperature_celsius{location_id=\"data-center-1\"} 23.0\n" + + "# EOF\n"; + String openMetricsTextWithExemplarsOnAllTimeSeries = + "" + + "# TYPE my_temperature_celsius gauge\n" + + "# UNIT my_temperature_celsius celsius\n" + + "# HELP my_temperature_celsius Temperature\n" + + "my_temperature_celsius{location_id=\"data-center-1\"} 23.0 # " + + exemplarWithDotsString + + "\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP my_temperature_celsius Temperature\n" + + "# TYPE my_temperature_celsius gauge\n" + + "my_temperature_celsius{location_id=\"data-center-1\"} 23.0\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"my_temperature_celsius\" " + + "help: \"Temperature\" " + + "type: GAUGE " + + "metric { " + + "label { name: \"location_id\" value: \"data-center-1\" } " + + "gauge { " + + "value: 23.0 " + + "} " + + "}"; + // @formatter:on - GaugeSnapshot gauge = GaugeSnapshot.builder() - .name("my.temperature.celsius") - .help("Temperature") - .unit(Unit.CELSIUS) - .dataPoint(GaugeDataPointSnapshot.builder() - .value(23.0) - .labels(Labels.builder() - .label("location.id", "data-center-1") - .build()) - .exemplar(exemplarWithDots) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, gauge); - assertOpenMetricsTextWithExemplarsOnAllTimeSeries(openMetricsTextWithExemplarsOnAllTimeSeries, gauge); - assertPrometheusText(prometheusText, gauge); - assertPrometheusProtobuf(prometheusProtobuf, gauge); - } + GaugeSnapshot gauge = + GaugeSnapshot.builder() + .name("my.temperature.celsius") + .help("Temperature") + .unit(Unit.CELSIUS) + .dataPoint( + GaugeDataPointSnapshot.builder() + .value(23.0) + .labels(Labels.builder().label("location.id", "data-center-1").build()) + .exemplar(exemplarWithDots) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, gauge); + assertOpenMetricsTextWithExemplarsOnAllTimeSeries( + openMetricsTextWithExemplarsOnAllTimeSeries, gauge); + assertPrometheusText(prometheusText, gauge); + assertPrometheusProtobuf(prometheusProtobuf, gauge); + } - @Test - public void testSummaryComplete() throws IOException { - String openMetricsText = "" + - "# TYPE http_request_duration_seconds summary\n" + - "# UNIT http_request_duration_seconds seconds\n" + - "# HELP http_request_duration_seconds request duration\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.5\"} 225.3 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.9\"} 240.7 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.95\"} 245.1 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds_count{status=\"200\"} 3 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds_sum{status=\"200\"} 1.2 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds_created{status=\"200\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.5\"} 225.3 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.9\"} 240.7 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.95\"} 245.1 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds_count{status=\"500\"} 7 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds_sum{status=\"500\"} 2.2 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds_created{status=\"500\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String openMetricsTextWithExemplarsOnAllTimeSeries = "" + - "# TYPE http_request_duration_seconds summary\n" + - "# UNIT http_request_duration_seconds seconds\n" + - "# HELP http_request_duration_seconds request duration\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.5\"} 225.3 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.9\"} 240.7 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.95\"} 245.1 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "http_request_duration_seconds_count{status=\"200\"} 3 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "http_request_duration_seconds_sum{status=\"200\"} 1.2 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds_created{status=\"200\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.5\"} 225.3 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.9\"} 240.7 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.95\"} 245.1 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "http_request_duration_seconds_count{status=\"500\"} 7 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "http_request_duration_seconds_sum{status=\"500\"} 2.2 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds_created{status=\"500\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP http_request_duration_seconds request duration\n" + - "# TYPE http_request_duration_seconds summary\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.5\"} 225.3 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.9\"} 240.7 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.95\"} 245.1 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds_count{status=\"200\"} 3 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds_sum{status=\"200\"} 1.2 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.5\"} 225.3 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.9\"} 240.7 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.95\"} 245.1 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds_count{status=\"500\"} 7 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds_sum{status=\"500\"} 2.2 " + scrapeTimestamp2s + "\n" + - "# HELP http_request_duration_seconds_created request duration\n" + - "# TYPE http_request_duration_seconds_created gauge\n" + - "http_request_duration_seconds_created{status=\"200\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds_created{status=\"500\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n"; - String openMetricsTextWithoutCreated = "" + - "# TYPE http_request_duration_seconds summary\n" + - "# UNIT http_request_duration_seconds seconds\n" + - "# HELP http_request_duration_seconds request duration\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.5\"} 225.3 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.9\"} 240.7 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.95\"} 245.1 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds_count{status=\"200\"} 3 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds_sum{status=\"200\"} 1.2 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.5\"} 225.3 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.9\"} 240.7 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.95\"} 245.1 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds_count{status=\"500\"} 7 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds_sum{status=\"500\"} 2.2 " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String prometheusTextWithoutCreated = "" + - "# HELP http_request_duration_seconds request duration\n" + - "# TYPE http_request_duration_seconds summary\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.5\"} 225.3 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.9\"} 240.7 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"200\",quantile=\"0.95\"} 245.1 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds_count{status=\"200\"} 3 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds_sum{status=\"200\"} 1.2 " + scrapeTimestamp1s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.5\"} 225.3 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.9\"} 240.7 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds{status=\"500\",quantile=\"0.95\"} 245.1 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds_count{status=\"500\"} 7 " + scrapeTimestamp2s + "\n" + - "http_request_duration_seconds_sum{status=\"500\"} 2.2 " + scrapeTimestamp2s + "\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"http_request_duration_seconds\" " + - "help: \"request duration\" " + - "type: SUMMARY " + - "metric { " + - "label { name: \"status\" value: \"200\" } " + - "summary { " + - "sample_count: 3 " + - "sample_sum: 1.2 " + - "quantile { quantile: 0.5 value: 225.3 } " + - "quantile { quantile: 0.9 value: 240.7 } " + - "quantile { quantile: 0.95 value: 245.1 } " + - "} " + - "timestamp_ms: 1672850685829 " + - "} metric { " + - "label { name: \"status\" value: \"500\" } " + - "summary { " + - "sample_count: 7 " + - "sample_sum: 2.2 " + - "quantile { quantile: 0.5 value: 225.3 } " + - "quantile { quantile: 0.9 value: 240.7 } " + - "quantile { quantile: 0.95 value: 245.1 } " + - "} " + "" + - "timestamp_ms: 1672850585820 " + - "}"; - //@formatter:on - SummarySnapshot summary = SummarySnapshot.builder() - .name("http_request_duration_seconds") - .help("request duration") - .unit(Unit.SECONDS) - .dataPoint(SummaryDataPointSnapshot.builder() - .count(7) - .sum(2.2) - .quantiles(Quantiles.builder() - .quantile(0.5, 225.3) - .quantile(0.9, 240.7) - .quantile(0.95, 245.1) - .build()) - .labels(Labels.builder() - .label("status", "500") - .build()) - .exemplars(Exemplars.of(exemplar2)) - .createdTimestampMillis(createdTimestamp2) - .scrapeTimestampMillis(scrapeTimestamp2) - .build()) - .dataPoint(SummaryDataPointSnapshot.builder() - .count(3) - .sum(1.2) - .quantiles(Quantiles.builder() - .quantile(0.5, 225.3) - .quantile(0.9, 240.7) - .quantile(0.95, 245.1) - .build()) - .labels(Labels.builder() - .label("status", "200") - .build()) - .exemplars(Exemplars.of(exemplar1)) - .createdTimestampMillis(createdTimestamp1) - .scrapeTimestampMillis(scrapeTimestamp1) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, summary); - assertOpenMetricsTextWithExemplarsOnAllTimeSeries(openMetricsTextWithExemplarsOnAllTimeSeries, summary); - assertPrometheusText(prometheusText, summary); - assertOpenMetricsTextWithoutCreated(openMetricsTextWithoutCreated, summary); - assertPrometheusTextWithoutCreated(prometheusTextWithoutCreated, summary); - assertPrometheusProtobuf(prometheusProtobuf, summary); - } + @Test + public void testSummaryComplete() throws IOException { + String openMetricsText = + "" + + "# TYPE http_request_duration_seconds summary\n" + + "# UNIT http_request_duration_seconds seconds\n" + + "# HELP http_request_duration_seconds request duration\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.5\"} 225.3 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.9\"} 240.7 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.95\"} 245.1 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds_count{status=\"200\"} 3 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds_sum{status=\"200\"} 1.2 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds_created{status=\"200\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.5\"} 225.3 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.9\"} 240.7 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.95\"} 245.1 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds_count{status=\"500\"} 7 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds_sum{status=\"500\"} 2.2 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds_created{status=\"500\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String openMetricsTextWithExemplarsOnAllTimeSeries = + "" + + "# TYPE http_request_duration_seconds summary\n" + + "# UNIT http_request_duration_seconds seconds\n" + + "# HELP http_request_duration_seconds request duration\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.5\"} 225.3 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.9\"} 240.7 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.95\"} 245.1 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "http_request_duration_seconds_count{status=\"200\"} 3 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "http_request_duration_seconds_sum{status=\"200\"} 1.2 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds_created{status=\"200\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.5\"} 225.3 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.9\"} 240.7 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.95\"} 245.1 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "http_request_duration_seconds_count{status=\"500\"} 7 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "http_request_duration_seconds_sum{status=\"500\"} 2.2 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds_created{status=\"500\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP http_request_duration_seconds request duration\n" + + "# TYPE http_request_duration_seconds summary\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.5\"} 225.3 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.9\"} 240.7 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.95\"} 245.1 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds_count{status=\"200\"} 3 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds_sum{status=\"200\"} 1.2 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.5\"} 225.3 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.9\"} 240.7 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.95\"} 245.1 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds_count{status=\"500\"} 7 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds_sum{status=\"500\"} 2.2 " + + scrapeTimestamp2s + + "\n" + + "# HELP http_request_duration_seconds_created request duration\n" + + "# TYPE http_request_duration_seconds_created gauge\n" + + "http_request_duration_seconds_created{status=\"200\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds_created{status=\"500\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n"; + String openMetricsTextWithoutCreated = + "" + + "# TYPE http_request_duration_seconds summary\n" + + "# UNIT http_request_duration_seconds seconds\n" + + "# HELP http_request_duration_seconds request duration\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.5\"} 225.3 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.9\"} 240.7 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.95\"} 245.1 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds_count{status=\"200\"} 3 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds_sum{status=\"200\"} 1.2 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.5\"} 225.3 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.9\"} 240.7 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.95\"} 245.1 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds_count{status=\"500\"} 7 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds_sum{status=\"500\"} 2.2 " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String prometheusTextWithoutCreated = + "" + + "# HELP http_request_duration_seconds request duration\n" + + "# TYPE http_request_duration_seconds summary\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.5\"} 225.3 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.9\"} 240.7 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"200\",quantile=\"0.95\"} 245.1 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds_count{status=\"200\"} 3 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds_sum{status=\"200\"} 1.2 " + + scrapeTimestamp1s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.5\"} 225.3 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.9\"} 240.7 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds{status=\"500\",quantile=\"0.95\"} 245.1 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds_count{status=\"500\"} 7 " + + scrapeTimestamp2s + + "\n" + + "http_request_duration_seconds_sum{status=\"500\"} 2.2 " + + scrapeTimestamp2s + + "\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"http_request_duration_seconds\" " + + "help: \"request duration\" " + + "type: SUMMARY " + + "metric { " + + "label { name: \"status\" value: \"200\" } " + + "summary { " + + "sample_count: 3 " + + "sample_sum: 1.2 " + + "quantile { quantile: 0.5 value: 225.3 } " + + "quantile { quantile: 0.9 value: 240.7 } " + + "quantile { quantile: 0.95 value: 245.1 } " + + "} " + + "timestamp_ms: 1672850685829 " + + "} metric { " + + "label { name: \"status\" value: \"500\" } " + + "summary { " + + "sample_count: 7 " + + "sample_sum: 2.2 " + + "quantile { quantile: 0.5 value: 225.3 } " + + "quantile { quantile: 0.9 value: 240.7 } " + + "quantile { quantile: 0.95 value: 245.1 } " + + "} " + + "" + + "timestamp_ms: 1672850585820 " + + "}"; + // @formatter:on + SummarySnapshot summary = + SummarySnapshot.builder() + .name("http_request_duration_seconds") + .help("request duration") + .unit(Unit.SECONDS) + .dataPoint( + SummaryDataPointSnapshot.builder() + .count(7) + .sum(2.2) + .quantiles( + Quantiles.builder() + .quantile(0.5, 225.3) + .quantile(0.9, 240.7) + .quantile(0.95, 245.1) + .build()) + .labels(Labels.builder().label("status", "500").build()) + .exemplars(Exemplars.of(exemplar2)) + .createdTimestampMillis(createdTimestamp2) + .scrapeTimestampMillis(scrapeTimestamp2) + .build()) + .dataPoint( + SummaryDataPointSnapshot.builder() + .count(3) + .sum(1.2) + .quantiles( + Quantiles.builder() + .quantile(0.5, 225.3) + .quantile(0.9, 240.7) + .quantile(0.95, 245.1) + .build()) + .labels(Labels.builder().label("status", "200").build()) + .exemplars(Exemplars.of(exemplar1)) + .createdTimestampMillis(createdTimestamp1) + .scrapeTimestampMillis(scrapeTimestamp1) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, summary); + assertOpenMetricsTextWithExemplarsOnAllTimeSeries( + openMetricsTextWithExemplarsOnAllTimeSeries, summary); + assertPrometheusText(prometheusText, summary); + assertOpenMetricsTextWithoutCreated(openMetricsTextWithoutCreated, summary); + assertPrometheusTextWithoutCreated(prometheusTextWithoutCreated, summary); + assertPrometheusProtobuf(prometheusProtobuf, summary); + } - @Test - public void testSummaryWithoutQuantiles() throws IOException { - String openMetricsText = "" + - "# TYPE latency_seconds summary\n" + - "# UNIT latency_seconds seconds\n" + - "# HELP latency_seconds latency\n" + - "latency_seconds_count 3\n" + - "latency_seconds_sum 1.2\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP latency_seconds latency\n" + - "# TYPE latency_seconds summary\n" + - "latency_seconds_count 3\n" + - "latency_seconds_sum 1.2\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"latency_seconds\" " + - "help: \"latency\" " + - "type: SUMMARY " + - "metric { " + - "summary { " + - "sample_count: 3 " + - "sample_sum: 1.2 " + - "} " + - "}"; - //@formatter:on - SummarySnapshot summary = SummarySnapshot.builder() - .name("latency_seconds") - .help("latency") - .unit(Unit.SECONDS) - .dataPoint(SummaryDataPointSnapshot.builder() - .count(3) - .sum(1.2) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, summary); - assertPrometheusText(prometheusText, summary); - assertOpenMetricsTextWithoutCreated(openMetricsText, summary); - assertPrometheusTextWithoutCreated(prometheusText, summary); - assertPrometheusProtobuf(prometheusProtobuf, summary); - } + @Test + public void testSummaryWithoutQuantiles() throws IOException { + String openMetricsText = + "" + + "# TYPE latency_seconds summary\n" + + "# UNIT latency_seconds seconds\n" + + "# HELP latency_seconds latency\n" + + "latency_seconds_count 3\n" + + "latency_seconds_sum 1.2\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP latency_seconds latency\n" + + "# TYPE latency_seconds summary\n" + + "latency_seconds_count 3\n" + + "latency_seconds_sum 1.2\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"latency_seconds\" " + + "help: \"latency\" " + + "type: SUMMARY " + + "metric { " + + "summary { " + + "sample_count: 3 " + + "sample_sum: 1.2 " + + "} " + + "}"; + // @formatter:on + SummarySnapshot summary = + SummarySnapshot.builder() + .name("latency_seconds") + .help("latency") + .unit(Unit.SECONDS) + .dataPoint(SummaryDataPointSnapshot.builder().count(3).sum(1.2).build()) + .build(); + assertOpenMetricsText(openMetricsText, summary); + assertPrometheusText(prometheusText, summary); + assertOpenMetricsTextWithoutCreated(openMetricsText, summary); + assertPrometheusTextWithoutCreated(prometheusText, summary); + assertPrometheusProtobuf(prometheusProtobuf, summary); + } - @Test - public void testSummaryNoCountAndSum() throws IOException { - String openMetricsText = "" + - "# TYPE latency_seconds summary\n" + - "latency_seconds{quantile=\"0.95\"} 200.0\n" + - "# EOF\n"; - String prometheusText = "" + - "# TYPE latency_seconds summary\n" + - "latency_seconds{quantile=\"0.95\"} 200.0\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"latency_seconds\" " + - "type: SUMMARY " + - "metric { " + - "summary { " + - "quantile { quantile: 0.95 value: 200.0 } " + - "} " + - "}"; - //@formatter:on - SummarySnapshot summary = SummarySnapshot.builder() - .name("latency_seconds") - .dataPoint(SummaryDataPointSnapshot.builder() - .quantiles(Quantiles.builder().quantile(0.95, 200.0).build()) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, summary); - assertPrometheusText(prometheusText, summary); - assertOpenMetricsTextWithoutCreated(openMetricsText, summary); - assertPrometheusTextWithoutCreated(prometheusText, summary); - assertPrometheusProtobuf(prometheusProtobuf, summary); - } + @Test + public void testSummaryNoCountAndSum() throws IOException { + String openMetricsText = + "" + + "# TYPE latency_seconds summary\n" + + "latency_seconds{quantile=\"0.95\"} 200.0\n" + + "# EOF\n"; + String prometheusText = + "" + "# TYPE latency_seconds summary\n" + "latency_seconds{quantile=\"0.95\"} 200.0\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"latency_seconds\" " + + "type: SUMMARY " + + "metric { " + + "summary { " + + "quantile { quantile: 0.95 value: 200.0 } " + + "} " + + "}"; + // @formatter:on + SummarySnapshot summary = + SummarySnapshot.builder() + .name("latency_seconds") + .dataPoint( + SummaryDataPointSnapshot.builder() + .quantiles(Quantiles.builder().quantile(0.95, 200.0).build()) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, summary); + assertPrometheusText(prometheusText, summary); + assertOpenMetricsTextWithoutCreated(openMetricsText, summary); + assertPrometheusTextWithoutCreated(prometheusText, summary); + assertPrometheusProtobuf(prometheusProtobuf, summary); + } - @Test - public void testSummaryJustCount() throws IOException { - String openMetricsText = "" + - "# TYPE latency_seconds summary\n" + - "latency_seconds_count 1\n" + - "# EOF\n"; - String prometheusText = "" + - "# TYPE latency_seconds summary\n" + - "latency_seconds_count 1\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"latency_seconds\" " + - "type: SUMMARY " + - "metric { " + - "summary { " + - "sample_count: 1 " + - "} " + - "}"; - //@formatter:on - SummarySnapshot summary = SummarySnapshot.builder() - .name("latency_seconds") - .dataPoint(SummaryDataPointSnapshot.builder() - .count(1) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, summary); - assertPrometheusText(prometheusText, summary); - assertOpenMetricsTextWithoutCreated(openMetricsText, summary); - assertPrometheusTextWithoutCreated(prometheusText, summary); - assertPrometheusProtobuf(prometheusProtobuf, summary); - } + @Test + public void testSummaryJustCount() throws IOException { + String openMetricsText = + "" + "# TYPE latency_seconds summary\n" + "latency_seconds_count 1\n" + "# EOF\n"; + String prometheusText = "" + "# TYPE latency_seconds summary\n" + "latency_seconds_count 1\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"latency_seconds\" " + + "type: SUMMARY " + + "metric { " + + "summary { " + + "sample_count: 1 " + + "} " + + "}"; + // @formatter:on + SummarySnapshot summary = + SummarySnapshot.builder() + .name("latency_seconds") + .dataPoint(SummaryDataPointSnapshot.builder().count(1).build()) + .build(); + assertOpenMetricsText(openMetricsText, summary); + assertPrometheusText(prometheusText, summary); + assertOpenMetricsTextWithoutCreated(openMetricsText, summary); + assertPrometheusTextWithoutCreated(prometheusText, summary); + assertPrometheusProtobuf(prometheusProtobuf, summary); + } - @Test - public void testSummaryJustSum() throws IOException { - String openMetricsText = "" + - "# TYPE latency_seconds summary\n" + - "latency_seconds_sum 12.3\n" + - "# EOF\n"; - String prometheusText = "" + - "# TYPE latency_seconds summary\n" + - "latency_seconds_sum 12.3\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"latency_seconds\" " + - "type: SUMMARY " + - "metric { " + - "summary { " + - "sample_sum: 12.3 " + - "} " + - "}"; - //@formatter:on - SummarySnapshot summary = SummarySnapshot.builder() - .name("latency_seconds") - .dataPoint(SummaryDataPointSnapshot.builder() - .sum(12.3) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, summary); - assertPrometheusText(prometheusText, summary); - assertOpenMetricsTextWithoutCreated(openMetricsText, summary); - assertPrometheusTextWithoutCreated(prometheusText, summary); - assertPrometheusProtobuf(prometheusProtobuf, summary); - } + @Test + public void testSummaryJustSum() throws IOException { + String openMetricsText = + "" + "# TYPE latency_seconds summary\n" + "latency_seconds_sum 12.3\n" + "# EOF\n"; + String prometheusText = "" + "# TYPE latency_seconds summary\n" + "latency_seconds_sum 12.3\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"latency_seconds\" " + + "type: SUMMARY " + + "metric { " + + "summary { " + + "sample_sum: 12.3 " + + "} " + + "}"; + // @formatter:on + SummarySnapshot summary = + SummarySnapshot.builder() + .name("latency_seconds") + .dataPoint(SummaryDataPointSnapshot.builder().sum(12.3).build()) + .build(); + assertOpenMetricsText(openMetricsText, summary); + assertPrometheusText(prometheusText, summary); + assertOpenMetricsTextWithoutCreated(openMetricsText, summary); + assertPrometheusTextWithoutCreated(prometheusText, summary); + assertPrometheusProtobuf(prometheusProtobuf, summary); + } - @Test - public void testSummaryEmptyData() throws IOException { - // SummaryData can be present but empty (no count, no sum, no quantiles). - // This should be treated like no data is present. - SummarySnapshot summary = SummarySnapshot.builder() - .name("latency_seconds") - .help("latency") - .unit(Unit.SECONDS) - .dataPoint(SummaryDataPointSnapshot.builder().build()) - .build(); - assertOpenMetricsText("# EOF\n", summary); - assertPrometheusText("", summary); - assertOpenMetricsTextWithoutCreated("# EOF\n", summary); - assertPrometheusTextWithoutCreated("", summary); - assertPrometheusProtobuf("", summary); - } + @Test + public void testSummaryEmptyData() throws IOException { + // SummaryData can be present but empty (no count, no sum, no quantiles). + // This should be treated like no data is present. + SummarySnapshot summary = + SummarySnapshot.builder() + .name("latency_seconds") + .help("latency") + .unit(Unit.SECONDS) + .dataPoint(SummaryDataPointSnapshot.builder().build()) + .build(); + assertOpenMetricsText("# EOF\n", summary); + assertPrometheusText("", summary); + assertOpenMetricsTextWithoutCreated("# EOF\n", summary); + assertPrometheusTextWithoutCreated("", summary); + assertPrometheusProtobuf("", summary); + } - @Test - public void testSummaryEmptyAndNonEmpty() throws IOException { - String openMetricsText = "" + - "# TYPE latency_seconds summary\n" + - "latency_seconds_count{path=\"/v2\"} 2\n" + - "latency_seconds_sum{path=\"/v2\"} 10.7\n" + - "# EOF\n"; - String prometheusText = "" + - "# TYPE latency_seconds summary\n" + - "latency_seconds_count{path=\"/v2\"} 2\n" + - "latency_seconds_sum{path=\"/v2\"} 10.7\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"latency_seconds\" " + - "type: SUMMARY " + - "metric { " + - "label { name: \"path\" value: \"/v2\" } " + - "summary { " + - "sample_count: 2 " + - "sample_sum: 10.7 " + - "} " + - "}"; - //@formatter:on - SummarySnapshot summary = SummarySnapshot.builder() - .name("latency_seconds") - .dataPoint(SummaryDataPointSnapshot.builder() - .labels(Labels.of("path", "/v1")) - .build()) - .dataPoint(SummaryDataPointSnapshot.builder() - .labels(Labels.of("path", "/v2")) - .count(2) - .sum(10.7) - .build()) - .dataPoint(SummaryDataPointSnapshot.builder() - .labels(Labels.of("path", "/v3")) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, summary); - assertPrometheusText(prometheusText, summary); - assertOpenMetricsTextWithoutCreated(openMetricsText, summary); - assertPrometheusTextWithoutCreated(prometheusText, summary); - assertPrometheusProtobuf(prometheusProtobuf, summary); - } + @Test + public void testSummaryEmptyAndNonEmpty() throws IOException { + String openMetricsText = + "" + + "# TYPE latency_seconds summary\n" + + "latency_seconds_count{path=\"/v2\"} 2\n" + + "latency_seconds_sum{path=\"/v2\"} 10.7\n" + + "# EOF\n"; + String prometheusText = + "" + + "# TYPE latency_seconds summary\n" + + "latency_seconds_count{path=\"/v2\"} 2\n" + + "latency_seconds_sum{path=\"/v2\"} 10.7\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"latency_seconds\" " + + "type: SUMMARY " + + "metric { " + + "label { name: \"path\" value: \"/v2\" } " + + "summary { " + + "sample_count: 2 " + + "sample_sum: 10.7 " + + "} " + + "}"; + // @formatter:on + SummarySnapshot summary = + SummarySnapshot.builder() + .name("latency_seconds") + .dataPoint(SummaryDataPointSnapshot.builder().labels(Labels.of("path", "/v1")).build()) + .dataPoint( + SummaryDataPointSnapshot.builder() + .labels(Labels.of("path", "/v2")) + .count(2) + .sum(10.7) + .build()) + .dataPoint(SummaryDataPointSnapshot.builder().labels(Labels.of("path", "/v3")).build()) + .build(); + assertOpenMetricsText(openMetricsText, summary); + assertPrometheusText(prometheusText, summary); + assertOpenMetricsTextWithoutCreated(openMetricsText, summary); + assertPrometheusTextWithoutCreated(prometheusText, summary); + assertPrometheusProtobuf(prometheusProtobuf, summary); + } - @Test - public void testSummaryWithDots() throws IOException { - String openMetricsText = "" + - "# TYPE my_request_duration_seconds summary\n" + - "# UNIT my_request_duration_seconds seconds\n" + - "# HELP my_request_duration_seconds Request duration in seconds\n" + - "my_request_duration_seconds_count{http_path=\"/hello\"} 1\n" + - "my_request_duration_seconds_sum{http_path=\"/hello\"} 0.03\n" + - "# EOF\n"; - String openMetricsTextWithExemplarsOnAllTimeSeries = "" + - "# TYPE my_request_duration_seconds summary\n" + - "# UNIT my_request_duration_seconds seconds\n" + - "# HELP my_request_duration_seconds Request duration in seconds\n" + - "my_request_duration_seconds_count{http_path=\"/hello\"} 1 # " + exemplarWithDotsString + "\n" + - "my_request_duration_seconds_sum{http_path=\"/hello\"} 0.03\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP my_request_duration_seconds Request duration in seconds\n" + - "# TYPE my_request_duration_seconds summary\n" + - "my_request_duration_seconds_count{http_path=\"/hello\"} 1\n" + - "my_request_duration_seconds_sum{http_path=\"/hello\"} 0.03\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"my_request_duration_seconds\" " + - "help: \"Request duration in seconds\" " + - "type: SUMMARY " + - "metric { " + - "label { name: \"http_path\" value: \"/hello\" } " + - "summary { sample_count: 1 sample_sum: 0.03 } " + - "}"; - //@formatter:on + @Test + public void testSummaryWithDots() throws IOException { + String openMetricsText = + "" + + "# TYPE my_request_duration_seconds summary\n" + + "# UNIT my_request_duration_seconds seconds\n" + + "# HELP my_request_duration_seconds Request duration in seconds\n" + + "my_request_duration_seconds_count{http_path=\"/hello\"} 1\n" + + "my_request_duration_seconds_sum{http_path=\"/hello\"} 0.03\n" + + "# EOF\n"; + String openMetricsTextWithExemplarsOnAllTimeSeries = + "" + + "# TYPE my_request_duration_seconds summary\n" + + "# UNIT my_request_duration_seconds seconds\n" + + "# HELP my_request_duration_seconds Request duration in seconds\n" + + "my_request_duration_seconds_count{http_path=\"/hello\"} 1 # " + + exemplarWithDotsString + + "\n" + + "my_request_duration_seconds_sum{http_path=\"/hello\"} 0.03\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP my_request_duration_seconds Request duration in seconds\n" + + "# TYPE my_request_duration_seconds summary\n" + + "my_request_duration_seconds_count{http_path=\"/hello\"} 1\n" + + "my_request_duration_seconds_sum{http_path=\"/hello\"} 0.03\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"my_request_duration_seconds\" " + + "help: \"Request duration in seconds\" " + + "type: SUMMARY " + + "metric { " + + "label { name: \"http_path\" value: \"/hello\" } " + + "summary { sample_count: 1 sample_sum: 0.03 } " + + "}"; + // @formatter:on - SummarySnapshot summary = SummarySnapshot.builder() - .name("my.request.duration.seconds") - .help("Request duration in seconds") - .unit(Unit.SECONDS) - .dataPoint(SummaryDataPointSnapshot.builder() - .count(1) - .sum(0.03) - .labels(Labels.builder() - .label("http.path", "/hello") - .build()) - .exemplars(Exemplars.of(exemplarWithDots)) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, summary); - assertOpenMetricsTextWithExemplarsOnAllTimeSeries(openMetricsTextWithExemplarsOnAllTimeSeries, summary); - assertPrometheusText(prometheusText, summary); - assertPrometheusProtobuf(prometheusProtobuf, summary); - } + SummarySnapshot summary = + SummarySnapshot.builder() + .name("my.request.duration.seconds") + .help("Request duration in seconds") + .unit(Unit.SECONDS) + .dataPoint( + SummaryDataPointSnapshot.builder() + .count(1) + .sum(0.03) + .labels(Labels.builder().label("http.path", "/hello").build()) + .exemplars(Exemplars.of(exemplarWithDots)) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, summary); + assertOpenMetricsTextWithExemplarsOnAllTimeSeries( + openMetricsTextWithExemplarsOnAllTimeSeries, summary); + assertPrometheusText(prometheusText, summary); + assertPrometheusProtobuf(prometheusProtobuf, summary); + } - @Test - public void testClassicHistogramComplete() throws Exception { - String openMetricsText = "" + - "# TYPE response_size_bytes histogram\n" + - "# UNIT response_size_bytes bytes\n" + - "# HELP response_size_bytes help\n" + - "response_size_bytes_bucket{status=\"200\",le=\"2.2\"} 2 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 3 " + scrapeTimestamp1s + " # " + exemplar2String + "\n" + - "response_size_bytes_count{status=\"200\"} 3 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_sum{status=\"200\"} 4.1 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_created{status=\"200\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"1.0\"} 3 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"2.2\"} 5 " + scrapeTimestamp2s + " # " + exemplar1String + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 5 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "response_size_bytes_count{status=\"500\"} 5 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_sum{status=\"500\"} 3.2 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_created{status=\"500\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String openMetricsTextWithExemplarsOnAllTimeSeries = "" + - "# TYPE response_size_bytes histogram\n" + - "# UNIT response_size_bytes bytes\n" + - "# HELP response_size_bytes help\n" + - "response_size_bytes_bucket{status=\"200\",le=\"2.2\"} 2 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 3 " + scrapeTimestamp1s + " # " + exemplar2String + "\n" + - "response_size_bytes_count{status=\"200\"} 3 " + scrapeTimestamp1s + " # " + exemplar2String + "\n" + - "response_size_bytes_sum{status=\"200\"} 4.1 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_created{status=\"200\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"1.0\"} 3 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"2.2\"} 5 " + scrapeTimestamp2s + " # " + exemplar1String + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 5 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "response_size_bytes_count{status=\"500\"} 5 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "response_size_bytes_sum{status=\"500\"} 3.2 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_created{status=\"500\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP response_size_bytes help\n" + - "# TYPE response_size_bytes histogram\n" + - "response_size_bytes_bucket{status=\"200\",le=\"2.2\"} 2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 3 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_count{status=\"200\"} 3 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_sum{status=\"200\"} 4.1 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"1.0\"} 3 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"2.2\"} 5 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 5 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_count{status=\"500\"} 5 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_sum{status=\"500\"} 3.2 " + scrapeTimestamp2s + "\n" + - "# HELP response_size_bytes_created help\n" + - "# TYPE response_size_bytes_created gauge\n" + - "response_size_bytes_created{status=\"200\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "response_size_bytes_created{status=\"500\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n"; - String openMetricsTextWithoutCreated = "" + - "# TYPE response_size_bytes histogram\n" + - "# UNIT response_size_bytes bytes\n" + - "# HELP response_size_bytes help\n" + - "response_size_bytes_bucket{status=\"200\",le=\"2.2\"} 2 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 3 " + scrapeTimestamp1s + " # " + exemplar2String + "\n" + - "response_size_bytes_count{status=\"200\"} 3 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_sum{status=\"200\"} 4.1 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"1.0\"} 3 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"2.2\"} 5 " + scrapeTimestamp2s + " # " + exemplar1String + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 5 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "response_size_bytes_count{status=\"500\"} 5 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_sum{status=\"500\"} 3.2 " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String prometheusTextWithoutCreated = "" + - "# HELP response_size_bytes help\n" + - "# TYPE response_size_bytes histogram\n" + - "response_size_bytes_bucket{status=\"200\",le=\"2.2\"} 2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 3 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_count{status=\"200\"} 3 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_sum{status=\"200\"} 4.1 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"1.0\"} 3 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"2.2\"} 5 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 5 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_count{status=\"500\"} 5 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_sum{status=\"500\"} 3.2 " + scrapeTimestamp2s + "\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"response_size_bytes\" " + - "help: \"help\" " + - "type: HISTOGRAM " + - "metric { " + - "label { name: \"status\" value: \"200\" } " + - "timestamp_ms: 1672850685829 " + - "histogram { " + - "sample_count: 3 " + - "sample_sum: 4.1 " + - "bucket { " + - "cumulative_count: 2 " + - "upper_bound: 2.2 " + - exemplar1protoString + " " + - "} bucket { " + - "cumulative_count: 3 " + - "upper_bound: Infinity " + - exemplar2protoString + " " + - "} " + - "} " + - "} metric { " + - "label { name: \"status\" value: \"500\" } " + - "timestamp_ms: 1672850585820 " + - "histogram { " + - "sample_count: 5 " + - "sample_sum: 3.2 " + - "bucket { " + - "cumulative_count: 3 " + - "upper_bound: 1.0 " + - "} bucket { " + - "cumulative_count: 5 " + - "upper_bound: 2.2 " + - exemplar1protoString + " " + - "} bucket { " + - "cumulative_count: 5 " + - "upper_bound: Infinity " + - exemplar2protoString + " " + - "} " + - "} " + - "}"; - //@formatter:on - HistogramSnapshot histogram = HistogramSnapshot.builder() - .name("response_size_bytes") - .help("help") - .unit(Unit.BYTES) - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .sum(3.2) - .classicHistogramBuckets(ClassicHistogramBuckets.builder() - .bucket(1.0, 3) - .bucket(2.2, 2) - .bucket(Double.POSITIVE_INFINITY, 0) - .build()) - .labels(Labels.of("status", "500")) - .exemplars(Exemplars.of(exemplar1, exemplar2)) - .createdTimestampMillis(createdTimestamp2) - .scrapeTimestampMillis(scrapeTimestamp2) - .build()) - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .sum(4.1) - .classicHistogramBuckets(ClassicHistogramBuckets.builder() - .bucket(2.2, 2) - .bucket(Double.POSITIVE_INFINITY, 1) - .build()) - .labels(Labels.of("status", "200")) - .exemplars(Exemplars.of(exemplar1, exemplar2)) - .createdTimestampMillis(createdTimestamp1) - .scrapeTimestampMillis(scrapeTimestamp1) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, histogram); - assertOpenMetricsTextWithExemplarsOnAllTimeSeries(openMetricsTextWithExemplarsOnAllTimeSeries, histogram); - assertPrometheusText(prometheusText, histogram); - assertOpenMetricsTextWithoutCreated(openMetricsTextWithoutCreated, histogram); - assertPrometheusTextWithoutCreated(prometheusTextWithoutCreated, histogram); - assertPrometheusProtobuf(prometheusProtobuf, histogram); - } + @Test + public void testClassicHistogramComplete() throws Exception { + String openMetricsText = + "" + + "# TYPE response_size_bytes histogram\n" + + "# UNIT response_size_bytes bytes\n" + + "# HELP response_size_bytes help\n" + + "response_size_bytes_bucket{status=\"200\",le=\"2.2\"} 2 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 3 " + + scrapeTimestamp1s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_count{status=\"200\"} 3 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_sum{status=\"200\"} 4.1 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_created{status=\"200\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"1.0\"} 3 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"2.2\"} 5 " + + scrapeTimestamp2s + + " # " + + exemplar1String + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 5 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_count{status=\"500\"} 5 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_sum{status=\"500\"} 3.2 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_created{status=\"500\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String openMetricsTextWithExemplarsOnAllTimeSeries = + "" + + "# TYPE response_size_bytes histogram\n" + + "# UNIT response_size_bytes bytes\n" + + "# HELP response_size_bytes help\n" + + "response_size_bytes_bucket{status=\"200\",le=\"2.2\"} 2 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 3 " + + scrapeTimestamp1s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_count{status=\"200\"} 3 " + + scrapeTimestamp1s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_sum{status=\"200\"} 4.1 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_created{status=\"200\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"1.0\"} 3 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"2.2\"} 5 " + + scrapeTimestamp2s + + " # " + + exemplar1String + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 5 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_count{status=\"500\"} 5 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_sum{status=\"500\"} 3.2 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_created{status=\"500\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP response_size_bytes help\n" + + "# TYPE response_size_bytes histogram\n" + + "response_size_bytes_bucket{status=\"200\",le=\"2.2\"} 2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 3 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_count{status=\"200\"} 3 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_sum{status=\"200\"} 4.1 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"1.0\"} 3 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"2.2\"} 5 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 5 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_count{status=\"500\"} 5 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_sum{status=\"500\"} 3.2 " + + scrapeTimestamp2s + + "\n" + + "# HELP response_size_bytes_created help\n" + + "# TYPE response_size_bytes_created gauge\n" + + "response_size_bytes_created{status=\"200\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_created{status=\"500\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n"; + String openMetricsTextWithoutCreated = + "" + + "# TYPE response_size_bytes histogram\n" + + "# UNIT response_size_bytes bytes\n" + + "# HELP response_size_bytes help\n" + + "response_size_bytes_bucket{status=\"200\",le=\"2.2\"} 2 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 3 " + + scrapeTimestamp1s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_count{status=\"200\"} 3 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_sum{status=\"200\"} 4.1 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"1.0\"} 3 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"2.2\"} 5 " + + scrapeTimestamp2s + + " # " + + exemplar1String + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 5 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_count{status=\"500\"} 5 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_sum{status=\"500\"} 3.2 " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String prometheusTextWithoutCreated = + "" + + "# HELP response_size_bytes help\n" + + "# TYPE response_size_bytes histogram\n" + + "response_size_bytes_bucket{status=\"200\",le=\"2.2\"} 2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 3 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_count{status=\"200\"} 3 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_sum{status=\"200\"} 4.1 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"1.0\"} 3 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"2.2\"} 5 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 5 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_count{status=\"500\"} 5 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_sum{status=\"500\"} 3.2 " + + scrapeTimestamp2s + + "\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"response_size_bytes\" " + + "help: \"help\" " + + "type: HISTOGRAM " + + "metric { " + + "label { name: \"status\" value: \"200\" } " + + "timestamp_ms: 1672850685829 " + + "histogram { " + + "sample_count: 3 " + + "sample_sum: 4.1 " + + "bucket { " + + "cumulative_count: 2 " + + "upper_bound: 2.2 " + + exemplar1protoString + + " " + + "} bucket { " + + "cumulative_count: 3 " + + "upper_bound: Infinity " + + exemplar2protoString + + " " + + "} " + + "} " + + "} metric { " + + "label { name: \"status\" value: \"500\" } " + + "timestamp_ms: 1672850585820 " + + "histogram { " + + "sample_count: 5 " + + "sample_sum: 3.2 " + + "bucket { " + + "cumulative_count: 3 " + + "upper_bound: 1.0 " + + "} bucket { " + + "cumulative_count: 5 " + + "upper_bound: 2.2 " + + exemplar1protoString + + " " + + "} bucket { " + + "cumulative_count: 5 " + + "upper_bound: Infinity " + + exemplar2protoString + + " " + + "} " + + "} " + + "}"; + // @formatter:on + HistogramSnapshot histogram = + HistogramSnapshot.builder() + .name("response_size_bytes") + .help("help") + .unit(Unit.BYTES) + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder() + .sum(3.2) + .classicHistogramBuckets( + ClassicHistogramBuckets.builder() + .bucket(1.0, 3) + .bucket(2.2, 2) + .bucket(Double.POSITIVE_INFINITY, 0) + .build()) + .labels(Labels.of("status", "500")) + .exemplars(Exemplars.of(exemplar1, exemplar2)) + .createdTimestampMillis(createdTimestamp2) + .scrapeTimestampMillis(scrapeTimestamp2) + .build()) + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder() + .sum(4.1) + .classicHistogramBuckets( + ClassicHistogramBuckets.builder() + .bucket(2.2, 2) + .bucket(Double.POSITIVE_INFINITY, 1) + .build()) + .labels(Labels.of("status", "200")) + .exemplars(Exemplars.of(exemplar1, exemplar2)) + .createdTimestampMillis(createdTimestamp1) + .scrapeTimestampMillis(scrapeTimestamp1) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, histogram); + assertOpenMetricsTextWithExemplarsOnAllTimeSeries( + openMetricsTextWithExemplarsOnAllTimeSeries, histogram); + assertPrometheusText(prometheusText, histogram); + assertOpenMetricsTextWithoutCreated(openMetricsTextWithoutCreated, histogram); + assertPrometheusTextWithoutCreated(prometheusTextWithoutCreated, histogram); + assertPrometheusProtobuf(prometheusProtobuf, histogram); + } - @Test - public void testClassicHistogramMinimal() throws Exception { - // In OpenMetrics a histogram can have a _count if and only if it has a _sum. - // In Prometheus format, a histogram can have a _count without a _sum. - String openMetricsText = "" + - "# TYPE request_latency_seconds histogram\n" + - "request_latency_seconds_bucket{le=\"+Inf\"} 2\n" + - "# EOF\n"; - String prometheusText = "" + - "# TYPE request_latency_seconds histogram\n" + - "request_latency_seconds_bucket{le=\"+Inf\"} 2\n" + - "request_latency_seconds_count 2\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"request_latency_seconds\" " + - "type: HISTOGRAM " + - "metric { " + - "histogram { " + - "sample_count: 2 " + - "bucket { " + - "cumulative_count: 2 " + - "upper_bound: Infinity " + - "} " + - "} " + - "}"; - //@formatter:on - HistogramSnapshot histogram = HistogramSnapshot.builder() - .name("request_latency_seconds") - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .classicHistogramBuckets(ClassicHistogramBuckets.builder() - .bucket(Double.POSITIVE_INFINITY, 2) - .build()) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, histogram); - assertPrometheusText(prometheusText, histogram); - assertOpenMetricsTextWithoutCreated(openMetricsText, histogram); - assertPrometheusTextWithoutCreated(prometheusText, histogram); - assertPrometheusProtobuf(prometheusProtobuf, histogram); - } + @Test + public void testClassicHistogramMinimal() throws Exception { + // In OpenMetrics a histogram can have a _count if and only if it has a _sum. + // In Prometheus format, a histogram can have a _count without a _sum. + String openMetricsText = + "" + + "# TYPE request_latency_seconds histogram\n" + + "request_latency_seconds_bucket{le=\"+Inf\"} 2\n" + + "# EOF\n"; + String prometheusText = + "" + + "# TYPE request_latency_seconds histogram\n" + + "request_latency_seconds_bucket{le=\"+Inf\"} 2\n" + + "request_latency_seconds_count 2\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"request_latency_seconds\" " + + "type: HISTOGRAM " + + "metric { " + + "histogram { " + + "sample_count: 2 " + + "bucket { " + + "cumulative_count: 2 " + + "upper_bound: Infinity " + + "} " + + "} " + + "}"; + // @formatter:on + HistogramSnapshot histogram = + HistogramSnapshot.builder() + .name("request_latency_seconds") + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder() + .classicHistogramBuckets( + ClassicHistogramBuckets.builder() + .bucket(Double.POSITIVE_INFINITY, 2) + .build()) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, histogram); + assertPrometheusText(prometheusText, histogram); + assertOpenMetricsTextWithoutCreated(openMetricsText, histogram); + assertPrometheusTextWithoutCreated(prometheusText, histogram); + assertPrometheusProtobuf(prometheusProtobuf, histogram); + } - @Test - public void testClassicHistogramCountAndSum() throws Exception { - String openMetricsText = "" + - "# TYPE request_latency_seconds histogram\n" + - "request_latency_seconds_bucket{le=\"+Inf\"} 2\n" + - "request_latency_seconds_count 2\n" + - "request_latency_seconds_sum 3.2\n" + - "# EOF\n"; - String prometheusText = "" + - "# TYPE request_latency_seconds histogram\n" + - "request_latency_seconds_bucket{le=\"+Inf\"} 2\n" + - "request_latency_seconds_count 2\n" + - "request_latency_seconds_sum 3.2\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"request_latency_seconds\" " + - "type: HISTOGRAM " + - "metric { " + - "histogram { " + - "sample_count: 2 " + - "sample_sum: 3.2 " + - "bucket { " + - "cumulative_count: 2 " + - "upper_bound: Infinity " + - "} " + - "} " + - "}"; - //@formatter:on - HistogramSnapshot histogram = HistogramSnapshot.builder() - .name("request_latency_seconds") - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .sum(3.2) - .classicHistogramBuckets(ClassicHistogramBuckets.builder() - .bucket(Double.POSITIVE_INFINITY, 2) - .build()) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, histogram); - assertPrometheusText(prometheusText, histogram); - assertOpenMetricsTextWithoutCreated(openMetricsText, histogram); - assertPrometheusTextWithoutCreated(prometheusText, histogram); - assertPrometheusProtobuf(prometheusProtobuf, histogram); - } + @Test + public void testClassicHistogramCountAndSum() throws Exception { + String openMetricsText = + "" + + "# TYPE request_latency_seconds histogram\n" + + "request_latency_seconds_bucket{le=\"+Inf\"} 2\n" + + "request_latency_seconds_count 2\n" + + "request_latency_seconds_sum 3.2\n" + + "# EOF\n"; + String prometheusText = + "" + + "# TYPE request_latency_seconds histogram\n" + + "request_latency_seconds_bucket{le=\"+Inf\"} 2\n" + + "request_latency_seconds_count 2\n" + + "request_latency_seconds_sum 3.2\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"request_latency_seconds\" " + + "type: HISTOGRAM " + + "metric { " + + "histogram { " + + "sample_count: 2 " + + "sample_sum: 3.2 " + + "bucket { " + + "cumulative_count: 2 " + + "upper_bound: Infinity " + + "} " + + "} " + + "}"; + // @formatter:on + HistogramSnapshot histogram = + HistogramSnapshot.builder() + .name("request_latency_seconds") + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder() + .sum(3.2) + .classicHistogramBuckets( + ClassicHistogramBuckets.builder() + .bucket(Double.POSITIVE_INFINITY, 2) + .build()) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, histogram); + assertPrometheusText(prometheusText, histogram); + assertOpenMetricsTextWithoutCreated(openMetricsText, histogram); + assertPrometheusTextWithoutCreated(prometheusText, histogram); + assertPrometheusProtobuf(prometheusProtobuf, histogram); + } - @Test - public void testClassicGaugeHistogramComplete() throws IOException { - String openMetricsText = "" + - "# TYPE cache_size_bytes gaugehistogram\n" + - "# UNIT cache_size_bytes bytes\n" + - "# HELP cache_size_bytes number of bytes in the cache\n" + - "cache_size_bytes_bucket{db=\"items\",le=\"2.0\"} 3 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "cache_size_bytes_bucket{db=\"items\",le=\"+Inf\"} 4 " + scrapeTimestamp1s + " # " + exemplar2String + "\n" + - "cache_size_bytes_gcount{db=\"items\"} 4 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_gsum{db=\"items\"} 17.0 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_created{db=\"items\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_bucket{db=\"options\",le=\"2.0\"} 4 " + scrapeTimestamp2s + " # " + exemplar1String + "\n" + - "cache_size_bytes_bucket{db=\"options\",le=\"+Inf\"} 4 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "cache_size_bytes_gcount{db=\"options\"} 4 " + scrapeTimestamp2s + "\n" + - "cache_size_bytes_gsum{db=\"options\"} 18.0 " + scrapeTimestamp2s + "\n" + - "cache_size_bytes_created{db=\"options\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String openMetricsTextWithExemplarsOnAllTimeSeries = "" + - "# TYPE cache_size_bytes gaugehistogram\n" + - "# UNIT cache_size_bytes bytes\n" + - "# HELP cache_size_bytes number of bytes in the cache\n" + - "cache_size_bytes_bucket{db=\"items\",le=\"2.0\"} 3 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "cache_size_bytes_bucket{db=\"items\",le=\"+Inf\"} 4 " + scrapeTimestamp1s + " # " + exemplar2String + "\n" + - "cache_size_bytes_gcount{db=\"items\"} 4 " + scrapeTimestamp1s + " # " + exemplar2String + "\n" + - "cache_size_bytes_gsum{db=\"items\"} 17.0 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_created{db=\"items\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_bucket{db=\"options\",le=\"2.0\"} 4 " + scrapeTimestamp2s + " # " + exemplar1String + "\n" + - "cache_size_bytes_bucket{db=\"options\",le=\"+Inf\"} 4 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "cache_size_bytes_gcount{db=\"options\"} 4 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "cache_size_bytes_gsum{db=\"options\"} 18.0 " + scrapeTimestamp2s + "\n" + - "cache_size_bytes_created{db=\"options\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP cache_size_bytes number of bytes in the cache\n" + - "# TYPE cache_size_bytes histogram\n" + - "cache_size_bytes_bucket{db=\"items\",le=\"2.0\"} 3 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_bucket{db=\"items\",le=\"+Inf\"} 4 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_bucket{db=\"options\",le=\"2.0\"} 4 " + scrapeTimestamp2s + "\n" + - "cache_size_bytes_bucket{db=\"options\",le=\"+Inf\"} 4 " + scrapeTimestamp2s + "\n" + - "# HELP cache_size_bytes_gcount number of bytes in the cache\n" + - "# TYPE cache_size_bytes_gcount gauge\n" + - "cache_size_bytes_gcount{db=\"items\"} 4 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_gcount{db=\"options\"} 4 " + scrapeTimestamp2s + "\n" + - "# HELP cache_size_bytes_gsum number of bytes in the cache\n" + - "# TYPE cache_size_bytes_gsum gauge\n" + - "cache_size_bytes_gsum{db=\"items\"} 17.0 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_gsum{db=\"options\"} 18.0 " + scrapeTimestamp2s + "\n" + - "# HELP cache_size_bytes_created number of bytes in the cache\n" + - "# TYPE cache_size_bytes_created gauge\n" + - "cache_size_bytes_created{db=\"items\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_created{db=\"options\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n"; - String openMetricsTextWithoutCreated = "" + - "# TYPE cache_size_bytes gaugehistogram\n" + - "# UNIT cache_size_bytes bytes\n" + - "# HELP cache_size_bytes number of bytes in the cache\n" + - "cache_size_bytes_bucket{db=\"items\",le=\"2.0\"} 3 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "cache_size_bytes_bucket{db=\"items\",le=\"+Inf\"} 4 " + scrapeTimestamp1s + " # " + exemplar2String + "\n" + - "cache_size_bytes_gcount{db=\"items\"} 4 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_gsum{db=\"items\"} 17.0 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_bucket{db=\"options\",le=\"2.0\"} 4 " + scrapeTimestamp2s + " # " + exemplar1String + "\n" + - "cache_size_bytes_bucket{db=\"options\",le=\"+Inf\"} 4 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "cache_size_bytes_gcount{db=\"options\"} 4 " + scrapeTimestamp2s + "\n" + - "cache_size_bytes_gsum{db=\"options\"} 18.0 " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String prometheusTextWithoutCreated = "" + - "# HELP cache_size_bytes number of bytes in the cache\n" + - "# TYPE cache_size_bytes histogram\n" + - "cache_size_bytes_bucket{db=\"items\",le=\"2.0\"} 3 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_bucket{db=\"items\",le=\"+Inf\"} 4 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_bucket{db=\"options\",le=\"2.0\"} 4 " + scrapeTimestamp2s + "\n" + - "cache_size_bytes_bucket{db=\"options\",le=\"+Inf\"} 4 " + scrapeTimestamp2s + "\n" + - "# HELP cache_size_bytes_gcount number of bytes in the cache\n" + - "# TYPE cache_size_bytes_gcount gauge\n" + - "cache_size_bytes_gcount{db=\"items\"} 4 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_gcount{db=\"options\"} 4 " + scrapeTimestamp2s + "\n" + - "# HELP cache_size_bytes_gsum number of bytes in the cache\n" + - "# TYPE cache_size_bytes_gsum gauge\n" + - "cache_size_bytes_gsum{db=\"items\"} 17.0 " + scrapeTimestamp1s + "\n" + - "cache_size_bytes_gsum{db=\"options\"} 18.0 " + scrapeTimestamp2s + "\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"cache_size_bytes\" " + - "help: \"number of bytes in the cache\" " + - "type: GAUGE_HISTOGRAM " + - "metric { " + - "label { name: \"db\" value: \"items\" } " + - "timestamp_ms: 1672850685829 " + - "histogram { " + - "sample_count: 4 " + - "sample_sum: 17.0 " + - "bucket { " + - "cumulative_count: 3 " + - "upper_bound: 2.0 " + - exemplar1protoString + " " + - "} bucket { " + - "cumulative_count: 4 " + - "upper_bound: Infinity " + - exemplar2protoString + " " + - "} " + - "} " + - "} metric { " + - "label { name: \"db\" value: \"options\" } " + - "timestamp_ms: 1672850585820 " + - "histogram { " + - "sample_count: 4 " + - "sample_sum: 18.0 " + - "bucket { " + - "cumulative_count: 4 " + - "upper_bound: 2.0 " + - exemplar1protoString + " " + - "} bucket { " + - "cumulative_count: 4 " + - "upper_bound: Infinity " + - exemplar2protoString + " " + - "} " + - "} " + - "}"; - //@formatter:on - HistogramSnapshot gaugeHistogram = HistogramSnapshot.builder() - .gaugeHistogram(true) - .name("cache_size_bytes") - .help("number of bytes in the cache") - .unit(Unit.BYTES) - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .sum(17) - .classicHistogramBuckets(ClassicHistogramBuckets.builder() - .bucket(2.0, 3) - .bucket(Double.POSITIVE_INFINITY, 1) - .build()) - .labels(Labels.of("db", "items")) - .exemplars(Exemplars.of(exemplar1, exemplar2)) - .createdTimestampMillis(createdTimestamp1) - .scrapeTimestampMillis(scrapeTimestamp1) - .build()) - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .sum(18) - .classicHistogramBuckets(ClassicHistogramBuckets.builder() - .bucket(2.0, 4) - .bucket(Double.POSITIVE_INFINITY, 0) - .build() - ) - .labels(Labels.of("db", "options")) - .exemplars(Exemplars.of(exemplar1, exemplar2)) - .createdTimestampMillis(createdTimestamp2) - .scrapeTimestampMillis(scrapeTimestamp2) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, gaugeHistogram); - assertOpenMetricsTextWithExemplarsOnAllTimeSeries(openMetricsTextWithExemplarsOnAllTimeSeries, gaugeHistogram); - assertPrometheusText(prometheusText, gaugeHistogram); - assertOpenMetricsTextWithoutCreated(openMetricsTextWithoutCreated, gaugeHistogram); - assertPrometheusTextWithoutCreated(prometheusTextWithoutCreated, gaugeHistogram); - assertPrometheusProtobuf(prometheusProtobuf, gaugeHistogram); - } + @Test + public void testClassicGaugeHistogramComplete() throws IOException { + String openMetricsText = + "" + + "# TYPE cache_size_bytes gaugehistogram\n" + + "# UNIT cache_size_bytes bytes\n" + + "# HELP cache_size_bytes number of bytes in the cache\n" + + "cache_size_bytes_bucket{db=\"items\",le=\"2.0\"} 3 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "cache_size_bytes_bucket{db=\"items\",le=\"+Inf\"} 4 " + + scrapeTimestamp1s + + " # " + + exemplar2String + + "\n" + + "cache_size_bytes_gcount{db=\"items\"} 4 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_gsum{db=\"items\"} 17.0 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_created{db=\"items\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_bucket{db=\"options\",le=\"2.0\"} 4 " + + scrapeTimestamp2s + + " # " + + exemplar1String + + "\n" + + "cache_size_bytes_bucket{db=\"options\",le=\"+Inf\"} 4 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "cache_size_bytes_gcount{db=\"options\"} 4 " + + scrapeTimestamp2s + + "\n" + + "cache_size_bytes_gsum{db=\"options\"} 18.0 " + + scrapeTimestamp2s + + "\n" + + "cache_size_bytes_created{db=\"options\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String openMetricsTextWithExemplarsOnAllTimeSeries = + "" + + "# TYPE cache_size_bytes gaugehistogram\n" + + "# UNIT cache_size_bytes bytes\n" + + "# HELP cache_size_bytes number of bytes in the cache\n" + + "cache_size_bytes_bucket{db=\"items\",le=\"2.0\"} 3 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "cache_size_bytes_bucket{db=\"items\",le=\"+Inf\"} 4 " + + scrapeTimestamp1s + + " # " + + exemplar2String + + "\n" + + "cache_size_bytes_gcount{db=\"items\"} 4 " + + scrapeTimestamp1s + + " # " + + exemplar2String + + "\n" + + "cache_size_bytes_gsum{db=\"items\"} 17.0 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_created{db=\"items\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_bucket{db=\"options\",le=\"2.0\"} 4 " + + scrapeTimestamp2s + + " # " + + exemplar1String + + "\n" + + "cache_size_bytes_bucket{db=\"options\",le=\"+Inf\"} 4 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "cache_size_bytes_gcount{db=\"options\"} 4 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "cache_size_bytes_gsum{db=\"options\"} 18.0 " + + scrapeTimestamp2s + + "\n" + + "cache_size_bytes_created{db=\"options\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP cache_size_bytes number of bytes in the cache\n" + + "# TYPE cache_size_bytes histogram\n" + + "cache_size_bytes_bucket{db=\"items\",le=\"2.0\"} 3 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_bucket{db=\"items\",le=\"+Inf\"} 4 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_bucket{db=\"options\",le=\"2.0\"} 4 " + + scrapeTimestamp2s + + "\n" + + "cache_size_bytes_bucket{db=\"options\",le=\"+Inf\"} 4 " + + scrapeTimestamp2s + + "\n" + + "# HELP cache_size_bytes_gcount number of bytes in the cache\n" + + "# TYPE cache_size_bytes_gcount gauge\n" + + "cache_size_bytes_gcount{db=\"items\"} 4 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_gcount{db=\"options\"} 4 " + + scrapeTimestamp2s + + "\n" + + "# HELP cache_size_bytes_gsum number of bytes in the cache\n" + + "# TYPE cache_size_bytes_gsum gauge\n" + + "cache_size_bytes_gsum{db=\"items\"} 17.0 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_gsum{db=\"options\"} 18.0 " + + scrapeTimestamp2s + + "\n" + + "# HELP cache_size_bytes_created number of bytes in the cache\n" + + "# TYPE cache_size_bytes_created gauge\n" + + "cache_size_bytes_created{db=\"items\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_created{db=\"options\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n"; + String openMetricsTextWithoutCreated = + "" + + "# TYPE cache_size_bytes gaugehistogram\n" + + "# UNIT cache_size_bytes bytes\n" + + "# HELP cache_size_bytes number of bytes in the cache\n" + + "cache_size_bytes_bucket{db=\"items\",le=\"2.0\"} 3 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "cache_size_bytes_bucket{db=\"items\",le=\"+Inf\"} 4 " + + scrapeTimestamp1s + + " # " + + exemplar2String + + "\n" + + "cache_size_bytes_gcount{db=\"items\"} 4 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_gsum{db=\"items\"} 17.0 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_bucket{db=\"options\",le=\"2.0\"} 4 " + + scrapeTimestamp2s + + " # " + + exemplar1String + + "\n" + + "cache_size_bytes_bucket{db=\"options\",le=\"+Inf\"} 4 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "cache_size_bytes_gcount{db=\"options\"} 4 " + + scrapeTimestamp2s + + "\n" + + "cache_size_bytes_gsum{db=\"options\"} 18.0 " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String prometheusTextWithoutCreated = + "" + + "# HELP cache_size_bytes number of bytes in the cache\n" + + "# TYPE cache_size_bytes histogram\n" + + "cache_size_bytes_bucket{db=\"items\",le=\"2.0\"} 3 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_bucket{db=\"items\",le=\"+Inf\"} 4 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_bucket{db=\"options\",le=\"2.0\"} 4 " + + scrapeTimestamp2s + + "\n" + + "cache_size_bytes_bucket{db=\"options\",le=\"+Inf\"} 4 " + + scrapeTimestamp2s + + "\n" + + "# HELP cache_size_bytes_gcount number of bytes in the cache\n" + + "# TYPE cache_size_bytes_gcount gauge\n" + + "cache_size_bytes_gcount{db=\"items\"} 4 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_gcount{db=\"options\"} 4 " + + scrapeTimestamp2s + + "\n" + + "# HELP cache_size_bytes_gsum number of bytes in the cache\n" + + "# TYPE cache_size_bytes_gsum gauge\n" + + "cache_size_bytes_gsum{db=\"items\"} 17.0 " + + scrapeTimestamp1s + + "\n" + + "cache_size_bytes_gsum{db=\"options\"} 18.0 " + + scrapeTimestamp2s + + "\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"cache_size_bytes\" " + + "help: \"number of bytes in the cache\" " + + "type: GAUGE_HISTOGRAM " + + "metric { " + + "label { name: \"db\" value: \"items\" } " + + "timestamp_ms: 1672850685829 " + + "histogram { " + + "sample_count: 4 " + + "sample_sum: 17.0 " + + "bucket { " + + "cumulative_count: 3 " + + "upper_bound: 2.0 " + + exemplar1protoString + + " " + + "} bucket { " + + "cumulative_count: 4 " + + "upper_bound: Infinity " + + exemplar2protoString + + " " + + "} " + + "} " + + "} metric { " + + "label { name: \"db\" value: \"options\" } " + + "timestamp_ms: 1672850585820 " + + "histogram { " + + "sample_count: 4 " + + "sample_sum: 18.0 " + + "bucket { " + + "cumulative_count: 4 " + + "upper_bound: 2.0 " + + exemplar1protoString + + " " + + "} bucket { " + + "cumulative_count: 4 " + + "upper_bound: Infinity " + + exemplar2protoString + + " " + + "} " + + "} " + + "}"; + // @formatter:on + HistogramSnapshot gaugeHistogram = + HistogramSnapshot.builder() + .gaugeHistogram(true) + .name("cache_size_bytes") + .help("number of bytes in the cache") + .unit(Unit.BYTES) + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder() + .sum(17) + .classicHistogramBuckets( + ClassicHistogramBuckets.builder() + .bucket(2.0, 3) + .bucket(Double.POSITIVE_INFINITY, 1) + .build()) + .labels(Labels.of("db", "items")) + .exemplars(Exemplars.of(exemplar1, exemplar2)) + .createdTimestampMillis(createdTimestamp1) + .scrapeTimestampMillis(scrapeTimestamp1) + .build()) + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder() + .sum(18) + .classicHistogramBuckets( + ClassicHistogramBuckets.builder() + .bucket(2.0, 4) + .bucket(Double.POSITIVE_INFINITY, 0) + .build()) + .labels(Labels.of("db", "options")) + .exemplars(Exemplars.of(exemplar1, exemplar2)) + .createdTimestampMillis(createdTimestamp2) + .scrapeTimestampMillis(scrapeTimestamp2) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, gaugeHistogram); + assertOpenMetricsTextWithExemplarsOnAllTimeSeries( + openMetricsTextWithExemplarsOnAllTimeSeries, gaugeHistogram); + assertPrometheusText(prometheusText, gaugeHistogram); + assertOpenMetricsTextWithoutCreated(openMetricsTextWithoutCreated, gaugeHistogram); + assertPrometheusTextWithoutCreated(prometheusTextWithoutCreated, gaugeHistogram); + assertPrometheusProtobuf(prometheusProtobuf, gaugeHistogram); + } - @Test - public void testClassicGaugeHistogramMinimal() throws IOException { - // In OpenMetrics a histogram can have a _count if and only if it has a _sum. - // In Prometheus format, a histogram can have a _count without a _sum. - String openMetricsText = "" + - "# TYPE queue_size_bytes gaugehistogram\n" + - "queue_size_bytes_bucket{le=\"+Inf\"} 130\n" + - "# EOF\n"; - String prometheusText = "" + - "# TYPE queue_size_bytes histogram\n" + - "queue_size_bytes_bucket{le=\"+Inf\"} 130\n" + - "# TYPE queue_size_bytes_gcount gauge\n" + - "queue_size_bytes_gcount 130\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"queue_size_bytes\" " + - "type: GAUGE_HISTOGRAM " + - "metric { " + - "histogram { " + - "sample_count: 130 " + - "bucket { " + - "cumulative_count: 130 " + - "upper_bound: Infinity " + - "} " + - "} " + - "}"; - //@formatter:on - HistogramSnapshot gaugeHistogram = HistogramSnapshot.builder() - .gaugeHistogram(true) - .name("queue_size_bytes") - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .classicHistogramBuckets(ClassicHistogramBuckets.builder() - .bucket(Double.POSITIVE_INFINITY, 130) - .build()) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, gaugeHistogram); - assertPrometheusText(prometheusText, gaugeHistogram); - assertOpenMetricsTextWithoutCreated(openMetricsText, gaugeHistogram); - assertPrometheusTextWithoutCreated(prometheusText, gaugeHistogram); - assertPrometheusProtobuf(prometheusProtobuf, gaugeHistogram); - } + @Test + public void testClassicGaugeHistogramMinimal() throws IOException { + // In OpenMetrics a histogram can have a _count if and only if it has a _sum. + // In Prometheus format, a histogram can have a _count without a _sum. + String openMetricsText = + "" + + "# TYPE queue_size_bytes gaugehistogram\n" + + "queue_size_bytes_bucket{le=\"+Inf\"} 130\n" + + "# EOF\n"; + String prometheusText = + "" + + "# TYPE queue_size_bytes histogram\n" + + "queue_size_bytes_bucket{le=\"+Inf\"} 130\n" + + "# TYPE queue_size_bytes_gcount gauge\n" + + "queue_size_bytes_gcount 130\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"queue_size_bytes\" " + + "type: GAUGE_HISTOGRAM " + + "metric { " + + "histogram { " + + "sample_count: 130 " + + "bucket { " + + "cumulative_count: 130 " + + "upper_bound: Infinity " + + "} " + + "} " + + "}"; + // @formatter:on + HistogramSnapshot gaugeHistogram = + HistogramSnapshot.builder() + .gaugeHistogram(true) + .name("queue_size_bytes") + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder() + .classicHistogramBuckets( + ClassicHistogramBuckets.builder() + .bucket(Double.POSITIVE_INFINITY, 130) + .build()) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, gaugeHistogram); + assertPrometheusText(prometheusText, gaugeHistogram); + assertOpenMetricsTextWithoutCreated(openMetricsText, gaugeHistogram); + assertPrometheusTextWithoutCreated(prometheusText, gaugeHistogram); + assertPrometheusProtobuf(prometheusProtobuf, gaugeHistogram); + } - @Test - public void testClassicGaugeHistogramCountAndSum() throws IOException { - String openMetricsText = "" + - "# TYPE queue_size_bytes gaugehistogram\n" + - "queue_size_bytes_bucket{le=\"+Inf\"} 130\n" + - "queue_size_bytes_gcount 130\n" + - "queue_size_bytes_gsum 27000.0\n" + - "# EOF\n"; - String prometheusText = "" + - "# TYPE queue_size_bytes histogram\n" + - "queue_size_bytes_bucket{le=\"+Inf\"} 130\n" + - "# TYPE queue_size_bytes_gcount gauge\n" + - "queue_size_bytes_gcount 130\n" + - "# TYPE queue_size_bytes_gsum gauge\n" + - "queue_size_bytes_gsum 27000.0\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"queue_size_bytes\" " + - "type: GAUGE_HISTOGRAM " + - "metric { " + - "histogram { " + - "sample_count: 130 " + - "sample_sum: 27000.0 " + - "bucket { " + - "cumulative_count: 130 " + - "upper_bound: Infinity " + - "} " + - "} " + - "}"; - //@formatter:on - HistogramSnapshot gaugeHistogram = HistogramSnapshot.builder() - .gaugeHistogram(true) - .name("queue_size_bytes") - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .sum(27000) - .classicHistogramBuckets(ClassicHistogramBuckets.builder() - .bucket(Double.POSITIVE_INFINITY, 130) - .build()) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, gaugeHistogram); - assertPrometheusText(prometheusText, gaugeHistogram); - assertOpenMetricsTextWithoutCreated(openMetricsText, gaugeHistogram); - assertPrometheusTextWithoutCreated(prometheusText, gaugeHistogram); - assertPrometheusProtobuf(prometheusProtobuf, gaugeHistogram); - } + @Test + public void testClassicGaugeHistogramCountAndSum() throws IOException { + String openMetricsText = + "" + + "# TYPE queue_size_bytes gaugehistogram\n" + + "queue_size_bytes_bucket{le=\"+Inf\"} 130\n" + + "queue_size_bytes_gcount 130\n" + + "queue_size_bytes_gsum 27000.0\n" + + "# EOF\n"; + String prometheusText = + "" + + "# TYPE queue_size_bytes histogram\n" + + "queue_size_bytes_bucket{le=\"+Inf\"} 130\n" + + "# TYPE queue_size_bytes_gcount gauge\n" + + "queue_size_bytes_gcount 130\n" + + "# TYPE queue_size_bytes_gsum gauge\n" + + "queue_size_bytes_gsum 27000.0\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"queue_size_bytes\" " + + "type: GAUGE_HISTOGRAM " + + "metric { " + + "histogram { " + + "sample_count: 130 " + + "sample_sum: 27000.0 " + + "bucket { " + + "cumulative_count: 130 " + + "upper_bound: Infinity " + + "} " + + "} " + + "}"; + // @formatter:on + HistogramSnapshot gaugeHistogram = + HistogramSnapshot.builder() + .gaugeHistogram(true) + .name("queue_size_bytes") + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder() + .sum(27000) + .classicHistogramBuckets( + ClassicHistogramBuckets.builder() + .bucket(Double.POSITIVE_INFINITY, 130) + .build()) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, gaugeHistogram); + assertPrometheusText(prometheusText, gaugeHistogram); + assertOpenMetricsTextWithoutCreated(openMetricsText, gaugeHistogram); + assertPrometheusTextWithoutCreated(prometheusText, gaugeHistogram); + assertPrometheusProtobuf(prometheusProtobuf, gaugeHistogram); + } - @Test - public void testClassicHistogramWithDots() throws IOException { - String openMetricsText = "" + - "# TYPE my_request_duration_seconds histogram\n" + - "# UNIT my_request_duration_seconds seconds\n" + - "# HELP my_request_duration_seconds Request duration in seconds\n" + - "my_request_duration_seconds_bucket{http_path=\"/hello\",le=\"+Inf\"} 130 # " + exemplarWithDotsString + "\n" + - "my_request_duration_seconds_count{http_path=\"/hello\"} 130\n" + - "my_request_duration_seconds_sum{http_path=\"/hello\"} 0.01\n" + - "# EOF\n"; - String openMetricsTextWithExemplarsOnAllTimeSeries = "" + - "# TYPE my_request_duration_seconds histogram\n" + - "# UNIT my_request_duration_seconds seconds\n" + - "# HELP my_request_duration_seconds Request duration in seconds\n" + - "my_request_duration_seconds_bucket{http_path=\"/hello\",le=\"+Inf\"} 130 # " + exemplarWithDotsString + "\n" + - "my_request_duration_seconds_count{http_path=\"/hello\"} 130 # " + exemplarWithDotsString + "\n" + - "my_request_duration_seconds_sum{http_path=\"/hello\"} 0.01\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP my_request_duration_seconds Request duration in seconds\n" + - "# TYPE my_request_duration_seconds histogram\n" + - "my_request_duration_seconds_bucket{http_path=\"/hello\",le=\"+Inf\"} 130\n" + - "my_request_duration_seconds_count{http_path=\"/hello\"} 130\n" + - "my_request_duration_seconds_sum{http_path=\"/hello\"} 0.01\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"my_request_duration_seconds\" " + - "help: \"Request duration in seconds\" " + - "type: HISTOGRAM " + - "metric { " + - "label { name: \"http_path\" value: \"/hello\" } " + - "histogram { " + - "sample_count: 130 " + - "sample_sum: 0.01 " + - "bucket { cumulative_count: 130 upper_bound: Infinity " + exemplarWithDotsProtoString + " } " + - "} " + - "}"; - //@formatter:on + @Test + public void testClassicHistogramWithDots() throws IOException { + String openMetricsText = + "" + + "# TYPE my_request_duration_seconds histogram\n" + + "# UNIT my_request_duration_seconds seconds\n" + + "# HELP my_request_duration_seconds Request duration in seconds\n" + + "my_request_duration_seconds_bucket{http_path=\"/hello\",le=\"+Inf\"} 130 # " + + exemplarWithDotsString + + "\n" + + "my_request_duration_seconds_count{http_path=\"/hello\"} 130\n" + + "my_request_duration_seconds_sum{http_path=\"/hello\"} 0.01\n" + + "# EOF\n"; + String openMetricsTextWithExemplarsOnAllTimeSeries = + "" + + "# TYPE my_request_duration_seconds histogram\n" + + "# UNIT my_request_duration_seconds seconds\n" + + "# HELP my_request_duration_seconds Request duration in seconds\n" + + "my_request_duration_seconds_bucket{http_path=\"/hello\",le=\"+Inf\"} 130 # " + + exemplarWithDotsString + + "\n" + + "my_request_duration_seconds_count{http_path=\"/hello\"} 130 # " + + exemplarWithDotsString + + "\n" + + "my_request_duration_seconds_sum{http_path=\"/hello\"} 0.01\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP my_request_duration_seconds Request duration in seconds\n" + + "# TYPE my_request_duration_seconds histogram\n" + + "my_request_duration_seconds_bucket{http_path=\"/hello\",le=\"+Inf\"} 130\n" + + "my_request_duration_seconds_count{http_path=\"/hello\"} 130\n" + + "my_request_duration_seconds_sum{http_path=\"/hello\"} 0.01\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"my_request_duration_seconds\" " + + "help: \"Request duration in seconds\" " + + "type: HISTOGRAM " + + "metric { " + + "label { name: \"http_path\" value: \"/hello\" } " + + "histogram { " + + "sample_count: 130 " + + "sample_sum: 0.01 " + + "bucket { cumulative_count: 130 upper_bound: Infinity " + + exemplarWithDotsProtoString + + " } " + + "} " + + "}"; + // @formatter:on - HistogramSnapshot histogram = HistogramSnapshot.builder() - .name("my.request.duration.seconds") - .help("Request duration in seconds") - .unit(Unit.SECONDS) - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .sum(0.01) - .labels(Labels.builder() - .label("http.path", "/hello") - .build()) - .classicHistogramBuckets(ClassicHistogramBuckets.builder() - .bucket(Double.POSITIVE_INFINITY, 130) - .build()) - .exemplars(Exemplars.of(exemplarWithDots)) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, histogram); - assertOpenMetricsTextWithExemplarsOnAllTimeSeries(openMetricsTextWithExemplarsOnAllTimeSeries, histogram); - assertPrometheusText(prometheusText, histogram); - assertPrometheusProtobuf(prometheusProtobuf, histogram); - } + HistogramSnapshot histogram = + HistogramSnapshot.builder() + .name("my.request.duration.seconds") + .help("Request duration in seconds") + .unit(Unit.SECONDS) + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder() + .sum(0.01) + .labels(Labels.builder().label("http.path", "/hello").build()) + .classicHistogramBuckets( + ClassicHistogramBuckets.builder() + .bucket(Double.POSITIVE_INFINITY, 130) + .build()) + .exemplars(Exemplars.of(exemplarWithDots)) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, histogram); + assertOpenMetricsTextWithExemplarsOnAllTimeSeries( + openMetricsTextWithExemplarsOnAllTimeSeries, histogram); + assertPrometheusText(prometheusText, histogram); + assertPrometheusProtobuf(prometheusProtobuf, histogram); + } - @Test - public void testNativeHistogramComplete() throws IOException { - String openMetricsText = "" + - "# TYPE response_size_bytes histogram\n" + - "# UNIT response_size_bytes bytes\n" + - "# HELP response_size_bytes help\n" + - "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 2 " + scrapeTimestamp1s + " # " + exemplar2String + "\n" + - "response_size_bytes_count{status=\"200\"} 2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_sum{status=\"200\"} 4.2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_created{status=\"200\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 55 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "response_size_bytes_count{status=\"500\"} 55 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_sum{status=\"500\"} 3.2 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_created{status=\"500\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String openMetricsTextWithExemplarsOnAllTimeSeries = "" + - "# TYPE response_size_bytes histogram\n" + - "# UNIT response_size_bytes bytes\n" + - "# HELP response_size_bytes help\n" + - "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 2 " + scrapeTimestamp1s + " # " + exemplar2String + "\n" + - "response_size_bytes_count{status=\"200\"} 2 " + scrapeTimestamp1s + " # " + exemplar2String + "\n" + - "response_size_bytes_sum{status=\"200\"} 4.2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_created{status=\"200\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 55 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "response_size_bytes_count{status=\"500\"} 55 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "response_size_bytes_sum{status=\"500\"} 3.2 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_created{status=\"500\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP response_size_bytes help\n" + - "# TYPE response_size_bytes histogram\n" + - "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_count{status=\"200\"} 2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_sum{status=\"200\"} 4.2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 55 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_count{status=\"500\"} 55 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_sum{status=\"500\"} 3.2 " + scrapeTimestamp2s + "\n" + - "# HELP response_size_bytes_created help\n" + - "# TYPE response_size_bytes_created gauge\n" + - "response_size_bytes_created{status=\"200\"} " + createdTimestamp1s + " " + scrapeTimestamp1s + "\n" + - "response_size_bytes_created{status=\"500\"} " + createdTimestamp2s + " " + scrapeTimestamp2s + "\n"; - String openMetricsTextWithoutCreated = "" + - "# TYPE response_size_bytes histogram\n" + - "# UNIT response_size_bytes bytes\n" + - "# HELP response_size_bytes help\n" + - "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 2 " + scrapeTimestamp1s + " # " + exemplar2String + "\n" + - "response_size_bytes_count{status=\"200\"} 2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_sum{status=\"200\"} 4.2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 55 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "response_size_bytes_count{status=\"500\"} 55 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_sum{status=\"500\"} 3.2 " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String prometheusTextWithoutCreated = "" + - "# HELP response_size_bytes help\n" + - "# TYPE response_size_bytes histogram\n" + - "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_count{status=\"200\"} 2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_sum{status=\"200\"} 4.2 " + scrapeTimestamp1s + "\n" + - "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 55 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_count{status=\"500\"} 55 " + scrapeTimestamp2s + "\n" + - "response_size_bytes_sum{status=\"500\"} 3.2 " + scrapeTimestamp2s + "\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"response_size_bytes\" " + - "help: \"help\" " + - "type: HISTOGRAM " + - "metric { " + - "label { name: \"status\" value: \"200\" } " + - "timestamp_ms: 1672850685829 " + - "histogram { " + - "sample_count: 2 " + - "sample_sum: 4.2 " + - "bucket { cumulative_count: 2 upper_bound: Infinity " + exemplar2protoString + " } " + - "schema: 5 " + - "zero_threshold: 0.0 " + - "zero_count: 0 " + - "positive_span { offset: 0 length: 1 } " + - "positive_delta: 2 " + - "} " + - "} metric { " + - "label { name: \"status\" value: \"500\" } " + - "timestamp_ms: 1672850585820 " + - "histogram { " + - "sample_count: 55 " + // bucket counts + zero count - "sample_sum: 3.2 " + - "bucket { cumulative_count: 55 upper_bound: Infinity " + exemplar2protoString + " } " + - "schema: 5 " + - "zero_threshold: 0.0 " + - "zero_count: 1 " + - "negative_span { offset: 0 length: 1 } " + - "negative_span { offset: 9 length: 1 } " + - "negative_delta: 1 " + - "negative_delta: -1 " + // span with count 0 - "positive_span { offset: 2 length: 3 } " + // span with 3 buckets (indexes 2-4) - "positive_span { offset: 7 length: 1 } " + // span with 1 bucket (index 12) - "positive_span { offset: 9 length: 4 } " + // span with gap of size 1 (indexes 22-25) - "positive_span { offset: 6 length: 5 } " + // span with gap of size 2 (indexes 32-36) - "positive_span { offset: 4 length: 2 } " + // span with gap of size 3 part 1 (indexes 41-42) - "positive_span { offset: 3 length: 2 } " + // span with gap of size 3 part 2 (indexes 46-47) - "positive_delta: 3 " + // index 2, count 3 - "positive_delta: 2 " + // index 3, count 5 - "positive_delta: -1 " + // index 4, count 4 - "positive_delta: 2 " + // index 12, count 6 - "positive_delta: -4 " + // index 22, count 2 - "positive_delta: -2 " + // index 23, gap - "positive_delta: 1 " + // index 24, count 1 - "positive_delta: 2 " + // index 25, count 3 - "positive_delta: 1 " + // index 32, count 4 - "positive_delta: -1 " + // index 33, count 3 - "positive_delta: -3 " + // index 34, gap - "positive_delta: 0 " + // index 35, gap - "positive_delta: 7 " + // index 36, count 7 - "positive_delta: -4 " + // index 41, count 3 - "positive_delta: 6 " + // index 42, count 9 - "positive_delta: -7 " + // index 46, count 2 - "positive_delta: -1 " + // index 47, count 1 - "} " + - "}"; - //@formatter:on - HistogramSnapshot nativeHistogram = HistogramSnapshot.builder() - .name("response_size_bytes") - .help("help") - .unit(Unit.BYTES) - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .sum(3.2) - .nativeSchema(5) - .nativeZeroCount(1) - .nativeBucketsForPositiveValues(NativeHistogramBuckets.builder() - // span with 3 buckets - .bucket(2, 3) - .bucket(3, 5) - .bucket(4, 4) - // span with just 1 bucket - .bucket(12, 6) - // span with gap of size 1 - .bucket(22, 2) - .bucket(24, 1) - .bucket(25, 3) - // span with gap of size 2 - .bucket(32, 4) - .bucket(33, 3) - .bucket(36, 7) - // span with gap of size 3 - .bucket(41, 3) - .bucket(42, 9) - .bucket(46, 2) - .bucket(47, 1) - .build()) - .nativeBucketsForNegativeValues(NativeHistogramBuckets.builder() - .bucket(0, 1) - .bucket(10, 0) // bucket with count 0 - .build()) - .labels(Labels.of("status", "500")) - .exemplars(Exemplars.of(exemplar1, exemplar2)) - .createdTimestampMillis(createdTimestamp2) - .scrapeTimestampMillis(scrapeTimestamp2) - .build()) - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .sum(4.2) - .nativeSchema(5) - .nativeBucketsForPositiveValues(NativeHistogramBuckets.builder() - .bucket(0, 2) - .build()) - .labels(Labels.of("status", "200")) - .exemplars(Exemplars.of(exemplar1, exemplar2)) - .createdTimestampMillis(createdTimestamp1) - .scrapeTimestampMillis(scrapeTimestamp1) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, nativeHistogram); - assertOpenMetricsTextWithExemplarsOnAllTimeSeries(openMetricsTextWithExemplarsOnAllTimeSeries, nativeHistogram); - assertPrometheusText(prometheusText, nativeHistogram); - assertOpenMetricsTextWithoutCreated(openMetricsTextWithoutCreated, nativeHistogram); - assertPrometheusTextWithoutCreated(prometheusTextWithoutCreated, nativeHistogram); - assertPrometheusProtobuf(prometheusProtobuf, nativeHistogram); - } + @Test + public void testNativeHistogramComplete() throws IOException { + String openMetricsText = + "" + + "# TYPE response_size_bytes histogram\n" + + "# UNIT response_size_bytes bytes\n" + + "# HELP response_size_bytes help\n" + + "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 2 " + + scrapeTimestamp1s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_count{status=\"200\"} 2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_sum{status=\"200\"} 4.2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_created{status=\"200\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 55 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_count{status=\"500\"} 55 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_sum{status=\"500\"} 3.2 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_created{status=\"500\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String openMetricsTextWithExemplarsOnAllTimeSeries = + "" + + "# TYPE response_size_bytes histogram\n" + + "# UNIT response_size_bytes bytes\n" + + "# HELP response_size_bytes help\n" + + "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 2 " + + scrapeTimestamp1s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_count{status=\"200\"} 2 " + + scrapeTimestamp1s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_sum{status=\"200\"} 4.2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_created{status=\"200\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 55 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_count{status=\"500\"} 55 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_sum{status=\"500\"} 3.2 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_created{status=\"500\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP response_size_bytes help\n" + + "# TYPE response_size_bytes histogram\n" + + "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_count{status=\"200\"} 2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_sum{status=\"200\"} 4.2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 55 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_count{status=\"500\"} 55 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_sum{status=\"500\"} 3.2 " + + scrapeTimestamp2s + + "\n" + + "# HELP response_size_bytes_created help\n" + + "# TYPE response_size_bytes_created gauge\n" + + "response_size_bytes_created{status=\"200\"} " + + createdTimestamp1s + + " " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_created{status=\"500\"} " + + createdTimestamp2s + + " " + + scrapeTimestamp2s + + "\n"; + String openMetricsTextWithoutCreated = + "" + + "# TYPE response_size_bytes histogram\n" + + "# UNIT response_size_bytes bytes\n" + + "# HELP response_size_bytes help\n" + + "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 2 " + + scrapeTimestamp1s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_count{status=\"200\"} 2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_sum{status=\"200\"} 4.2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 55 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "response_size_bytes_count{status=\"500\"} 55 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_sum{status=\"500\"} 3.2 " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String prometheusTextWithoutCreated = + "" + + "# HELP response_size_bytes help\n" + + "# TYPE response_size_bytes histogram\n" + + "response_size_bytes_bucket{status=\"200\",le=\"+Inf\"} 2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_count{status=\"200\"} 2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_sum{status=\"200\"} 4.2 " + + scrapeTimestamp1s + + "\n" + + "response_size_bytes_bucket{status=\"500\",le=\"+Inf\"} 55 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_count{status=\"500\"} 55 " + + scrapeTimestamp2s + + "\n" + + "response_size_bytes_sum{status=\"500\"} 3.2 " + + scrapeTimestamp2s + + "\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"response_size_bytes\" " + + "help: \"help\" " + + "type: HISTOGRAM " + + "metric { " + + "label { name: \"status\" value: \"200\" } " + + "timestamp_ms: 1672850685829 " + + "histogram { " + + "sample_count: 2 " + + "sample_sum: 4.2 " + + "bucket { cumulative_count: 2 upper_bound: Infinity " + + exemplar2protoString + + " } " + + "schema: 5 " + + "zero_threshold: 0.0 " + + "zero_count: 0 " + + "positive_span { offset: 0 length: 1 } " + + "positive_delta: 2 " + + "} " + + "} metric { " + + "label { name: \"status\" value: \"500\" } " + + "timestamp_ms: 1672850585820 " + + "histogram { " + + "sample_count: 55 " + + // bucket counts + zero count + "sample_sum: 3.2 " + + "bucket { cumulative_count: 55 upper_bound: Infinity " + + exemplar2protoString + + " } " + + "schema: 5 " + + "zero_threshold: 0.0 " + + "zero_count: 1 " + + "negative_span { offset: 0 length: 1 } " + + "negative_span { offset: 9 length: 1 } " + + "negative_delta: 1 " + + "negative_delta: -1 " + + // span with count 0 + "positive_span { offset: 2 length: 3 } " + + // span with 3 buckets (indexes 2-4) + "positive_span { offset: 7 length: 1 } " + + // span with 1 bucket (index 12) + "positive_span { offset: 9 length: 4 } " + + // span with gap of size 1 (indexes 22-25) + "positive_span { offset: 6 length: 5 } " + + // span with gap of size 2 (indexes 32-36) + "positive_span { offset: 4 length: 2 } " + + // span with gap of size 3 part 1 (indexes 41-42) + "positive_span { offset: 3 length: 2 } " + + // span with gap of size 3 part 2 (indexes 46-47) + "positive_delta: 3 " + + // index 2, count 3 + "positive_delta: 2 " + + // index 3, count 5 + "positive_delta: -1 " + + // index 4, count 4 + "positive_delta: 2 " + + // index 12, count 6 + "positive_delta: -4 " + + // index 22, count 2 + "positive_delta: -2 " + + // index 23, gap + "positive_delta: 1 " + + // index 24, count 1 + "positive_delta: 2 " + + // index 25, count 3 + "positive_delta: 1 " + + // index 32, count 4 + "positive_delta: -1 " + + // index 33, count 3 + "positive_delta: -3 " + + // index 34, gap + "positive_delta: 0 " + + // index 35, gap + "positive_delta: 7 " + + // index 36, count 7 + "positive_delta: -4 " + + // index 41, count 3 + "positive_delta: 6 " + + // index 42, count 9 + "positive_delta: -7 " + + // index 46, count 2 + "positive_delta: -1 " + + // index 47, count 1 + "} " + + "}"; + // @formatter:on + HistogramSnapshot nativeHistogram = + HistogramSnapshot.builder() + .name("response_size_bytes") + .help("help") + .unit(Unit.BYTES) + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder() + .sum(3.2) + .nativeSchema(5) + .nativeZeroCount(1) + .nativeBucketsForPositiveValues( + NativeHistogramBuckets.builder() + // span with 3 buckets + .bucket(2, 3) + .bucket(3, 5) + .bucket(4, 4) + // span with just 1 bucket + .bucket(12, 6) + // span with gap of size 1 + .bucket(22, 2) + .bucket(24, 1) + .bucket(25, 3) + // span with gap of size 2 + .bucket(32, 4) + .bucket(33, 3) + .bucket(36, 7) + // span with gap of size 3 + .bucket(41, 3) + .bucket(42, 9) + .bucket(46, 2) + .bucket(47, 1) + .build()) + .nativeBucketsForNegativeValues( + NativeHistogramBuckets.builder() + .bucket(0, 1) + .bucket(10, 0) // bucket with count 0 + .build()) + .labels(Labels.of("status", "500")) + .exemplars(Exemplars.of(exemplar1, exemplar2)) + .createdTimestampMillis(createdTimestamp2) + .scrapeTimestampMillis(scrapeTimestamp2) + .build()) + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder() + .sum(4.2) + .nativeSchema(5) + .nativeBucketsForPositiveValues( + NativeHistogramBuckets.builder().bucket(0, 2).build()) + .labels(Labels.of("status", "200")) + .exemplars(Exemplars.of(exemplar1, exemplar2)) + .createdTimestampMillis(createdTimestamp1) + .scrapeTimestampMillis(scrapeTimestamp1) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, nativeHistogram); + assertOpenMetricsTextWithExemplarsOnAllTimeSeries( + openMetricsTextWithExemplarsOnAllTimeSeries, nativeHistogram); + assertPrometheusText(prometheusText, nativeHistogram); + assertOpenMetricsTextWithoutCreated(openMetricsTextWithoutCreated, nativeHistogram); + assertPrometheusTextWithoutCreated(prometheusTextWithoutCreated, nativeHistogram); + assertPrometheusProtobuf(prometheusProtobuf, nativeHistogram); + } - @Test - public void testNativeHistogramMinimal() throws IOException { - String openMetricsText = "" + - "# TYPE latency_seconds histogram\n" + - "latency_seconds_bucket{le=\"+Inf\"} 0\n" + - "# EOF\n"; - String prometheusText = "" + - "# TYPE latency_seconds histogram\n" + - "latency_seconds_bucket{le=\"+Inf\"} 0\n" + - "latency_seconds_count 0\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"latency_seconds\" " + - "type: HISTOGRAM " + - "metric { " + - "histogram { " + - "sample_count: 0 " + - "schema: 5 " + - "zero_threshold: 0.0 " + - "zero_count: 0 " + - "} " + - "}"; - //@formatter:on - HistogramSnapshot nativeHistogram = HistogramSnapshot.builder() - .name("latency_seconds") - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .nativeSchema(5) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, nativeHistogram); - assertPrometheusText(prometheusText, nativeHistogram); - assertPrometheusProtobuf(prometheusProtobuf, nativeHistogram); - } + @Test + public void testNativeHistogramMinimal() throws IOException { + String openMetricsText = + "" + + "# TYPE latency_seconds histogram\n" + + "latency_seconds_bucket{le=\"+Inf\"} 0\n" + + "# EOF\n"; + String prometheusText = + "" + + "# TYPE latency_seconds histogram\n" + + "latency_seconds_bucket{le=\"+Inf\"} 0\n" + + "latency_seconds_count 0\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"latency_seconds\" " + + "type: HISTOGRAM " + + "metric { " + + "histogram { " + + "sample_count: 0 " + + "schema: 5 " + + "zero_threshold: 0.0 " + + "zero_count: 0 " + + "} " + + "}"; + // @formatter:on + HistogramSnapshot nativeHistogram = + HistogramSnapshot.builder() + .name("latency_seconds") + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder().nativeSchema(5).build()) + .build(); + assertOpenMetricsText(openMetricsText, nativeHistogram); + assertPrometheusText(prometheusText, nativeHistogram); + assertPrometheusProtobuf(prometheusProtobuf, nativeHistogram); + } - @Test - public void testNativeHistogramWithDots() throws IOException { - String openMetricsText = "" + - "# TYPE my_request_duration_seconds histogram\n" + - "# UNIT my_request_duration_seconds seconds\n" + - "# HELP my_request_duration_seconds Request duration in seconds\n" + - "my_request_duration_seconds_bucket{http_path=\"/hello\",le=\"+Inf\"} 4 # " + exemplarWithDotsString + "\n" + - "my_request_duration_seconds_count{http_path=\"/hello\"} 4\n" + - "my_request_duration_seconds_sum{http_path=\"/hello\"} 3.2\n" + - "# EOF\n"; - String openMetricsTextWithExemplarsOnAllTimeSeries = "" + - "# TYPE my_request_duration_seconds histogram\n" + - "# UNIT my_request_duration_seconds seconds\n" + - "# HELP my_request_duration_seconds Request duration in seconds\n" + - "my_request_duration_seconds_bucket{http_path=\"/hello\",le=\"+Inf\"} 4 # " + exemplarWithDotsString + "\n" + - "my_request_duration_seconds_count{http_path=\"/hello\"} 4 # " + exemplarWithDotsString + "\n" + - "my_request_duration_seconds_sum{http_path=\"/hello\"} 3.2\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP my_request_duration_seconds Request duration in seconds\n" + - "# TYPE my_request_duration_seconds histogram\n" + - "my_request_duration_seconds_bucket{http_path=\"/hello\",le=\"+Inf\"} 4\n" + - "my_request_duration_seconds_count{http_path=\"/hello\"} 4\n" + - "my_request_duration_seconds_sum{http_path=\"/hello\"} 3.2\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"my_request_duration_seconds\" " + - "help: \"Request duration in seconds\" " + - "type: HISTOGRAM " + - "metric { " + - "label { name: \"http_path\" value: \"/hello\" } " + - "histogram { " + - "sample_count: 4 " + - "sample_sum: 3.2 " + - "bucket { cumulative_count: 4 upper_bound: Infinity " + exemplarWithDotsProtoString + " } " + - "schema: 5 " + - "zero_threshold: 0.0 " + - "zero_count: 1 " + - "positive_span { offset: 2 length: 1 } " + - "positive_delta: 3 " + - "} " + - "}"; - //@formatter:on + @Test + public void testNativeHistogramWithDots() throws IOException { + String openMetricsText = + "" + + "# TYPE my_request_duration_seconds histogram\n" + + "# UNIT my_request_duration_seconds seconds\n" + + "# HELP my_request_duration_seconds Request duration in seconds\n" + + "my_request_duration_seconds_bucket{http_path=\"/hello\",le=\"+Inf\"} 4 # " + + exemplarWithDotsString + + "\n" + + "my_request_duration_seconds_count{http_path=\"/hello\"} 4\n" + + "my_request_duration_seconds_sum{http_path=\"/hello\"} 3.2\n" + + "# EOF\n"; + String openMetricsTextWithExemplarsOnAllTimeSeries = + "" + + "# TYPE my_request_duration_seconds histogram\n" + + "# UNIT my_request_duration_seconds seconds\n" + + "# HELP my_request_duration_seconds Request duration in seconds\n" + + "my_request_duration_seconds_bucket{http_path=\"/hello\",le=\"+Inf\"} 4 # " + + exemplarWithDotsString + + "\n" + + "my_request_duration_seconds_count{http_path=\"/hello\"} 4 # " + + exemplarWithDotsString + + "\n" + + "my_request_duration_seconds_sum{http_path=\"/hello\"} 3.2\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP my_request_duration_seconds Request duration in seconds\n" + + "# TYPE my_request_duration_seconds histogram\n" + + "my_request_duration_seconds_bucket{http_path=\"/hello\",le=\"+Inf\"} 4\n" + + "my_request_duration_seconds_count{http_path=\"/hello\"} 4\n" + + "my_request_duration_seconds_sum{http_path=\"/hello\"} 3.2\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"my_request_duration_seconds\" " + + "help: \"Request duration in seconds\" " + + "type: HISTOGRAM " + + "metric { " + + "label { name: \"http_path\" value: \"/hello\" } " + + "histogram { " + + "sample_count: 4 " + + "sample_sum: 3.2 " + + "bucket { cumulative_count: 4 upper_bound: Infinity " + + exemplarWithDotsProtoString + + " } " + + "schema: 5 " + + "zero_threshold: 0.0 " + + "zero_count: 1 " + + "positive_span { offset: 2 length: 1 } " + + "positive_delta: 3 " + + "} " + + "}"; + // @formatter:on - HistogramSnapshot histogram = HistogramSnapshot.builder() - .name("my.request.duration.seconds") - .help("Request duration in seconds") - .unit(Unit.SECONDS) - .dataPoint(HistogramSnapshot.HistogramDataPointSnapshot.builder() - .labels(Labels.builder() - .label("http.path", "/hello") - .build()) - .sum(3.2) - .nativeSchema(5) - .nativeZeroCount(1) - .nativeBucketsForPositiveValues(NativeHistogramBuckets.builder() - .bucket(2, 3) - .build() - ) - .exemplars(Exemplars.of(exemplarWithDots)) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, histogram); - assertOpenMetricsTextWithExemplarsOnAllTimeSeries(openMetricsTextWithExemplarsOnAllTimeSeries, histogram); - assertPrometheusText(prometheusText, histogram); - assertPrometheusProtobuf(prometheusProtobuf, histogram); - } - // TODO: Gauge Native Histogram + HistogramSnapshot histogram = + HistogramSnapshot.builder() + .name("my.request.duration.seconds") + .help("Request duration in seconds") + .unit(Unit.SECONDS) + .dataPoint( + HistogramSnapshot.HistogramDataPointSnapshot.builder() + .labels(Labels.builder().label("http.path", "/hello").build()) + .sum(3.2) + .nativeSchema(5) + .nativeZeroCount(1) + .nativeBucketsForPositiveValues( + NativeHistogramBuckets.builder().bucket(2, 3).build()) + .exemplars(Exemplars.of(exemplarWithDots)) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, histogram); + assertOpenMetricsTextWithExemplarsOnAllTimeSeries( + openMetricsTextWithExemplarsOnAllTimeSeries, histogram); + assertPrometheusText(prometheusText, histogram); + assertPrometheusProtobuf(prometheusProtobuf, histogram); + } - @Test - public void testInfo() throws IOException { - String openMetrics = "" + - "# TYPE version info\n" + - "# HELP version version information\n" + - "version_info{version=\"1.2.3\"} 1\n" + - "# EOF\n"; - String prometheus = "" + - "# HELP version_info version information\n" + - "# TYPE version_info gauge\n" + - "version_info{version=\"1.2.3\"} 1\n"; - InfoSnapshot info = InfoSnapshot.builder() - .name("version") - .help("version information") - .dataPoint(InfoSnapshot.InfoDataPointSnapshot.builder() - .labels(Labels.of("version", "1.2.3")) - .build()) - .build(); - assertOpenMetricsText(openMetrics, info); - assertPrometheusText(prometheus, info); - assertOpenMetricsTextWithoutCreated(openMetrics, info); - assertPrometheusTextWithoutCreated(prometheus, info); - } + // TODO: Gauge Native Histogram - @Test - public void testInfoWithDots() throws IOException { - String openMetricsText = "" + - "# TYPE jvm_status info\n" + - "# HELP jvm_status JVM status info\n" + - "jvm_status_info{jvm_version=\"1.2.3\"} 1\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP jvm_status_info JVM status info\n" + - "# TYPE jvm_status_info gauge\n" + - "jvm_status_info{jvm_version=\"1.2.3\"} 1\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"jvm_status_info\" " + - "help: \"JVM status info\" " + - "type: GAUGE " + - "metric { " + "" + - "label { name: \"jvm_version\" value: \"1.2.3\" } " + - "gauge { value: 1.0 } " + - "}"; - //@formatter:on - InfoSnapshot info = InfoSnapshot.builder() - .name("jvm.status") - .help("JVM status info") - .dataPoint(InfoSnapshot.InfoDataPointSnapshot.builder() - .labels(Labels.of("jvm.version", "1.2.3")) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, info); - assertPrometheusText(prometheusText, info); - assertPrometheusProtobuf(prometheusProtobuf, info); - } + @Test + public void testInfo() throws IOException { + String openMetrics = + "" + + "# TYPE version info\n" + + "# HELP version version information\n" + + "version_info{version=\"1.2.3\"} 1\n" + + "# EOF\n"; + String prometheus = + "" + + "# HELP version_info version information\n" + + "# TYPE version_info gauge\n" + + "version_info{version=\"1.2.3\"} 1\n"; + InfoSnapshot info = + InfoSnapshot.builder() + .name("version") + .help("version information") + .dataPoint( + InfoSnapshot.InfoDataPointSnapshot.builder() + .labels(Labels.of("version", "1.2.3")) + .build()) + .build(); + assertOpenMetricsText(openMetrics, info); + assertPrometheusText(prometheus, info); + assertOpenMetricsTextWithoutCreated(openMetrics, info); + assertPrometheusTextWithoutCreated(prometheus, info); + } - @Test - public void testStateSetComplete() throws IOException { - String openMetrics = "" + - "# TYPE state stateset\n" + - "# HELP state complete state set example\n" + - "state{env=\"dev\",state=\"state1\"} 1 " + scrapeTimestamp1s + "\n" + - "state{env=\"dev\",state=\"state2\"} 0 " + scrapeTimestamp1s + "\n" + - "state{env=\"prod\",state=\"state1\"} 0 " + scrapeTimestamp2s + "\n" + - "state{env=\"prod\",state=\"state2\"} 1 " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String prometheus = "" + - "# HELP state complete state set example\n" + - "# TYPE state gauge\n" + - "state{env=\"dev\",state=\"state1\"} 1 " + scrapeTimestamp1s + "\n" + - "state{env=\"dev\",state=\"state2\"} 0 " + scrapeTimestamp1s + "\n" + - "state{env=\"prod\",state=\"state1\"} 0 " + scrapeTimestamp2s + "\n" + - "state{env=\"prod\",state=\"state2\"} 1 " + scrapeTimestamp2s + "\n"; - StateSetSnapshot stateSet = StateSetSnapshot.builder() - .name("state") - .help("complete state set example") - .dataPoint(StateSetSnapshot.StateSetDataPointSnapshot.builder() - .labels(Labels.of("env", "prod")) - .state("state1", false) - .state("state2", true) - .scrapeTimestampMillis(scrapeTimestamp2) - .build()) - .dataPoint(StateSetSnapshot.StateSetDataPointSnapshot.builder() - .labels(Labels.of("env", "dev")) - .state("state2", false) - .state("state1", true) - .scrapeTimestampMillis(scrapeTimestamp1) - .build()) - .build(); - assertOpenMetricsText(openMetrics, stateSet); - assertPrometheusText(prometheus, stateSet); - assertOpenMetricsTextWithoutCreated(openMetrics, stateSet); - assertPrometheusTextWithoutCreated(prometheus, stateSet); - } + @Test + public void testInfoWithDots() throws IOException { + String openMetricsText = + "" + + "# TYPE jvm_status info\n" + + "# HELP jvm_status JVM status info\n" + + "jvm_status_info{jvm_version=\"1.2.3\"} 1\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP jvm_status_info JVM status info\n" + + "# TYPE jvm_status_info gauge\n" + + "jvm_status_info{jvm_version=\"1.2.3\"} 1\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"jvm_status_info\" " + + "help: \"JVM status info\" " + + "type: GAUGE " + + "metric { " + + "" + + "label { name: \"jvm_version\" value: \"1.2.3\" } " + + "gauge { value: 1.0 } " + + "}"; + // @formatter:on + InfoSnapshot info = + InfoSnapshot.builder() + .name("jvm.status") + .help("JVM status info") + .dataPoint( + InfoSnapshot.InfoDataPointSnapshot.builder() + .labels(Labels.of("jvm.version", "1.2.3")) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, info); + assertPrometheusText(prometheusText, info); + assertPrometheusProtobuf(prometheusProtobuf, info); + } - @Test - public void testStateSetMinimal() throws IOException { - String openMetrics = "" + - "# TYPE state stateset\n" + - "state{state=\"a\"} 1\n" + - "state{state=\"bb\"} 0\n" + - "# EOF\n"; - String prometheus = "" + - "# TYPE state gauge\n" + - "state{state=\"a\"} 1\n" + - "state{state=\"bb\"} 0\n"; - StateSetSnapshot stateSet = StateSetSnapshot.builder() - .name("state") - .dataPoint(StateSetSnapshot.StateSetDataPointSnapshot.builder() - .state("a", true) - .state("bb", false) - .build()) - .build(); - assertOpenMetricsText(openMetrics, stateSet); - assertPrometheusText(prometheus, stateSet); - assertOpenMetricsTextWithoutCreated(openMetrics, stateSet); - assertPrometheusTextWithoutCreated(prometheus, stateSet); - } + @Test + public void testStateSetComplete() throws IOException { + String openMetrics = + "" + + "# TYPE state stateset\n" + + "# HELP state complete state set example\n" + + "state{env=\"dev\",state=\"state1\"} 1 " + + scrapeTimestamp1s + + "\n" + + "state{env=\"dev\",state=\"state2\"} 0 " + + scrapeTimestamp1s + + "\n" + + "state{env=\"prod\",state=\"state1\"} 0 " + + scrapeTimestamp2s + + "\n" + + "state{env=\"prod\",state=\"state2\"} 1 " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String prometheus = + "" + + "# HELP state complete state set example\n" + + "# TYPE state gauge\n" + + "state{env=\"dev\",state=\"state1\"} 1 " + + scrapeTimestamp1s + + "\n" + + "state{env=\"dev\",state=\"state2\"} 0 " + + scrapeTimestamp1s + + "\n" + + "state{env=\"prod\",state=\"state1\"} 0 " + + scrapeTimestamp2s + + "\n" + + "state{env=\"prod\",state=\"state2\"} 1 " + + scrapeTimestamp2s + + "\n"; + StateSetSnapshot stateSet = + StateSetSnapshot.builder() + .name("state") + .help("complete state set example") + .dataPoint( + StateSetSnapshot.StateSetDataPointSnapshot.builder() + .labels(Labels.of("env", "prod")) + .state("state1", false) + .state("state2", true) + .scrapeTimestampMillis(scrapeTimestamp2) + .build()) + .dataPoint( + StateSetSnapshot.StateSetDataPointSnapshot.builder() + .labels(Labels.of("env", "dev")) + .state("state2", false) + .state("state1", true) + .scrapeTimestampMillis(scrapeTimestamp1) + .build()) + .build(); + assertOpenMetricsText(openMetrics, stateSet); + assertPrometheusText(prometheus, stateSet); + assertOpenMetricsTextWithoutCreated(openMetrics, stateSet); + assertPrometheusTextWithoutCreated(prometheus, stateSet); + } - @Test - public void testStateSetWithDots() throws IOException { - String openMetricsText = "" + - "# TYPE my_application_state stateset\n" + - "# HELP my_application_state My application state\n" + - "my_application_state{data_center=\"us east\",my_application_state=\"feature.enabled\"} 1\n" + - "my_application_state{data_center=\"us east\",my_application_state=\"is.alpha.version\"} 0\n" + - "# EOF\n"; - String prometheusText = "" + - "# HELP my_application_state My application state\n" + - "# TYPE my_application_state gauge\n" + - "my_application_state{data_center=\"us east\",my_application_state=\"feature.enabled\"} 1\n" + - "my_application_state{data_center=\"us east\",my_application_state=\"is.alpha.version\"} 0\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"my_application_state\" " + - "help: \"My application state\" " + - "type: GAUGE " + - "metric { " + - "label { name: \"data_center\" value: \"us east\" } " + - "label { name: \"my_application_state\" value: \"feature.enabled\" } " + - "gauge { value: 1.0 } " + - "} metric { " + - "label { name: \"data_center\" value: \"us east\" } " + - "label { name: \"my_application_state\" value: \"is.alpha.version\" } " + - "gauge { value: 0.0 } " + - "}"; - //@formatter:on - StateSetSnapshot stateSet = StateSetSnapshot.builder() - .name("my.application.state") - .help("My application state") - .dataPoint(StateSetSnapshot.StateSetDataPointSnapshot.builder() - .labels(Labels.of("data.center", "us east")) - .state("feature.enabled", true) - .state("is.alpha.version", false) - .build()) - .build(); - assertOpenMetricsText(openMetricsText, stateSet); - assertPrometheusText(prometheusText, stateSet); - assertPrometheusProtobuf(prometheusProtobuf, stateSet); - } + @Test + public void testStateSetMinimal() throws IOException { + String openMetrics = + "" + + "# TYPE state stateset\n" + + "state{state=\"a\"} 1\n" + + "state{state=\"bb\"} 0\n" + + "# EOF\n"; + String prometheus = + "" + "# TYPE state gauge\n" + "state{state=\"a\"} 1\n" + "state{state=\"bb\"} 0\n"; + StateSetSnapshot stateSet = + StateSetSnapshot.builder() + .name("state") + .dataPoint( + StateSetSnapshot.StateSetDataPointSnapshot.builder() + .state("a", true) + .state("bb", false) + .build()) + .build(); + assertOpenMetricsText(openMetrics, stateSet); + assertPrometheusText(prometheus, stateSet); + assertOpenMetricsTextWithoutCreated(openMetrics, stateSet); + assertPrometheusTextWithoutCreated(prometheus, stateSet); + } - @Test - public void testUnknownComplete() throws IOException { - String openMetrics = "" + - "# TYPE my_special_thing_bytes unknown\n" + - "# UNIT my_special_thing_bytes bytes\n" + - "# HELP my_special_thing_bytes help message\n" + - "my_special_thing_bytes{env=\"dev\"} 0.2 " + scrapeTimestamp1s + "\n" + - "my_special_thing_bytes{env=\"prod\"} 0.7 " + scrapeTimestamp2s + "\n" + - "# EOF\n"; - String openMetricsWithExemplarsOnAllTimeSeries = "" + - "# TYPE my_special_thing_bytes unknown\n" + - "# UNIT my_special_thing_bytes bytes\n" + - "# HELP my_special_thing_bytes help message\n" + - "my_special_thing_bytes{env=\"dev\"} 0.2 " + scrapeTimestamp1s + " # " + exemplar1String + "\n" + - "my_special_thing_bytes{env=\"prod\"} 0.7 " + scrapeTimestamp2s + " # " + exemplar2String + "\n" + - "# EOF\n"; - String prometheus = "" + - "# HELP my_special_thing_bytes help message\n" + - "# TYPE my_special_thing_bytes untyped\n" + - "my_special_thing_bytes{env=\"dev\"} 0.2 " + scrapeTimestamp1s + "\n" + - "my_special_thing_bytes{env=\"prod\"} 0.7 " + scrapeTimestamp2s + "\n"; - UnknownSnapshot unknown = UnknownSnapshot.builder() - .name("my_special_thing_bytes") - .help("help message") - .unit(Unit.BYTES) - .dataPoint(UnknownDataPointSnapshot.builder() - .value(0.7) - .labels(Labels.of("env", "prod")) - .exemplar(exemplar2) - .scrapeTimestampMillis(scrapeTimestamp2) - .build()) - .dataPoint(UnknownDataPointSnapshot.builder() - .value(0.2) - .labels(Labels.of("env", "dev")) - .exemplar(exemplar1) - .scrapeTimestampMillis(scrapeTimestamp1) - .build()) - .build(); - assertOpenMetricsText(openMetrics, unknown); - assertOpenMetricsTextWithExemplarsOnAllTimeSeries(openMetricsWithExemplarsOnAllTimeSeries, unknown); - assertPrometheusText(prometheus, unknown); - assertOpenMetricsTextWithoutCreated(openMetrics, unknown); - assertPrometheusTextWithoutCreated(prometheus, unknown); - } + @Test + public void testStateSetWithDots() throws IOException { + String openMetricsText = + "" + + "# TYPE my_application_state stateset\n" + + "# HELP my_application_state My application state\n" + + "my_application_state{data_center=\"us east\",my_application_state=\"feature.enabled\"} 1\n" + + "my_application_state{data_center=\"us east\",my_application_state=\"is.alpha.version\"} 0\n" + + "# EOF\n"; + String prometheusText = + "" + + "# HELP my_application_state My application state\n" + + "# TYPE my_application_state gauge\n" + + "my_application_state{data_center=\"us east\",my_application_state=\"feature.enabled\"} 1\n" + + "my_application_state{data_center=\"us east\",my_application_state=\"is.alpha.version\"} 0\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"my_application_state\" " + + "help: \"My application state\" " + + "type: GAUGE " + + "metric { " + + "label { name: \"data_center\" value: \"us east\" } " + + "label { name: \"my_application_state\" value: \"feature.enabled\" } " + + "gauge { value: 1.0 } " + + "} metric { " + + "label { name: \"data_center\" value: \"us east\" } " + + "label { name: \"my_application_state\" value: \"is.alpha.version\" } " + + "gauge { value: 0.0 } " + + "}"; + // @formatter:on + StateSetSnapshot stateSet = + StateSetSnapshot.builder() + .name("my.application.state") + .help("My application state") + .dataPoint( + StateSetSnapshot.StateSetDataPointSnapshot.builder() + .labels(Labels.of("data.center", "us east")) + .state("feature.enabled", true) + .state("is.alpha.version", false) + .build()) + .build(); + assertOpenMetricsText(openMetricsText, stateSet); + assertPrometheusText(prometheusText, stateSet); + assertPrometheusProtobuf(prometheusProtobuf, stateSet); + } - @Test - public void testUnknownMinimal() throws IOException { - String openMetrics = "" + - "# TYPE other unknown\n" + - "other 22.3\n" + - "# EOF\n"; - String prometheus = "" + - "# TYPE other untyped\n" + - "other 22.3\n"; - UnknownSnapshot unknown = UnknownSnapshot.builder() - .name("other") - .dataPoint(UnknownDataPointSnapshot.builder() - .value(22.3) - .build()) - .build(); - assertOpenMetricsText(openMetrics, unknown); - assertPrometheusText(prometheus, unknown); - assertOpenMetricsTextWithoutCreated(openMetrics, unknown); - assertPrometheusTextWithoutCreated(prometheus, unknown); - } + @Test + public void testUnknownComplete() throws IOException { + String openMetrics = + "" + + "# TYPE my_special_thing_bytes unknown\n" + + "# UNIT my_special_thing_bytes bytes\n" + + "# HELP my_special_thing_bytes help message\n" + + "my_special_thing_bytes{env=\"dev\"} 0.2 " + + scrapeTimestamp1s + + "\n" + + "my_special_thing_bytes{env=\"prod\"} 0.7 " + + scrapeTimestamp2s + + "\n" + + "# EOF\n"; + String openMetricsWithExemplarsOnAllTimeSeries = + "" + + "# TYPE my_special_thing_bytes unknown\n" + + "# UNIT my_special_thing_bytes bytes\n" + + "# HELP my_special_thing_bytes help message\n" + + "my_special_thing_bytes{env=\"dev\"} 0.2 " + + scrapeTimestamp1s + + " # " + + exemplar1String + + "\n" + + "my_special_thing_bytes{env=\"prod\"} 0.7 " + + scrapeTimestamp2s + + " # " + + exemplar2String + + "\n" + + "# EOF\n"; + String prometheus = + "" + + "# HELP my_special_thing_bytes help message\n" + + "# TYPE my_special_thing_bytes untyped\n" + + "my_special_thing_bytes{env=\"dev\"} 0.2 " + + scrapeTimestamp1s + + "\n" + + "my_special_thing_bytes{env=\"prod\"} 0.7 " + + scrapeTimestamp2s + + "\n"; + UnknownSnapshot unknown = + UnknownSnapshot.builder() + .name("my_special_thing_bytes") + .help("help message") + .unit(Unit.BYTES) + .dataPoint( + UnknownDataPointSnapshot.builder() + .value(0.7) + .labels(Labels.of("env", "prod")) + .exemplar(exemplar2) + .scrapeTimestampMillis(scrapeTimestamp2) + .build()) + .dataPoint( + UnknownDataPointSnapshot.builder() + .value(0.2) + .labels(Labels.of("env", "dev")) + .exemplar(exemplar1) + .scrapeTimestampMillis(scrapeTimestamp1) + .build()) + .build(); + assertOpenMetricsText(openMetrics, unknown); + assertOpenMetricsTextWithExemplarsOnAllTimeSeries( + openMetricsWithExemplarsOnAllTimeSeries, unknown); + assertPrometheusText(prometheus, unknown); + assertOpenMetricsTextWithoutCreated(openMetrics, unknown); + assertPrometheusTextWithoutCreated(prometheus, unknown); + } - @Test - public void testUnknownWithDots() throws IOException { - String openMetrics = "" + - "# TYPE some_unknown_metric_bytes unknown\n" + - "# UNIT some_unknown_metric_bytes bytes\n" + - "# HELP some_unknown_metric_bytes help message\n" + - "some_unknown_metric_bytes{test_env=\"7\"} 0.7\n" + - "# EOF\n"; - String openMetricsWithExemplarsOnAllTimeSeries = "" + - "# TYPE some_unknown_metric_bytes unknown\n" + - "# UNIT some_unknown_metric_bytes bytes\n" + - "# HELP some_unknown_metric_bytes help message\n" + - "some_unknown_metric_bytes{test_env=\"7\"} 0.7 # " + exemplarWithDotsString + "\n" + - "# EOF\n"; - String prometheus = "" + - "# HELP some_unknown_metric_bytes help message\n" + - "# TYPE some_unknown_metric_bytes untyped\n" + - "some_unknown_metric_bytes{test_env=\"7\"} 0.7\n"; - String prometheusProtobuf = "" + - //@formatter:off - "name: \"some_unknown_metric_bytes\" " + - "help: \"help message\" " + - "type: UNTYPED " + - "metric { " + - "label { name: \"test_env\" value: \"7\" } " + - "untyped { value: 0.7 } " + - "}"; - //@formatter:on - UnknownSnapshot unknown = UnknownSnapshot.builder() - .name(PrometheusNaming.sanitizeMetricName("some.unknown.metric", Unit.BYTES)) - .help("help message") - .unit(Unit.BYTES) - .dataPoint(UnknownDataPointSnapshot.builder() - .value(0.7) - .labels(Labels.of("test.env", "7")) - .exemplar(exemplarWithDots) - .build()) - .build(); - assertOpenMetricsText(openMetrics, unknown); - assertOpenMetricsTextWithExemplarsOnAllTimeSeries(openMetricsWithExemplarsOnAllTimeSeries, unknown); - assertPrometheusText(prometheus, unknown); - assertPrometheusProtobuf(prometheusProtobuf, unknown); - } + @Test + public void testUnknownMinimal() throws IOException { + String openMetrics = "" + "# TYPE other unknown\n" + "other 22.3\n" + "# EOF\n"; + String prometheus = "" + "# TYPE other untyped\n" + "other 22.3\n"; + UnknownSnapshot unknown = + UnknownSnapshot.builder() + .name("other") + .dataPoint(UnknownDataPointSnapshot.builder().value(22.3).build()) + .build(); + assertOpenMetricsText(openMetrics, unknown); + assertPrometheusText(prometheus, unknown); + assertOpenMetricsTextWithoutCreated(openMetrics, unknown); + assertPrometheusTextWithoutCreated(prometheus, unknown); + } - @Test - public void testHelpEscape() throws IOException { - String openMetrics = "" + - "# TYPE test counter\n" + - "# HELP test Some text and \\n some \\\" escaping\n" + - "test_total 1.0\n" + - "# EOF\n"; - String prometheus = "" + - "# HELP test_total Some text and \\n some \" escaping\n" + - "# TYPE test_total counter\n" + - "test_total 1.0\n"; - CounterSnapshot counter = CounterSnapshot.builder() - .name("test") - .help("Some text and \n some \" escaping") // example from https://openMetrics.io - .dataPoint(CounterDataPointSnapshot.builder().value(1.0).build()) - .build(); - assertOpenMetricsText(openMetrics, counter); - assertPrometheusText(prometheus, counter); - assertOpenMetricsTextWithoutCreated(openMetrics, counter); - assertPrometheusTextWithoutCreated(prometheus, counter); - } + @Test + public void testUnknownWithDots() throws IOException { + String openMetrics = + "" + + "# TYPE some_unknown_metric_bytes unknown\n" + + "# UNIT some_unknown_metric_bytes bytes\n" + + "# HELP some_unknown_metric_bytes help message\n" + + "some_unknown_metric_bytes{test_env=\"7\"} 0.7\n" + + "# EOF\n"; + String openMetricsWithExemplarsOnAllTimeSeries = + "" + + "# TYPE some_unknown_metric_bytes unknown\n" + + "# UNIT some_unknown_metric_bytes bytes\n" + + "# HELP some_unknown_metric_bytes help message\n" + + "some_unknown_metric_bytes{test_env=\"7\"} 0.7 # " + + exemplarWithDotsString + + "\n" + + "# EOF\n"; + String prometheus = + "" + + "# HELP some_unknown_metric_bytes help message\n" + + "# TYPE some_unknown_metric_bytes untyped\n" + + "some_unknown_metric_bytes{test_env=\"7\"} 0.7\n"; + String prometheusProtobuf = + "" + + + // @formatter:off + "name: \"some_unknown_metric_bytes\" " + + "help: \"help message\" " + + "type: UNTYPED " + + "metric { " + + "label { name: \"test_env\" value: \"7\" } " + + "untyped { value: 0.7 } " + + "}"; + // @formatter:on + UnknownSnapshot unknown = + UnknownSnapshot.builder() + .name(PrometheusNaming.sanitizeMetricName("some.unknown.metric", Unit.BYTES)) + .help("help message") + .unit(Unit.BYTES) + .dataPoint( + UnknownDataPointSnapshot.builder() + .value(0.7) + .labels(Labels.of("test.env", "7")) + .exemplar(exemplarWithDots) + .build()) + .build(); + assertOpenMetricsText(openMetrics, unknown); + assertOpenMetricsTextWithExemplarsOnAllTimeSeries( + openMetricsWithExemplarsOnAllTimeSeries, unknown); + assertPrometheusText(prometheus, unknown); + assertPrometheusProtobuf(prometheusProtobuf, unknown); + } - @Test - public void testLabelValueEscape() throws IOException { - String openMetrics = "" + - "# TYPE test counter\n" + - "test_total{a=\"x\",b=\"escaping\\\" example \\n \"} 1.0\n" + - "# EOF\n"; - String prometheus = "" + - "# TYPE test_total counter\n" + - "test_total{a=\"x\",b=\"escaping\\\" example \\n \"} 1.0\n"; - CounterSnapshot counter = CounterSnapshot.builder() - .name("test") - .dataPoint(CounterDataPointSnapshot.builder() - // example from https://openMetrics.io - .labels(Labels.of("a", "x", "b", "escaping\" example \n ")) - .value(1.0) - .build()) - .build(); - assertOpenMetricsText(openMetrics, counter); - assertPrometheusText(prometheus, counter); - } + @Test + public void testHelpEscape() throws IOException { + String openMetrics = + "" + + "# TYPE test counter\n" + + "# HELP test Some text and \\n some \\\" escaping\n" + + "test_total 1.0\n" + + "# EOF\n"; + String prometheus = + "" + + "# HELP test_total Some text and \\n some \" escaping\n" + + "# TYPE test_total counter\n" + + "test_total 1.0\n"; + CounterSnapshot counter = + CounterSnapshot.builder() + .name("test") + .help("Some text and \n some \" escaping") // example from https://openMetrics.io + .dataPoint(CounterDataPointSnapshot.builder().value(1.0).build()) + .build(); + assertOpenMetricsText(openMetrics, counter); + assertPrometheusText(prometheus, counter); + assertOpenMetricsTextWithoutCreated(openMetrics, counter); + assertPrometheusTextWithoutCreated(prometheus, counter); + } + + @Test + public void testLabelValueEscape() throws IOException { + String openMetrics = + "" + + "# TYPE test counter\n" + + "test_total{a=\"x\",b=\"escaping\\\" example \\n \"} 1.0\n" + + "# EOF\n"; + String prometheus = + "" + + "# TYPE test_total counter\n" + + "test_total{a=\"x\",b=\"escaping\\\" example \\n \"} 1.0\n"; + CounterSnapshot counter = + CounterSnapshot.builder() + .name("test") + .dataPoint( + CounterDataPointSnapshot.builder() + // example from https://openMetrics.io + .labels(Labels.of("a", "x", "b", "escaping\" example \n ")) + .value(1.0) + .build()) + .build(); + assertOpenMetricsText(openMetrics, counter); + assertPrometheusText(prometheus, counter); + } - private void assertOpenMetricsText(String expected, MetricSnapshot snapshot) throws IOException { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, false); - writer.write(out, MetricSnapshots.of(snapshot)); - Assert.assertEquals(expected, out.toString()); - } + private void assertOpenMetricsText(String expected, MetricSnapshot snapshot) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, false); + writer.write(out, MetricSnapshots.of(snapshot)); + Assert.assertEquals(expected, out.toString()); + } - private void assertOpenMetricsTextWithExemplarsOnAllTimeSeries(String expected, MetricSnapshot snapshot) throws IOException { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); - writer.write(out, MetricSnapshots.of(snapshot)); - Assert.assertEquals(expected, out.toString()); - } + private void assertOpenMetricsTextWithExemplarsOnAllTimeSeries( + String expected, MetricSnapshot snapshot) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); + writer.write(out, MetricSnapshots.of(snapshot)); + Assert.assertEquals(expected, out.toString()); + } - private void assertOpenMetricsTextWithoutCreated(String expected, MetricSnapshot snapshot) throws IOException { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(false, false); - writer.write(out, MetricSnapshots.of(snapshot)); - Assert.assertEquals(expected, out.toString()); - } + private void assertOpenMetricsTextWithoutCreated(String expected, MetricSnapshot snapshot) + throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(false, false); + writer.write(out, MetricSnapshots.of(snapshot)); + Assert.assertEquals(expected, out.toString()); + } - private void assertPrometheusText(String expected, MetricSnapshot snapshot) throws IOException { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - PrometheusTextFormatWriter writer = new PrometheusTextFormatWriter(true); - writer.write(out, MetricSnapshots.of(snapshot)); - Assert.assertEquals(expected, out.toString()); - } + private void assertPrometheusText(String expected, MetricSnapshot snapshot) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + PrometheusTextFormatWriter writer = new PrometheusTextFormatWriter(true); + writer.write(out, MetricSnapshots.of(snapshot)); + Assert.assertEquals(expected, out.toString()); + } - private void assertPrometheusTextWithoutCreated(String expected, MetricSnapshot snapshot) throws IOException { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - PrometheusTextFormatWriter writer = new PrometheusTextFormatWriter(false); - writer.write(out, MetricSnapshots.of(snapshot)); - Assert.assertEquals(expected, out.toString()); - } + private void assertPrometheusTextWithoutCreated(String expected, MetricSnapshot snapshot) + throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + PrometheusTextFormatWriter writer = new PrometheusTextFormatWriter(false); + writer.write(out, MetricSnapshots.of(snapshot)); + Assert.assertEquals(expected, out.toString()); + } - private void assertPrometheusProtobuf(String expected, MetricSnapshot snapshot) { - PrometheusProtobufWriter writer = new PrometheusProtobufWriter(); - Metrics.MetricFamily protobufData = writer.convert(snapshot); - String actual = TextFormat.printer().shortDebugString(protobufData); - Assert.assertEquals(expected, actual); - } + private void assertPrometheusProtobuf(String expected, MetricSnapshot snapshot) { + PrometheusProtobufWriter writer = new PrometheusProtobufWriter(); + Metrics.MetricFamily protobufData = writer.convert(snapshot); + String actual = TextFormat.printer().shortDebugString(protobufData); + Assert.assertEquals(expected, actual); + } } diff --git a/prometheus-metrics-instrumentation-caffeine/src/main/java/io/prometheus/metrics/instrumentation/caffeine/CacheMetricsCollector.java b/prometheus-metrics-instrumentation-caffeine/src/main/java/io/prometheus/metrics/instrumentation/caffeine/CacheMetricsCollector.java index 985a15711..69b852a41 100644 --- a/prometheus-metrics-instrumentation-caffeine/src/main/java/io/prometheus/metrics/instrumentation/caffeine/CacheMetricsCollector.java +++ b/prometheus-metrics-instrumentation-caffeine/src/main/java/io/prometheus/metrics/instrumentation/caffeine/CacheMetricsCollector.java @@ -10,19 +10,18 @@ import io.prometheus.metrics.model.snapshots.Labels; import io.prometheus.metrics.model.snapshots.MetricSnapshots; import io.prometheus.metrics.model.snapshots.SummarySnapshot; - import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; - /** * Collect metrics from Caffeine's com.github.benmanes.caffeine.cache.Cache. + * *

- *

{@code
  *
+ * 
{@code
  * // Note that `recordStats()` is required to gather non-zero statistics
  * Cache cache = Caffeine.newBuilder().recordStats().build();
  * CacheMetricsCollector cacheMetrics = new CacheMetricsCollector().register();
@@ -32,7 +31,8 @@
  *
  * Exposed metrics are labeled with the provided cache name.
  *
- * With the example above, sample metric names would be:
+ * 

With the example above, sample metric names would be: + * *

  *     caffeine_cache_hit_total{cache="mycache"} 10.0
  *     caffeine_cache_miss_total{cache="mycache"} 3.0
@@ -42,191 +42,183 @@
  * 
* * Additionally, if the cache includes a loader, the following metrics would be provided: + * *
  *     caffeine_cache_load_failure_total{cache="mycache"} 2.0
  *     caffeine_cache_loads_total{cache="mycache"} 7.0
  *     caffeine_cache_load_duration_seconds_count{cache="mycache"} 7.0
  *     caffeine_cache_load_duration_seconds_sum{cache="mycache"} 0.0034
  * 
- * */ public class CacheMetricsCollector implements MultiCollector { - private static final double NANOSECONDS_PER_SECOND = 1_000_000_000.0; - - protected final ConcurrentMap children = new ConcurrentHashMap(); - - /** - * Add or replace the cache with the given name. - *

- * Any references any previous cache with this name is invalidated. - * - * @param cacheName The name of the cache, will be the metrics label value - * @param cache The cache being monitored - */ - public void addCache(String cacheName, Cache cache) { - children.put(cacheName, cache); - } - - /** - * Add or replace the cache with the given name. - *

- * Any references any previous cache with this name is invalidated. - * - * @param cacheName The name of the cache, will be the metrics label value - * @param cache The cache being monitored - */ - public void addCache(String cacheName, AsyncCache cache) { - children.put(cacheName, cache.synchronous()); - } - - /** - * Remove the cache with the given name. - *

- * Any references to the cache are invalidated. - * - * @param cacheName cache to be removed - */ - public Cache removeCache(String cacheName) { - return children.remove(cacheName); - } - - /** - * Remove all caches. - *

- * Any references to all caches are invalidated. - */ - public void clear(){ - children.clear(); - } - - @Override - public MetricSnapshots collect() { - final MetricSnapshots.Builder metricSnapshotsBuilder = MetricSnapshots.builder(); - final List labelNames = Arrays.asList("cache"); - - final CounterSnapshot.Builder cacheHitTotal = CounterSnapshot.builder() - .name("caffeine_cache_hit") - .help("Cache hit totals"); - - final CounterSnapshot.Builder cacheMissTotal = CounterSnapshot.builder() - .name("caffeine_cache_miss") - .help("Cache miss totals"); - - final CounterSnapshot.Builder cacheRequestsTotal = CounterSnapshot.builder() + private static final double NANOSECONDS_PER_SECOND = 1_000_000_000.0; + + protected final ConcurrentMap children = new ConcurrentHashMap(); + + /** + * Add or replace the cache with the given name. + * + *

Any references any previous cache with this name is invalidated. + * + * @param cacheName The name of the cache, will be the metrics label value + * @param cache The cache being monitored + */ + public void addCache(String cacheName, Cache cache) { + children.put(cacheName, cache); + } + + /** + * Add or replace the cache with the given name. + * + *

Any references any previous cache with this name is invalidated. + * + * @param cacheName The name of the cache, will be the metrics label value + * @param cache The cache being monitored + */ + public void addCache(String cacheName, AsyncCache cache) { + children.put(cacheName, cache.synchronous()); + } + + /** + * Remove the cache with the given name. + * + *

Any references to the cache are invalidated. + * + * @param cacheName cache to be removed + */ + public Cache removeCache(String cacheName) { + return children.remove(cacheName); + } + + /** + * Remove all caches. + * + *

Any references to all caches are invalidated. + */ + public void clear() { + children.clear(); + } + + @Override + public MetricSnapshots collect() { + final MetricSnapshots.Builder metricSnapshotsBuilder = MetricSnapshots.builder(); + final List labelNames = Arrays.asList("cache"); + + final CounterSnapshot.Builder cacheHitTotal = + CounterSnapshot.builder().name("caffeine_cache_hit").help("Cache hit totals"); + + final CounterSnapshot.Builder cacheMissTotal = + CounterSnapshot.builder().name("caffeine_cache_miss").help("Cache miss totals"); + + final CounterSnapshot.Builder cacheRequestsTotal = + CounterSnapshot.builder() .name("caffeine_cache_requests") .help("Cache request totals, hits + misses"); - final CounterSnapshot.Builder cacheEvictionTotal = CounterSnapshot.builder() + final CounterSnapshot.Builder cacheEvictionTotal = + CounterSnapshot.builder() .name("caffeine_cache_eviction") .help("Cache eviction totals, doesn't include manually removed entries"); - final GaugeSnapshot.Builder cacheEvictionWeight = GaugeSnapshot.builder() + final GaugeSnapshot.Builder cacheEvictionWeight = + GaugeSnapshot.builder() .name("caffeine_cache_eviction_weight") .help("Cache eviction weight"); - final CounterSnapshot.Builder cacheLoadFailure = CounterSnapshot.builder() - .name("caffeine_cache_load_failure") - .help("Cache load failures"); + final CounterSnapshot.Builder cacheLoadFailure = + CounterSnapshot.builder().name("caffeine_cache_load_failure").help("Cache load failures"); - final CounterSnapshot.Builder cacheLoadTotal = CounterSnapshot.builder() + final CounterSnapshot.Builder cacheLoadTotal = + CounterSnapshot.builder() .name("caffeine_cache_loads") .help("Cache loads: both success and failures"); - final GaugeSnapshot.Builder cacheSize = GaugeSnapshot.builder() - .name("caffeine_cache_estimated_size") - .help("Estimated cache size"); + final GaugeSnapshot.Builder cacheSize = + GaugeSnapshot.builder().name("caffeine_cache_estimated_size").help("Estimated cache size"); - final SummarySnapshot.Builder cacheLoadSummary = SummarySnapshot.builder() + final SummarySnapshot.Builder cacheLoadSummary = + SummarySnapshot.builder() .name("caffeine_cache_load_duration_seconds") .help("Cache load duration: both success and failures"); - for (final Map.Entry c: children.entrySet()) { - final List cacheName = Arrays.asList(c.getKey()); - final Labels labels = Labels.of(labelNames, cacheName); - - final CacheStats stats = c.getValue().stats(); - - try { - cacheEvictionWeight.dataPoint( - GaugeSnapshot.GaugeDataPointSnapshot.builder() - .labels(labels) - .value(stats.evictionWeight()) - .build() - ); - } catch (Exception e) { - // EvictionWeight metric is unavailable, newer version of Caffeine is needed. - } - - cacheHitTotal.dataPoint( - CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(labels) - .value(stats.hitCount()) - .build() - ); - - cacheMissTotal.dataPoint( - CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(labels) - .value(stats.missCount()) - .build() - ); - - cacheRequestsTotal.dataPoint( - CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(labels) - .value(stats.requestCount()) - .build() - ); - - cacheEvictionTotal.dataPoint( - CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(labels) - .value(stats.evictionCount()) - .build() - ); - - cacheSize.dataPoint( - GaugeSnapshot.GaugeDataPointSnapshot.builder() - .labels(labels) - .value(c.getValue().estimatedSize()) - .build() - ); - - if (c.getValue() instanceof LoadingCache) { - cacheLoadFailure.dataPoint( - CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(labels) - .value(stats.loadFailureCount()) - .build() - ); - - cacheLoadTotal.dataPoint( - CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(labels) - .value(stats.loadCount()) - .build() - ); - - cacheLoadSummary.dataPoint( - SummarySnapshot.SummaryDataPointSnapshot.builder() - .labels(labels) - .count(stats.loadCount()) - .sum(stats.totalLoadTime() / NANOSECONDS_PER_SECOND) - .build() - ); - } - } - - return metricSnapshotsBuilder - .metricSnapshot(cacheHitTotal.build()) - .metricSnapshot(cacheMissTotal.build()) - .metricSnapshot(cacheRequestsTotal.build()) - .metricSnapshot(cacheEvictionTotal.build()) - .metricSnapshot(cacheEvictionWeight.build()) - .metricSnapshot(cacheLoadFailure.build()) - .metricSnapshot(cacheLoadTotal.build()) - .metricSnapshot(cacheSize.build()) - .metricSnapshot(cacheLoadSummary.build()) - .build(); + for (final Map.Entry c : children.entrySet()) { + final List cacheName = Arrays.asList(c.getKey()); + final Labels labels = Labels.of(labelNames, cacheName); + + final CacheStats stats = c.getValue().stats(); + + try { + cacheEvictionWeight.dataPoint( + GaugeSnapshot.GaugeDataPointSnapshot.builder() + .labels(labels) + .value(stats.evictionWeight()) + .build()); + } catch (Exception e) { + // EvictionWeight metric is unavailable, newer version of Caffeine is needed. + } + + cacheHitTotal.dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(labels) + .value(stats.hitCount()) + .build()); + + cacheMissTotal.dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(labels) + .value(stats.missCount()) + .build()); + + cacheRequestsTotal.dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(labels) + .value(stats.requestCount()) + .build()); + + cacheEvictionTotal.dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(labels) + .value(stats.evictionCount()) + .build()); + + cacheSize.dataPoint( + GaugeSnapshot.GaugeDataPointSnapshot.builder() + .labels(labels) + .value(c.getValue().estimatedSize()) + .build()); + + if (c.getValue() instanceof LoadingCache) { + cacheLoadFailure.dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(labels) + .value(stats.loadFailureCount()) + .build()); + + cacheLoadTotal.dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(labels) + .value(stats.loadCount()) + .build()); + + cacheLoadSummary.dataPoint( + SummarySnapshot.SummaryDataPointSnapshot.builder() + .labels(labels) + .count(stats.loadCount()) + .sum(stats.totalLoadTime() / NANOSECONDS_PER_SECOND) + .build()); + } } + + return metricSnapshotsBuilder + .metricSnapshot(cacheHitTotal.build()) + .metricSnapshot(cacheMissTotal.build()) + .metricSnapshot(cacheRequestsTotal.build()) + .metricSnapshot(cacheEvictionTotal.build()) + .metricSnapshot(cacheEvictionWeight.build()) + .metricSnapshot(cacheLoadFailure.build()) + .metricSnapshot(cacheLoadTotal.build()) + .metricSnapshot(cacheSize.build()) + .metricSnapshot(cacheLoadSummary.build()) + .build(); + } } diff --git a/prometheus-metrics-instrumentation-caffeine/src/test/java/io/prometheus/metrics/instrumentation/caffeine/CacheMetricsCollectorTest.java b/prometheus-metrics-instrumentation-caffeine/src/test/java/io/prometheus/metrics/instrumentation/caffeine/CacheMetricsCollectorTest.java index 86f7ed185..acbfe3e1e 100644 --- a/prometheus-metrics-instrumentation-caffeine/src/test/java/io/prometheus/metrics/instrumentation/caffeine/CacheMetricsCollectorTest.java +++ b/prometheus-metrics-instrumentation-caffeine/src/test/java/io/prometheus/metrics/instrumentation/caffeine/CacheMetricsCollectorTest.java @@ -1,5 +1,11 @@ package io.prometheus.metrics.instrumentation.caffeine; +import static org.assertj.core.api.Java6Assertions.assertThat; +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.CacheLoader; import com.github.benmanes.caffeine.cache.Caffeine; @@ -10,144 +16,143 @@ import io.prometheus.metrics.model.snapshots.DataPointSnapshot; import io.prometheus.metrics.model.snapshots.Labels; import io.prometheus.metrics.model.snapshots.SummarySnapshot; -import org.junit.Test; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.util.concurrent.Executor; - -import static org.assertj.core.api.Java6Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import org.junit.Test; public class CacheMetricsCollectorTest { - @Test - public void cacheExposesMetricsForHitMissAndEviction() { - final Cache cache = Caffeine.newBuilder().maximumSize(2).recordStats().executor(new Executor() { - @Override - public void execute(Runnable command) { - // Run cleanup in same thread, to remove async behavior with evictions - command.run(); - } - }).build(); - - final CacheMetricsCollector collector = new CacheMetricsCollector(); - collector.addCache("users", cache); - - final PrometheusRegistry registry = new PrometheusRegistry(); - registry.register(collector); - - cache.getIfPresent("user1"); - cache.getIfPresent("user1"); - cache.put("user1", "First User"); - cache.getIfPresent("user1"); - - // Add to cache to trigger eviction. - cache.put("user2", "Second User"); - cache.put("user3", "Third User"); - cache.put("user4", "Fourth User"); - - assertCounterMetric(registry, "caffeine_cache_hit", "users", 1.0); - assertCounterMetric(registry, "caffeine_cache_miss", "users", 2.0); - assertCounterMetric(registry, "caffeine_cache_requests", "users", 3.0); - assertCounterMetric(registry, "caffeine_cache_eviction", "users", 2.0); - - final String expected = "# TYPE caffeine_cache_estimated_size gauge\n" + - "# HELP caffeine_cache_estimated_size Estimated cache size\n" + - "caffeine_cache_estimated_size{cache=\"users\"} 2.0\n" + - "# TYPE caffeine_cache_eviction counter\n" + - "# HELP caffeine_cache_eviction Cache eviction totals, doesn't include manually removed entries\n" + - "caffeine_cache_eviction_total{cache=\"users\"} 2.0\n" + - "# TYPE caffeine_cache_eviction_weight gauge\n" + - "# HELP caffeine_cache_eviction_weight Cache eviction weight\n" + - "caffeine_cache_eviction_weight{cache=\"users\"} 2.0\n" + - "# TYPE caffeine_cache_hit counter\n" + - "# HELP caffeine_cache_hit Cache hit totals\n" + - "caffeine_cache_hit_total{cache=\"users\"} 1.0\n" + - "# TYPE caffeine_cache_miss counter\n" + - "# HELP caffeine_cache_miss Cache miss totals\n" + - "caffeine_cache_miss_total{cache=\"users\"} 2.0\n" + - "# TYPE caffeine_cache_requests counter\n" + - "# HELP caffeine_cache_requests Cache request totals, hits + misses\n" + - "caffeine_cache_requests_total{cache=\"users\"} 3.0\n" + - "# EOF\n"; - - assertEquals(expected, convertToOpenMetricsFormat(registry)); - } - - @SuppressWarnings("unchecked") - @Test - public void loadingCacheExposesMetricsForLoadsAndExceptions() throws Exception { - final CacheLoader loader = mock(CacheLoader.class); - when(loader.load(anyString())) - .thenReturn("First User") - .thenThrow(new RuntimeException("Seconds time fails")) - .thenReturn("Third User"); - - final LoadingCache cache = Caffeine.newBuilder().recordStats().build(loader); - final CacheMetricsCollector collector = new CacheMetricsCollector(); - - collector.addCache("loadingusers", cache); - - final PrometheusRegistry registry = new PrometheusRegistry(); - registry.register(collector); - - cache.get("user1"); - cache.get("user1"); - try { - cache.get("user2"); - } catch (Exception e) { - // ignoring. - } - cache.get("user3"); - - assertCounterMetric(registry, "caffeine_cache_hit", "loadingusers", 1.0); - assertCounterMetric(registry, "caffeine_cache_miss", "loadingusers", 3.0); - - assertCounterMetric(registry, "caffeine_cache_load_failure", "loadingusers", 1.0); - assertCounterMetric(registry, "caffeine_cache_loads", "loadingusers", 3.0); - - final SummarySnapshot.SummaryDataPointSnapshot loadDuration = (SummarySnapshot.SummaryDataPointSnapshot) getDataPointSnapshot( - registry, - "caffeine_cache_load_duration_seconds", - "loadingusers" - ); - - assertEquals(3, loadDuration.getCount()); - assertThat(loadDuration.getSum()).isGreaterThan(0); - } - - private void assertCounterMetric(PrometheusRegistry registry, String name, String cacheName, double value) { - final CounterSnapshot.CounterDataPointSnapshot dataPointSnapshot = - (CounterSnapshot.CounterDataPointSnapshot) getDataPointSnapshot(registry, name, cacheName); - - assertEquals(value, dataPointSnapshot.getValue(), 0); - } - - private DataPointSnapshot getDataPointSnapshot(PrometheusRegistry registry, String name, String cacheName) - { - final Labels labels = Labels.of(new String[]{"cache"}, new String[]{cacheName}); - - return registry.scrape(name::equals).stream() - .flatMap(metricSnapshot -> metricSnapshot.getDataPoints().stream()) - .filter(dataPoint -> dataPoint.getLabels().equals(labels)) - .findFirst() - .get(); + @Test + public void cacheExposesMetricsForHitMissAndEviction() { + final Cache cache = + Caffeine.newBuilder() + .maximumSize(2) + .recordStats() + .executor( + new Executor() { + @Override + public void execute(Runnable command) { + // Run cleanup in same thread, to remove async behavior with evictions + command.run(); + } + }) + .build(); + + final CacheMetricsCollector collector = new CacheMetricsCollector(); + collector.addCache("users", cache); + + final PrometheusRegistry registry = new PrometheusRegistry(); + registry.register(collector); + + cache.getIfPresent("user1"); + cache.getIfPresent("user1"); + cache.put("user1", "First User"); + cache.getIfPresent("user1"); + + // Add to cache to trigger eviction. + cache.put("user2", "Second User"); + cache.put("user3", "Third User"); + cache.put("user4", "Fourth User"); + + assertCounterMetric(registry, "caffeine_cache_hit", "users", 1.0); + assertCounterMetric(registry, "caffeine_cache_miss", "users", 2.0); + assertCounterMetric(registry, "caffeine_cache_requests", "users", 3.0); + assertCounterMetric(registry, "caffeine_cache_eviction", "users", 2.0); + + final String expected = + "# TYPE caffeine_cache_estimated_size gauge\n" + + "# HELP caffeine_cache_estimated_size Estimated cache size\n" + + "caffeine_cache_estimated_size{cache=\"users\"} 2.0\n" + + "# TYPE caffeine_cache_eviction counter\n" + + "# HELP caffeine_cache_eviction Cache eviction totals, doesn't include manually removed entries\n" + + "caffeine_cache_eviction_total{cache=\"users\"} 2.0\n" + + "# TYPE caffeine_cache_eviction_weight gauge\n" + + "# HELP caffeine_cache_eviction_weight Cache eviction weight\n" + + "caffeine_cache_eviction_weight{cache=\"users\"} 2.0\n" + + "# TYPE caffeine_cache_hit counter\n" + + "# HELP caffeine_cache_hit Cache hit totals\n" + + "caffeine_cache_hit_total{cache=\"users\"} 1.0\n" + + "# TYPE caffeine_cache_miss counter\n" + + "# HELP caffeine_cache_miss Cache miss totals\n" + + "caffeine_cache_miss_total{cache=\"users\"} 2.0\n" + + "# TYPE caffeine_cache_requests counter\n" + + "# HELP caffeine_cache_requests Cache request totals, hits + misses\n" + + "caffeine_cache_requests_total{cache=\"users\"} 3.0\n" + + "# EOF\n"; + + assertEquals(expected, convertToOpenMetricsFormat(registry)); + } + + @SuppressWarnings("unchecked") + @Test + public void loadingCacheExposesMetricsForLoadsAndExceptions() throws Exception { + final CacheLoader loader = mock(CacheLoader.class); + when(loader.load(anyString())) + .thenReturn("First User") + .thenThrow(new RuntimeException("Seconds time fails")) + .thenReturn("Third User"); + + final LoadingCache cache = Caffeine.newBuilder().recordStats().build(loader); + final CacheMetricsCollector collector = new CacheMetricsCollector(); + + collector.addCache("loadingusers", cache); + + final PrometheusRegistry registry = new PrometheusRegistry(); + registry.register(collector); + + cache.get("user1"); + cache.get("user1"); + try { + cache.get("user2"); + } catch (Exception e) { + // ignoring. } - - private String convertToOpenMetricsFormat(PrometheusRegistry registry) { - final ByteArrayOutputStream out = new ByteArrayOutputStream(); - final OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); - try { - writer.write(out, registry.scrape()); - return out.toString(StandardCharsets.UTF_8.name()); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + cache.get("user3"); + + assertCounterMetric(registry, "caffeine_cache_hit", "loadingusers", 1.0); + assertCounterMetric(registry, "caffeine_cache_miss", "loadingusers", 3.0); + + assertCounterMetric(registry, "caffeine_cache_load_failure", "loadingusers", 1.0); + assertCounterMetric(registry, "caffeine_cache_loads", "loadingusers", 3.0); + + final SummarySnapshot.SummaryDataPointSnapshot loadDuration = + (SummarySnapshot.SummaryDataPointSnapshot) + getDataPointSnapshot(registry, "caffeine_cache_load_duration_seconds", "loadingusers"); + + assertEquals(3, loadDuration.getCount()); + assertThat(loadDuration.getSum()).isGreaterThan(0); + } + + private void assertCounterMetric( + PrometheusRegistry registry, String name, String cacheName, double value) { + final CounterSnapshot.CounterDataPointSnapshot dataPointSnapshot = + (CounterSnapshot.CounterDataPointSnapshot) getDataPointSnapshot(registry, name, cacheName); + + assertEquals(value, dataPointSnapshot.getValue(), 0); + } + + private DataPointSnapshot getDataPointSnapshot( + PrometheusRegistry registry, String name, String cacheName) { + final Labels labels = Labels.of(new String[] {"cache"}, new String[] {cacheName}); + + return registry.scrape(name::equals).stream() + .flatMap(metricSnapshot -> metricSnapshot.getDataPoints().stream()) + .filter(dataPoint -> dataPoint.getLabels().equals(labels)) + .findFirst() + .get(); + } + + private String convertToOpenMetricsFormat(PrometheusRegistry registry) { + final ByteArrayOutputStream out = new ByteArrayOutputStream(); + final OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); + try { + writer.write(out, registry.scrape()); + return out.toString(StandardCharsets.UTF_8.name()); + } catch (IOException e) { + throw new UncheckedIOException(e); } + } } diff --git a/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/DropwizardExports.java b/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/DropwizardExports.java index ec2bfa2e4..c87f4aa90 100644 --- a/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/DropwizardExports.java +++ b/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/DropwizardExports.java @@ -1,221 +1,252 @@ package io.prometheus.metrics.instrumentation.dropwizard5; -import io.dropwizard.metrics5.Timer; import io.dropwizard.metrics5.*; +import io.dropwizard.metrics5.Timer; import io.prometheus.metrics.instrumentation.dropwizard5.labels.CustomLabelMapper; import io.prometheus.metrics.model.registry.MultiCollector; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.*; - import java.util.*; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; -/** - * Collect Dropwizard metrics from a MetricRegistry. - */ +/** Collect Dropwizard metrics from a MetricRegistry. */ public class DropwizardExports implements MultiCollector { - private static final Logger LOGGER = Logger.getLogger(DropwizardExports.class.getName()); - private final MetricRegistry registry; - private final MetricFilter metricFilter; - private final Optional labelMapper; - - /** - * Creates a new DropwizardExports and {@link MetricFilter#ALL}. - * - * @param registry a metric registry to export in prometheus. - */ - public DropwizardExports(MetricRegistry registry) { - super(); - this.registry = registry; - this.metricFilter = MetricFilter.ALL; - this.labelMapper = Optional.empty(); + private static final Logger LOGGER = Logger.getLogger(DropwizardExports.class.getName()); + private final MetricRegistry registry; + private final MetricFilter metricFilter; + private final Optional labelMapper; + + /** + * Creates a new DropwizardExports and {@link MetricFilter#ALL}. + * + * @param registry a metric registry to export in prometheus. + */ + public DropwizardExports(MetricRegistry registry) { + super(); + this.registry = registry; + this.metricFilter = MetricFilter.ALL; + this.labelMapper = Optional.empty(); + } + + /** + * Creates a new DropwizardExports with a custom {@link MetricFilter}. + * + * @param registry a metric registry to export in prometheus. + * @param metricFilter a custom metric filter. + */ + public DropwizardExports(MetricRegistry registry, MetricFilter metricFilter) { + this.registry = registry; + this.metricFilter = metricFilter; + this.labelMapper = Optional.empty(); + } + + /** + * @param registry a metric registry to export in prometheus. + * @param metricFilter a custom metric filter. + * @param labelMapper a labelMapper to use to map labels. + */ + public DropwizardExports( + MetricRegistry registry, MetricFilter metricFilter, CustomLabelMapper labelMapper) { + this.registry = registry; + this.metricFilter = metricFilter; + this.labelMapper = Optional.ofNullable(labelMapper); + } + + private static String getHelpMessage(String metricName, Metric metric) { + return String.format( + "Generated from Dropwizard metric import (metric=%s, type=%s)", + metricName, metric.getClass().getName()); + } + + private MetricMetadata getMetricMetaData(String metricName, Metric metric) { + String name = labelMapper.isPresent() ? labelMapper.get().getName(metricName) : metricName; + return new MetricMetadata( + PrometheusNaming.sanitizeMetricName(name), getHelpMessage(metricName, metric)); + } + + /** + * Export counter as Prometheus Gauge. + */ + MetricSnapshot fromCounter(String dropwizardName, Counter counter) { + MetricMetadata metadata = getMetricMetaData(dropwizardName, counter); + CounterSnapshot.CounterDataPointSnapshot.Builder dataPointBuilder = + CounterSnapshot.CounterDataPointSnapshot.builder() + .value(Long.valueOf(counter.getCount()).doubleValue()); + labelMapper.map( + mapper -> + dataPointBuilder.labels( + mapper.getLabels( + dropwizardName, Collections.emptyList(), Collections.emptyList()))); + return new CounterSnapshot(metadata, Collections.singletonList(dataPointBuilder.build())); + } + + /** Export gauge as a prometheus gauge. */ + MetricSnapshot fromGauge(String dropwizardName, Gauge gauge) { + Object obj = gauge.getValue(); + double value; + if (obj instanceof Number) { + value = ((Number) obj).doubleValue(); + } else if (obj instanceof Boolean) { + value = ((Boolean) obj) ? 1 : 0; + } else { + LOGGER.log( + Level.FINE, + String.format( + "Invalid type for Gauge %s: %s", + PrometheusNaming.sanitizeMetricName(dropwizardName), + obj == null ? "null" : obj.getClass().getName())); + return null; } - - /** - * Creates a new DropwizardExports with a custom {@link MetricFilter}. - * - * @param registry a metric registry to export in prometheus. - * @param metricFilter a custom metric filter. - */ - public DropwizardExports(MetricRegistry registry, MetricFilter metricFilter) { - this.registry = registry; - this.metricFilter = metricFilter; - this.labelMapper = Optional.empty(); + MetricMetadata metadata = getMetricMetaData(dropwizardName, gauge); + GaugeSnapshot.GaugeDataPointSnapshot.Builder dataPointBuilder = + GaugeSnapshot.GaugeDataPointSnapshot.builder().value(value); + labelMapper.map( + mapper -> + dataPointBuilder.labels( + mapper.getLabels( + dropwizardName, Collections.emptyList(), Collections.emptyList()))); + return new GaugeSnapshot(metadata, Collections.singletonList(dataPointBuilder.build())); + } + + /** + * Export a histogram snapshot as a prometheus SUMMARY. + * + * @param dropwizardName metric name. + * @param snapshot the histogram snapshot. + * @param count the total sample count for this snapshot. + * @param factor a factor to apply to histogram values. + */ + MetricSnapshot fromSnapshotAndCount( + String dropwizardName, Snapshot snapshot, long count, double factor, String helpMessage) { + Quantiles quantiles = + Quantiles.builder() + .quantile(0.5, snapshot.getMedian() * factor) + .quantile(0.75, snapshot.get75thPercentile() * factor) + .quantile(0.95, snapshot.get95thPercentile() * factor) + .quantile(0.98, snapshot.get98thPercentile() * factor) + .quantile(0.99, snapshot.get99thPercentile() * factor) + .quantile(0.999, snapshot.get999thPercentile() * factor) + .build(); + + MetricMetadata metadata = + new MetricMetadata(PrometheusNaming.sanitizeMetricName(dropwizardName), helpMessage); + SummarySnapshot.SummaryDataPointSnapshot.Builder dataPointBuilder = + SummarySnapshot.SummaryDataPointSnapshot.builder().quantiles(quantiles).count(count); + labelMapper.map( + mapper -> + dataPointBuilder.labels( + mapper.getLabels( + dropwizardName, Collections.emptyList(), Collections.emptyList()))); + return new SummarySnapshot(metadata, Collections.singletonList(dataPointBuilder.build())); + } + + /** Convert histogram snapshot. */ + MetricSnapshot fromHistogram(String dropwizardName, Histogram histogram) { + return fromSnapshotAndCount( + dropwizardName, + histogram.getSnapshot(), + histogram.getCount(), + 1.0, + getHelpMessage(dropwizardName, histogram)); + } + + /** Export Dropwizard Timer as a histogram. Use TIME_UNIT as time unit. */ + MetricSnapshot fromTimer(String dropwizardName, Timer timer) { + return fromSnapshotAndCount( + dropwizardName, + timer.getSnapshot(), + timer.getCount(), + 1.0D / TimeUnit.SECONDS.toNanos(1L), + getHelpMessage(dropwizardName, timer)); + } + + /** Export a Meter as a prometheus COUNTER. */ + MetricSnapshot fromMeter(String dropwizardName, Meter meter) { + MetricMetadata metadata = getMetricMetaData(dropwizardName + "_total", meter); + CounterSnapshot.CounterDataPointSnapshot.Builder dataPointBuilder = + CounterSnapshot.CounterDataPointSnapshot.builder().value(meter.getCount()); + labelMapper.map( + mapper -> + dataPointBuilder.labels( + mapper.getLabels( + dropwizardName, Collections.emptyList(), Collections.emptyList()))); + return new CounterSnapshot(metadata, Collections.singletonList(dataPointBuilder.build())); + } + + @Override + public MetricSnapshots collect() { + MetricSnapshots.Builder metricSnapshots = MetricSnapshots.builder(); + for (SortedMap.Entry entry : registry.getGauges(metricFilter).entrySet()) { + Optional.ofNullable(fromGauge(entry.getKey().getKey(), entry.getValue())) + .map(metricSnapshots::metricSnapshot); } - - /** - * @param registry a metric registry to export in prometheus. - * @param metricFilter a custom metric filter. - * @param labelMapper a labelMapper to use to map labels. - */ - public DropwizardExports(MetricRegistry registry, MetricFilter metricFilter, CustomLabelMapper labelMapper) { - this.registry = registry; - this.metricFilter = metricFilter; - this.labelMapper = Optional.ofNullable(labelMapper); + for (SortedMap.Entry entry : + registry.getCounters(metricFilter).entrySet()) { + metricSnapshots.metricSnapshot(fromCounter(entry.getKey().getKey(), entry.getValue())); } - - private static String getHelpMessage(String metricName, Metric metric) { - return String.format("Generated from Dropwizard metric import (metric=%s, type=%s)", - metricName, metric.getClass().getName()); + for (SortedMap.Entry entry : + registry.getHistograms(metricFilter).entrySet()) { + metricSnapshots.metricSnapshot(fromHistogram(entry.getKey().getKey(), entry.getValue())); } - - private MetricMetadata getMetricMetaData(String metricName, Metric metric) { - String name = labelMapper.isPresent() ? labelMapper.get().getName(metricName) : metricName; - return new MetricMetadata(PrometheusNaming.sanitizeMetricName(name), getHelpMessage(metricName, metric)); + for (SortedMap.Entry entry : registry.getTimers(metricFilter).entrySet()) { + metricSnapshots.metricSnapshot(fromTimer(entry.getKey().getKey(), entry.getValue())); } - - /** - * Export counter as Prometheus Gauge. - */ - MetricSnapshot fromCounter(String dropwizardName, Counter counter) { - MetricMetadata metadata = getMetricMetaData(dropwizardName, counter); - CounterSnapshot.CounterDataPointSnapshot.Builder dataPointBuilder = CounterSnapshot.CounterDataPointSnapshot.builder().value(Long.valueOf(counter.getCount()).doubleValue()); - labelMapper.map(mapper -> dataPointBuilder.labels(mapper.getLabels(dropwizardName, Collections.emptyList(), Collections.emptyList()))); - return new CounterSnapshot(metadata, Collections.singletonList(dataPointBuilder.build())); + for (SortedMap.Entry entry : registry.getMeters(metricFilter).entrySet()) { + metricSnapshots.metricSnapshot(fromMeter(entry.getKey().getKey(), entry.getValue())); } + return metricSnapshots.build(); + } - /** - * Export gauge as a prometheus gauge. - */ - MetricSnapshot fromGauge(String dropwizardName, Gauge gauge) { - Object obj = gauge.getValue(); - double value; - if (obj instanceof Number) { - value = ((Number) obj).doubleValue(); - } else if (obj instanceof Boolean) { - value = ((Boolean) obj) ? 1 : 0; - } else { - LOGGER.log(Level.FINE, String.format("Invalid type for Gauge %s: %s", PrometheusNaming.sanitizeMetricName(dropwizardName), - obj == null ? "null" : obj.getClass().getName())); - return null; - } - MetricMetadata metadata = getMetricMetaData(dropwizardName, gauge); - GaugeSnapshot.GaugeDataPointSnapshot.Builder dataPointBuilder = GaugeSnapshot.GaugeDataPointSnapshot.builder().value(value); - labelMapper.map(mapper -> dataPointBuilder.labels(mapper.getLabels(dropwizardName, Collections.emptyList(), Collections.emptyList()))); - return new GaugeSnapshot(metadata, Collections.singletonList(dataPointBuilder.build())); - } + public static Builder builder() { + return new Builder(); + } - /** - * Export a histogram snapshot as a prometheus SUMMARY. - * - * @param dropwizardName metric name. - * @param snapshot the histogram snapshot. - * @param count the total sample count for this snapshot. - * @param factor a factor to apply to histogram values. - */ - MetricSnapshot fromSnapshotAndCount(String dropwizardName, Snapshot snapshot, long count, double factor, String helpMessage) { - Quantiles quantiles = Quantiles.builder() - .quantile(0.5, snapshot.getMedian() * factor) - .quantile(0.75, snapshot.get75thPercentile() * factor) - .quantile(0.95, snapshot.get95thPercentile() * factor) - .quantile(0.98, snapshot.get98thPercentile() * factor) - .quantile(0.99, snapshot.get99thPercentile() * factor) - .quantile(0.999, snapshot.get999thPercentile() * factor) - .build(); - - MetricMetadata metadata = new MetricMetadata(PrometheusNaming.sanitizeMetricName(dropwizardName), helpMessage); - SummarySnapshot.SummaryDataPointSnapshot.Builder dataPointBuilder = SummarySnapshot.SummaryDataPointSnapshot.builder().quantiles(quantiles).count(count); - labelMapper.map(mapper -> dataPointBuilder.labels(mapper.getLabels(dropwizardName, Collections.emptyList(), Collections.emptyList()))); - return new SummarySnapshot(metadata, Collections.singletonList(dataPointBuilder.build())); - } + // Builder class for DropwizardExports + public static class Builder { + private MetricRegistry registry; + private MetricFilter metricFilter; + private CustomLabelMapper labelMapper; - /** - * Convert histogram snapshot. - */ - MetricSnapshot fromHistogram(String dropwizardName, Histogram histogram) { - return fromSnapshotAndCount(dropwizardName, histogram.getSnapshot(), histogram.getCount(), 1.0, - getHelpMessage(dropwizardName, histogram)); + private Builder() { + this.metricFilter = MetricFilter.ALL; } - /** - * Export Dropwizard Timer as a histogram. Use TIME_UNIT as time unit. - */ - MetricSnapshot fromTimer(String dropwizardName, Timer timer) { - return fromSnapshotAndCount(dropwizardName, timer.getSnapshot(), timer.getCount(), - 1.0D / TimeUnit.SECONDS.toNanos(1L), getHelpMessage(dropwizardName, timer)); + public Builder dropwizardRegistry(MetricRegistry registry) { + this.registry = registry; + return this; } - /** - * Export a Meter as a prometheus COUNTER. - */ - MetricSnapshot fromMeter(String dropwizardName, Meter meter) { - MetricMetadata metadata = getMetricMetaData(dropwizardName + "_total", meter); - CounterSnapshot.CounterDataPointSnapshot.Builder dataPointBuilder = CounterSnapshot.CounterDataPointSnapshot.builder().value(meter.getCount()); - labelMapper.map(mapper -> dataPointBuilder.labels(mapper.getLabels(dropwizardName, Collections.emptyList(), Collections.emptyList()))); - return new CounterSnapshot(metadata, Collections.singletonList(dataPointBuilder.build())); + public Builder metricFilter(MetricFilter metricFilter) { + this.metricFilter = metricFilter; + return this; } - @Override - public MetricSnapshots collect() { - MetricSnapshots.Builder metricSnapshots = MetricSnapshots.builder(); - for (SortedMap.Entry entry : registry.getGauges(metricFilter).entrySet()) { - Optional.ofNullable(fromGauge(entry.getKey().getKey(), entry.getValue())).map(metricSnapshots::metricSnapshot); - } - for (SortedMap.Entry entry : registry.getCounters(metricFilter).entrySet()) { - metricSnapshots.metricSnapshot(fromCounter(entry.getKey().getKey(), entry.getValue())); - } - for (SortedMap.Entry entry : registry.getHistograms(metricFilter).entrySet()) { - metricSnapshots.metricSnapshot(fromHistogram(entry.getKey().getKey(), entry.getValue())); - } - for (SortedMap.Entry entry : registry.getTimers(metricFilter).entrySet()) { - metricSnapshots.metricSnapshot(fromTimer(entry.getKey().getKey(), entry.getValue())); - } - for (SortedMap.Entry entry : registry.getMeters(metricFilter).entrySet()) { - metricSnapshots.metricSnapshot(fromMeter(entry.getKey().getKey(), entry.getValue())); - } - return metricSnapshots.build(); + public Builder customLabelMapper(CustomLabelMapper labelMapper) { + this.labelMapper = labelMapper; + return this; } - public static Builder builder() { - return new Builder(); + DropwizardExports build() { + if (registry == null) { + throw new IllegalArgumentException("MetricRegistry must be set"); + } + if (labelMapper == null) { + return new DropwizardExports(registry, metricFilter); + } else { + return new DropwizardExports(registry, metricFilter, labelMapper); + } } - //Builder class for DropwizardExports - public static class Builder { - private MetricRegistry registry; - private MetricFilter metricFilter; - private CustomLabelMapper labelMapper; - - private Builder() { - this.metricFilter = MetricFilter.ALL; - } - - public Builder dropwizardRegistry(MetricRegistry registry) { - this.registry = registry; - return this; - } - - public Builder metricFilter(MetricFilter metricFilter) { - this.metricFilter = metricFilter; - return this; - } - - public Builder customLabelMapper(CustomLabelMapper labelMapper) { - this.labelMapper = labelMapper; - return this; - } - - DropwizardExports build() { - if (registry == null) { - throw new IllegalArgumentException("MetricRegistry must be set"); - } - if (labelMapper == null) { - return new DropwizardExports(registry, metricFilter); - } else { - return new DropwizardExports(registry, metricFilter, labelMapper); - } - } - - public void register() { - register(PrometheusRegistry.defaultRegistry); - } - - public void register(PrometheusRegistry registry) { - DropwizardExports dropwizardExports = build(); - registry.register(dropwizardExports); - } + public void register() { + register(PrometheusRegistry.defaultRegistry); } -} \ No newline at end of file + public void register(PrometheusRegistry registry) { + DropwizardExports dropwizardExports = build(); + registry.register(dropwizardExports); + } + } +} diff --git a/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/CustomLabelMapper.java b/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/CustomLabelMapper.java index db5f6a64f..af45769cc 100644 --- a/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/CustomLabelMapper.java +++ b/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/CustomLabelMapper.java @@ -1,117 +1,120 @@ package io.prometheus.metrics.instrumentation.dropwizard5.labels; import io.prometheus.metrics.model.snapshots.Labels; - import java.util.ArrayList; import java.util.List; import java.util.Map; /** - * A LabelMapper to allow Dropwizard metrics to be translated to Prometheus metrics including custom labels and names. - * Prometheus metric name and labels are extracted from the Dropwizard name based on the provided list of {@link MapperConfig}s. - * The FIRST matching config will be used. + * A LabelMapper to allow Dropwizard metrics to be translated to Prometheus metrics including custom + * labels and names. Prometheus metric name and labels are extracted from the Dropwizard name based + * on the provided list of {@link MapperConfig}s. The FIRST matching config will be used. */ -public class CustomLabelMapper { - private final List compiledMapperConfigs; - - public CustomLabelMapper(final List mapperConfigs) { - if (mapperConfigs == null || mapperConfigs.isEmpty()) { - throw new IllegalArgumentException("CustomLabelMapper needs some mapper configs!"); - } - - this.compiledMapperConfigs = new ArrayList(mapperConfigs.size()); - for (MapperConfig config : mapperConfigs) { - this.compiledMapperConfigs.add(new CompiledMapperConfig(config)); - } +public class CustomLabelMapper { + private final List compiledMapperConfigs; + + public CustomLabelMapper(final List mapperConfigs) { + if (mapperConfigs == null || mapperConfigs.isEmpty()) { + throw new IllegalArgumentException("CustomLabelMapper needs some mapper configs!"); } - public String getName(final String dropwizardName){ - if (dropwizardName == null) { - throw new IllegalArgumentException("Dropwizard metric name cannot be null"); - } - - CompiledMapperConfig matchingConfig = null; - for (CompiledMapperConfig config : this.compiledMapperConfigs) { - if (config.pattern.matches(dropwizardName)) { - matchingConfig = config; - break; - } - } - - if (matchingConfig != null) { - final Map params = matchingConfig.pattern.extractParameters(dropwizardName); - final NameAndLabels nameAndLabels = getNameAndLabels(matchingConfig.mapperConfig, params); - return nameAndLabels.name; - } - - return dropwizardName; + this.compiledMapperConfigs = new ArrayList(mapperConfigs.size()); + for (MapperConfig config : mapperConfigs) { + this.compiledMapperConfigs.add(new CompiledMapperConfig(config)); } + } + public String getName(final String dropwizardName) { + if (dropwizardName == null) { + throw new IllegalArgumentException("Dropwizard metric name cannot be null"); + } + + CompiledMapperConfig matchingConfig = null; + for (CompiledMapperConfig config : this.compiledMapperConfigs) { + if (config.pattern.matches(dropwizardName)) { + matchingConfig = config; + break; + } + } - public Labels getLabels(final String dropwizardName, final List additionalLabelNames, final List additionalLabelValues){ - if (dropwizardName == null) { - throw new IllegalArgumentException("Dropwizard metric name cannot be null"); - } - - CompiledMapperConfig matchingConfig = null; - for (CompiledMapperConfig config : this.compiledMapperConfigs) { - if (config.pattern.matches(dropwizardName)) { - matchingConfig = config; - break; - } - } - - if (matchingConfig != null) { - final Map params = matchingConfig.pattern.extractParameters(dropwizardName); - final NameAndLabels nameAndLabels = getNameAndLabels(matchingConfig.mapperConfig, params); - nameAndLabels.labelNames.addAll(additionalLabelNames); - nameAndLabels.labelValues.addAll(additionalLabelValues); - return Labels.of(nameAndLabels.labelNames, nameAndLabels.labelValues); - } - - return Labels.of(additionalLabelNames, additionalLabelValues); + if (matchingConfig != null) { + final Map params = matchingConfig.pattern.extractParameters(dropwizardName); + final NameAndLabels nameAndLabels = getNameAndLabels(matchingConfig.mapperConfig, params); + return nameAndLabels.name; } - protected NameAndLabels getNameAndLabels(final MapperConfig config, final Map parameters) { - final String metricName = formatTemplate(config.getName(), parameters); - final List labels = new ArrayList(config.getLabels().size()); - final List labelValues = new ArrayList(config.getLabels().size()); - for (Map.Entry entry : config.getLabels().entrySet()) { - labels.add(entry.getKey()); - labelValues.add(formatTemplate(entry.getValue(), parameters)); - } + return dropwizardName; + } - return new NameAndLabels(metricName, labels, labelValues); + public Labels getLabels( + final String dropwizardName, + final List additionalLabelNames, + final List additionalLabelValues) { + if (dropwizardName == null) { + throw new IllegalArgumentException("Dropwizard metric name cannot be null"); } - private String formatTemplate(final String template, final Map params) { - String result = template; - for (Map.Entry entry : params.entrySet()) { - result = result.replace(entry.getKey(), entry.getValue()); - } + CompiledMapperConfig matchingConfig = null; + for (CompiledMapperConfig config : this.compiledMapperConfigs) { + if (config.pattern.matches(dropwizardName)) { + matchingConfig = config; + break; + } + } + + if (matchingConfig != null) { + final Map params = matchingConfig.pattern.extractParameters(dropwizardName); + final NameAndLabels nameAndLabels = getNameAndLabels(matchingConfig.mapperConfig, params); + nameAndLabels.labelNames.addAll(additionalLabelNames); + nameAndLabels.labelValues.addAll(additionalLabelValues); + return Labels.of(nameAndLabels.labelNames, nameAndLabels.labelValues); + } - return result; + return Labels.of(additionalLabelNames, additionalLabelValues); + } + + protected NameAndLabels getNameAndLabels( + final MapperConfig config, final Map parameters) { + final String metricName = formatTemplate(config.getName(), parameters); + final List labels = new ArrayList(config.getLabels().size()); + final List labelValues = new ArrayList(config.getLabels().size()); + for (Map.Entry entry : config.getLabels().entrySet()) { + labels.add(entry.getKey()); + labelValues.add(formatTemplate(entry.getValue(), parameters)); } - static class CompiledMapperConfig { - final MapperConfig mapperConfig; - final GraphiteNamePattern pattern; + return new NameAndLabels(metricName, labels, labelValues); + } - CompiledMapperConfig(final MapperConfig mapperConfig) { - this.mapperConfig = mapperConfig; - this.pattern = new GraphiteNamePattern(mapperConfig.getMatch()); - } + private String formatTemplate(final String template, final Map params) { + String result = template; + for (Map.Entry entry : params.entrySet()) { + result = result.replace(entry.getKey(), entry.getValue()); } - static class NameAndLabels { - final String name; - final List labelNames; - final List labelValues; + return result; + } - NameAndLabels(final String name, final List labelNames, final List labelValues) { - this.name = name; - this.labelNames = labelNames; - this.labelValues = labelValues; - } + static class CompiledMapperConfig { + final MapperConfig mapperConfig; + final GraphiteNamePattern pattern; + + CompiledMapperConfig(final MapperConfig mapperConfig) { + this.mapperConfig = mapperConfig; + this.pattern = new GraphiteNamePattern(mapperConfig.getMatch()); + } + } + + static class NameAndLabels { + final String name; + final List labelNames; + final List labelValues; + + NameAndLabels( + final String name, final List labelNames, final List labelValues) { + this.name = name; + this.labelNames = labelNames; + this.labelValues = labelValues; } + } } diff --git a/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/GraphiteNamePattern.java b/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/GraphiteNamePattern.java index 4823c736f..bd37fe04c 100644 --- a/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/GraphiteNamePattern.java +++ b/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/GraphiteNamePattern.java @@ -1,94 +1,95 @@ package io.prometheus.metrics.instrumentation.dropwizard5.labels; +import static io.prometheus.metrics.instrumentation.dropwizard5.labels.MapperConfig.METRIC_GLOB_REGEX; + import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; -import static io.prometheus.metrics.instrumentation.dropwizard5.labels.MapperConfig.METRIC_GLOB_REGEX; - /** - * GraphiteNamePattern is initialised with a simplified glob pattern that only allows '*' as special character. - * Examples of valid patterns: + * GraphiteNamePattern is initialised with a simplified glob pattern that only allows '*' as special + * character. Examples of valid patterns: + * *

    - *
  • org.test.controller.gather.status.400
  • - *
  • org.test.controller.gather.status.*
  • - *
  • org.test.controller.*.status.*
  • - *
  • *.test.controller.*.status.*
  • + *
  • org.test.controller.gather.status.400 + *
  • org.test.controller.gather.status.* + *
  • org.test.controller.*.status.* + *
  • *.test.controller.*.status.* *
- *

- * It contains logic to match a metric name and to extract named parameters from it. + * + *

It contains logic to match a metric name and to extract named parameters from it. */ class GraphiteNamePattern { - private static final Pattern VALIDATION_PATTERN = Pattern.compile(METRIC_GLOB_REGEX); + private static final Pattern VALIDATION_PATTERN = Pattern.compile(METRIC_GLOB_REGEX); - private Pattern pattern; - private String patternStr; + private Pattern pattern; + private String patternStr; - /** - * Creates a new GraphiteNamePattern from the given simplified glob pattern. - * - * @param pattern The glob style pattern to be used. - */ - GraphiteNamePattern(final String pattern) throws IllegalArgumentException { - if (!VALIDATION_PATTERN.matcher(pattern).matches()) { - throw new IllegalArgumentException(String.format("Provided pattern [%s] does not matches [%s]", pattern, METRIC_GLOB_REGEX)); - } - initializePattern(pattern); + /** + * Creates a new GraphiteNamePattern from the given simplified glob pattern. + * + * @param pattern The glob style pattern to be used. + */ + GraphiteNamePattern(final String pattern) throws IllegalArgumentException { + if (!VALIDATION_PATTERN.matcher(pattern).matches()) { + throw new IllegalArgumentException( + String.format("Provided pattern [%s] does not matches [%s]", pattern, METRIC_GLOB_REGEX)); } + initializePattern(pattern); + } - /** - * Matches the metric name against the pattern. - * - * @param metricName The metric name to be tested. - * @return {@code true} if the name is matched, {@code false} otherwise. - */ - boolean matches(final String metricName) { - return metricName != null && pattern.matcher(metricName).matches(); - } - - /** - * Extracts parameters from the given metric name based on the pattern. - * The resulting map has keys named as '${n}' where n is the 0 based position in the pattern. - * E.g.: - * pattern: org.test.controller.*.status.* - * extractParameters("org.test.controller.gather.status.400") -> - * {${0} -> "gather", ${1} -> "400"} - * - * @param metricName The metric name to extract parameters from. - * @return A parameter map where keys are named '${n}' where n is 0 based parameter position in the pattern. - */ - Map extractParameters(final String metricName) { - final Matcher matcher = this.pattern.matcher(metricName); - final Map params = new HashMap(); - if (matcher.find()) { - for (int i = 1; i <= matcher.groupCount(); i++) { - params.put(String.format("${%d}", i - 1), matcher.group(i)); - } - } + /** + * Matches the metric name against the pattern. + * + * @param metricName The metric name to be tested. + * @return {@code true} if the name is matched, {@code false} otherwise. + */ + boolean matches(final String metricName) { + return metricName != null && pattern.matcher(metricName).matches(); + } - return params; + /** + * Extracts parameters from the given metric name based on the pattern. The resulting map has keys + * named as '${n}' where n is the 0 based position in the pattern. E.g.: pattern: + * org.test.controller.*.status.* extractParameters("org.test.controller.gather.status.400") -> + * {${0} -> "gather", ${1} -> "400"} + * + * @param metricName The metric name to extract parameters from. + * @return A parameter map where keys are named '${n}' where n is 0 based parameter position in + * the pattern. + */ + Map extractParameters(final String metricName) { + final Matcher matcher = this.pattern.matcher(metricName); + final Map params = new HashMap(); + if (matcher.find()) { + for (int i = 1; i <= matcher.groupCount(); i++) { + params.put(String.format("${%d}", i - 1), matcher.group(i)); + } } - /** - * Turns the GLOB pattern into a REGEX. - * - * @param pattern The pattern to use - */ - private void initializePattern(final String pattern) { - final String[] split = pattern.split(Pattern.quote("*"), -1); - final StringBuilder escapedPattern = new StringBuilder(Pattern.quote(split[0])); - for (int i = 1; i < split.length; i++) { - String quoted = Pattern.quote(split[i]); - escapedPattern.append("([^.]*)").append(quoted); - } + return params; + } - final String regex = "^" + escapedPattern.toString() + "$"; - this.patternStr = regex; - this.pattern = Pattern.compile(regex); + /** + * Turns the GLOB pattern into a REGEX. + * + * @param pattern The pattern to use + */ + private void initializePattern(final String pattern) { + final String[] split = pattern.split(Pattern.quote("*"), -1); + final StringBuilder escapedPattern = new StringBuilder(Pattern.quote(split[0])); + for (int i = 1; i < split.length; i++) { + String quoted = Pattern.quote(split[i]); + escapedPattern.append("([^.]*)").append(quoted); } - String getPatternString() { - return this.patternStr; - } + final String regex = "^" + escapedPattern.toString() + "$"; + this.patternStr = regex; + this.pattern = Pattern.compile(regex); + } + + String getPatternString() { + return this.patternStr; + } } diff --git a/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/MapperConfig.java b/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/MapperConfig.java index c1eb1ae08..19e52d788 100644 --- a/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/MapperConfig.java +++ b/prometheus-metrics-instrumentation-dropwizard5/src/main/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/MapperConfig.java @@ -5,155 +5,143 @@ import java.util.regex.Pattern; /** - * POJO containing info on how to map a graphite metric to a prometheus one. - * Example mapping in yaml format: - *

- * match: test.dispatcher.*.*.* - * name: dispatcher_events_total - * labels: - * action: ${1} - * outcome: ${2}_out - * processor: ${0} - * status: ${1}_${2} - *

- * Dropwizard metrics that match the "match" pattern will be further processed to have a new name and new labels based on this config. + * POJO containing info on how to map a graphite metric to a prometheus one. Example mapping in yaml + * format: + * + *

match: test.dispatcher.*.*.* name: dispatcher_events_total labels: action: ${1} outcome: + * ${2}_out processor: ${0} status: ${1}_${2} + * + *

Dropwizard metrics that match the "match" pattern will be further processed to have a new name + * and new labels based on this config. */ public class MapperConfig { - // each part of the metric name between dots - private static final String METRIC_PART_REGEX = "[a-zA-Z_0-9](-?[a-zA-Z0-9_])+"; - // Simplified GLOB: we can have "*." at the beginning and "*" only at the end - static final String METRIC_GLOB_REGEX = "^(\\*\\.|" + METRIC_PART_REGEX + "\\.)+(\\*|" + METRIC_PART_REGEX + ")$"; - // Labels validation. - private static final String LABEL_REGEX = "^[a-zA-Z_][a-zA-Z0-9_]+$"; - private static final Pattern MATCH_EXPRESSION_PATTERN = Pattern.compile(METRIC_GLOB_REGEX); - private static final Pattern LABEL_PATTERN = Pattern.compile(LABEL_REGEX); - - /** - * Regex used to match incoming metric name. - * Uses a simplified glob syntax where only '*' are allowed. - * E.g: - * org.company.controller.*.status.* - * Will be used to match - * org.company.controller.controller1.status.200 - * and - * org.company.controller.controller2.status.400 - */ - private String match; - - /** - * New metric name. Can contain placeholders to be replaced with actual values from the incoming metric name. - * Placeholders are in the ${n} format where n is the zero based index of the group to extract from the original metric name. - * E.g.: - * match: test.dispatcher.*.*.* - * name: dispatcher_events_total_${1} - *

- * A metric "test.dispatcher.old.test.yay" will be converted in a new metric with name "dispatcher_events_total_test" - */ - private String name; - - /** - * Labels to be extracted from the metric name. - * They should contain placeholders to be replaced with actual values from the incoming metric name. - * Placeholders are in the ${n} format where n is the zero based index of the group to extract from the original metric name. - * E.g.: - * match: test.dispatcher.*.* - * name: dispatcher_events_total_${0} - * labels: - * label1: ${1}_t - *

- * A metric "test.dispatcher.sp1.yay" will be converted in a new metric with name "dispatcher_events_total_sp1" with label {label1: yay_t} - *

- * Label names have to match the regex ^[a-zA-Z_][a-zA-Z0-9_]+$ - */ - - private Map labels = new HashMap(); - - public MapperConfig() { - // empty constructor + // each part of the metric name between dots + private static final String METRIC_PART_REGEX = "[a-zA-Z_0-9](-?[a-zA-Z0-9_])+"; + // Simplified GLOB: we can have "*." at the beginning and "*" only at the end + static final String METRIC_GLOB_REGEX = + "^(\\*\\.|" + METRIC_PART_REGEX + "\\.)+(\\*|" + METRIC_PART_REGEX + ")$"; + // Labels validation. + private static final String LABEL_REGEX = "^[a-zA-Z_][a-zA-Z0-9_]+$"; + private static final Pattern MATCH_EXPRESSION_PATTERN = Pattern.compile(METRIC_GLOB_REGEX); + private static final Pattern LABEL_PATTERN = Pattern.compile(LABEL_REGEX); + + /** + * Regex used to match incoming metric name. Uses a simplified glob syntax where only '*' are + * allowed. E.g: org.company.controller.*.status.* Will be used to match + * org.company.controller.controller1.status.200 and org.company.controller.controller2.status.400 + */ + private String match; + + /** + * New metric name. Can contain placeholders to be replaced with actual values from the incoming + * metric name. Placeholders are in the ${n} format where n is the zero based index of the group + * to extract from the original metric name. E.g.: match: test.dispatcher.*.*.* name: + * dispatcher_events_total_${1} + * + *

A metric "test.dispatcher.old.test.yay" will be converted in a new metric with name + * "dispatcher_events_total_test" + */ + private String name; + + /** + * Labels to be extracted from the metric name. They should contain placeholders to be replaced + * with actual values from the incoming metric name. Placeholders are in the ${n} format where n + * is the zero based index of the group to extract from the original metric name. E.g.: match: + * test.dispatcher.*.* name: dispatcher_events_total_${0} labels: label1: ${1}_t + * + *

A metric "test.dispatcher.sp1.yay" will be converted in a new metric with name + * "dispatcher_events_total_sp1" with label {label1: yay_t} + * + *

Label names have to match the regex ^[a-zA-Z_][a-zA-Z0-9_]+$ + */ + private Map labels = new HashMap(); + + public MapperConfig() { + // empty constructor + } + + // for tests + MapperConfig(final String match) { + validateMatch(match); + this.match = match; + } + + public MapperConfig(final String match, final String name, final Map labels) { + this.name = name; + validateMatch(match); + this.match = match; + validateLabels(labels); + this.labels = labels; + } + + @Override + public String toString() { + return String.format("MapperConfig{match=%s, name=%s, labels=%s}", match, name, labels); + } + + public String getMatch() { + return match; + } + + public void setMatch(final String match) { + validateMatch(match); + this.match = match; + } + + public String getName() { + return name; + } + + public void setName(final String name) { + this.name = name; + } + + public Map getLabels() { + return labels; + } + + public void setLabels(final Map labels) { + validateLabels(labels); + this.labels = labels; + } + + private void validateMatch(final String match) { + if (!MATCH_EXPRESSION_PATTERN.matcher(match).matches()) { + throw new IllegalArgumentException( + String.format( + "Match expression [%s] does not match required pattern %s", + match, MATCH_EXPRESSION_PATTERN)); } - - // for tests - MapperConfig(final String match) { - validateMatch(match); - this.match = match; - } - - public MapperConfig(final String match, final String name, final Map labels) { - this.name = name; - validateMatch(match); - this.match = match; - validateLabels(labels); - this.labels = labels; - } - - @Override - public String toString() { - return String.format("MapperConfig{match=%s, name=%s, labels=%s}", match, name, labels); - } - - public String getMatch() { - return match; - } - - public void setMatch(final String match) { - validateMatch(match); - this.match = match; - } - - public String getName() { - return name; - } - - public void setName(final String name) { - this.name = name; - - } - - public Map getLabels() { - return labels; - } - - public void setLabels(final Map labels) { - validateLabels(labels); - this.labels = labels; - } - - private void validateMatch(final String match) - { - if (!MATCH_EXPRESSION_PATTERN.matcher(match).matches()) { - throw new IllegalArgumentException(String.format("Match expression [%s] does not match required pattern %s", match, MATCH_EXPRESSION_PATTERN)); + } + + private void validateLabels(final Map labels) { + if (labels != null) { + for (final String key : labels.keySet()) { + if (!LABEL_PATTERN.matcher(key).matches()) { + throw new IllegalArgumentException( + String.format("Label [%s] does not match required pattern %s", match, LABEL_PATTERN)); } + } } - - private void validateLabels(final Map labels) - { - if (labels != null) { - for (final String key : labels.keySet()) { - if (!LABEL_PATTERN.matcher(key).matches()) { - throw new IllegalArgumentException(String.format("Label [%s] does not match required pattern %s", match, LABEL_PATTERN)); - } - } - - } - } - - @Override - public boolean equals(final Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - final MapperConfig that = (MapperConfig) o; - - if (match != null ? !match.equals(that.match) : that.match != null) return false; - if (name != null ? !name.equals(that.name) : that.name != null) return false; - return labels != null ? labels.equals(that.labels) : that.labels == null; - } - - @Override - public int hashCode() { - int result = match != null ? match.hashCode() : 0; - result = 31 * result + (name != null ? name.hashCode() : 0); - result = 31 * result + (labels != null ? labels.hashCode() : 0); - return result; - } + } + + @Override + public boolean equals(final Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + final MapperConfig that = (MapperConfig) o; + + if (match != null ? !match.equals(that.match) : that.match != null) return false; + if (name != null ? !name.equals(that.name) : that.name != null) return false; + return labels != null ? labels.equals(that.labels) : that.labels == null; + } + + @Override + public int hashCode() { + int result = match != null ? match.hashCode() : 0; + result = 31 * result + (name != null ? name.hashCode() : 0); + result = 31 * result + (labels != null ? labels.hashCode() : 0); + return result; + } } diff --git a/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/DropwizardExportsTest.java b/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/DropwizardExportsTest.java index 6fbbb68b1..0473329ce 100644 --- a/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/DropwizardExportsTest.java +++ b/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/DropwizardExportsTest.java @@ -1,298 +1,313 @@ package io.prometheus.metrics.instrumentation.dropwizard5; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + import io.dropwizard.metrics5.*; import io.prometheus.metrics.expositionformats.OpenMetricsTextFormatWriter; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.MetricSnapshots; import io.prometheus.metrics.model.snapshots.Quantiles; import io.prometheus.metrics.model.snapshots.SummarySnapshot; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.concurrent.TimeUnit; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; public class DropwizardExportsTest { - private PrometheusRegistry registry = new PrometheusRegistry(); - private MetricRegistry metricRegistry; - - - @Before - public void setUp() { - metricRegistry = new MetricRegistry(); - DropwizardExports.builder().dropwizardRegistry(metricRegistry).register(registry); - } - - - @Test - public void testCounter() { - metricRegistry.counter("foo.bar").inc(1); - String expected = "# TYPE foo_bar counter\n" + - "# HELP foo_bar Generated from Dropwizard metric import (metric=foo.bar, type=io.dropwizard.metrics5.Counter)\n" + - "foo_bar_total 1.0\n" + - "# EOF\n"; - - assertEquals(expected, convertToOpenMetricsFormat()); - } - - @Test - public void testGauge() { - Gauge integerGauge = new Gauge() { - @Override - public Integer getValue() { - return 1234; - } + private PrometheusRegistry registry = new PrometheusRegistry(); + private MetricRegistry metricRegistry; + + @Before + public void setUp() { + metricRegistry = new MetricRegistry(); + DropwizardExports.builder().dropwizardRegistry(metricRegistry).register(registry); + } + + @Test + public void testCounter() { + metricRegistry.counter("foo.bar").inc(1); + String expected = + "# TYPE foo_bar counter\n" + + "# HELP foo_bar Generated from Dropwizard metric import (metric=foo.bar, type=io.dropwizard.metrics5.Counter)\n" + + "foo_bar_total 1.0\n" + + "# EOF\n"; + + assertEquals(expected, convertToOpenMetricsFormat()); + } + + @Test + public void testGauge() { + Gauge integerGauge = + new Gauge() { + @Override + public Integer getValue() { + return 1234; + } }; - Gauge doubleGauge = new Gauge() { - @Override - public Double getValue() { - return 1.234D; - } + Gauge doubleGauge = + new Gauge() { + @Override + public Double getValue() { + return 1.234D; + } }; - Gauge longGauge = new Gauge() { - @Override - public Long getValue() { - return 1234L; - } + Gauge longGauge = + new Gauge() { + @Override + public Long getValue() { + return 1234L; + } }; - Gauge floatGauge = new Gauge() { - @Override - public Float getValue() { - return 0.1234F; - } + Gauge floatGauge = + new Gauge() { + @Override + public Float getValue() { + return 0.1234F; + } }; - Gauge booleanGauge = new Gauge() { - @Override - public Boolean getValue() { - return true; - } + Gauge booleanGauge = + new Gauge() { + @Override + public Boolean getValue() { + return true; + } }; - metricRegistry.register("double.gauge", doubleGauge); - metricRegistry.register("long.gauge", longGauge); - metricRegistry.register("integer.gauge", integerGauge); - metricRegistry.register("float.gauge", floatGauge); - metricRegistry.register("boolean.gauge", booleanGauge); - - String expected = "# TYPE boolean_gauge gauge\n" + - "# HELP boolean_gauge Generated from Dropwizard metric import (metric=boolean.gauge, type=io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExportsTest$5)\n" + - "boolean_gauge 1.0\n" + - "# TYPE double_gauge gauge\n" + - "# HELP double_gauge Generated from Dropwizard metric import (metric=double.gauge, type=io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExportsTest$2)\n" + - "double_gauge 1.234\n" + - "# TYPE float_gauge gauge\n" + - "# HELP float_gauge Generated from Dropwizard metric import (metric=float.gauge, type=io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExportsTest$4)\n" + - "float_gauge 0.1234000027179718\n" + - "# TYPE integer_gauge gauge\n" + - "# HELP integer_gauge Generated from Dropwizard metric import (metric=integer.gauge, type=io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExportsTest$1)\n" + - "integer_gauge 1234.0\n" + - "# TYPE long_gauge gauge\n" + - "# HELP long_gauge Generated from Dropwizard metric import (metric=long.gauge, type=io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExportsTest$3)\n" + - "long_gauge 1234.0\n" + - "# EOF\n"; - - assertEquals(expected, convertToOpenMetricsFormat()); - } - - @Test - public void testInvalidGaugeType() { - Gauge invalidGauge = new Gauge() { - @Override - public String getValue() { - return "foobar"; - } + metricRegistry.register("double.gauge", doubleGauge); + metricRegistry.register("long.gauge", longGauge); + metricRegistry.register("integer.gauge", integerGauge); + metricRegistry.register("float.gauge", floatGauge); + metricRegistry.register("boolean.gauge", booleanGauge); + + String expected = + "# TYPE boolean_gauge gauge\n" + + "# HELP boolean_gauge Generated from Dropwizard metric import (metric=boolean.gauge, type=io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExportsTest$5)\n" + + "boolean_gauge 1.0\n" + + "# TYPE double_gauge gauge\n" + + "# HELP double_gauge Generated from Dropwizard metric import (metric=double.gauge, type=io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExportsTest$2)\n" + + "double_gauge 1.234\n" + + "# TYPE float_gauge gauge\n" + + "# HELP float_gauge Generated from Dropwizard metric import (metric=float.gauge, type=io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExportsTest$4)\n" + + "float_gauge 0.1234000027179718\n" + + "# TYPE integer_gauge gauge\n" + + "# HELP integer_gauge Generated from Dropwizard metric import (metric=integer.gauge, type=io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExportsTest$1)\n" + + "integer_gauge 1234.0\n" + + "# TYPE long_gauge gauge\n" + + "# HELP long_gauge Generated from Dropwizard metric import (metric=long.gauge, type=io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExportsTest$3)\n" + + "long_gauge 1234.0\n" + + "# EOF\n"; + + assertEquals(expected, convertToOpenMetricsFormat()); + } + + @Test + public void testInvalidGaugeType() { + Gauge invalidGauge = + new Gauge() { + @Override + public String getValue() { + return "foobar"; + } }; - metricRegistry.register("invalid_gauge", invalidGauge); + metricRegistry.register("invalid_gauge", invalidGauge); - String expected = "# EOF\n"; - assertEquals(expected, convertToOpenMetricsFormat()); - } + String expected = "# EOF\n"; + assertEquals(expected, convertToOpenMetricsFormat()); + } - @Test - public void testGaugeReturningNullValue() { - Gauge invalidGauge = new Gauge() { - @Override - public String getValue() { - return null; - } + @Test + public void testGaugeReturningNullValue() { + Gauge invalidGauge = + new Gauge() { + @Override + public String getValue() { + return null; + } }; - metricRegistry.register("invalid_gauge", invalidGauge); - String expected = "# EOF\n"; - assertEquals(expected, convertToOpenMetricsFormat()); + metricRegistry.register("invalid_gauge", invalidGauge); + String expected = "# EOF\n"; + assertEquals(expected, convertToOpenMetricsFormat()); + } + + @Test + public void testHistogram() throws IOException { + // just test the standard mapper + final MetricRegistry metricRegistry = new MetricRegistry(); + PrometheusRegistry pmRegistry = new PrometheusRegistry(); + DropwizardExports.builder().dropwizardRegistry(metricRegistry).register(pmRegistry); + + Histogram hist = metricRegistry.histogram("hist"); + int i = 0; + while (i < 100) { + hist.update(i); + i += 1; } - @Test - public void testHistogram() throws IOException { - // just test the standard mapper - final MetricRegistry metricRegistry = new MetricRegistry(); - PrometheusRegistry pmRegistry = new PrometheusRegistry(); - DropwizardExports.builder().dropwizardRegistry(metricRegistry).register(pmRegistry); - - Histogram hist = metricRegistry.histogram("hist"); - int i = 0; - while (i < 100) { - hist.update(i); - i += 1; - } - - // The result should look like this - // - // # TYPE hist summary - // # HELP hist Generated from Dropwizard metric import (metric=hist, type=io.dropwizard.metrics5.Histogram) - // hist{quantile="0.5"} 49.0 - // hist{quantile="0.75"} 74.0 - // hist{quantile="0.95"} 94.0 - // hist{quantile="0.98"} 97.0 - // hist{quantile="0.99"} 98.0 - // hist{quantile="0.999"} 99.0 - // hist_count 100 - // # EOF - // - // However, Dropwizard uses a random reservoir sampling algorithm, so the values could as well be off-by-one - // - // # TYPE hist summary - // # HELP hist Generated from Dropwizard metric import (metric=hist, type=io.dropwizard.metrics5.Histogram) - // hist{quantile="0.5"} 50.0 - // hist{quantile="0.75"} 75.0 - // hist{quantile="0.95"} 95.0 - // hist{quantile="0.98"} 98.0 - // hist{quantile="0.99"} 99.0 - // hist{quantile="0.999"} 99.0 - // hist_count 100 - // # EOF - // - // The following asserts the values, but allows an error of 1.0 for quantile values. - - MetricSnapshots snapshots = pmRegistry.scrape(name -> name.equals("hist")); - Assert.assertEquals(1, snapshots.size()); - SummarySnapshot snapshot = (SummarySnapshot) snapshots.get(0); - Assert.assertEquals("hist", snapshot.getMetadata().getName()); - Assert.assertEquals("Generated from Dropwizard metric import (metric=hist, type=io.dropwizard.metrics5.Histogram)", snapshot.getMetadata().getHelp()); - Assert.assertEquals(1, snapshot.getDataPoints().size()); - SummarySnapshot.SummaryDataPointSnapshot dataPoint = snapshot.getDataPoints().get(0); - Assert.assertTrue(dataPoint.hasCount()); - Assert.assertEquals(100, dataPoint.getCount()); - Assert.assertFalse(dataPoint.hasSum()); - Quantiles quantiles = dataPoint.getQuantiles(); - Assert.assertEquals(6, quantiles.size()); - Assert.assertEquals(0.5, quantiles.get(0).getQuantile(), 0.0); - Assert.assertEquals(49.0, quantiles.get(0).getValue(), 1.0); - Assert.assertEquals(0.75, quantiles.get(1).getQuantile(), 0.0); - Assert.assertEquals(74.0, quantiles.get(1).getValue(), 1.0); - Assert.assertEquals(0.95, quantiles.get(2).getQuantile(), 0.0); - Assert.assertEquals(94.0, quantiles.get(2).getValue(), 1.0); - Assert.assertEquals(0.98, quantiles.get(3).getQuantile(), 0.0); - Assert.assertEquals(97.0, quantiles.get(3).getValue(), 1.0); - Assert.assertEquals(0.99, quantiles.get(4).getQuantile(), 0.0); - Assert.assertEquals(98.0, quantiles.get(4).getValue(), 1.0); - Assert.assertEquals(0.999, quantiles.get(5).getQuantile(), 0.0); - Assert.assertEquals(99.0, quantiles.get(5).getValue(), 1.0); + // The result should look like this + // + // # TYPE hist summary + // # HELP hist Generated from Dropwizard metric import (metric=hist, + // type=io.dropwizard.metrics5.Histogram) + // hist{quantile="0.5"} 49.0 + // hist{quantile="0.75"} 74.0 + // hist{quantile="0.95"} 94.0 + // hist{quantile="0.98"} 97.0 + // hist{quantile="0.99"} 98.0 + // hist{quantile="0.999"} 99.0 + // hist_count 100 + // # EOF + // + // However, Dropwizard uses a random reservoir sampling algorithm, so the values could as well + // be off-by-one + // + // # TYPE hist summary + // # HELP hist Generated from Dropwizard metric import (metric=hist, + // type=io.dropwizard.metrics5.Histogram) + // hist{quantile="0.5"} 50.0 + // hist{quantile="0.75"} 75.0 + // hist{quantile="0.95"} 95.0 + // hist{quantile="0.98"} 98.0 + // hist{quantile="0.99"} 99.0 + // hist{quantile="0.999"} 99.0 + // hist_count 100 + // # EOF + // + // The following asserts the values, but allows an error of 1.0 for quantile values. + + MetricSnapshots snapshots = pmRegistry.scrape(name -> name.equals("hist")); + Assert.assertEquals(1, snapshots.size()); + SummarySnapshot snapshot = (SummarySnapshot) snapshots.get(0); + Assert.assertEquals("hist", snapshot.getMetadata().getName()); + Assert.assertEquals( + "Generated from Dropwizard metric import (metric=hist, type=io.dropwizard.metrics5.Histogram)", + snapshot.getMetadata().getHelp()); + Assert.assertEquals(1, snapshot.getDataPoints().size()); + SummarySnapshot.SummaryDataPointSnapshot dataPoint = snapshot.getDataPoints().get(0); + Assert.assertTrue(dataPoint.hasCount()); + Assert.assertEquals(100, dataPoint.getCount()); + Assert.assertFalse(dataPoint.hasSum()); + Quantiles quantiles = dataPoint.getQuantiles(); + Assert.assertEquals(6, quantiles.size()); + Assert.assertEquals(0.5, quantiles.get(0).getQuantile(), 0.0); + Assert.assertEquals(49.0, quantiles.get(0).getValue(), 1.0); + Assert.assertEquals(0.75, quantiles.get(1).getQuantile(), 0.0); + Assert.assertEquals(74.0, quantiles.get(1).getValue(), 1.0); + Assert.assertEquals(0.95, quantiles.get(2).getQuantile(), 0.0); + Assert.assertEquals(94.0, quantiles.get(2).getValue(), 1.0); + Assert.assertEquals(0.98, quantiles.get(3).getQuantile(), 0.0); + Assert.assertEquals(97.0, quantiles.get(3).getValue(), 1.0); + Assert.assertEquals(0.99, quantiles.get(4).getQuantile(), 0.0); + Assert.assertEquals(98.0, quantiles.get(4).getValue(), 1.0); + Assert.assertEquals(0.999, quantiles.get(5).getQuantile(), 0.0); + Assert.assertEquals(99.0, quantiles.get(5).getValue(), 1.0); + } + + @Test + public void testMeter() { + Meter meter = metricRegistry.meter("meter"); + meter.mark(); + meter.mark(); + + String expected = + "# TYPE meter counter\n" + + "# HELP meter Generated from Dropwizard metric import (metric=meter_total, type=io.dropwizard.metrics5.Meter)\n" + + "meter_total 2.0\n" + + "# EOF\n"; + assertEquals(expected, convertToOpenMetricsFormat()); + } + + @Test + public void testTimer() throws InterruptedException { + final MetricRegistry metricRegistry = new MetricRegistry(); + DropwizardExports exports = new DropwizardExports(metricRegistry); + Timer t = metricRegistry.timer("timer"); + Timer.Context time = t.time(); + Thread.sleep(100L); + long timeSpentNanos = time.stop(); + double timeSpentMillis = TimeUnit.NANOSECONDS.toMillis(timeSpentNanos); + System.out.println(timeSpentMillis); + + SummarySnapshot.SummaryDataPointSnapshot dataPointSnapshot = + (SummarySnapshot.SummaryDataPointSnapshot) + exports.collect().stream().flatMap(i -> i.getDataPoints().stream()).findFirst().get(); + // We slept for 1Ms so we ensure that all timers are above 1ms: + assertTrue(dataPointSnapshot.getQuantiles().size() > 1); + dataPointSnapshot + .getQuantiles() + .forEach( + i -> { + System.out.println(i.getQuantile() + " : " + i.getValue()); + assertTrue(i.getValue() > timeSpentMillis / 1000d); + }); + assertEquals(1, dataPointSnapshot.getCount()); + } + + @Test + public void testThatMetricHelpUsesOriginalDropwizardName() { + + metricRegistry.timer("my.application.namedTimer1"); + metricRegistry.counter("my.application.namedCounter1"); + metricRegistry.meter("my.application.namedMeter1"); + metricRegistry.histogram("my.application.namedHistogram1"); + metricRegistry.register("my.application.namedGauge1", new ExampleDoubleGauge()); + + String expected = + "# TYPE my_application_namedCounter1 counter\n" + + "# HELP my_application_namedCounter1 Generated from Dropwizard metric import (metric=my.application.namedCounter1, type=io.dropwizard.metrics5.Counter)\n" + + "my_application_namedCounter1_total 0.0\n" + + "# TYPE my_application_namedGauge1 gauge\n" + + "# HELP my_application_namedGauge1 Generated from Dropwizard metric import (metric=my.application.namedGauge1, type=io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExportsTest$ExampleDoubleGauge)\n" + + "my_application_namedGauge1 0.0\n" + + "# TYPE my_application_namedHistogram1 summary\n" + + "# HELP my_application_namedHistogram1 Generated from Dropwizard metric import (metric=my.application.namedHistogram1, type=io.dropwizard.metrics5.Histogram)\n" + + "my_application_namedHistogram1{quantile=\"0.5\"} 0.0\n" + + "my_application_namedHistogram1{quantile=\"0.75\"} 0.0\n" + + "my_application_namedHistogram1{quantile=\"0.95\"} 0.0\n" + + "my_application_namedHistogram1{quantile=\"0.98\"} 0.0\n" + + "my_application_namedHistogram1{quantile=\"0.99\"} 0.0\n" + + "my_application_namedHistogram1{quantile=\"0.999\"} 0.0\n" + + "my_application_namedHistogram1_count 0\n" + + "# TYPE my_application_namedMeter1 counter\n" + + "# HELP my_application_namedMeter1 Generated from Dropwizard metric import (metric=my.application.namedMeter1_total, type=io.dropwizard.metrics5.Meter)\n" + + "my_application_namedMeter1_total 0.0\n" + + "# TYPE my_application_namedTimer1 summary\n" + + "# HELP my_application_namedTimer1 Generated from Dropwizard metric import (metric=my.application.namedTimer1, type=io.dropwizard.metrics5.Timer)\n" + + "my_application_namedTimer1{quantile=\"0.5\"} 0.0\n" + + "my_application_namedTimer1{quantile=\"0.75\"} 0.0\n" + + "my_application_namedTimer1{quantile=\"0.95\"} 0.0\n" + + "my_application_namedTimer1{quantile=\"0.98\"} 0.0\n" + + "my_application_namedTimer1{quantile=\"0.99\"} 0.0\n" + + "my_application_namedTimer1{quantile=\"0.999\"} 0.0\n" + + "my_application_namedTimer1_count 0\n" + + "# EOF\n"; + assertEquals(expected, convertToOpenMetricsFormat()); + } + + private static class ExampleDoubleGauge implements Gauge { + @Override + public Double getValue() { + return 0.0; } - - @Test - public void testMeter() { - Meter meter = metricRegistry.meter("meter"); - meter.mark(); - meter.mark(); - - String expected = "# TYPE meter counter\n" + - "# HELP meter Generated from Dropwizard metric import (metric=meter_total, type=io.dropwizard.metrics5.Meter)\n" + - "meter_total 2.0\n" + - "# EOF\n"; - assertEquals(expected, convertToOpenMetricsFormat()); - + } + + private String convertToOpenMetricsFormat(PrometheusRegistry _registry) { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); + try { + writer.write(out, _registry.scrape()); + return out.toString(StandardCharsets.UTF_8.name()); + } catch (IOException e) { + throw new RuntimeException(e); } + } - @Test - public void testTimer() throws InterruptedException { - final MetricRegistry metricRegistry = new MetricRegistry(); - DropwizardExports exports = new DropwizardExports(metricRegistry); - Timer t = metricRegistry.timer("timer"); - Timer.Context time = t.time(); - Thread.sleep(100L); - long timeSpentNanos = time.stop(); - double timeSpentMillis = TimeUnit.NANOSECONDS.toMillis(timeSpentNanos); - System.out.println(timeSpentMillis); - - SummarySnapshot.SummaryDataPointSnapshot dataPointSnapshot = (SummarySnapshot.SummaryDataPointSnapshot) exports.collect().stream().flatMap(i -> i.getDataPoints().stream()).findFirst().get(); - // We slept for 1Ms so we ensure that all timers are above 1ms: - assertTrue(dataPointSnapshot.getQuantiles().size() > 1); - dataPointSnapshot.getQuantiles().forEach( i-> { - System.out.println(i.getQuantile() + " : " + i.getValue()); - assertTrue(i.getValue() > timeSpentMillis/1000d); - }); - assertEquals(1, dataPointSnapshot.getCount()); - } - - @Test - public void testThatMetricHelpUsesOriginalDropwizardName() { - - metricRegistry.timer("my.application.namedTimer1"); - metricRegistry.counter("my.application.namedCounter1"); - metricRegistry.meter("my.application.namedMeter1"); - metricRegistry.histogram("my.application.namedHistogram1"); - metricRegistry.register("my.application.namedGauge1", new ExampleDoubleGauge()); - - String expected = "# TYPE my_application_namedCounter1 counter\n" + - "# HELP my_application_namedCounter1 Generated from Dropwizard metric import (metric=my.application.namedCounter1, type=io.dropwizard.metrics5.Counter)\n" + - "my_application_namedCounter1_total 0.0\n" + - "# TYPE my_application_namedGauge1 gauge\n" + - "# HELP my_application_namedGauge1 Generated from Dropwizard metric import (metric=my.application.namedGauge1, type=io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExportsTest$ExampleDoubleGauge)\n" + - "my_application_namedGauge1 0.0\n" + - "# TYPE my_application_namedHistogram1 summary\n" + - "# HELP my_application_namedHistogram1 Generated from Dropwizard metric import (metric=my.application.namedHistogram1, type=io.dropwizard.metrics5.Histogram)\n" + - "my_application_namedHistogram1{quantile=\"0.5\"} 0.0\n" + - "my_application_namedHistogram1{quantile=\"0.75\"} 0.0\n" + - "my_application_namedHistogram1{quantile=\"0.95\"} 0.0\n" + - "my_application_namedHistogram1{quantile=\"0.98\"} 0.0\n" + - "my_application_namedHistogram1{quantile=\"0.99\"} 0.0\n" + - "my_application_namedHistogram1{quantile=\"0.999\"} 0.0\n" + - "my_application_namedHistogram1_count 0\n" + - "# TYPE my_application_namedMeter1 counter\n" + - "# HELP my_application_namedMeter1 Generated from Dropwizard metric import (metric=my.application.namedMeter1_total, type=io.dropwizard.metrics5.Meter)\n" + - "my_application_namedMeter1_total 0.0\n" + - "# TYPE my_application_namedTimer1 summary\n" + - "# HELP my_application_namedTimer1 Generated from Dropwizard metric import (metric=my.application.namedTimer1, type=io.dropwizard.metrics5.Timer)\n" + - "my_application_namedTimer1{quantile=\"0.5\"} 0.0\n" + - "my_application_namedTimer1{quantile=\"0.75\"} 0.0\n" + - "my_application_namedTimer1{quantile=\"0.95\"} 0.0\n" + - "my_application_namedTimer1{quantile=\"0.98\"} 0.0\n" + - "my_application_namedTimer1{quantile=\"0.99\"} 0.0\n" + - "my_application_namedTimer1{quantile=\"0.999\"} 0.0\n" + - "my_application_namedTimer1_count 0\n" + - "# EOF\n"; - assertEquals(expected, convertToOpenMetricsFormat()); - } - - - private static class ExampleDoubleGauge implements Gauge { - @Override - public Double getValue() { - return 0.0; - } - } - - private String convertToOpenMetricsFormat(PrometheusRegistry _registry) { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); - try { - writer.write(out, _registry.scrape()); - return out.toString(StandardCharsets.UTF_8.name()); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - private String convertToOpenMetricsFormat() { - return convertToOpenMetricsFormat(registry); - } + private String convertToOpenMetricsFormat() { + return convertToOpenMetricsFormat(registry); + } } diff --git a/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/CustomLabelMapperTest.java b/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/CustomLabelMapperTest.java index d5003332c..5d1950111 100644 --- a/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/CustomLabelMapperTest.java +++ b/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/CustomLabelMapperTest.java @@ -1,213 +1,214 @@ package io.prometheus.metrics.instrumentation.dropwizard5.labels; +import static org.junit.Assert.assertEquals; + import io.dropwizard.metrics5.MetricFilter; import io.dropwizard.metrics5.MetricRegistry; -import io.prometheus.metrics.core.metrics.Counter; import io.prometheus.metrics.expositionformats.OpenMetricsTextFormatWriter; import io.prometheus.metrics.instrumentation.dropwizard5.DropwizardExports; -import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.MetricSnapshots; -import org.junit.Before; -import org.junit.Test; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.*; - -import static org.junit.Assert.assertEquals; +import org.junit.Before; +import org.junit.Test; public class CustomLabelMapperTest { - private MetricRegistry metricRegistry; - - - @Before - public void setUp() { - metricRegistry = new MetricRegistry(); - } - - @Test(expected = IllegalArgumentException.class) - public void test_WHEN_EmptyConfig_THEN_Fail() { - final CustomLabelMapper converter = new CustomLabelMapper(Collections.emptyList()); - } - - @Test - public void test_WHEN_NoMatches_THEN_ShouldReturnDefaultSample() { - final List mapperConfigs = Arrays.asList( - new MapperConfig("client-nope.*.*.*"), - new MapperConfig("*.client-nope.*.*.*"), - new MapperConfig("not.even.this.*.*.*") - ); - final CustomLabelMapper labelMapper = new CustomLabelMapper(mapperConfigs); - DropwizardExports dropwizardExports = new DropwizardExports(metricRegistry, MetricFilter.ALL, labelMapper); - - metricRegistry.counter("app.okhttpclient.client.HttpClient.service.total").inc(1); - System.out.println(convertToOpenMetricsFormat(dropwizardExports.collect())); - - String expected = "# TYPE app_okhttpclient_client_HttpClient_service counter\n" + - "# HELP app_okhttpclient_client_HttpClient_service Generated from Dropwizard metric import (metric=app.okhttpclient.client.HttpClient.service.total, type=io.dropwizard.metrics5.Counter)\n" + - "app_okhttpclient_client_HttpClient_service_total 1.0\n" + - "# EOF\n"; - - assertEquals(expected, convertToOpenMetricsFormat(dropwizardExports.collect())); + private MetricRegistry metricRegistry; + + @Before + public void setUp() { + metricRegistry = new MetricRegistry(); + } + + @Test(expected = IllegalArgumentException.class) + public void test_WHEN_EmptyConfig_THEN_Fail() { + final CustomLabelMapper converter = + new CustomLabelMapper(Collections.emptyList()); + } + + @Test + public void test_WHEN_NoMatches_THEN_ShouldReturnDefaultSample() { + final List mapperConfigs = + Arrays.asList( + new MapperConfig("client-nope.*.*.*"), + new MapperConfig("*.client-nope.*.*.*"), + new MapperConfig("not.even.this.*.*.*")); + final CustomLabelMapper labelMapper = new CustomLabelMapper(mapperConfigs); + DropwizardExports dropwizardExports = + new DropwizardExports(metricRegistry, MetricFilter.ALL, labelMapper); + + metricRegistry.counter("app.okhttpclient.client.HttpClient.service.total").inc(1); + System.out.println(convertToOpenMetricsFormat(dropwizardExports.collect())); + + String expected = + "# TYPE app_okhttpclient_client_HttpClient_service counter\n" + + "# HELP app_okhttpclient_client_HttpClient_service Generated from Dropwizard metric import (metric=app.okhttpclient.client.HttpClient.service.total, type=io.dropwizard.metrics5.Counter)\n" + + "app_okhttpclient_client_HttpClient_service_total 1.0\n" + + "# EOF\n"; + + assertEquals(expected, convertToOpenMetricsFormat(dropwizardExports.collect())); + } + + @Test + public void test_WHEN_OneMatch_THEN_ShouldReturnConverted() { + final Map labels = new HashMap(); + labels.put("service", "${0}"); + final MapperConfig mapperConfig = + new MapperConfig( + "app.okhttpclient.client.HttpClient.*.total", + "app.okhttpclient.client.HttpClient.total", + labels); + final List mapperConfigs = + Arrays.asList( + new MapperConfig("client-nope.*.*.*"), + mapperConfig, + new MapperConfig("not.even.this.*.*.*")); + final CustomLabelMapper labelMapper = new CustomLabelMapper(mapperConfigs); + DropwizardExports dropwizardExports = + new DropwizardExports(metricRegistry, MetricFilter.ALL, labelMapper); + + metricRegistry.counter("app.okhttpclient.client.HttpClient.greatService.total").inc(1); + + String expected = + "# TYPE app_okhttpclient_client_HttpClient counter\n" + + "# HELP app_okhttpclient_client_HttpClient Generated from Dropwizard metric import (metric=app.okhttpclient.client.HttpClient.greatService.total, type=io.dropwizard.metrics5.Counter)\n" + + "app_okhttpclient_client_HttpClient_total{service=\"greatService\"} 1.0\n" + + "# EOF\n"; + assertEquals(expected, convertToOpenMetricsFormat(dropwizardExports.collect())); + } + + @Test + public void test_WHEN_MoreMatches_THEN_ShouldReturnFirstOne() { + final Map labels = new HashMap(); + labels.put("service", "${0}"); + final MapperConfig mapperConfig = + new MapperConfig( + "app.okhttpclient.client.HttpClient.*.total", + "app.okhttpclient.client.HttpClient.total", + labels); + final List mapperConfigs = + Arrays.asList( + new MapperConfig("client-nope.*.*.*"), + mapperConfig, + new MapperConfig("app.okhttpclient.client.HttpClient.*.*") // this matches as well + ); + final CustomLabelMapper labelMapper = new CustomLabelMapper(mapperConfigs); + DropwizardExports dropwizardExports = + new DropwizardExports(metricRegistry, MetricFilter.ALL, labelMapper); + + metricRegistry.counter("app.okhttpclient.client.HttpClient.greatService.total").inc(1); + + String expected = + "# TYPE app_okhttpclient_client_HttpClient counter\n" + + "# HELP app_okhttpclient_client_HttpClient Generated from Dropwizard metric import (metric=app.okhttpclient.client.HttpClient.greatService.total, type=io.dropwizard.metrics5.Counter)\n" + + "app_okhttpclient_client_HttpClient_total{service=\"greatService\"} 1.0\n" + + "# EOF\n"; + assertEquals(expected, convertToOpenMetricsFormat(dropwizardExports.collect())); + } + + @Test + public void test_WHEN_MoreMatchesReverseOrder_THEN_ShouldReturnFirstOne() { + final Map labels = new LinkedHashMap(); + labels.put("service", "${0}"); + labels.put("status", "${1}"); + final MapperConfig mapperConfig = + new MapperConfig( + "app.okhttpclient.client.HttpClient.*.*", "app.okhttpclient.client.HttpClient", labels); + + final MapperConfig mapperConfig2 = + new MapperConfig( + "app.okhttpclient.client.HttpClient.*.*", + "app.okhttpclient.client.HttpClient2", + labels); + + final List mapperConfigs = + Arrays.asList( + new MapperConfig("client-nope.*.*.*"), + mapperConfig, + mapperConfig2 // this matches as well + ); + + final CustomLabelMapper labelMapper = new CustomLabelMapper(mapperConfigs); + DropwizardExports dropwizardExports = + new DropwizardExports(metricRegistry, MetricFilter.ALL, labelMapper); + metricRegistry.counter("app.okhttpclient.client.HttpClient.greatService.400").inc(1); + + String expected = + "# TYPE app_okhttpclient_client_HttpClient counter\n" + + "# HELP app_okhttpclient_client_HttpClient Generated from Dropwizard metric import (metric=app.okhttpclient.client.HttpClient.greatService.400, type=io.dropwizard.metrics5.Counter)\n" + + "app_okhttpclient_client_HttpClient_total{service=\"greatService\",status=\"400\"} 1.0\n" + + "# EOF\n"; + assertEquals(expected, convertToOpenMetricsFormat(dropwizardExports.collect())); + } + + @Test + public void test_WHEN_MoreToFormatInLabelsAndName_THEN_ShouldReturnCorrectSample() { + final Map labels = new LinkedHashMap(); + labels.put("service", "${0}_${1}"); + labels.put("status", "s_${1}"); + final MapperConfig mapperConfig = + new MapperConfig( + "app.okhttpclient.client.HttpClient.*.*", + "app.okhttpclient.client.HttpClient.${0}", + labels); + final List mapperConfigs = + Arrays.asList( + new MapperConfig("client-nope.*.*.*"), + mapperConfig, + new MapperConfig("app.okhttpclient.client.HttpClient.*.*") // this matches as well + ); + + final CustomLabelMapper labelMapper = new CustomLabelMapper(mapperConfigs); + DropwizardExports dropwizardExports = + new DropwizardExports(metricRegistry, MetricFilter.ALL, labelMapper); + metricRegistry.counter("app.okhttpclient.client.HttpClient.greatService.400").inc(1); + System.out.println(convertToOpenMetricsFormat(dropwizardExports.collect())); + + String expected = + "# TYPE app_okhttpclient_client_HttpClient_greatService counter\n" + + "# HELP app_okhttpclient_client_HttpClient_greatService Generated from Dropwizard metric import (metric=app.okhttpclient.client.HttpClient.greatService.400, type=io.dropwizard.metrics5.Counter)\n" + + "app_okhttpclient_client_HttpClient_greatService_total{service=\"greatService_400\",status=\"s_400\"} 1.0\n" + + "# EOF\n"; + assertEquals(expected, convertToOpenMetricsFormat(dropwizardExports.collect())); + } + + @Test + public void test_WHEN_AdditionalLabels_THEN_ShouldReturnCorrectSample() { + final Map labels = new LinkedHashMap(); + labels.put("service", "${0}"); + labels.put("status", "s_${1}"); + labels.put("client", "sampleClient"); + final MapperConfig mapperConfig = + new MapperConfig( + "app.okhttpclient.client.HttpClient.*.*", + "app.okhttpclient.client.HttpClient.${0}", + labels); + final List mapperConfigs = + Arrays.asList(new MapperConfig("client-nope.*.*.*"), mapperConfig); + + final CustomLabelMapper labelMapper = new CustomLabelMapper(mapperConfigs); + DropwizardExports dropwizardExports = + new DropwizardExports(metricRegistry, MetricFilter.ALL, labelMapper); + metricRegistry.counter("app.okhttpclient.client.HttpClient.greatService.400").inc(1); + + String expected = + "# TYPE app_okhttpclient_client_HttpClient_greatService counter\n" + + "# HELP app_okhttpclient_client_HttpClient_greatService Generated from Dropwizard metric import (metric=app.okhttpclient.client.HttpClient.greatService.400, type=io.dropwizard.metrics5.Counter)\n" + + "app_okhttpclient_client_HttpClient_greatService_total{client=\"sampleClient\",service=\"greatService\",status=\"s_400\"} 1.0\n" + + "# EOF\n"; + assertEquals(expected, convertToOpenMetricsFormat(dropwizardExports.collect())); + } + + private String convertToOpenMetricsFormat(MetricSnapshots snapshots) { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); + try { + writer.write(out, snapshots); + return out.toString(StandardCharsets.UTF_8.name()); + } catch (IOException e) { + throw new RuntimeException(e); } - - @Test - public void test_WHEN_OneMatch_THEN_ShouldReturnConverted() { - final Map labels = new HashMap(); - labels.put("service", "${0}"); - final MapperConfig mapperConfig = new MapperConfig( - "app.okhttpclient.client.HttpClient.*.total", - "app.okhttpclient.client.HttpClient.total", - labels - ); - final List mapperConfigs = Arrays.asList( - new MapperConfig("client-nope.*.*.*"), - mapperConfig, - new MapperConfig("not.even.this.*.*.*") - ); - final CustomLabelMapper labelMapper = new CustomLabelMapper(mapperConfigs); - DropwizardExports dropwizardExports = new DropwizardExports(metricRegistry, MetricFilter.ALL, labelMapper); - - metricRegistry.counter("app.okhttpclient.client.HttpClient.greatService.total").inc(1); - - String expected = "# TYPE app_okhttpclient_client_HttpClient counter\n" + - "# HELP app_okhttpclient_client_HttpClient Generated from Dropwizard metric import (metric=app.okhttpclient.client.HttpClient.greatService.total, type=io.dropwizard.metrics5.Counter)\n" + - "app_okhttpclient_client_HttpClient_total{service=\"greatService\"} 1.0\n" + - "# EOF\n"; - assertEquals(expected, convertToOpenMetricsFormat(dropwizardExports.collect())); - } - - @Test - public void test_WHEN_MoreMatches_THEN_ShouldReturnFirstOne() { - final Map labels = new HashMap(); - labels.put("service", "${0}"); - final MapperConfig mapperConfig = new MapperConfig( - "app.okhttpclient.client.HttpClient.*.total", - "app.okhttpclient.client.HttpClient.total", - labels - ); - final List mapperConfigs = Arrays.asList( - new MapperConfig("client-nope.*.*.*"), - mapperConfig, - new MapperConfig("app.okhttpclient.client.HttpClient.*.*") // this matches as well - ); - final CustomLabelMapper labelMapper = new CustomLabelMapper(mapperConfigs); - DropwizardExports dropwizardExports = new DropwizardExports(metricRegistry, MetricFilter.ALL, labelMapper); - - metricRegistry.counter("app.okhttpclient.client.HttpClient.greatService.total").inc(1); - - - String expected = "# TYPE app_okhttpclient_client_HttpClient counter\n" + - "# HELP app_okhttpclient_client_HttpClient Generated from Dropwizard metric import (metric=app.okhttpclient.client.HttpClient.greatService.total, type=io.dropwizard.metrics5.Counter)\n" + - "app_okhttpclient_client_HttpClient_total{service=\"greatService\"} 1.0\n" + - "# EOF\n"; - assertEquals(expected, convertToOpenMetricsFormat(dropwizardExports.collect())); - } - - @Test - public void test_WHEN_MoreMatchesReverseOrder_THEN_ShouldReturnFirstOne() { - final Map labels = new LinkedHashMap(); - labels.put("service", "${0}"); - labels.put("status", "${1}"); - final MapperConfig mapperConfig = new MapperConfig( - "app.okhttpclient.client.HttpClient.*.*", - "app.okhttpclient.client.HttpClient", - labels - ); - - final MapperConfig mapperConfig2 = new MapperConfig( - "app.okhttpclient.client.HttpClient.*.*", - "app.okhttpclient.client.HttpClient2", - labels - ); - - final List mapperConfigs = Arrays.asList( - new MapperConfig("client-nope.*.*.*"), - mapperConfig, - mapperConfig2 // this matches as well - ); - - final CustomLabelMapper labelMapper = new CustomLabelMapper(mapperConfigs); - DropwizardExports dropwizardExports = new DropwizardExports(metricRegistry, MetricFilter.ALL, labelMapper); - metricRegistry.counter("app.okhttpclient.client.HttpClient.greatService.400").inc(1); - - String expected = "# TYPE app_okhttpclient_client_HttpClient counter\n" + - "# HELP app_okhttpclient_client_HttpClient Generated from Dropwizard metric import (metric=app.okhttpclient.client.HttpClient.greatService.400, type=io.dropwizard.metrics5.Counter)\n" + - "app_okhttpclient_client_HttpClient_total{service=\"greatService\",status=\"400\"} 1.0\n" + - "# EOF\n"; - assertEquals(expected, convertToOpenMetricsFormat(dropwizardExports.collect())); - - } - - @Test - public void test_WHEN_MoreToFormatInLabelsAndName_THEN_ShouldReturnCorrectSample() { - final Map labels = new LinkedHashMap(); - labels.put("service", "${0}_${1}"); - labels.put("status", "s_${1}"); - final MapperConfig mapperConfig = new MapperConfig( - "app.okhttpclient.client.HttpClient.*.*", - "app.okhttpclient.client.HttpClient.${0}", - labels - ); - final List mapperConfigs = Arrays.asList( - new MapperConfig("client-nope.*.*.*"), - mapperConfig, - new MapperConfig("app.okhttpclient.client.HttpClient.*.*") // this matches as well - ); - - - final CustomLabelMapper labelMapper = new CustomLabelMapper(mapperConfigs); - DropwizardExports dropwizardExports = new DropwizardExports(metricRegistry,MetricFilter.ALL, labelMapper); - metricRegistry.counter("app.okhttpclient.client.HttpClient.greatService.400").inc(1); - System.out.println(convertToOpenMetricsFormat(dropwizardExports.collect())); - - - String expected = "# TYPE app_okhttpclient_client_HttpClient_greatService counter\n" + - "# HELP app_okhttpclient_client_HttpClient_greatService Generated from Dropwizard metric import (metric=app.okhttpclient.client.HttpClient.greatService.400, type=io.dropwizard.metrics5.Counter)\n" + - "app_okhttpclient_client_HttpClient_greatService_total{service=\"greatService_400\",status=\"s_400\"} 1.0\n" + - "# EOF\n"; - assertEquals(expected, convertToOpenMetricsFormat(dropwizardExports.collect())); - } - - - @Test - public void test_WHEN_AdditionalLabels_THEN_ShouldReturnCorrectSample() { - final Map labels = new LinkedHashMap(); - labels.put("service", "${0}"); - labels.put("status", "s_${1}"); - labels.put("client", "sampleClient"); - final MapperConfig mapperConfig = new MapperConfig( - "app.okhttpclient.client.HttpClient.*.*", - "app.okhttpclient.client.HttpClient.${0}", - labels - ); - final List mapperConfigs = Arrays.asList( - new MapperConfig("client-nope.*.*.*"), - mapperConfig - ); - - final CustomLabelMapper labelMapper = new CustomLabelMapper(mapperConfigs); - DropwizardExports dropwizardExports = new DropwizardExports(metricRegistry,MetricFilter.ALL, labelMapper); - metricRegistry.counter("app.okhttpclient.client.HttpClient.greatService.400").inc(1); - - String expected = "# TYPE app_okhttpclient_client_HttpClient_greatService counter\n" + - "# HELP app_okhttpclient_client_HttpClient_greatService Generated from Dropwizard metric import (metric=app.okhttpclient.client.HttpClient.greatService.400, type=io.dropwizard.metrics5.Counter)\n" + - "app_okhttpclient_client_HttpClient_greatService_total{client=\"sampleClient\",service=\"greatService\",status=\"s_400\"} 1.0\n" + - "# EOF\n"; - assertEquals(expected, convertToOpenMetricsFormat(dropwizardExports.collect())); - } - - - private String convertToOpenMetricsFormat(MetricSnapshots snapshots) { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); - try { - writer.write(out, snapshots); - return out.toString(StandardCharsets.UTF_8.name()); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - + } } diff --git a/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/GraphiteNamePatternTest.java b/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/GraphiteNamePatternTest.java index 5b704e6bd..9e8316d0b 100644 --- a/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/GraphiteNamePatternTest.java +++ b/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/GraphiteNamePatternTest.java @@ -1,144 +1,147 @@ package io.prometheus.metrics.instrumentation.dropwizard5.labels; -import org.assertj.core.api.Assertions; -import org.junit.Test; - import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import org.assertj.core.api.Assertions; +import org.junit.Test; public class GraphiteNamePatternTest { - @Test(expected = IllegalArgumentException.class) - public void createNew_WHEN_InvalidPattern_THEN_ShouldThrowException() { - final List invalidPatterns = Arrays.asList( - "", - "a", - "1org", - "1org.", - "org.", - "org.**", - "org.**", - "org.company-", - "org.company-.", - "org.company-*", - "org.company.**", - "org.company.**-", - "org.com*pany.*", - "org.test.contr.oller.gather.status..400", - "org.test.controller.gather.status..400" - ); - final GraphiteNamePattern graphiteNamePattern = new GraphiteNamePattern(""); - for (String pattern : invalidPatterns) { - try { - new GraphiteNamePattern(pattern); - - Assertions.failBecauseExceptionWasNotThrown(IllegalArgumentException.class); - } catch (IllegalArgumentException e) { - Assertions.assertThat(e).hasMessageContaining(pattern); - } - } + @Test(expected = IllegalArgumentException.class) + public void createNew_WHEN_InvalidPattern_THEN_ShouldThrowException() { + final List invalidPatterns = + Arrays.asList( + "", + "a", + "1org", + "1org.", + "org.", + "org.**", + "org.**", + "org.company-", + "org.company-.", + "org.company-*", + "org.company.**", + "org.company.**-", + "org.com*pany.*", + "org.test.contr.oller.gather.status..400", + "org.test.controller.gather.status..400"); + final GraphiteNamePattern graphiteNamePattern = new GraphiteNamePattern(""); + for (String pattern : invalidPatterns) { + try { + new GraphiteNamePattern(pattern); + + Assertions.failBecauseExceptionWasNotThrown(IllegalArgumentException.class); + } catch (IllegalArgumentException e) { + Assertions.assertThat(e).hasMessageContaining(pattern); + } } - - @Test - public void createNew_WHEN_ValidPattern_THEN_ShouldCreateThePatternSuccessfully() { - final List validPatterns = Arrays.asList( - "org.test.controller.gather.status.400", - "org.test.controller.*.status.400", - "org.test.controller.*.status.*", - "*.test.controller.*.status.*", - "*.test.controller-1.*.status.*", - "*.amazing-test.controller-1.*.status.*" - - ); - for (String pattern : validPatterns) { - new GraphiteNamePattern(pattern); - } + } + + @Test + public void createNew_WHEN_ValidPattern_THEN_ShouldCreateThePatternSuccessfully() { + final List validPatterns = + Arrays.asList( + "org.test.controller.gather.status.400", + "org.test.controller.*.status.400", + "org.test.controller.*.status.*", + "*.test.controller.*.status.*", + "*.test.controller-1.*.status.*", + "*.amazing-test.controller-1.*.status.*"); + for (String pattern : validPatterns) { + new GraphiteNamePattern(pattern); } - - @Test - public void createNew_WHEN_ValidPattern_THEN_ShouldInitInternalPatternSuccessfully() { - final Map validPatterns = new HashMap(); - validPatterns.put("org.test.controller.gather.status.400", "^\\Qorg.test.controller.gather.status.400\\E$"); - validPatterns.put("org.test.controller.*.status.400", "^\\Qorg.test.controller.\\E([^.]*)\\Q.status.400\\E$"); - validPatterns.put("org.test.controller.*.status.*", "^\\Qorg.test.controller.\\E([^.]*)\\Q.status.\\E([^.]*)\\Q\\E$"); - validPatterns.put("*.test.controller.*.status.*", "^\\Q\\E([^.]*)\\Q.test.controller.\\E([^.]*)\\Q.status.\\E([^.]*)\\Q\\E$"); - - for (Map.Entry expected : validPatterns.entrySet()) { - final GraphiteNamePattern pattern = new GraphiteNamePattern(expected.getKey()); - Assertions.assertThat(pattern.getPatternString()).isEqualTo(expected.getValue()); - } + } + + @Test + public void createNew_WHEN_ValidPattern_THEN_ShouldInitInternalPatternSuccessfully() { + final Map validPatterns = new HashMap(); + validPatterns.put( + "org.test.controller.gather.status.400", "^\\Qorg.test.controller.gather.status.400\\E$"); + validPatterns.put( + "org.test.controller.*.status.400", "^\\Qorg.test.controller.\\E([^.]*)\\Q.status.400\\E$"); + validPatterns.put( + "org.test.controller.*.status.*", + "^\\Qorg.test.controller.\\E([^.]*)\\Q.status.\\E([^.]*)\\Q\\E$"); + validPatterns.put( + "*.test.controller.*.status.*", + "^\\Q\\E([^.]*)\\Q.test.controller.\\E([^.]*)\\Q.status.\\E([^.]*)\\Q\\E$"); + + for (Map.Entry expected : validPatterns.entrySet()) { + final GraphiteNamePattern pattern = new GraphiteNamePattern(expected.getKey()); + Assertions.assertThat(pattern.getPatternString()).isEqualTo(expected.getValue()); } - - @Test - public void match_WHEN_NotMatchingMetricNameProvided_THEN_ShouldNotMatch() { - final GraphiteNamePattern pattern = new GraphiteNamePattern("org.test.controller.*.status.*"); - final List notMatchingMetricNamed = Arrays.asList( - "org.test.controller.status.400", - "", - null - ); - - for (String metricName : notMatchingMetricNamed) { - Assertions.assertThat(pattern.matches(metricName)).as("Matching [%s] against [%s]", metricName, pattern.getPatternString()).isFalse(); - } + } + + @Test + public void match_WHEN_NotMatchingMetricNameProvided_THEN_ShouldNotMatch() { + final GraphiteNamePattern pattern = new GraphiteNamePattern("org.test.controller.*.status.*"); + final List notMatchingMetricNamed = + Arrays.asList("org.test.controller.status.400", "", null); + + for (String metricName : notMatchingMetricNamed) { + Assertions.assertThat(pattern.matches(metricName)) + .as("Matching [%s] against [%s]", metricName, pattern.getPatternString()) + .isFalse(); } - - @Test - public void match_WHEN_MatchingMetricNameProvided_THEN_ShouldMatch() { - final GraphiteNamePattern pattern = new GraphiteNamePattern("org.test.controller.*.status.*"); - final List matchingMetricNamed = Arrays.asList( - "org.test.controller.gather.status.400", - "org.test.controller.gather2.status.500", - "org.test.controller.gather1.status.", - "org.test.controller.*.status.*", - "org.test.controller..status.*" - ); - - for (String metricName : matchingMetricNamed) { - Assertions.assertThat(pattern.matches(metricName)).as("Matching [%s] against [%s]", metricName, pattern.getPatternString()).isTrue(); - } - } - - @Test - public void extractParameters() { - GraphiteNamePattern pattern; - Map expected = new HashMap(); - expected.put("${0}", "gather"); - expected.put("${1}", "400"); - pattern = new GraphiteNamePattern("org.test.controller.*.status.*"); - Assertions.assertThat(pattern.extractParameters("org.test.controller.gather.status.400")) - .isEqualTo(expected); - - expected = new HashMap(); - expected.put("${0}", "org"); - expected.put("${1}", "gather"); - expected.put("${2}", "400"); - pattern = new GraphiteNamePattern("*.test.controller.*.status.*"); - Assertions.assertThat(pattern.extractParameters("org.test.controller.gather.status.400")) - .isEqualTo(expected); - } - - @Test - public void extractParameters_WHEN_emptyStringInDottedMetricsName_THEN_ShouldReturnEmptyString() { - GraphiteNamePattern pattern; - Map expected = new HashMap(); - expected.put("${0}", ""); - expected.put("${1}", "400"); - pattern = new GraphiteNamePattern("org.test.controller.*.status.*"); - Assertions.assertThat(pattern.extractParameters("org.test.controller..status.400")) - .isEqualTo(expected); - - } - - @Test - public void extractParameters_WHEN_moreDots_THEN_ShouldReturnNoMatches() { - GraphiteNamePattern pattern; - pattern = new GraphiteNamePattern("org.test.controller.*.status.*"); - Assertions.assertThat(pattern.extractParameters("org.test.controller...status.400")) - .isEqualTo(Collections.emptyMap()); - + } + + @Test + public void match_WHEN_MatchingMetricNameProvided_THEN_ShouldMatch() { + final GraphiteNamePattern pattern = new GraphiteNamePattern("org.test.controller.*.status.*"); + final List matchingMetricNamed = + Arrays.asList( + "org.test.controller.gather.status.400", + "org.test.controller.gather2.status.500", + "org.test.controller.gather1.status.", + "org.test.controller.*.status.*", + "org.test.controller..status.*"); + + for (String metricName : matchingMetricNamed) { + Assertions.assertThat(pattern.matches(metricName)) + .as("Matching [%s] against [%s]", metricName, pattern.getPatternString()) + .isTrue(); } -} \ No newline at end of file + } + + @Test + public void extractParameters() { + GraphiteNamePattern pattern; + Map expected = new HashMap(); + expected.put("${0}", "gather"); + expected.put("${1}", "400"); + pattern = new GraphiteNamePattern("org.test.controller.*.status.*"); + Assertions.assertThat(pattern.extractParameters("org.test.controller.gather.status.400")) + .isEqualTo(expected); + + expected = new HashMap(); + expected.put("${0}", "org"); + expected.put("${1}", "gather"); + expected.put("${2}", "400"); + pattern = new GraphiteNamePattern("*.test.controller.*.status.*"); + Assertions.assertThat(pattern.extractParameters("org.test.controller.gather.status.400")) + .isEqualTo(expected); + } + + @Test + public void extractParameters_WHEN_emptyStringInDottedMetricsName_THEN_ShouldReturnEmptyString() { + GraphiteNamePattern pattern; + Map expected = new HashMap(); + expected.put("${0}", ""); + expected.put("${1}", "400"); + pattern = new GraphiteNamePattern("org.test.controller.*.status.*"); + Assertions.assertThat(pattern.extractParameters("org.test.controller..status.400")) + .isEqualTo(expected); + } + + @Test + public void extractParameters_WHEN_moreDots_THEN_ShouldReturnNoMatches() { + GraphiteNamePattern pattern; + pattern = new GraphiteNamePattern("org.test.controller.*.status.*"); + Assertions.assertThat(pattern.extractParameters("org.test.controller...status.400")) + .isEqualTo(Collections.emptyMap()); + } +} diff --git a/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/MapperConfigTest.java b/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/MapperConfigTest.java index 03d244b89..7bf7b6520 100644 --- a/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/MapperConfigTest.java +++ b/prometheus-metrics-instrumentation-dropwizard5/src/test/java/io/prometheus/metrics/instrumentation/dropwizard5/labels/MapperConfigTest.java @@ -1,57 +1,58 @@ package io.prometheus.metrics.instrumentation.dropwizard5.labels; -import org.junit.Test; +import static org.junit.Assert.assertEquals; import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static org.junit.Assert.assertEquals; +import org.junit.Test; public class MapperConfigTest { - @Test - public void setMatch_WHEN_ExpressionMatchesPattern_AllGood() { - final MapperConfig mapperConfig = new MapperConfig(); - mapperConfig.setMatch("com.company.meter.*"); - assertEquals("com.company.meter.*", mapperConfig.getMatch()); - } - - @Test(expected = IllegalArgumentException.class) - public void setMatch_WHEN_ExpressionDoesnNotMatchPattern_ThrowException() { - final MapperConfig mapperConfig = new MapperConfig(); - mapperConfig.setMatch("com.company.meter.**.yay"); - } - - @Test - public void setLabels_WHEN_ExpressionMatchesPattern_AllGood() { - final MapperConfig mapperConfig = new MapperConfig(); - final Map labels = new HashMap(); - labels.put("valid", "${0}"); - mapperConfig.setLabels(labels); - assertEquals(labels, mapperConfig.getLabels()); - } - - @Test(expected = IllegalArgumentException.class) - public void setLabels_WHEN_ExpressionDoesnNotMatchPattern_ThrowException() { - final MapperConfig mapperConfig = new MapperConfig(); - final Map labels = new HashMap(); - labels.put("valid", "${0}"); - labels.put("not valid", "${0}"); - mapperConfig.setLabels(labels); - } - - @Test - public void toString_WHEN_EmptyConfig_AllGood() { - final MapperConfig mapperConfig = new MapperConfig(); - assertEquals("MapperConfig{match=null, name=null, labels={}}", mapperConfig.toString()); - } - - @Test - public void toString_WHEN_FullyConfigured_AllGood() { - final MapperConfig mapperConfig = new MapperConfig(); - mapperConfig.setMatch("com.company.meter.*.foo"); - mapperConfig.setName("foo"); - mapperConfig.setLabels(Collections.singletonMap("type", "${0}")); - assertEquals("MapperConfig{match=com.company.meter.*.foo, name=foo, labels={type=${0}}}", mapperConfig.toString()); - } + @Test + public void setMatch_WHEN_ExpressionMatchesPattern_AllGood() { + final MapperConfig mapperConfig = new MapperConfig(); + mapperConfig.setMatch("com.company.meter.*"); + assertEquals("com.company.meter.*", mapperConfig.getMatch()); + } + + @Test(expected = IllegalArgumentException.class) + public void setMatch_WHEN_ExpressionDoesnNotMatchPattern_ThrowException() { + final MapperConfig mapperConfig = new MapperConfig(); + mapperConfig.setMatch("com.company.meter.**.yay"); + } + + @Test + public void setLabels_WHEN_ExpressionMatchesPattern_AllGood() { + final MapperConfig mapperConfig = new MapperConfig(); + final Map labels = new HashMap(); + labels.put("valid", "${0}"); + mapperConfig.setLabels(labels); + assertEquals(labels, mapperConfig.getLabels()); + } + + @Test(expected = IllegalArgumentException.class) + public void setLabels_WHEN_ExpressionDoesnNotMatchPattern_ThrowException() { + final MapperConfig mapperConfig = new MapperConfig(); + final Map labels = new HashMap(); + labels.put("valid", "${0}"); + labels.put("not valid", "${0}"); + mapperConfig.setLabels(labels); + } + + @Test + public void toString_WHEN_EmptyConfig_AllGood() { + final MapperConfig mapperConfig = new MapperConfig(); + assertEquals("MapperConfig{match=null, name=null, labels={}}", mapperConfig.toString()); + } + + @Test + public void toString_WHEN_FullyConfigured_AllGood() { + final MapperConfig mapperConfig = new MapperConfig(); + mapperConfig.setMatch("com.company.meter.*.foo"); + mapperConfig.setName("foo"); + mapperConfig.setLabels(Collections.singletonMap("type", "${0}")); + assertEquals( + "MapperConfig{match=com.company.meter.*.foo, name=foo, labels={type=${0}}}", + mapperConfig.toString()); + } } diff --git a/prometheus-metrics-instrumentation-guava/src/main/java/io/prometheus/metrics/instrumentation/guava/CacheMetricsCollector.java b/prometheus-metrics-instrumentation-guava/src/main/java/io/prometheus/metrics/instrumentation/guava/CacheMetricsCollector.java index 71ffea01c..5e17338b7 100644 --- a/prometheus-metrics-instrumentation-guava/src/main/java/io/prometheus/metrics/instrumentation/guava/CacheMetricsCollector.java +++ b/prometheus-metrics-instrumentation-guava/src/main/java/io/prometheus/metrics/instrumentation/guava/CacheMetricsCollector.java @@ -9,7 +9,6 @@ import io.prometheus.metrics.model.snapshots.Labels; import io.prometheus.metrics.model.snapshots.MetricSnapshots; import io.prometheus.metrics.model.snapshots.SummarySnapshot; - import java.util.Arrays; import java.util.List; import java.util.Map; @@ -18,9 +17,10 @@ /** * Collect metrics from Guava's com.google.common.cache.Cache. + * *

- *

{@code
  *
+ * 
{@code
  * // Note that `recordStats()` is required to gather non-zero statistics
  * Cache cache = CacheBuilder.newBuilder().recordStats().build();
  * CacheMetricsCollector cacheMetrics = new CacheMetricsCollector().register();
@@ -30,7 +30,8 @@
  *
  * Exposed metrics are labeled with the provided cache name.
  *
- * With the example above, sample metric names would be:
+ * 

With the example above, sample metric names would be: + * *

  *     guava_cache_hit_total{cache="mycache"} 10.0
  *     guava_cache_miss_total{cache="mycache"} 3.0
@@ -40,164 +41,154 @@
  * 
* * Additionally, if the cache includes a loader, the following metrics would be provided: + * *
  *     guava_cache_load_failure_total{cache="mycache"} 2.0
  *     guava_cache_loads_total{cache="mycache"} 7.0
  *     guava_cache_load_duration_seconds_count{cache="mycache"} 7.0
  *     guava_cache_load_duration_seconds_sum{cache="mycache"} 0.0034
  * 
- * */ public class CacheMetricsCollector implements MultiCollector { - private static final double NANOSECONDS_PER_SECOND = 1_000_000_000.0; - - protected final ConcurrentMap children = new ConcurrentHashMap<>(); - - /** - * Add or replace the cache with the given name. - *

- * Any references any previous cache with this name is invalidated. - * - * @param cacheName The name of the cache, will be the metrics label value - * @param cache The cache being monitored - */ - public void addCache(String cacheName, Cache cache) { - children.put(cacheName, cache); - } - - /** - * Remove the cache with the given name. - *

- * Any references to the cache are invalidated. - * - * @param cacheName cache to be removed - */ - public Cache removeCache(String cacheName) { - return children.remove(cacheName); - } - - /** - * Remove all caches. - *

- * Any references to all caches are invalidated. - */ - public void clear(){ - children.clear(); - } - - @Override - public MetricSnapshots collect() { - final MetricSnapshots.Builder metricSnapshotsBuilder = MetricSnapshots.builder(); - final List labelNames = Arrays.asList("cache"); - - final CounterSnapshot.Builder cacheHitTotal = CounterSnapshot.builder() - .name("guava_cache_hit") - .help("Cache hit totals"); - - final CounterSnapshot.Builder cacheMissTotal = CounterSnapshot.builder() - .name("guava_cache_miss") - .help("Cache miss totals"); - - final CounterSnapshot.Builder cacheRequestsTotal = CounterSnapshot.builder() - .name("guava_cache_requests") - .help("Cache request totals"); - - final CounterSnapshot.Builder cacheEvictionTotal = CounterSnapshot.builder() + private static final double NANOSECONDS_PER_SECOND = 1_000_000_000.0; + + protected final ConcurrentMap children = new ConcurrentHashMap<>(); + + /** + * Add or replace the cache with the given name. + * + *

Any references any previous cache with this name is invalidated. + * + * @param cacheName The name of the cache, will be the metrics label value + * @param cache The cache being monitored + */ + public void addCache(String cacheName, Cache cache) { + children.put(cacheName, cache); + } + + /** + * Remove the cache with the given name. + * + *

Any references to the cache are invalidated. + * + * @param cacheName cache to be removed + */ + public Cache removeCache(String cacheName) { + return children.remove(cacheName); + } + + /** + * Remove all caches. + * + *

Any references to all caches are invalidated. + */ + public void clear() { + children.clear(); + } + + @Override + public MetricSnapshots collect() { + final MetricSnapshots.Builder metricSnapshotsBuilder = MetricSnapshots.builder(); + final List labelNames = Arrays.asList("cache"); + + final CounterSnapshot.Builder cacheHitTotal = + CounterSnapshot.builder().name("guava_cache_hit").help("Cache hit totals"); + + final CounterSnapshot.Builder cacheMissTotal = + CounterSnapshot.builder().name("guava_cache_miss").help("Cache miss totals"); + + final CounterSnapshot.Builder cacheRequestsTotal = + CounterSnapshot.builder().name("guava_cache_requests").help("Cache request totals"); + + final CounterSnapshot.Builder cacheEvictionTotal = + CounterSnapshot.builder() .name("guava_cache_eviction") .help("Cache eviction totals, doesn't include manually removed entries"); - final CounterSnapshot.Builder cacheLoadFailure = CounterSnapshot.builder() - .name("guava_cache_load_failure") - .help("Cache load failures"); + final CounterSnapshot.Builder cacheLoadFailure = + CounterSnapshot.builder().name("guava_cache_load_failure").help("Cache load failures"); - final CounterSnapshot.Builder cacheLoadTotal = CounterSnapshot.builder() + final CounterSnapshot.Builder cacheLoadTotal = + CounterSnapshot.builder() .name("guava_cache_loads") .help("Cache loads: both success and failures"); - final GaugeSnapshot.Builder cacheSize = GaugeSnapshot.builder() - .name("guava_cache_size") - .help("Cache size"); + final GaugeSnapshot.Builder cacheSize = + GaugeSnapshot.builder().name("guava_cache_size").help("Cache size"); - final SummarySnapshot.Builder cacheLoadSummary = SummarySnapshot.builder() + final SummarySnapshot.Builder cacheLoadSummary = + SummarySnapshot.builder() .name("guava_cache_load_duration_seconds") .help("Cache load duration: both success and failures"); - for (final Map.Entry c: children.entrySet()) { - final List cacheName = Arrays.asList(c.getKey()); - final Labels labels = Labels.of(labelNames, cacheName); - - final CacheStats stats = c.getValue().stats(); - - cacheHitTotal.dataPoint( - CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(labels) - .value(stats.hitCount()) - .build() - ); - - cacheMissTotal.dataPoint( - CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(labels) - .value(stats.missCount()) - .build() - ); - - cacheRequestsTotal.dataPoint( - CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(labels) - .value(stats.requestCount()) - .build() - ); - - cacheEvictionTotal.dataPoint( - CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(labels) - .value(stats.evictionCount()) - .build() - ); - - cacheSize.dataPoint( - GaugeSnapshot.GaugeDataPointSnapshot.builder() - .labels(labels) - .value(c.getValue().size()) - .build() - ); - - if (c.getValue() instanceof LoadingCache) { - cacheLoadFailure.dataPoint( - CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(labels) - .value(stats.loadExceptionCount()) - .build() - ); - - cacheLoadTotal.dataPoint( - CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(labels) - .value(stats.loadCount()) - .build() - ); - - cacheLoadSummary.dataPoint( - SummarySnapshot.SummaryDataPointSnapshot.builder() - .labels(labels) - .count(stats.loadCount()) - .sum(stats.totalLoadTime() / NANOSECONDS_PER_SECOND) - .build() - ); - } - } - - metricSnapshotsBuilder.metricSnapshot(cacheHitTotal.build()); - metricSnapshotsBuilder.metricSnapshot(cacheMissTotal.build()); - metricSnapshotsBuilder.metricSnapshot(cacheRequestsTotal.build()); - metricSnapshotsBuilder.metricSnapshot(cacheEvictionTotal.build()); - metricSnapshotsBuilder.metricSnapshot(cacheLoadFailure.build()); - metricSnapshotsBuilder.metricSnapshot(cacheLoadTotal.build()); - metricSnapshotsBuilder.metricSnapshot(cacheSize.build()); - metricSnapshotsBuilder.metricSnapshot(cacheLoadSummary.build()); - - return metricSnapshotsBuilder.build(); + for (final Map.Entry c : children.entrySet()) { + final List cacheName = Arrays.asList(c.getKey()); + final Labels labels = Labels.of(labelNames, cacheName); + + final CacheStats stats = c.getValue().stats(); + + cacheHitTotal.dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(labels) + .value(stats.hitCount()) + .build()); + + cacheMissTotal.dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(labels) + .value(stats.missCount()) + .build()); + + cacheRequestsTotal.dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(labels) + .value(stats.requestCount()) + .build()); + + cacheEvictionTotal.dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(labels) + .value(stats.evictionCount()) + .build()); + + cacheSize.dataPoint( + GaugeSnapshot.GaugeDataPointSnapshot.builder() + .labels(labels) + .value(c.getValue().size()) + .build()); + + if (c.getValue() instanceof LoadingCache) { + cacheLoadFailure.dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(labels) + .value(stats.loadExceptionCount()) + .build()); + + cacheLoadTotal.dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(labels) + .value(stats.loadCount()) + .build()); + + cacheLoadSummary.dataPoint( + SummarySnapshot.SummaryDataPointSnapshot.builder() + .labels(labels) + .count(stats.loadCount()) + .sum(stats.totalLoadTime() / NANOSECONDS_PER_SECOND) + .build()); + } } + + metricSnapshotsBuilder.metricSnapshot(cacheHitTotal.build()); + metricSnapshotsBuilder.metricSnapshot(cacheMissTotal.build()); + metricSnapshotsBuilder.metricSnapshot(cacheRequestsTotal.build()); + metricSnapshotsBuilder.metricSnapshot(cacheEvictionTotal.build()); + metricSnapshotsBuilder.metricSnapshot(cacheLoadFailure.build()); + metricSnapshotsBuilder.metricSnapshot(cacheLoadTotal.build()); + metricSnapshotsBuilder.metricSnapshot(cacheSize.build()); + metricSnapshotsBuilder.metricSnapshot(cacheLoadSummary.build()); + + return metricSnapshotsBuilder.build(); + } } diff --git a/prometheus-metrics-instrumentation-guava/src/test/java/io/prometheus/metrics/instrumentation/guava/CacheMetricsCollectorTest.java b/prometheus-metrics-instrumentation-guava/src/test/java/io/prometheus/metrics/instrumentation/guava/CacheMetricsCollectorTest.java index 41b3bba8d..96de845a8 100644 --- a/prometheus-metrics-instrumentation-guava/src/test/java/io/prometheus/metrics/instrumentation/guava/CacheMetricsCollectorTest.java +++ b/prometheus-metrics-instrumentation-guava/src/test/java/io/prometheus/metrics/instrumentation/guava/CacheMetricsCollectorTest.java @@ -1,5 +1,11 @@ package io.prometheus.metrics.instrumentation.guava; +import static org.assertj.core.api.Java6Assertions.assertThat; +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; @@ -10,133 +16,128 @@ import io.prometheus.metrics.model.snapshots.DataPointSnapshot; import io.prometheus.metrics.model.snapshots.Labels; import io.prometheus.metrics.model.snapshots.SummarySnapshot; -import org.junit.Test; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; - -import static org.assertj.core.api.Java6Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import org.junit.Test; public class CacheMetricsCollectorTest { - @Test - public void cacheExposesMetricsForHitMissAndEviction() { - final Cache cache = CacheBuilder.newBuilder().maximumSize(2).recordStats().build(); - - final CacheMetricsCollector collector = new CacheMetricsCollector(); - collector.addCache("users", cache); - - final PrometheusRegistry registry = new PrometheusRegistry(); - registry.register(collector); - - cache.getIfPresent("user1"); - cache.getIfPresent("user1"); - cache.put("user1", "First User"); - cache.getIfPresent("user1"); - - // Add to cache to trigger eviction. - cache.put("user2", "Second User"); - cache.put("user3", "Third User"); - cache.put("user4", "Fourth User"); - - assertCounterMetric(registry, "guava_cache_hit", "users", 1.0); - assertCounterMetric(registry, "guava_cache_miss", "users", 2.0); - assertCounterMetric(registry, "guava_cache_requests", "users", 3.0); - assertCounterMetric(registry, "guava_cache_eviction", "users", 2.0); - - final String expected = "# TYPE guava_cache_eviction counter\n" + - "# HELP guava_cache_eviction Cache eviction totals, doesn't include manually removed entries\n" + - "guava_cache_eviction_total{cache=\"users\"} 2.0\n" + - "# TYPE guava_cache_hit counter\n" + - "# HELP guava_cache_hit Cache hit totals\n" + - "guava_cache_hit_total{cache=\"users\"} 1.0\n" + - "# TYPE guava_cache_miss counter\n" + - "# HELP guava_cache_miss Cache miss totals\n" + - "guava_cache_miss_total{cache=\"users\"} 2.0\n" + - "# TYPE guava_cache_requests counter\n" + - "# HELP guava_cache_requests Cache request totals\n" + - "guava_cache_requests_total{cache=\"users\"} 3.0\n" + - "# TYPE guava_cache_size gauge\n" + - "# HELP guava_cache_size Cache size\n" + - "guava_cache_size{cache=\"users\"} 2.0\n" + - "# EOF\n"; - - assertEquals(expected, convertToOpenMetricsFormat(registry)); - } - - @SuppressWarnings("unchecked") - @Test - public void loadingCacheExposesMetricsForLoadsAndExceptions() throws Exception { - final CacheLoader loader = mock(CacheLoader.class); - when(loader.load(anyString())) - .thenReturn("First User") - .thenThrow(new RuntimeException("Seconds time fails")) - .thenReturn("Third User"); - - final LoadingCache cache = CacheBuilder.newBuilder().recordStats().build(loader); - final CacheMetricsCollector collector = new CacheMetricsCollector(); - collector.addCache("loadingusers", cache); - - final PrometheusRegistry registry = new PrometheusRegistry(); - registry.register(collector); - - cache.get("user1"); - cache.get("user1"); - try { - cache.get("user2"); - } catch (Exception e) { - // ignoring. - } - cache.get("user3"); - - assertCounterMetric(registry, "guava_cache_hit", "loadingusers", 1.0); - assertCounterMetric(registry, "guava_cache_miss", "loadingusers", 3.0); - - assertCounterMetric(registry, "guava_cache_load_failure", "loadingusers", 1.0); - assertCounterMetric(registry, "guava_cache_loads", "loadingusers", 3.0); - - final SummarySnapshot.SummaryDataPointSnapshot loadDuration = (SummarySnapshot.SummaryDataPointSnapshot) getDataPointSnapshot( - registry, - "guava_cache_load_duration_seconds", - "loadingusers" - ); - - assertEquals(3, loadDuration.getCount()); - assertThat(loadDuration.getSum()).isGreaterThan(0); - } - - private void assertCounterMetric(PrometheusRegistry registry, String name, String cacheName, double value) { - final CounterSnapshot.CounterDataPointSnapshot dataPointSnapshot = - (CounterSnapshot.CounterDataPointSnapshot) getDataPointSnapshot(registry, name, cacheName); - - assertEquals(value, dataPointSnapshot.getValue(), 0); - } - - private DataPointSnapshot getDataPointSnapshot(PrometheusRegistry registry, String name, String cacheName) - { - final Labels labels = Labels.of(new String[]{"cache"}, new String[]{cacheName}); - - return registry.scrape(name::equals).stream() - .flatMap(metricSnapshot -> metricSnapshot.getDataPoints().stream()) - .filter(dataPoint -> dataPoint.getLabels().equals(labels)) - .findFirst() - .get(); + @Test + public void cacheExposesMetricsForHitMissAndEviction() { + final Cache cache = + CacheBuilder.newBuilder().maximumSize(2).recordStats().build(); + + final CacheMetricsCollector collector = new CacheMetricsCollector(); + collector.addCache("users", cache); + + final PrometheusRegistry registry = new PrometheusRegistry(); + registry.register(collector); + + cache.getIfPresent("user1"); + cache.getIfPresent("user1"); + cache.put("user1", "First User"); + cache.getIfPresent("user1"); + + // Add to cache to trigger eviction. + cache.put("user2", "Second User"); + cache.put("user3", "Third User"); + cache.put("user4", "Fourth User"); + + assertCounterMetric(registry, "guava_cache_hit", "users", 1.0); + assertCounterMetric(registry, "guava_cache_miss", "users", 2.0); + assertCounterMetric(registry, "guava_cache_requests", "users", 3.0); + assertCounterMetric(registry, "guava_cache_eviction", "users", 2.0); + + final String expected = + "# TYPE guava_cache_eviction counter\n" + + "# HELP guava_cache_eviction Cache eviction totals, doesn't include manually removed entries\n" + + "guava_cache_eviction_total{cache=\"users\"} 2.0\n" + + "# TYPE guava_cache_hit counter\n" + + "# HELP guava_cache_hit Cache hit totals\n" + + "guava_cache_hit_total{cache=\"users\"} 1.0\n" + + "# TYPE guava_cache_miss counter\n" + + "# HELP guava_cache_miss Cache miss totals\n" + + "guava_cache_miss_total{cache=\"users\"} 2.0\n" + + "# TYPE guava_cache_requests counter\n" + + "# HELP guava_cache_requests Cache request totals\n" + + "guava_cache_requests_total{cache=\"users\"} 3.0\n" + + "# TYPE guava_cache_size gauge\n" + + "# HELP guava_cache_size Cache size\n" + + "guava_cache_size{cache=\"users\"} 2.0\n" + + "# EOF\n"; + + assertEquals(expected, convertToOpenMetricsFormat(registry)); + } + + @SuppressWarnings("unchecked") + @Test + public void loadingCacheExposesMetricsForLoadsAndExceptions() throws Exception { + final CacheLoader loader = mock(CacheLoader.class); + when(loader.load(anyString())) + .thenReturn("First User") + .thenThrow(new RuntimeException("Seconds time fails")) + .thenReturn("Third User"); + + final LoadingCache cache = + CacheBuilder.newBuilder().recordStats().build(loader); + final CacheMetricsCollector collector = new CacheMetricsCollector(); + collector.addCache("loadingusers", cache); + + final PrometheusRegistry registry = new PrometheusRegistry(); + registry.register(collector); + + cache.get("user1"); + cache.get("user1"); + try { + cache.get("user2"); + } catch (Exception e) { + // ignoring. } - - private String convertToOpenMetricsFormat(PrometheusRegistry registry) { - final ByteArrayOutputStream out = new ByteArrayOutputStream(); - final OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); - try { - writer.write(out, registry.scrape()); - return out.toString(StandardCharsets.UTF_8.name()); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + cache.get("user3"); + + assertCounterMetric(registry, "guava_cache_hit", "loadingusers", 1.0); + assertCounterMetric(registry, "guava_cache_miss", "loadingusers", 3.0); + + assertCounterMetric(registry, "guava_cache_load_failure", "loadingusers", 1.0); + assertCounterMetric(registry, "guava_cache_loads", "loadingusers", 3.0); + + final SummarySnapshot.SummaryDataPointSnapshot loadDuration = + (SummarySnapshot.SummaryDataPointSnapshot) + getDataPointSnapshot(registry, "guava_cache_load_duration_seconds", "loadingusers"); + + assertEquals(3, loadDuration.getCount()); + assertThat(loadDuration.getSum()).isGreaterThan(0); + } + + private void assertCounterMetric( + PrometheusRegistry registry, String name, String cacheName, double value) { + final CounterSnapshot.CounterDataPointSnapshot dataPointSnapshot = + (CounterSnapshot.CounterDataPointSnapshot) getDataPointSnapshot(registry, name, cacheName); + + assertEquals(value, dataPointSnapshot.getValue(), 0); + } + + private DataPointSnapshot getDataPointSnapshot( + PrometheusRegistry registry, String name, String cacheName) { + final Labels labels = Labels.of(new String[] {"cache"}, new String[] {cacheName}); + + return registry.scrape(name::equals).stream() + .flatMap(metricSnapshot -> metricSnapshot.getDataPoints().stream()) + .filter(dataPoint -> dataPoint.getLabels().equals(labels)) + .findFirst() + .get(); + } + + private String convertToOpenMetricsFormat(PrometheusRegistry registry) { + final ByteArrayOutputStream out = new ByteArrayOutputStream(); + final OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); + try { + writer.write(out, registry.scrape()); + return out.toString(StandardCharsets.UTF_8.name()); + } catch (IOException e) { + throw new UncheckedIOException(e); } + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmBufferPoolMetrics.java b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmBufferPoolMetrics.java index 9c8cddb46..ad047f1ba 100644 --- a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmBufferPoolMetrics.java +++ b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmBufferPoolMetrics.java @@ -4,21 +4,26 @@ import io.prometheus.metrics.core.metrics.GaugeWithCallback; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.Unit; - import java.lang.management.BufferPoolMXBean; import java.lang.management.ManagementFactory; import java.util.List; /** - * JVM Buffer Pool metrics. The {@link JvmBufferPoolMetrics} are registered as part of the {@link JvmMetrics} like this: + * JVM Buffer Pool metrics. The {@link JvmBufferPoolMetrics} are registered as part of the {@link + * JvmMetrics} like this: + * *

{@code
- *   JvmMetrics.builder().register();
+ * JvmMetrics.builder().register();
  * }
+ * * However, if you want only the {@link JvmBufferPoolMetrics} you can also register them directly: + * *
{@code
- *   JvmBufferPoolMetrics.builder().register();
+ * JvmBufferPoolMetrics.builder().register();
  * }
+ * * Example metrics being exported: + * *
  * # HELP jvm_buffer_pool_capacity_bytes Bytes capacity of a given JVM buffer pool.
  * # TYPE jvm_buffer_pool_capacity_bytes gauge
@@ -36,91 +41,93 @@
  */
 public class JvmBufferPoolMetrics {
 
-    private static final String JVM_BUFFER_POOL_USED_BYTES = "jvm_buffer_pool_used_bytes";
-    private static final String JVM_BUFFER_POOL_CAPACITY_BYTES = "jvm_buffer_pool_capacity_bytes";
-    private static final String JVM_BUFFER_POOL_USED_BUFFERS = "jvm_buffer_pool_used_buffers";
+  private static final String JVM_BUFFER_POOL_USED_BYTES = "jvm_buffer_pool_used_bytes";
+  private static final String JVM_BUFFER_POOL_CAPACITY_BYTES = "jvm_buffer_pool_capacity_bytes";
+  private static final String JVM_BUFFER_POOL_USED_BUFFERS = "jvm_buffer_pool_used_buffers";
+
+  private final PrometheusProperties config;
+  private final List bufferPoolBeans;
+
+  private JvmBufferPoolMetrics(
+      List bufferPoolBeans, PrometheusProperties config) {
+    this.config = config;
+    this.bufferPoolBeans = bufferPoolBeans;
+  }
+
+  private void register(PrometheusRegistry registry) {
+
+    GaugeWithCallback.builder(config)
+        .name(JVM_BUFFER_POOL_USED_BYTES)
+        .help("Used bytes of a given JVM buffer pool.")
+        .unit(Unit.BYTES)
+        .labelNames("pool")
+        .callback(
+            callback -> {
+              for (BufferPoolMXBean pool : bufferPoolBeans) {
+                callback.call(pool.getMemoryUsed(), pool.getName());
+              }
+            })
+        .register(registry);
+
+    GaugeWithCallback.builder(config)
+        .name(JVM_BUFFER_POOL_CAPACITY_BYTES)
+        .help("Bytes capacity of a given JVM buffer pool.")
+        .unit(Unit.BYTES)
+        .labelNames("pool")
+        .callback(
+            callback -> {
+              for (BufferPoolMXBean pool : bufferPoolBeans) {
+                callback.call(pool.getTotalCapacity(), pool.getName());
+              }
+            })
+        .register(registry);
+
+    GaugeWithCallback.builder(config)
+        .name(JVM_BUFFER_POOL_USED_BUFFERS)
+        .help("Used buffers of a given JVM buffer pool.")
+        .labelNames("pool")
+        .callback(
+            callback -> {
+              for (BufferPoolMXBean pool : bufferPoolBeans) {
+                callback.call(pool.getCount(), pool.getName());
+              }
+            })
+        .register(registry);
+  }
+
+  public static Builder builder() {
+    return new Builder(PrometheusProperties.get());
+  }
+
+  public static Builder builder(PrometheusProperties config) {
+    return new Builder(config);
+  }
+
+  public static class Builder {
 
     private final PrometheusProperties config;
-    private final List bufferPoolBeans;
-
-    private JvmBufferPoolMetrics(List bufferPoolBeans, PrometheusProperties config) {
-        this.config = config;
-        this.bufferPoolBeans = bufferPoolBeans;
-    }
+    private List bufferPoolBeans;
 
-    private void register(PrometheusRegistry registry) {
-
-        GaugeWithCallback.builder(config)
-                .name(JVM_BUFFER_POOL_USED_BYTES)
-                .help("Used bytes of a given JVM buffer pool.")
-                .unit(Unit.BYTES)
-                .labelNames("pool")
-                .callback(callback -> {
-                    for (BufferPoolMXBean pool : bufferPoolBeans) {
-                        callback.call(pool.getMemoryUsed(), pool.getName());
-                    }
-                })
-                .register(registry);
-
-        GaugeWithCallback.builder(config)
-                .name(JVM_BUFFER_POOL_CAPACITY_BYTES)
-                .help("Bytes capacity of a given JVM buffer pool.")
-                .unit(Unit.BYTES)
-                .labelNames("pool")
-                .callback(callback -> {
-                    for (BufferPoolMXBean pool : bufferPoolBeans) {
-                        callback.call(pool.getTotalCapacity(), pool.getName());
-                    }
-                })
-                .register(registry);
-
-        GaugeWithCallback.builder(config)
-                .name(JVM_BUFFER_POOL_USED_BUFFERS)
-                .help("Used buffers of a given JVM buffer pool.")
-                .labelNames("pool")
-                .callback(callback -> {
-                    for (BufferPoolMXBean pool : bufferPoolBeans) {
-                        callback.call(pool.getCount(), pool.getName());
-                    }
-                })
-                .register(registry);
+    private Builder(PrometheusProperties config) {
+      this.config = config;
     }
 
-    public static Builder builder() {
-        return new Builder(PrometheusProperties.get());
+    /** Package private. For testing only. */
+    Builder bufferPoolBeans(List bufferPoolBeans) {
+      this.bufferPoolBeans = bufferPoolBeans;
+      return this;
     }
 
-    public static Builder builder(PrometheusProperties config) {
-        return new Builder(config);
+    public void register() {
+      register(PrometheusRegistry.defaultRegistry);
     }
 
-    public static class Builder {
-
-        private final PrometheusProperties config;
-        private List bufferPoolBeans;
-
-        private Builder(PrometheusProperties config) {
-            this.config = config;
-        }
-
-        /**
-         * Package private. For testing only.
-         */
-        Builder bufferPoolBeans(List bufferPoolBeans) {
-            this.bufferPoolBeans = bufferPoolBeans;
-            return this;
-        }
-
-        public void register() {
-            register(PrometheusRegistry.defaultRegistry);
-        }
-
-        public void register(PrometheusRegistry registry) {
-            List bufferPoolBeans = this.bufferPoolBeans;
-            if (bufferPoolBeans == null) {
-                bufferPoolBeans = ManagementFactory.getPlatformMXBeans(BufferPoolMXBean.class);
-            }
-            new JvmBufferPoolMetrics(bufferPoolBeans, config).register(registry);
-        }
+    public void register(PrometheusRegistry registry) {
+      List bufferPoolBeans = this.bufferPoolBeans;
+      if (bufferPoolBeans == null) {
+        bufferPoolBeans = ManagementFactory.getPlatformMXBeans(BufferPoolMXBean.class);
+      }
+      new JvmBufferPoolMetrics(bufferPoolBeans, config).register(registry);
     }
+  }
 }
diff --git a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmClassLoadingMetrics.java b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmClassLoadingMetrics.java
index 69d3c4ecd..f87618f63 100644
--- a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmClassLoadingMetrics.java
+++ b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmClassLoadingMetrics.java
@@ -4,20 +4,25 @@
 import io.prometheus.metrics.core.metrics.CounterWithCallback;
 import io.prometheus.metrics.core.metrics.GaugeWithCallback;
 import io.prometheus.metrics.model.registry.PrometheusRegistry;
-
 import java.lang.management.ClassLoadingMXBean;
 import java.lang.management.ManagementFactory;
 
 /**
- * JVM Class Loading metrics. The {@link JvmClassLoadingMetrics} are registered as part of the {@link JvmMetrics} like this:
+ * JVM Class Loading metrics. The {@link JvmClassLoadingMetrics} are registered as part of the
+ * {@link JvmMetrics} like this:
+ *
  * 
{@code
- *   JvmMetrics.builder().register();
+ * JvmMetrics.builder().register();
  * }
+ * * However, if you want only the {@link JvmClassLoadingMetrics} you can also register them directly: + * *
{@code
- *   JvmClassLoadingMetrics.builder().register();
+ * JvmClassLoadingMetrics.builder().register();
  * }
+ * * Example metrics being exported: + * *
  * # HELP jvm_classes_currently_loaded The number of classes that are currently loaded in the JVM
  * # TYPE jvm_classes_currently_loaded gauge
@@ -32,71 +37,74 @@
  */
 public class JvmClassLoadingMetrics {
 
-    private static final String JVM_CLASSES_CURRENTLY_LOADED = "jvm_classes_currently_loaded";
-    private static final String JVM_CLASSES_LOADED_TOTAL = "jvm_classes_loaded_total";
-    private static final String JVM_CLASSES_UNLOADED_TOTAL = "jvm_classes_unloaded_total";
+  private static final String JVM_CLASSES_CURRENTLY_LOADED = "jvm_classes_currently_loaded";
+  private static final String JVM_CLASSES_LOADED_TOTAL = "jvm_classes_loaded_total";
+  private static final String JVM_CLASSES_UNLOADED_TOTAL = "jvm_classes_unloaded_total";
 
-    private final PrometheusProperties config;
-    private final ClassLoadingMXBean classLoadingBean;
+  private final PrometheusProperties config;
+  private final ClassLoadingMXBean classLoadingBean;
 
-    private JvmClassLoadingMetrics(ClassLoadingMXBean classLoadingBean, PrometheusProperties config) {
-        this.classLoadingBean = classLoadingBean;
-        this.config = config;
-    }
+  private JvmClassLoadingMetrics(ClassLoadingMXBean classLoadingBean, PrometheusProperties config) {
+    this.classLoadingBean = classLoadingBean;
+    this.config = config;
+  }
 
-    private void register(PrometheusRegistry registry) {
-
-        GaugeWithCallback.builder(config)
-                .name(JVM_CLASSES_CURRENTLY_LOADED)
-                .help("The number of classes that are currently loaded in the JVM")
-                .callback(callback -> callback.call(classLoadingBean.getLoadedClassCount()))
-                .register(registry);
-
-        CounterWithCallback.builder(config)
-                .name(JVM_CLASSES_LOADED_TOTAL)
-                .help("The total number of classes that have been loaded since the JVM has started execution")
-                .callback(callback -> callback.call(classLoadingBean.getTotalLoadedClassCount()))
-                .register(registry);
-
-        CounterWithCallback.builder(config)
-                .name(JVM_CLASSES_UNLOADED_TOTAL)
-                .help("The total number of classes that have been unloaded since the JVM has started execution")
-                .callback(callback -> callback.call(classLoadingBean.getUnloadedClassCount()))
-                .register(registry);
-    }
+  private void register(PrometheusRegistry registry) {
 
-    public static Builder builder() {
-        return new Builder(PrometheusProperties.get());
-    }
+    GaugeWithCallback.builder(config)
+        .name(JVM_CLASSES_CURRENTLY_LOADED)
+        .help("The number of classes that are currently loaded in the JVM")
+        .callback(callback -> callback.call(classLoadingBean.getLoadedClassCount()))
+        .register(registry);
 
-    public static Builder builder(PrometheusProperties config) {
-        return new Builder(config);
-    }
+    CounterWithCallback.builder(config)
+        .name(JVM_CLASSES_LOADED_TOTAL)
+        .help(
+            "The total number of classes that have been loaded since the JVM has started execution")
+        .callback(callback -> callback.call(classLoadingBean.getTotalLoadedClassCount()))
+        .register(registry);
 
-    public static class Builder {
+    CounterWithCallback.builder(config)
+        .name(JVM_CLASSES_UNLOADED_TOTAL)
+        .help(
+            "The total number of classes that have been unloaded since the JVM has started execution")
+        .callback(callback -> callback.call(classLoadingBean.getUnloadedClassCount()))
+        .register(registry);
+  }
 
-        private final PrometheusProperties config;
-        private ClassLoadingMXBean classLoadingBean;
+  public static Builder builder() {
+    return new Builder(PrometheusProperties.get());
+  }
 
-        private Builder(PrometheusProperties config) {
-            this.config = config;
-        }
+  public static Builder builder(PrometheusProperties config) {
+    return new Builder(config);
+  }
 
-        /**
-         * Package private. For testing only.
-         */
-        Builder classLoadingBean(ClassLoadingMXBean classLoadingBean) {
-            this.classLoadingBean = classLoadingBean;
-            return this;
-        }
+  public static class Builder {
 
-        public void register() {
-            register(PrometheusRegistry.defaultRegistry);
-        }
+    private final PrometheusProperties config;
+    private ClassLoadingMXBean classLoadingBean;
+
+    private Builder(PrometheusProperties config) {
+      this.config = config;
+    }
+
+    /** Package private. For testing only. */
+    Builder classLoadingBean(ClassLoadingMXBean classLoadingBean) {
+      this.classLoadingBean = classLoadingBean;
+      return this;
+    }
+
+    public void register() {
+      register(PrometheusRegistry.defaultRegistry);
+    }
 
-        public void register(PrometheusRegistry registry) {
-            ClassLoadingMXBean classLoadingBean = this.classLoadingBean != null ? this.classLoadingBean : ManagementFactory.getClassLoadingMXBean();
-            new JvmClassLoadingMetrics(classLoadingBean, config).register(registry);
-        }
+    public void register(PrometheusRegistry registry) {
+      ClassLoadingMXBean classLoadingBean =
+          this.classLoadingBean != null
+              ? this.classLoadingBean
+              : ManagementFactory.getClassLoadingMXBean();
+      new JvmClassLoadingMetrics(classLoadingBean, config).register(registry);
     }
+  }
 }
diff --git a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmCompilationMetrics.java b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmCompilationMetrics.java
index 0fbebf46e..308b00877 100644
--- a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmCompilationMetrics.java
+++ b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmCompilationMetrics.java
@@ -1,25 +1,30 @@
 package io.prometheus.metrics.instrumentation.jvm;
 
+import static io.prometheus.metrics.model.snapshots.Unit.millisToSeconds;
+
 import io.prometheus.metrics.config.PrometheusProperties;
 import io.prometheus.metrics.core.metrics.CounterWithCallback;
 import io.prometheus.metrics.model.registry.PrometheusRegistry;
 import io.prometheus.metrics.model.snapshots.Unit;
-
 import java.lang.management.CompilationMXBean;
 import java.lang.management.ManagementFactory;
 
-import static io.prometheus.metrics.model.snapshots.Unit.millisToSeconds;
-
 /**
- * JVM Compilation metrics. The {@link JvmCompilationMetrics} are registered as part of the {@link JvmMetrics} like this:
+ * JVM Compilation metrics. The {@link JvmCompilationMetrics} are registered as part of the {@link
+ * JvmMetrics} like this:
+ *
  * 
{@code
- *   JvmMetrics.builder().register();
+ * JvmMetrics.builder().register();
  * }
+ * * However, if you want only the {@link JvmCompilationMetrics} you can also register them directly: + * *
{@code
- *   JvmCompilationMetrics.builder().register();
+ * JvmCompilationMetrics.builder().register();
  * }
+ * * Example metrics being exported: + * *
  * # HELP jvm_compilation_time_seconds_total The total time in seconds taken for HotSpot class compilation
  * # TYPE jvm_compilation_time_seconds_total counter
@@ -28,62 +33,65 @@
  */
 public class JvmCompilationMetrics {
 
-    private static final String JVM_COMPILATION_TIME_SECONDS_TOTAL = "jvm_compilation_time_seconds_total";
-
-    private final PrometheusProperties config;
-    private final CompilationMXBean compilationBean;
+  private static final String JVM_COMPILATION_TIME_SECONDS_TOTAL =
+      "jvm_compilation_time_seconds_total";
 
-    private JvmCompilationMetrics(CompilationMXBean compilationBean, PrometheusProperties config) {
-        this.compilationBean = compilationBean;
-        this.config = config;
-    }
+  private final PrometheusProperties config;
+  private final CompilationMXBean compilationBean;
 
-    private void register(PrometheusRegistry registry) {
+  private JvmCompilationMetrics(CompilationMXBean compilationBean, PrometheusProperties config) {
+    this.compilationBean = compilationBean;
+    this.config = config;
+  }
 
-        if (compilationBean == null || !compilationBean.isCompilationTimeMonitoringSupported()) {
-            return;
-        }
+  private void register(PrometheusRegistry registry) {
 
-        CounterWithCallback.builder(config)
-                .name(JVM_COMPILATION_TIME_SECONDS_TOTAL)
-                .help("The total time in seconds taken for HotSpot class compilation")
-                .unit(Unit.SECONDS)
-                .callback(callback -> callback.call(millisToSeconds(compilationBean.getTotalCompilationTime())))
-                .register(registry);
+    if (compilationBean == null || !compilationBean.isCompilationTimeMonitoringSupported()) {
+      return;
     }
 
-    public static Builder builder() {
-        return new Builder(PrometheusProperties.get());
-    }
+    CounterWithCallback.builder(config)
+        .name(JVM_COMPILATION_TIME_SECONDS_TOTAL)
+        .help("The total time in seconds taken for HotSpot class compilation")
+        .unit(Unit.SECONDS)
+        .callback(
+            callback -> callback.call(millisToSeconds(compilationBean.getTotalCompilationTime())))
+        .register(registry);
+  }
 
-    public static Builder builder(PrometheusProperties config) {
-        return new Builder(config);
-    }
+  public static Builder builder() {
+    return new Builder(PrometheusProperties.get());
+  }
 
-    public static class Builder {
+  public static Builder builder(PrometheusProperties config) {
+    return new Builder(config);
+  }
 
-        private final PrometheusProperties config;
-        private CompilationMXBean compilationBean;
+  public static class Builder {
 
-        private Builder(PrometheusProperties config) {
-            this.config = config;
-        }
+    private final PrometheusProperties config;
+    private CompilationMXBean compilationBean;
 
-        /**
-         * Package private. For testing only.
-         */
-        Builder compilationBean(CompilationMXBean compilationBean) {
-            this.compilationBean = compilationBean;
-            return this;
-        }
+    private Builder(PrometheusProperties config) {
+      this.config = config;
+    }
 
-        public void register() {
-            register(PrometheusRegistry.defaultRegistry);
-        }
+    /** Package private. For testing only. */
+    Builder compilationBean(CompilationMXBean compilationBean) {
+      this.compilationBean = compilationBean;
+      return this;
+    }
+
+    public void register() {
+      register(PrometheusRegistry.defaultRegistry);
+    }
 
-        public void register(PrometheusRegistry registry) {
-            CompilationMXBean compilationBean = this.compilationBean != null ? this.compilationBean : ManagementFactory.getCompilationMXBean();
-            new JvmCompilationMetrics(compilationBean, config).register(registry);
-        }
+    public void register(PrometheusRegistry registry) {
+      CompilationMXBean compilationBean =
+          this.compilationBean != null
+              ? this.compilationBean
+              : ManagementFactory.getCompilationMXBean();
+      new JvmCompilationMetrics(compilationBean, config).register(registry);
     }
+  }
 }
diff --git a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmGarbageCollectorMetrics.java b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmGarbageCollectorMetrics.java
index ab9877a85..e8b9aaa70 100644
--- a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmGarbageCollectorMetrics.java
+++ b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmGarbageCollectorMetrics.java
@@ -5,21 +5,27 @@
 import io.prometheus.metrics.model.registry.PrometheusRegistry;
 import io.prometheus.metrics.model.snapshots.Quantiles;
 import io.prometheus.metrics.model.snapshots.Unit;
-
 import java.lang.management.GarbageCollectorMXBean;
 import java.lang.management.ManagementFactory;
 import java.util.List;
 
 /**
- * JVM Garbage Collector metrics. The {@link JvmGarbageCollectorMetrics} are registered as part of the {@link JvmMetrics} like this:
+ * JVM Garbage Collector metrics. The {@link JvmGarbageCollectorMetrics} are registered as part of
+ * the {@link JvmMetrics} like this:
+ *
  * 
{@code
- *   JvmMetrics.builder().register();
+ * JvmMetrics.builder().register();
  * }
- * However, if you want only the {@link JvmGarbageCollectorMetrics} you can also register them directly: + * + * However, if you want only the {@link JvmGarbageCollectorMetrics} you can also register them + * directly: + * *
{@code
- *   JvmGarbageCollectorMetrics.builder().register();
+ * JvmGarbageCollectorMetrics.builder().register();
  * }
+ * * Example metrics being exported: + * *
  * # HELP jvm_gc_collection_seconds Time spent in a given JVM garbage collector in seconds.
  * # TYPE jvm_gc_collection_seconds summary
@@ -31,66 +37,70 @@
  */
 public class JvmGarbageCollectorMetrics {
 
-    private static final String JVM_GC_COLLECTION_SECONDS = "jvm_gc_collection_seconds";
+  private static final String JVM_GC_COLLECTION_SECONDS = "jvm_gc_collection_seconds";
+
+  private final PrometheusProperties config;
+  private final List garbageCollectorBeans;
+
+  private JvmGarbageCollectorMetrics(
+      List garbageCollectorBeans, PrometheusProperties config) {
+    this.config = config;
+    this.garbageCollectorBeans = garbageCollectorBeans;
+  }
+
+  private void register(PrometheusRegistry registry) {
+
+    SummaryWithCallback.builder(config)
+        .name(JVM_GC_COLLECTION_SECONDS)
+        .help("Time spent in a given JVM garbage collector in seconds.")
+        .unit(Unit.SECONDS)
+        .labelNames("gc")
+        .callback(
+            callback -> {
+              for (GarbageCollectorMXBean gc : garbageCollectorBeans) {
+                callback.call(
+                    gc.getCollectionCount(),
+                    Unit.millisToSeconds(gc.getCollectionTime()),
+                    Quantiles.EMPTY,
+                    gc.getName());
+              }
+            })
+        .register(registry);
+  }
+
+  public static Builder builder() {
+    return new Builder(PrometheusProperties.get());
+  }
+
+  public static Builder builder(PrometheusProperties config) {
+    return new Builder(config);
+  }
+
+  public static class Builder {
 
     private final PrometheusProperties config;
-    private final List garbageCollectorBeans;
+    private List garbageCollectorBeans;
 
-    private JvmGarbageCollectorMetrics(List garbageCollectorBeans, PrometheusProperties config) {
-        this.config = config;
-        this.garbageCollectorBeans = garbageCollectorBeans;
+    private Builder(PrometheusProperties config) {
+      this.config = config;
     }
 
-    private void register(PrometheusRegistry registry) {
-
-        SummaryWithCallback.builder(config)
-                .name(JVM_GC_COLLECTION_SECONDS)
-                .help("Time spent in a given JVM garbage collector in seconds.")
-                .unit(Unit.SECONDS)
-                .labelNames("gc")
-                .callback(callback -> {
-                    for (GarbageCollectorMXBean gc : garbageCollectorBeans) {
-                        callback.call(gc.getCollectionCount(), Unit.millisToSeconds(gc.getCollectionTime()), Quantiles.EMPTY, gc.getName());
-                    }
-                })
-                .register(registry);
+    /** Package private. For testing only. */
+    Builder garbageCollectorBeans(List garbageCollectorBeans) {
+      this.garbageCollectorBeans = garbageCollectorBeans;
+      return this;
     }
 
-    public static Builder builder() {
-        return new Builder(PrometheusProperties.get());
+    public void register() {
+      register(PrometheusRegistry.defaultRegistry);
     }
 
-    public static Builder builder(PrometheusProperties config) {
-        return new Builder(config);
-    }
-
-    public static class Builder {
-
-        private final PrometheusProperties config;
-        private List garbageCollectorBeans;
-
-        private Builder(PrometheusProperties config) {
-            this.config = config;
-        }
-
-        /**
-         * Package private. For testing only.
-         */
-        Builder garbageCollectorBeans(List garbageCollectorBeans) {
-            this.garbageCollectorBeans = garbageCollectorBeans;
-            return this;
-        }
-
-        public void register() {
-            register(PrometheusRegistry.defaultRegistry);
-        }
-
-        public void register(PrometheusRegistry registry) {
-            List garbageCollectorBeans = this.garbageCollectorBeans;
-            if (garbageCollectorBeans == null) {
-                garbageCollectorBeans = ManagementFactory.getGarbageCollectorMXBeans();
-            }
-            new JvmGarbageCollectorMetrics(garbageCollectorBeans, config).register(registry);
-        }
+    public void register(PrometheusRegistry registry) {
+      List garbageCollectorBeans = this.garbageCollectorBeans;
+      if (garbageCollectorBeans == null) {
+        garbageCollectorBeans = ManagementFactory.getGarbageCollectorMXBeans();
+      }
+      new JvmGarbageCollectorMetrics(garbageCollectorBeans, config).register(registry);
     }
+  }
 }
diff --git a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryMetrics.java b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryMetrics.java
index 3e3b95efa..b0672aaf3 100644
--- a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryMetrics.java
+++ b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryMetrics.java
@@ -4,7 +4,6 @@
 import io.prometheus.metrics.core.metrics.GaugeWithCallback;
 import io.prometheus.metrics.model.registry.PrometheusRegistry;
 import io.prometheus.metrics.model.snapshots.Unit;
-
 import java.lang.management.ManagementFactory;
 import java.lang.management.MemoryMXBean;
 import java.lang.management.MemoryPoolMXBean;
@@ -14,15 +13,21 @@
 import java.util.function.Function;
 
 /**
- * JVM memory metrics. The {@link JvmMemoryMetrics} are registered as part of the {@link JvmMetrics} like this:
+ * JVM memory metrics. The {@link JvmMemoryMetrics} are registered as part of the {@link JvmMetrics}
+ * like this:
+ *
  * 
{@code
- *   JvmMetrics.builder().register();
+ * JvmMetrics.builder().register();
  * }
+ * * However, if you want only the {@link JvmMemoryMetrics} you can also register them directly: + * *
{@code
- *   JvmMemoryMetrics.builder().register();
+ * JvmMemoryMetrics.builder().register();
  * }
+ * * Example metrics being exported: + * *
  * # HELP jvm_memory_committed_bytes Committed (bytes) of a given JVM memory area.
  * # TYPE jvm_memory_committed_bytes gauge
@@ -99,200 +104,216 @@
  */
 public class JvmMemoryMetrics {
 
-    private static final String JVM_MEMORY_OBJECTS_PENDING_FINALIZATION = "jvm_memory_objects_pending_finalization";
-    private static final String JVM_MEMORY_USED_BYTES = "jvm_memory_used_bytes";
-    private static final String JVM_MEMORY_COMMITTED_BYTES = "jvm_memory_committed_bytes";
-    private static final String JVM_MEMORY_MAX_BYTES = "jvm_memory_max_bytes";
-    private static final String JVM_MEMORY_INIT_BYTES = "jvm_memory_init_bytes";
-    private static final String JVM_MEMORY_POOL_USED_BYTES = "jvm_memory_pool_used_bytes";
-    private static final String JVM_MEMORY_POOL_COMMITTED_BYTES = "jvm_memory_pool_committed_bytes";
-    private static final String JVM_MEMORY_POOL_MAX_BYTES = "jvm_memory_pool_max_bytes";
-    private static final String JVM_MEMORY_POOL_INIT_BYTES = "jvm_memory_pool_init_bytes";
-    private static final String JVM_MEMORY_POOL_COLLECTION_USED_BYTES = "jvm_memory_pool_collection_used_bytes";
-    private static final String JVM_MEMORY_POOL_COLLECTION_COMMITTED_BYTES = "jvm_memory_pool_collection_committed_bytes";
-    private static final String JVM_MEMORY_POOL_COLLECTION_MAX_BYTES = "jvm_memory_pool_collection_max_bytes";
-    private static final String JVM_MEMORY_POOL_COLLECTION_INIT_BYTES = "jvm_memory_pool_collection_init_bytes";
+  private static final String JVM_MEMORY_OBJECTS_PENDING_FINALIZATION =
+      "jvm_memory_objects_pending_finalization";
+  private static final String JVM_MEMORY_USED_BYTES = "jvm_memory_used_bytes";
+  private static final String JVM_MEMORY_COMMITTED_BYTES = "jvm_memory_committed_bytes";
+  private static final String JVM_MEMORY_MAX_BYTES = "jvm_memory_max_bytes";
+  private static final String JVM_MEMORY_INIT_BYTES = "jvm_memory_init_bytes";
+  private static final String JVM_MEMORY_POOL_USED_BYTES = "jvm_memory_pool_used_bytes";
+  private static final String JVM_MEMORY_POOL_COMMITTED_BYTES = "jvm_memory_pool_committed_bytes";
+  private static final String JVM_MEMORY_POOL_MAX_BYTES = "jvm_memory_pool_max_bytes";
+  private static final String JVM_MEMORY_POOL_INIT_BYTES = "jvm_memory_pool_init_bytes";
+  private static final String JVM_MEMORY_POOL_COLLECTION_USED_BYTES =
+      "jvm_memory_pool_collection_used_bytes";
+  private static final String JVM_MEMORY_POOL_COLLECTION_COMMITTED_BYTES =
+      "jvm_memory_pool_collection_committed_bytes";
+  private static final String JVM_MEMORY_POOL_COLLECTION_MAX_BYTES =
+      "jvm_memory_pool_collection_max_bytes";
+  private static final String JVM_MEMORY_POOL_COLLECTION_INIT_BYTES =
+      "jvm_memory_pool_collection_init_bytes";
 
-    private final PrometheusProperties config;
-    private final MemoryMXBean memoryBean;
-    private final List poolBeans;
+  private final PrometheusProperties config;
+  private final MemoryMXBean memoryBean;
+  private final List poolBeans;
 
-    private JvmMemoryMetrics(List poolBeans, MemoryMXBean memoryBean, PrometheusProperties config) {
-        this.config = config;
-        this.poolBeans = poolBeans;
-        this.memoryBean = memoryBean;
-    }
+  private JvmMemoryMetrics(
+      List poolBeans, MemoryMXBean memoryBean, PrometheusProperties config) {
+    this.config = config;
+    this.poolBeans = poolBeans;
+    this.memoryBean = memoryBean;
+  }
 
-    private void register(PrometheusRegistry registry) {
+  private void register(PrometheusRegistry registry) {
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_OBJECTS_PENDING_FINALIZATION)
-                .help("The number of objects waiting in the finalizer queue.")
-                .callback(callback -> callback.call(memoryBean.getObjectPendingFinalizationCount()))
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_OBJECTS_PENDING_FINALIZATION)
+        .help("The number of objects waiting in the finalizer queue.")
+        .callback(callback -> callback.call(memoryBean.getObjectPendingFinalizationCount()))
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_USED_BYTES)
-                .help("Used bytes of a given JVM memory area.")
-                .unit(Unit.BYTES)
-                .labelNames("area")
-                .callback(callback -> {
-                    callback.call(memoryBean.getHeapMemoryUsage().getUsed(), "heap");
-                    callback.call(memoryBean.getNonHeapMemoryUsage().getUsed(), "nonheap");
-                })
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_USED_BYTES)
+        .help("Used bytes of a given JVM memory area.")
+        .unit(Unit.BYTES)
+        .labelNames("area")
+        .callback(
+            callback -> {
+              callback.call(memoryBean.getHeapMemoryUsage().getUsed(), "heap");
+              callback.call(memoryBean.getNonHeapMemoryUsage().getUsed(), "nonheap");
+            })
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_COMMITTED_BYTES)
-                .help("Committed (bytes) of a given JVM memory area.")
-                .unit(Unit.BYTES)
-                .labelNames("area")
-                .callback(callback -> {
-                    callback.call(memoryBean.getHeapMemoryUsage().getCommitted(), "heap");
-                    callback.call(memoryBean.getNonHeapMemoryUsage().getCommitted(), "nonheap");
-                })
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_COMMITTED_BYTES)
+        .help("Committed (bytes) of a given JVM memory area.")
+        .unit(Unit.BYTES)
+        .labelNames("area")
+        .callback(
+            callback -> {
+              callback.call(memoryBean.getHeapMemoryUsage().getCommitted(), "heap");
+              callback.call(memoryBean.getNonHeapMemoryUsage().getCommitted(), "nonheap");
+            })
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_MAX_BYTES)
-                .help("Max (bytes) of a given JVM memory area.")
-                .unit(Unit.BYTES)
-                .labelNames("area")
-                .callback(callback -> {
-                    callback.call(memoryBean.getHeapMemoryUsage().getMax(), "heap");
-                    callback.call(memoryBean.getNonHeapMemoryUsage().getMax(), "nonheap");
-                })
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_MAX_BYTES)
+        .help("Max (bytes) of a given JVM memory area.")
+        .unit(Unit.BYTES)
+        .labelNames("area")
+        .callback(
+            callback -> {
+              callback.call(memoryBean.getHeapMemoryUsage().getMax(), "heap");
+              callback.call(memoryBean.getNonHeapMemoryUsage().getMax(), "nonheap");
+            })
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_INIT_BYTES)
-                .help("Initial bytes of a given JVM memory area.")
-                .unit(Unit.BYTES)
-                .labelNames("area")
-                .callback(callback -> {
-                    callback.call(memoryBean.getHeapMemoryUsage().getInit(), "heap");
-                    callback.call(memoryBean.getNonHeapMemoryUsage().getInit(), "nonheap");
-                })
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_INIT_BYTES)
+        .help("Initial bytes of a given JVM memory area.")
+        .unit(Unit.BYTES)
+        .labelNames("area")
+        .callback(
+            callback -> {
+              callback.call(memoryBean.getHeapMemoryUsage().getInit(), "heap");
+              callback.call(memoryBean.getNonHeapMemoryUsage().getInit(), "nonheap");
+            })
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_POOL_USED_BYTES)
-                .help("Used bytes of a given JVM memory pool.")
-                .unit(Unit.BYTES)
-                .labelNames("pool")
-                .callback(makeCallback(poolBeans, MemoryPoolMXBean::getUsage, MemoryUsage::getUsed))
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_POOL_USED_BYTES)
+        .help("Used bytes of a given JVM memory pool.")
+        .unit(Unit.BYTES)
+        .labelNames("pool")
+        .callback(makeCallback(poolBeans, MemoryPoolMXBean::getUsage, MemoryUsage::getUsed))
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_POOL_COMMITTED_BYTES)
-                .help("Committed bytes of a given JVM memory pool.")
-                .unit(Unit.BYTES)
-                .labelNames("pool")
-                .callback(makeCallback(poolBeans, MemoryPoolMXBean::getUsage, MemoryUsage::getCommitted))
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_POOL_COMMITTED_BYTES)
+        .help("Committed bytes of a given JVM memory pool.")
+        .unit(Unit.BYTES)
+        .labelNames("pool")
+        .callback(makeCallback(poolBeans, MemoryPoolMXBean::getUsage, MemoryUsage::getCommitted))
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_POOL_MAX_BYTES)
-                .help("Max bytes of a given JVM memory pool.")
-                .unit(Unit.BYTES)
-                .labelNames("pool")
-                .callback(makeCallback(poolBeans, MemoryPoolMXBean::getUsage, MemoryUsage::getMax))
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_POOL_MAX_BYTES)
+        .help("Max bytes of a given JVM memory pool.")
+        .unit(Unit.BYTES)
+        .labelNames("pool")
+        .callback(makeCallback(poolBeans, MemoryPoolMXBean::getUsage, MemoryUsage::getMax))
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_POOL_INIT_BYTES)
-                .help("Initial bytes of a given JVM memory pool.")
-                .unit(Unit.BYTES)
-                .labelNames("pool")
-                .callback(makeCallback(poolBeans, MemoryPoolMXBean::getUsage, MemoryUsage::getInit))
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_POOL_INIT_BYTES)
+        .help("Initial bytes of a given JVM memory pool.")
+        .unit(Unit.BYTES)
+        .labelNames("pool")
+        .callback(makeCallback(poolBeans, MemoryPoolMXBean::getUsage, MemoryUsage::getInit))
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_POOL_COLLECTION_USED_BYTES)
-                .help("Used bytes after last collection of a given JVM memory pool.")
-                .unit(Unit.BYTES)
-                .labelNames("pool")
-                .callback(makeCallback(poolBeans, MemoryPoolMXBean::getCollectionUsage, MemoryUsage::getUsed))
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_POOL_COLLECTION_USED_BYTES)
+        .help("Used bytes after last collection of a given JVM memory pool.")
+        .unit(Unit.BYTES)
+        .labelNames("pool")
+        .callback(
+            makeCallback(poolBeans, MemoryPoolMXBean::getCollectionUsage, MemoryUsage::getUsed))
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_POOL_COLLECTION_COMMITTED_BYTES)
-                .help("Committed after last collection bytes of a given JVM memory pool.")
-                .unit(Unit.BYTES)
-                .labelNames("pool")
-                .callback(makeCallback(poolBeans, MemoryPoolMXBean::getCollectionUsage, MemoryUsage::getCommitted))
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_POOL_COLLECTION_COMMITTED_BYTES)
+        .help("Committed after last collection bytes of a given JVM memory pool.")
+        .unit(Unit.BYTES)
+        .labelNames("pool")
+        .callback(
+            makeCallback(
+                poolBeans, MemoryPoolMXBean::getCollectionUsage, MemoryUsage::getCommitted))
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_POOL_COLLECTION_MAX_BYTES)
-                .help("Max bytes after last collection of a given JVM memory pool.")
-                .unit(Unit.BYTES)
-                .labelNames("pool")
-                .callback(makeCallback(poolBeans, MemoryPoolMXBean::getCollectionUsage, MemoryUsage::getMax))
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_POOL_COLLECTION_MAX_BYTES)
+        .help("Max bytes after last collection of a given JVM memory pool.")
+        .unit(Unit.BYTES)
+        .labelNames("pool")
+        .callback(
+            makeCallback(poolBeans, MemoryPoolMXBean::getCollectionUsage, MemoryUsage::getMax))
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(JVM_MEMORY_POOL_COLLECTION_INIT_BYTES)
-                .help("Initial after last collection bytes of a given JVM memory pool.")
-                .unit(Unit.BYTES)
-                .labelNames("pool")
-                .callback(makeCallback(poolBeans, MemoryPoolMXBean::getCollectionUsage, MemoryUsage::getInit))
-                .register(registry);
-    }
+    GaugeWithCallback.builder(config)
+        .name(JVM_MEMORY_POOL_COLLECTION_INIT_BYTES)
+        .help("Initial after last collection bytes of a given JVM memory pool.")
+        .unit(Unit.BYTES)
+        .labelNames("pool")
+        .callback(
+            makeCallback(poolBeans, MemoryPoolMXBean::getCollectionUsage, MemoryUsage::getInit))
+        .register(registry);
+  }
 
-    private Consumer makeCallback(List poolBeans, Function memoryUsageFunc, Function valueFunc) {
-        return callback -> {
-            for (MemoryPoolMXBean pool : poolBeans) {
-                MemoryUsage poolUsage = memoryUsageFunc.apply(pool);
-                if (poolUsage != null) {
-                    callback.call(valueFunc.apply(poolUsage), pool.getName());
-                }
-            }
-        };
-    }
+  private Consumer makeCallback(
+      List poolBeans,
+      Function memoryUsageFunc,
+      Function valueFunc) {
+    return callback -> {
+      for (MemoryPoolMXBean pool : poolBeans) {
+        MemoryUsage poolUsage = memoryUsageFunc.apply(pool);
+        if (poolUsage != null) {
+          callback.call(valueFunc.apply(poolUsage), pool.getName());
+        }
+      }
+    };
+  }
 
-    public static Builder builder() {
-        return new Builder(PrometheusProperties.get());
-    }
+  public static Builder builder() {
+    return new Builder(PrometheusProperties.get());
+  }
 
-    public static Builder builder(PrometheusProperties config) {
-        return new Builder(config);
-    }
+  public static Builder builder(PrometheusProperties config) {
+    return new Builder(config);
+  }
 
-    public static class Builder {
+  public static class Builder {
 
-        private final PrometheusProperties config;
-        private MemoryMXBean memoryBean;
-        private List poolBeans;
+    private final PrometheusProperties config;
+    private MemoryMXBean memoryBean;
+    private List poolBeans;
 
-        private Builder(PrometheusProperties config) {
-            this.config = config;
-        }
+    private Builder(PrometheusProperties config) {
+      this.config = config;
+    }
 
-        /**
-         * Package private. For testing only.
-         */
-        Builder withMemoryBean(MemoryMXBean memoryBean) {
-            this.memoryBean = memoryBean;
-            return this;
-        }
+    /** Package private. For testing only. */
+    Builder withMemoryBean(MemoryMXBean memoryBean) {
+      this.memoryBean = memoryBean;
+      return this;
+    }
 
-        /**
-         * Package private. For testing only.
-         */
-        Builder withMemoryPoolBeans(List memoryPoolBeans) {
-            this.poolBeans = memoryPoolBeans;
-            return this;
-        }
+    /** Package private. For testing only. */
+    Builder withMemoryPoolBeans(List memoryPoolBeans) {
+      this.poolBeans = memoryPoolBeans;
+      return this;
+    }
 
-        public void register() {
-            register(PrometheusRegistry.defaultRegistry);
-        }
+    public void register() {
+      register(PrometheusRegistry.defaultRegistry);
+    }
 
-        public void register(PrometheusRegistry registry) {
-            MemoryMXBean memoryMXBean = this.memoryBean != null ? this.memoryBean : ManagementFactory.getMemoryMXBean();
-            List poolBeans = this.poolBeans != null ? this.poolBeans : ManagementFactory.getMemoryPoolMXBeans();
-            new JvmMemoryMetrics(poolBeans, memoryMXBean, config).register(registry);
-        }
+    public void register(PrometheusRegistry registry) {
+      MemoryMXBean memoryMXBean =
+          this.memoryBean != null ? this.memoryBean : ManagementFactory.getMemoryMXBean();
+      List poolBeans =
+          this.poolBeans != null ? this.poolBeans : ManagementFactory.getMemoryPoolMXBeans();
+      new JvmMemoryMetrics(poolBeans, memoryMXBean, config).register(registry);
     }
+  }
 }
diff --git a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryPoolAllocationMetrics.java b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryPoolAllocationMetrics.java
index 522a2ef81..09aab0659 100644
--- a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryPoolAllocationMetrics.java
+++ b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryPoolAllocationMetrics.java
@@ -5,28 +5,34 @@
 import io.prometheus.metrics.config.PrometheusProperties;
 import io.prometheus.metrics.core.metrics.Counter;
 import io.prometheus.metrics.model.registry.PrometheusRegistry;
-
-import javax.management.Notification;
-import javax.management.NotificationEmitter;
-import javax.management.NotificationListener;
-import javax.management.openmbean.CompositeData;
 import java.lang.management.GarbageCollectorMXBean;
 import java.lang.management.ManagementFactory;
 import java.lang.management.MemoryUsage;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import javax.management.Notification;
+import javax.management.NotificationEmitter;
+import javax.management.NotificationListener;
+import javax.management.openmbean.CompositeData;
 
 /**
- * JVM memory allocation metrics. The {@link JvmMemoryPoolAllocationMetrics} are registered as part of the {@link JvmMetrics} like this:
+ * JVM memory allocation metrics. The {@link JvmMemoryPoolAllocationMetrics} are registered as part
+ * of the {@link JvmMetrics} like this:
+ *
  * 
{@code
- *   JvmMetrics.builder().register();
+ * JvmMetrics.builder().register();
  * }
- * However, if you want only the {@link JvmMemoryPoolAllocationMetrics} you can also register them directly: + * + * However, if you want only the {@link JvmMemoryPoolAllocationMetrics} you can also register them + * directly: + * *
{@code
- *   JvmMemoryAllocationMetrics.builder().register();
+ * JvmMemoryAllocationMetrics.builder().register();
  * }
+ * * Example metrics being exported: + * *
  * # HELP jvm_memory_pool_allocated_bytes_total Total bytes allocated in a given JVM memory pool. Only updated after GC, not continuously.
  * # TYPE jvm_memory_pool_allocated_bytes_total counter
@@ -40,131 +46,136 @@
  */
 public class JvmMemoryPoolAllocationMetrics {
 
-    private static final String JVM_MEMORY_POOL_ALLOCATED_BYTES_TOTAL = "jvm_memory_pool_allocated_bytes_total";
+  private static final String JVM_MEMORY_POOL_ALLOCATED_BYTES_TOTAL =
+      "jvm_memory_pool_allocated_bytes_total";
+
+  private final PrometheusProperties config;
+  private final List garbageCollectorBeans;
+
+  private JvmMemoryPoolAllocationMetrics(
+      List garbageCollectorBeans, PrometheusProperties config) {
+    this.garbageCollectorBeans = garbageCollectorBeans;
+    this.config = config;
+  }
+
+  private void register(PrometheusRegistry registry) {
+
+    Counter allocatedCounter =
+        Counter.builder()
+            .name(JVM_MEMORY_POOL_ALLOCATED_BYTES_TOTAL)
+            .help(
+                "Total bytes allocated in a given JVM memory pool. Only updated after GC, not continuously.")
+            .labelNames("pool")
+            .register(registry);
+
+    AllocationCountingNotificationListener listener =
+        new AllocationCountingNotificationListener(allocatedCounter);
+    for (GarbageCollectorMXBean garbageCollectorMXBean : garbageCollectorBeans) {
+      if (garbageCollectorMXBean instanceof NotificationEmitter) {
+        ((NotificationEmitter) garbageCollectorMXBean)
+            .addNotificationListener(listener, null, null);
+      }
+    }
+  }
 
-    private final PrometheusProperties config;
-    private final List garbageCollectorBeans;
+  static class AllocationCountingNotificationListener implements NotificationListener {
+
+    private final Map lastMemoryUsage = new HashMap();
+    private final Counter counter;
 
-    private JvmMemoryPoolAllocationMetrics(List garbageCollectorBeans, PrometheusProperties config) {
-        this.garbageCollectorBeans = garbageCollectorBeans;
-        this.config = config;
+    AllocationCountingNotificationListener(Counter counter) {
+      this.counter = counter;
     }
 
-    private void register(PrometheusRegistry registry) {
+    @Override
+    public synchronized void handleNotification(Notification notification, Object handback) {
+      GarbageCollectionNotificationInfo info =
+          GarbageCollectionNotificationInfo.from((CompositeData) notification.getUserData());
+      GcInfo gcInfo = info.getGcInfo();
+      Map memoryUsageBeforeGc = gcInfo.getMemoryUsageBeforeGc();
+      Map memoryUsageAfterGc = gcInfo.getMemoryUsageAfterGc();
+      for (Map.Entry entry : memoryUsageBeforeGc.entrySet()) {
+        String memoryPool = entry.getKey();
+        long before = entry.getValue().getUsed();
+        long after = memoryUsageAfterGc.get(memoryPool).getUsed();
+        handleMemoryPool(memoryPool, before, after);
+      }
+    }
 
-        Counter allocatedCounter = Counter.builder()
-                .name(JVM_MEMORY_POOL_ALLOCATED_BYTES_TOTAL)
-                .help("Total bytes allocated in a given JVM memory pool. Only updated after GC, not continuously.")
-                .labelNames("pool")
-                .register(registry);
+    // Visible for testing
+    void handleMemoryPool(String memoryPool, long before, long after) {
+      /*
+       * Calculate increase in the memory pool by comparing memory used
+       * after last GC, before this GC, and after this GC.
+       * See ascii illustration below.
+       * Make sure to count only increases and ignore decreases.
+       * (Typically a pool will only increase between GCs or during GCs, not both.
+       * E.g. eden pools between GCs. Survivor and old generation pools during GCs.)
+       *
+       *                         |<-- diff1 -->|<-- diff2 -->|
+       * Timeline: |-- last GC --|             |---- GC -----|
+       *                      ___^__        ___^____      ___^___
+       * Mem. usage vars:    / last \      / before \    / after \
+       */
+
+      // Get last memory usage after GC and remember memory used after for next time
+      long last = getAndSet(lastMemoryUsage, memoryPool, after);
+      // Difference since last GC
+      long diff1 = before - last;
+      // Difference during this GC
+      long diff2 = after - before;
+      // Make sure to only count increases
+      if (diff1 < 0) {
+        diff1 = 0;
+      }
+      if (diff2 < 0) {
+        diff2 = 0;
+      }
+      long increase = diff1 + diff2;
+      if (increase > 0) {
+        counter.labelValues(memoryPool).inc(increase);
+      }
+    }
 
-        AllocationCountingNotificationListener listener = new AllocationCountingNotificationListener(allocatedCounter);
-        for (GarbageCollectorMXBean garbageCollectorMXBean : garbageCollectorBeans) {
-            if (garbageCollectorMXBean instanceof NotificationEmitter) {
-                ((NotificationEmitter) garbageCollectorMXBean).addNotificationListener(listener, null, null);
-            }
-        }
+    private static long getAndSet(Map map, String key, long value) {
+      Long last = map.put(key, value);
+      return last == null ? 0 : last;
     }
+  }
+
+  public static Builder builder() {
+    return new Builder(PrometheusProperties.get());
+  }
+
+  public static Builder builder(PrometheusProperties config) {
+    return new Builder(config);
+  }
+
+  public static class Builder {
+
+    private final PrometheusProperties config;
+    private List garbageCollectorBeans;
 
-    static class AllocationCountingNotificationListener implements NotificationListener {
-
-        private final Map lastMemoryUsage = new HashMap();
-        private final Counter counter;
-
-        AllocationCountingNotificationListener(Counter counter) {
-            this.counter = counter;
-        }
-
-        @Override
-        public synchronized void handleNotification(Notification notification, Object handback) {
-            GarbageCollectionNotificationInfo info = GarbageCollectionNotificationInfo.from((CompositeData) notification.getUserData());
-            GcInfo gcInfo = info.getGcInfo();
-            Map memoryUsageBeforeGc = gcInfo.getMemoryUsageBeforeGc();
-            Map memoryUsageAfterGc = gcInfo.getMemoryUsageAfterGc();
-            for (Map.Entry entry : memoryUsageBeforeGc.entrySet()) {
-                String memoryPool = entry.getKey();
-                long before = entry.getValue().getUsed();
-                long after = memoryUsageAfterGc.get(memoryPool).getUsed();
-                handleMemoryPool(memoryPool, before, after);
-            }
-        }
-
-        // Visible for testing
-        void handleMemoryPool(String memoryPool, long before, long after) {
-            /*
-             * Calculate increase in the memory pool by comparing memory used
-             * after last GC, before this GC, and after this GC.
-             * See ascii illustration below.
-             * Make sure to count only increases and ignore decreases.
-             * (Typically a pool will only increase between GCs or during GCs, not both.
-             * E.g. eden pools between GCs. Survivor and old generation pools during GCs.)
-             *
-             *                         |<-- diff1 -->|<-- diff2 -->|
-             * Timeline: |-- last GC --|             |---- GC -----|
-             *                      ___^__        ___^____      ___^___
-             * Mem. usage vars:    / last \      / before \    / after \
-             */
-
-            // Get last memory usage after GC and remember memory used after for next time
-            long last = getAndSet(lastMemoryUsage, memoryPool, after);
-            // Difference since last GC
-            long diff1 = before - last;
-            // Difference during this GC
-            long diff2 = after - before;
-            // Make sure to only count increases
-            if (diff1 < 0) {
-                diff1 = 0;
-            }
-            if (diff2 < 0) {
-                diff2 = 0;
-            }
-            long increase = diff1 + diff2;
-            if (increase > 0) {
-                counter.labelValues(memoryPool).inc(increase);
-            }
-        }
-
-        private static long getAndSet(Map map, String key, long value) {
-            Long last = map.put(key, value);
-            return last == null ? 0 : last;
-        }
+    private Builder(PrometheusProperties config) {
+      this.config = config;
     }
 
-    public static Builder builder() {
-        return new Builder(PrometheusProperties.get());
+    /** Package private. For testing only. */
+    Builder withGarbageCollectorBeans(List garbageCollectorBeans) {
+      this.garbageCollectorBeans = garbageCollectorBeans;
+      return this;
     }
 
-    public static Builder builder(PrometheusProperties config) {
-        return new Builder(config);
+    public void register() {
+      register(PrometheusRegistry.defaultRegistry);
     }
 
-    public static class Builder {
-
-        private final PrometheusProperties config;
-        private List garbageCollectorBeans;
-
-        private Builder(PrometheusProperties config) {
-            this.config = config;
-        }
-
-        /**
-         * Package private. For testing only.
-         */
-        Builder withGarbageCollectorBeans(List garbageCollectorBeans) {
-            this.garbageCollectorBeans = garbageCollectorBeans;
-            return this;
-        }
-
-        public void register() {
-            register(PrometheusRegistry.defaultRegistry);
-        }
-
-        public void register(PrometheusRegistry registry) {
-            List garbageCollectorBeans = this.garbageCollectorBeans;
-            if (garbageCollectorBeans == null) {
-                garbageCollectorBeans = ManagementFactory.getGarbageCollectorMXBeans();
-            }
-            new JvmMemoryPoolAllocationMetrics(garbageCollectorBeans, config).register(registry);
-        }
+    public void register(PrometheusRegistry registry) {
+      List garbageCollectorBeans = this.garbageCollectorBeans;
+      if (garbageCollectorBeans == null) {
+        garbageCollectorBeans = ManagementFactory.getGarbageCollectorMXBeans();
+      }
+      new JvmMemoryPoolAllocationMetrics(garbageCollectorBeans, config).register(registry);
     }
+  }
 }
diff --git a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmMetrics.java b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmMetrics.java
index 54d937688..0f5a56eee 100644
--- a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmMetrics.java
+++ b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmMetrics.java
@@ -2,67 +2,70 @@
 
 import io.prometheus.metrics.config.PrometheusProperties;
 import io.prometheus.metrics.model.registry.PrometheusRegistry;
-
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
 /**
  * Registers all JVM metrics. Example usage:
+ *
  * 
{@code
- *   JvmMetrics.builder().register();
+ * JvmMetrics.builder().register();
  * }
*/ public class JvmMetrics { - private static final Set REGISTERED = ConcurrentHashMap.newKeySet(); - public static Builder builder() { - return new Builder(PrometheusProperties.get()); - } + private static final Set REGISTERED = ConcurrentHashMap.newKeySet(); - // Note: Currently there is no configuration for JVM metrics, so it doesn't matter whether you pass a config or not. - // However, we will add config options in the future, like whether you want to use Prometheus naming conventions - // or OpenTelemetry semantic conventions for JVM metrics. - public static Builder builder(PrometheusProperties config) { - return new Builder(config); - } + public static Builder builder() { + return new Builder(PrometheusProperties.get()); + } - public static class Builder { + // Note: Currently there is no configuration for JVM metrics, so it doesn't matter whether you + // pass a config or not. + // However, we will add config options in the future, like whether you want to use Prometheus + // naming conventions + // or OpenTelemetry semantic conventions for JVM metrics. + public static Builder builder(PrometheusProperties config) { + return new Builder(config); + } - private final PrometheusProperties config; + public static class Builder { - private Builder(PrometheusProperties config) { - this.config = config; - } + private final PrometheusProperties config; - /** - * Register all JVM metrics with the default registry. - *

- * It's safe to call this multiple times, only the first call will register the metrics, all subsequent calls - * will be ignored. - */ - public void register() { - register(PrometheusRegistry.defaultRegistry); - } + private Builder(PrometheusProperties config) { + this.config = config; + } + + /** + * Register all JVM metrics with the default registry. + * + *

It's safe to call this multiple times, only the first call will register the metrics, all + * subsequent calls will be ignored. + */ + public void register() { + register(PrometheusRegistry.defaultRegistry); + } - /** - * Register all JVM metrics with the {@code registry}. - *

- * It's safe to call this multiple times, only the first call will register the metrics, all subsequent calls - * will be ignored. - */ - public void register(PrometheusRegistry registry) { - if (REGISTERED.add(registry)) { - JvmThreadsMetrics.builder(config).register(registry); - JvmBufferPoolMetrics.builder(config).register(registry); - JvmClassLoadingMetrics.builder(config).register(registry); - JvmCompilationMetrics.builder(config).register(registry); - JvmGarbageCollectorMetrics.builder(config).register(registry); - JvmMemoryPoolAllocationMetrics.builder(config).register(registry); - JvmMemoryMetrics.builder(config).register(registry); - JvmNativeMemoryMetrics.builder(config).register(registry); - JvmRuntimeInfoMetric.builder(config).register(registry); - ProcessMetrics.builder(config).register(registry); - } - } + /** + * Register all JVM metrics with the {@code registry}. + * + *

It's safe to call this multiple times, only the first call will register the metrics, all + * subsequent calls will be ignored. + */ + public void register(PrometheusRegistry registry) { + if (REGISTERED.add(registry)) { + JvmThreadsMetrics.builder(config).register(registry); + JvmBufferPoolMetrics.builder(config).register(registry); + JvmClassLoadingMetrics.builder(config).register(registry); + JvmCompilationMetrics.builder(config).register(registry); + JvmGarbageCollectorMetrics.builder(config).register(registry); + JvmMemoryPoolAllocationMetrics.builder(config).register(registry); + JvmMemoryMetrics.builder(config).register(registry); + JvmNativeMemoryMetrics.builder(config).register(registry); + JvmRuntimeInfoMetric.builder(config).register(registry); + ProcessMetrics.builder(config).register(registry); + } } + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmNativeMemoryMetrics.java b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmNativeMemoryMetrics.java index 8e60a31d9..4f02823f1 100644 --- a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmNativeMemoryMetrics.java +++ b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmNativeMemoryMetrics.java @@ -4,34 +4,41 @@ import io.prometheus.metrics.core.metrics.GaugeWithCallback; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.Unit; - -import javax.management.InstanceNotFoundException; -import javax.management.MBeanException; -import javax.management.MalformedObjectNameException; -import javax.management.ObjectName; -import javax.management.ReflectionException; import java.lang.management.ManagementFactory; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import java.util.regex.Matcher; import java.util.regex.Pattern; +import javax.management.InstanceNotFoundException; +import javax.management.MBeanException; +import javax.management.MalformedObjectNameException; +import javax.management.ObjectName; +import javax.management.ReflectionException; /** - * JVM native memory. JVM native memory tracking is disabled by default. You need to enable it by starting your JVM with this flag: + * JVM native memory. JVM native memory tracking is disabled by default. You need to enable it by + * starting your JVM with this flag: + * *

-XX:NativeMemoryTracking=summary
+ * + *

When native memory tracking is disabled the metrics are not registered either. + * *

- * When native memory tracking is disabled the metrics are not registered either. - *

- *

- * The {@link JvmNativeMemoryMetrics} are registered as part of the {@link JvmMetrics} like this: + * + *

The {@link JvmNativeMemoryMetrics} are registered as part of the {@link JvmMetrics} like this: + * *

{@code
- *   JvmMetrics.builder().register();
+ * JvmMetrics.builder().register();
  * }
+ * * However, if you want only the {@link JvmNativeMemoryMetrics} you can also register them directly: + * *
{@code
- *   JvmNativeMemoryMetrics.builder().register();
+ * JvmNativeMemoryMetrics.builder().register();
  * }
+ * * Example metrics being exported: + * *
  * # HELP jvm_native_memory_committed_bytes Committed bytes of a given JVM. Committed memory represents the amount of memory the JVM is using right now.
  * # TYPE jvm_native_memory_committed_bytes gauge
@@ -79,13 +86,13 @@
  */
 public class JvmNativeMemoryMetrics {
   private static final String JVM_NATIVE_MEMORY_RESERVED_BYTES = "jvm_native_memory_reserved_bytes";
-  private static final String JVM_NATIVE_MEMORY_COMMITTED_BYTES = "jvm_native_memory_committed_bytes";
+  private static final String JVM_NATIVE_MEMORY_COMMITTED_BYTES =
+      "jvm_native_memory_committed_bytes";
 
-  private static final Pattern pattern = Pattern.compile("\\s*([A-Z][A-Za-z\\s]*[A-Za-z]+).*reserved=(\\d+), committed=(\\d+)");
+  private static final Pattern pattern =
+      Pattern.compile("\\s*([A-Z][A-Za-z\\s]*[A-Za-z]+).*reserved=(\\d+), committed=(\\d+)");
 
-  /**
-   * Package private. For testing only.
-   */
+  /** Package private. For testing only. */
   static final AtomicBoolean isEnabled = new AtomicBoolean(true);
 
   private final PrometheusProperties config;
@@ -102,7 +109,8 @@ private void register(PrometheusRegistry registry) {
     if (isEnabled.get()) {
       GaugeWithCallback.builder(config)
           .name(JVM_NATIVE_MEMORY_RESERVED_BYTES)
-          .help("Reserved bytes of a given JVM. Reserved memory represents the total amount of memory the JVM can potentially use.")
+          .help(
+              "Reserved bytes of a given JVM. Reserved memory represents the total amount of memory the JVM can potentially use.")
           .unit(Unit.BYTES)
           .labelNames("pool")
           .callback(makeCallback(true))
@@ -110,7 +118,8 @@ private void register(PrometheusRegistry registry) {
 
       GaugeWithCallback.builder(config)
           .name(JVM_NATIVE_MEMORY_COMMITTED_BYTES)
-          .help("Committed bytes of a given JVM. Committed memory represents the amount of memory the JVM is using right now.")
+          .help(
+              "Committed bytes of a given JVM. Committed memory represents the amount of memory the JVM is using right now.")
           .unit(Unit.BYTES)
           .labelNames("pool")
           .callback(makeCallback(false))
@@ -165,12 +174,17 @@ static class DefaultPlatformMBeanServerAdapter implements PlatformMBeanServerAda
     @Override
     public String vmNativeMemorySummaryInBytes() {
       try {
-        return (String) ManagementFactory.getPlatformMBeanServer().invoke(
-            new ObjectName("com.sun.management:type=DiagnosticCommand"),
-            "vmNativeMemory",
-            new Object[]{new String[]{"summary", "scale=B"}},
-            new String[]{"[Ljava.lang.String;"});
-      } catch (ReflectionException | MalformedObjectNameException | InstanceNotFoundException | MBeanException e) {
+        return (String)
+            ManagementFactory.getPlatformMBeanServer()
+                .invoke(
+                    new ObjectName("com.sun.management:type=DiagnosticCommand"),
+                    "vmNativeMemory",
+                    new Object[] {new String[] {"summary", "scale=B"}},
+                    new String[] {"[Ljava.lang.String;"});
+      } catch (ReflectionException
+          | MalformedObjectNameException
+          | InstanceNotFoundException
+          | MBeanException e) {
         throw new IllegalStateException("Native memory tracking is not enabled", e);
       }
     }
@@ -193,9 +207,7 @@ private Builder(PrometheusProperties config) {
       this(config, new DefaultPlatformMBeanServerAdapter());
     }
 
-    /**
-     * Package private. For testing only.
-     */
+    /** Package private. For testing only. */
     Builder(PrometheusProperties config, PlatformMBeanServerAdapter adapter) {
       this.config = config;
       this.adapter = adapter;
diff --git a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmRuntimeInfoMetric.java b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmRuntimeInfoMetric.java
index c7024b14e..333395425 100644
--- a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmRuntimeInfoMetric.java
+++ b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmRuntimeInfoMetric.java
@@ -5,13 +5,17 @@
 import io.prometheus.metrics.model.registry.PrometheusRegistry;
 
 /**
- * JVM Runtime Info metric. The {@link JvmRuntimeInfoMetric} is registered as part of the {@link JvmMetrics} like this:
+ * JVM Runtime Info metric. The {@link JvmRuntimeInfoMetric} is registered as part of the {@link
+ * JvmMetrics} like this:
+ *
  * 
{@code
- *   JvmMetrics.builder().register();
+ * JvmMetrics.builder().register();
  * }
+ * * However, if you want only the {@link JvmRuntimeInfoMetric} you can also register them directly: + * *
{@code
- *   JvmRuntimeInfoMetric.builder().register();
+ * JvmRuntimeInfoMetric.builder().register();
  * }
* *
@@ -22,83 +26,84 @@
  */
 public class JvmRuntimeInfoMetric {
 
-    private static final String JVM_RUNTIME_INFO = "jvm_runtime_info";
+  private static final String JVM_RUNTIME_INFO = "jvm_runtime_info";
+
+  private final PrometheusProperties config;
+  private final String version;
+  private final String vendor;
+  private final String runtime;
+
+  private JvmRuntimeInfoMetric(
+      String version, String vendor, String runtime, PrometheusProperties config) {
+    this.config = config;
+    this.version = version;
+    this.vendor = vendor;
+    this.runtime = runtime;
+  }
+
+  private void register(PrometheusRegistry registry) {
+
+    Info jvmInfo =
+        Info.builder(config)
+            .name(JVM_RUNTIME_INFO)
+            .help("JVM runtime info")
+            .labelNames("version", "vendor", "runtime")
+            .register(registry);
+
+    jvmInfo.setLabelValues(version, vendor, runtime);
+  }
+
+  public static Builder builder() {
+    return new Builder(PrometheusProperties.get());
+  }
+
+  public static Builder builder(PrometheusProperties config) {
+    return new Builder(config);
+  }
+
+  public static class Builder {
 
     private final PrometheusProperties config;
-    private final String version;
-    private final String vendor;
-    private final String runtime;
-
-    private JvmRuntimeInfoMetric(String version, String vendor, String runtime, PrometheusProperties config) {
-        this.config = config;
-        this.version = version;
-        this.vendor = vendor;
-        this.runtime = runtime;
-    }
+    private String version;
+    private String vendor;
+    private String runtime;
 
-    private void register(PrometheusRegistry registry) {
+    private Builder(PrometheusProperties config) {
+      this.config = config;
+    }
 
-        Info jvmInfo = Info.builder(config)
-                .name(JVM_RUNTIME_INFO)
-                .help("JVM runtime info")
-                .labelNames("version", "vendor", "runtime")
-                .register(registry);
+    /** Package private. For testing only. */
+    Builder version(String version) {
+      this.version = version;
+      return this;
+    }
 
-        jvmInfo.setLabelValues(version, vendor, runtime);
+    /** Package private. For testing only. */
+    Builder vendor(String vendor) {
+      this.vendor = vendor;
+      return this;
     }
 
-    public static Builder builder() {
-        return new Builder(PrometheusProperties.get());
+    /** Package private. For testing only. */
+    Builder runtime(String runtime) {
+      this.runtime = runtime;
+      return this;
     }
 
-    public static Builder builder(PrometheusProperties config) {
-        return new Builder(config);
+    public void register() {
+      register(PrometheusRegistry.defaultRegistry);
     }
 
-    public static class Builder {
-
-        private final PrometheusProperties config;
-        private String version;
-        private String vendor;
-        private String runtime;
-
-        private Builder(PrometheusProperties config) {
-            this.config = config;
-        }
-
-        /**
-         * Package private. For testing only.
-         */
-        Builder version(String version) {
-            this.version = version;
-            return this;
-        }
-
-        /**
-         * Package private. For testing only.
-         */
-        Builder vendor(String vendor) {
-            this.vendor = vendor;
-            return this;
-        }
-
-        /**
-         * Package private. For testing only.
-         */
-        Builder runtime(String runtime) {
-            this.runtime = runtime;
-            return this;
-        }
-
-        public void register() {
-            register(PrometheusRegistry.defaultRegistry);
-        }
-
-        public void register(PrometheusRegistry registry) {
-            String version = this.version != null ? this.version : System.getProperty("java.runtime.version", "unknown");
-            String vendor = this.vendor != null ? this.vendor : System.getProperty("java.vm.vendor", "unknown");
-            String runtime = this.runtime != null ? this.runtime : System.getProperty("java.runtime.name", "unknown");
-            new JvmRuntimeInfoMetric(version, vendor, runtime, config).register(registry);
-        }
+    public void register(PrometheusRegistry registry) {
+      String version =
+          this.version != null
+              ? this.version
+              : System.getProperty("java.runtime.version", "unknown");
+      String vendor =
+          this.vendor != null ? this.vendor : System.getProperty("java.vm.vendor", "unknown");
+      String runtime =
+          this.runtime != null ? this.runtime : System.getProperty("java.runtime.name", "unknown");
+      new JvmRuntimeInfoMetric(version, vendor, runtime, config).register(registry);
     }
+  }
 }
diff --git a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmThreadsMetrics.java b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmThreadsMetrics.java
index d38c50a54..4027551ac 100644
--- a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmThreadsMetrics.java
+++ b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/JvmThreadsMetrics.java
@@ -4,7 +4,6 @@
 import io.prometheus.metrics.core.metrics.CounterWithCallback;
 import io.prometheus.metrics.core.metrics.GaugeWithCallback;
 import io.prometheus.metrics.model.registry.PrometheusRegistry;
-
 import java.lang.management.ManagementFactory;
 import java.lang.management.ThreadInfo;
 import java.lang.management.ThreadMXBean;
@@ -13,15 +12,21 @@
 import java.util.Map;
 
 /**
- * JVM Thread metrics. The {@link JvmThreadsMetrics} are registered as part of the {@link JvmMetrics} like this:
+ * JVM Thread metrics. The {@link JvmThreadsMetrics} are registered as part of the {@link
+ * JvmMetrics} like this:
+ *
  * 
{@code
- *   JvmMetrics.builder().register();
+ * JvmMetrics.builder().register();
  * }
+ * * However, if you want only the {@link JvmThreadsMetrics} you can also register them directly: + * *
{@code
- *   JvmThreadMetrics.builder().register();
+ * JvmThreadMetrics.builder().register();
  * }
+ * * Example metrics being exported: + * *
  * # HELP jvm_threads_current Current thread count of a JVM
  * # TYPE jvm_threads_current gauge
@@ -54,162 +59,165 @@
  */
 public class JvmThreadsMetrics {
 
-    private static final String UNKNOWN = "UNKNOWN";
-    private static final String JVM_THREADS_STATE = "jvm_threads_state";
-    private static final String JVM_THREADS_CURRENT = "jvm_threads_current";
-    private static final String JVM_THREADS_DAEMON = "jvm_threads_daemon";
-    private static final String JVM_THREADS_PEAK = "jvm_threads_peak";
-    private static final String JVM_THREADS_STARTED_TOTAL = "jvm_threads_started_total";
-    private static final String JVM_THREADS_DEADLOCKED = "jvm_threads_deadlocked";
-    private static final String JVM_THREADS_DEADLOCKED_MONITOR = "jvm_threads_deadlocked_monitor";
+  private static final String UNKNOWN = "UNKNOWN";
+  private static final String JVM_THREADS_STATE = "jvm_threads_state";
+  private static final String JVM_THREADS_CURRENT = "jvm_threads_current";
+  private static final String JVM_THREADS_DAEMON = "jvm_threads_daemon";
+  private static final String JVM_THREADS_PEAK = "jvm_threads_peak";
+  private static final String JVM_THREADS_STARTED_TOTAL = "jvm_threads_started_total";
+  private static final String JVM_THREADS_DEADLOCKED = "jvm_threads_deadlocked";
+  private static final String JVM_THREADS_DEADLOCKED_MONITOR = "jvm_threads_deadlocked_monitor";
+
+  private final PrometheusProperties config;
+  private final ThreadMXBean threadBean;
+  private final boolean isNativeImage;
+
+  private JvmThreadsMetrics(
+      boolean isNativeImage, ThreadMXBean threadBean, PrometheusProperties config) {
+    this.config = config;
+    this.threadBean = threadBean;
+    this.isNativeImage = isNativeImage;
+  }
+
+  private void register(PrometheusRegistry registry) {
+
+    GaugeWithCallback.builder(config)
+        .name(JVM_THREADS_CURRENT)
+        .help("Current thread count of a JVM")
+        .callback(callback -> callback.call(threadBean.getThreadCount()))
+        .register(registry);
+
+    GaugeWithCallback.builder(config)
+        .name(JVM_THREADS_DAEMON)
+        .help("Daemon thread count of a JVM")
+        .callback(callback -> callback.call(threadBean.getDaemonThreadCount()))
+        .register(registry);
+
+    GaugeWithCallback.builder(config)
+        .name(JVM_THREADS_PEAK)
+        .help("Peak thread count of a JVM")
+        .callback(callback -> callback.call(threadBean.getPeakThreadCount()))
+        .register(registry);
+
+    CounterWithCallback.builder(config)
+        .name(JVM_THREADS_STARTED_TOTAL)
+        .help("Started thread count of a JVM")
+        .callback(callback -> callback.call(threadBean.getTotalStartedThreadCount()))
+        .register(registry);
+
+    if (!isNativeImage) {
+      GaugeWithCallback.builder(config)
+          .name(JVM_THREADS_DEADLOCKED)
+          .help(
+              "Cycles of JVM-threads that are in deadlock waiting to acquire object monitors or ownable synchronizers")
+          .callback(
+              callback -> callback.call(nullSafeArrayLength(threadBean.findDeadlockedThreads())))
+          .register(registry);
+
+      GaugeWithCallback.builder(config)
+          .name(JVM_THREADS_DEADLOCKED_MONITOR)
+          .help("Cycles of JVM-threads that are in deadlock waiting to acquire object monitors")
+          .callback(
+              callback ->
+                  callback.call(nullSafeArrayLength(threadBean.findMonitorDeadlockedThreads())))
+          .register(registry);
+
+      GaugeWithCallback.builder(config)
+          .name(JVM_THREADS_STATE)
+          .help("Current count of threads by state")
+          .labelNames("state")
+          .callback(
+              callback -> {
+                Map threadStateCounts = getThreadStateCountMap(threadBean);
+                for (Map.Entry entry : threadStateCounts.entrySet()) {
+                  callback.call(entry.getValue(), entry.getKey());
+                }
+              })
+          .register(registry);
+    }
+  }
+
+  private Map getThreadStateCountMap(ThreadMXBean threadBean) {
+    long[] threadIds = threadBean.getAllThreadIds();
 
-    private final PrometheusProperties config;
-    private final ThreadMXBean threadBean;
-    private final boolean isNativeImage;
+    // Code to remove any thread id values <= 0
+    int writePos = 0;
+    for (int i = 0; i < threadIds.length; i++) {
+      if (threadIds[i] > 0) {
+        threadIds[writePos++] = threadIds[i];
+      }
+    }
+
+    int numberOfInvalidThreadIds = threadIds.length - writePos;
+    threadIds = Arrays.copyOf(threadIds, writePos);
 
-    private JvmThreadsMetrics(boolean isNativeImage, ThreadMXBean threadBean, PrometheusProperties config) {
-        this.config = config;
-        this.threadBean = threadBean;
-        this.isNativeImage = isNativeImage;
+    // Get thread information without computing any stack traces
+    ThreadInfo[] allThreads = threadBean.getThreadInfo(threadIds, 0);
+
+    // Initialize the map with all thread states
+    HashMap threadCounts = new HashMap();
+    for (Thread.State state : Thread.State.values()) {
+      threadCounts.put(state.name(), 0);
     }
 
-    private void register(PrometheusRegistry registry) {
-
-        GaugeWithCallback.builder(config)
-                .name(JVM_THREADS_CURRENT)
-                .help("Current thread count of a JVM")
-                .callback(callback -> callback.call(threadBean.getThreadCount()))
-                .register(registry);
-
-        GaugeWithCallback.builder(config)
-                .name(JVM_THREADS_DAEMON)
-                .help("Daemon thread count of a JVM")
-                .callback(callback -> callback.call(threadBean.getDaemonThreadCount()))
-                .register(registry);
-
-        GaugeWithCallback.builder(config)
-                .name(JVM_THREADS_PEAK)
-                .help("Peak thread count of a JVM")
-                .callback(callback -> callback.call(threadBean.getPeakThreadCount()))
-                .register(registry);
-
-        CounterWithCallback.builder(config)
-                .name(JVM_THREADS_STARTED_TOTAL)
-                .help("Started thread count of a JVM")
-                .callback(callback -> callback.call(threadBean.getTotalStartedThreadCount()))
-                .register(registry);
-
-        if (!isNativeImage) {
-            GaugeWithCallback.builder(config)
-                    .name(JVM_THREADS_DEADLOCKED)
-                    .help("Cycles of JVM-threads that are in deadlock waiting to acquire object monitors or ownable synchronizers")
-                    .callback(callback -> callback.call(nullSafeArrayLength(threadBean.findDeadlockedThreads())))
-                    .register(registry);
-
-            GaugeWithCallback.builder(config)
-                    .name(JVM_THREADS_DEADLOCKED_MONITOR)
-                    .help("Cycles of JVM-threads that are in deadlock waiting to acquire object monitors")
-                    .callback(callback -> callback.call(nullSafeArrayLength(threadBean.findMonitorDeadlockedThreads())))
-                    .register(registry);
-
-
-            GaugeWithCallback.builder(config)
-                    .name(JVM_THREADS_STATE)
-                    .help("Current count of threads by state")
-                    .labelNames("state")
-                    .callback(callback -> {
-                        Map threadStateCounts = getThreadStateCountMap(threadBean);
-                        for (Map.Entry entry : threadStateCounts.entrySet()) {
-                            callback.call(entry.getValue(), entry.getKey());
-                        }
-                    })
-                    .register(registry);
-        }
+    // Collect the actual thread counts
+    for (ThreadInfo curThread : allThreads) {
+      if (curThread != null) {
+        Thread.State threadState = curThread.getThreadState();
+        threadCounts.put(threadState.name(), threadCounts.get(threadState.name()) + 1);
+      }
     }
 
-    private Map getThreadStateCountMap(ThreadMXBean threadBean) {
-        long[] threadIds = threadBean.getAllThreadIds();
-
-        // Code to remove any thread id values <= 0
-        int writePos = 0;
-        for (int i = 0; i < threadIds.length; i++) {
-            if (threadIds[i] > 0) {
-                threadIds[writePos++] = threadIds[i];
-            }
-        }
-
-        int numberOfInvalidThreadIds = threadIds.length - writePos;
-        threadIds = Arrays.copyOf(threadIds, writePos);
-
-        // Get thread information without computing any stack traces
-        ThreadInfo[] allThreads = threadBean.getThreadInfo(threadIds, 0);
-
-        // Initialize the map with all thread states
-        HashMap threadCounts = new HashMap();
-        for (Thread.State state : Thread.State.values()) {
-            threadCounts.put(state.name(), 0);
-        }
-
-        // Collect the actual thread counts
-        for (ThreadInfo curThread : allThreads) {
-            if (curThread != null) {
-                Thread.State threadState = curThread.getThreadState();
-                threadCounts.put(threadState.name(), threadCounts.get(threadState.name()) + 1);
-            }
-        }
-
-        // Add the thread count for invalid thread ids
-        threadCounts.put(UNKNOWN, numberOfInvalidThreadIds);
-
-        return threadCounts;
+    // Add the thread count for invalid thread ids
+    threadCounts.put(UNKNOWN, numberOfInvalidThreadIds);
+
+    return threadCounts;
+  }
+
+  private double nullSafeArrayLength(long[] array) {
+    return null == array ? 0 : array.length;
+  }
+
+  public static Builder builder() {
+    return new Builder(PrometheusProperties.get());
+  }
+
+  public static Builder builder(PrometheusProperties config) {
+    return new Builder(config);
+  }
+
+  public static class Builder {
+
+    private final PrometheusProperties config;
+    private Boolean isNativeImage;
+    private ThreadMXBean threadBean;
+
+    private Builder(PrometheusProperties config) {
+      this.config = config;
     }
 
-    private double nullSafeArrayLength(long[] array) {
-        return null == array ? 0 : array.length;
+    /** Package private. For testing only. */
+    Builder threadBean(ThreadMXBean threadBean) {
+      this.threadBean = threadBean;
+      return this;
     }
 
-    public static Builder builder() {
-        return new Builder(PrometheusProperties.get());
+    /** Package private. For testing only. */
+    Builder isNativeImage(boolean isNativeImage) {
+      this.isNativeImage = isNativeImage;
+      return this;
     }
 
-    public static Builder builder(PrometheusProperties config) {
-        return new Builder(config);
+    public void register() {
+      register(PrometheusRegistry.defaultRegistry);
     }
 
-    public static class Builder {
-
-        private final PrometheusProperties config;
-        private Boolean isNativeImage;
-        private ThreadMXBean threadBean;
-
-        private Builder(PrometheusProperties config) {
-            this.config = config;
-        }
-
-        /**
-         * Package private. For testing only.
-         */
-        Builder threadBean(ThreadMXBean threadBean) {
-            this.threadBean = threadBean;
-            return this;
-        }
-
-        /**
-         * Package private. For testing only.
-         */
-        Builder isNativeImage(boolean isNativeImage) {
-            this.isNativeImage = isNativeImage;
-            return this;
-        }
-
-        public void register() {
-            register(PrometheusRegistry.defaultRegistry);
-        }
-
-        public void register(PrometheusRegistry registry) {
-            ThreadMXBean threadBean = this.threadBean != null ? this.threadBean : ManagementFactory.getThreadMXBean();
-            boolean isNativeImage = this.isNativeImage != null ? this.isNativeImage : NativeImageChecker.isGraalVmNativeImage;
-            new JvmThreadsMetrics(isNativeImage, threadBean, config).register(registry);
-        }
+    public void register(PrometheusRegistry registry) {
+      ThreadMXBean threadBean =
+          this.threadBean != null ? this.threadBean : ManagementFactory.getThreadMXBean();
+      boolean isNativeImage =
+          this.isNativeImage != null ? this.isNativeImage : NativeImageChecker.isGraalVmNativeImage;
+      new JvmThreadsMetrics(isNativeImage, threadBean, config).register(registry);
     }
+  }
 }
diff --git a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/NativeImageChecker.java b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/NativeImageChecker.java
index 11e3efb2d..e75350bce 100644
--- a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/NativeImageChecker.java
+++ b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/NativeImageChecker.java
@@ -1,13 +1,15 @@
 package io.prometheus.metrics.instrumentation.jvm;
 
 /**
- * Contains utilities to check if we are running inside or building for native image. Default behavior is to check
- * if specific for graalvm runtime property is present. For additional optimizations it is possible to do add
- * "--initialize-at-build-time=io.prometheus.client.hotspot.NativeImageChecker" to graalvm native image build command and
- * the native image will be identified during build time.
+ * Contains utilities to check if we are running inside or building for native image. Default
+ * behavior is to check if specific for graalvm runtime property is present. For additional
+ * optimizations it is possible to do add
+ * "--initialize-at-build-time=io.prometheus.client.hotspot.NativeImageChecker" to graalvm native
+ * image build command and the native image will be identified during build time.
  */
 class NativeImageChecker {
-    static final boolean isGraalVmNativeImage = System.getProperty("org.graalvm.nativeimage.imagecode") != null;
+  static final boolean isGraalVmNativeImage =
+      System.getProperty("org.graalvm.nativeimage.imagecode") != null;
 
-    private NativeImageChecker() {}
+  private NativeImageChecker() {}
 }
diff --git a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/ProcessMetrics.java b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/ProcessMetrics.java
index bca58942a..f875c104f 100644
--- a/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/ProcessMetrics.java
+++ b/prometheus-metrics-instrumentation-jvm/src/main/java/io/prometheus/metrics/instrumentation/jvm/ProcessMetrics.java
@@ -5,7 +5,6 @@
 import io.prometheus.metrics.core.metrics.GaugeWithCallback;
 import io.prometheus.metrics.model.registry.PrometheusRegistry;
 import io.prometheus.metrics.model.snapshots.Unit;
-
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileReader;
@@ -18,23 +17,31 @@
 
 /**
  * Process metrics.
- * 

- * These metrics are defined in the process metrics - * section of the Prometheus client library documentation, and they are implemented across client libraries in multiple programming languages. - *

- * Technically, some of them are OS-level metrics and not JVM-level metrics. However, I'm still putting them - * in the {@code prometheus-metrics-instrumentation-jvm} module, because first it seems overkill to create a separate - * Maven module just for the {@link ProcessMetrics} class, and seconds some of these metrics are coming from the JVM via JMX anyway. - *

- * The {@link ProcessMetrics} are registered as part of the {@link JvmMetrics} like this: + * + *

These metrics are defined in the process + * metrics section of the Prometheus client library documentation, and they are implemented + * across client libraries in multiple programming languages. + * + *

Technically, some of them are OS-level metrics and not JVM-level metrics. However, I'm still + * putting them in the {@code prometheus-metrics-instrumentation-jvm} module, because first it seems + * overkill to create a separate Maven module just for the {@link ProcessMetrics} class, and seconds + * some of these metrics are coming from the JVM via JMX anyway. + * + *

The {@link ProcessMetrics} are registered as part of the {@link JvmMetrics} like this: + * *

{@code
- *   JvmMetrics.builder().register();
+ * JvmMetrics.builder().register();
  * }
+ * * However, if you want only the {@link ProcessMetrics} you can also register them directly: + * *
{@code
- *   ProcessMetrics.builder().register();
+ * ProcessMetrics.builder().register();
  * }
+ * * Example metrics being exported: + * *
  * # HELP process_cpu_seconds_total Total user and system CPU time spent in seconds.
  * # TYPE process_cpu_seconds_total counter
@@ -58,228 +65,237 @@
  */
 public class ProcessMetrics {
 
-    private static final String PROCESS_CPU_SECONDS_TOTAL = "process_cpu_seconds_total";
-    private static final String PROCESS_START_TIME_SECONDS = "process_start_time_seconds";
-    private static final String PROCESS_OPEN_FDS = "process_open_fds";
-    private static final String PROCESS_MAX_FDS = "process_max_fds";
-    private static final String PROCESS_VIRTUAL_MEMORY_BYTES = "process_virtual_memory_bytes";
-    private static final String PROCESS_RESIDENT_MEMORY_BYTES = "process_resident_memory_bytes";
+  private static final String PROCESS_CPU_SECONDS_TOTAL = "process_cpu_seconds_total";
+  private static final String PROCESS_START_TIME_SECONDS = "process_start_time_seconds";
+  private static final String PROCESS_OPEN_FDS = "process_open_fds";
+  private static final String PROCESS_MAX_FDS = "process_max_fds";
+  private static final String PROCESS_VIRTUAL_MEMORY_BYTES = "process_virtual_memory_bytes";
+  private static final String PROCESS_RESIDENT_MEMORY_BYTES = "process_resident_memory_bytes";
 
-    private static final File PROC_SELF_STATUS = new File("/proc/self/status");
+  private static final File PROC_SELF_STATUS = new File("/proc/self/status");
 
-    private final PrometheusProperties config;
-    private final OperatingSystemMXBean osBean;
-    private final RuntimeMXBean runtimeBean;
-    private final Grepper grepper;
-    private final boolean linux;
+  private final PrometheusProperties config;
+  private final OperatingSystemMXBean osBean;
+  private final RuntimeMXBean runtimeBean;
+  private final Grepper grepper;
+  private final boolean linux;
 
-    private ProcessMetrics(OperatingSystemMXBean osBean, RuntimeMXBean runtimeBean, Grepper grepper, PrometheusProperties config) {
-        this.osBean = osBean;
-        this.runtimeBean = runtimeBean;
-        this.grepper = grepper;
-        this.config = config;
-        this.linux = PROC_SELF_STATUS.canRead();
-    }
+  private ProcessMetrics(
+      OperatingSystemMXBean osBean,
+      RuntimeMXBean runtimeBean,
+      Grepper grepper,
+      PrometheusProperties config) {
+    this.osBean = osBean;
+    this.runtimeBean = runtimeBean;
+    this.grepper = grepper;
+    this.config = config;
+    this.linux = PROC_SELF_STATUS.canRead();
+  }
 
-    private void register(PrometheusRegistry registry) {
+  private void register(PrometheusRegistry registry) {
 
-        CounterWithCallback.builder(config)
-                .name(PROCESS_CPU_SECONDS_TOTAL)
-                .help("Total user and system CPU time spent in seconds.")
-                .unit(Unit.SECONDS)
-                .callback(callback -> {
-                    try {
-                        // There exist at least 2 similar but unrelated UnixOperatingSystemMXBean interfaces, in
-                        // com.sun.management and com.ibm.lang.management. Hence use reflection and recursively go
-                        // through implemented interfaces until the method can be made accessible and invoked.
-                        Long processCpuTime = callLongGetter("getProcessCpuTime", osBean);
-                        if (processCpuTime != null) {
-                            callback.call(Unit.nanosToSeconds(processCpuTime));
-                        }
-                    } catch (Exception ignored) {
-                    }
-                })
-                .register(registry);
+    CounterWithCallback.builder(config)
+        .name(PROCESS_CPU_SECONDS_TOTAL)
+        .help("Total user and system CPU time spent in seconds.")
+        .unit(Unit.SECONDS)
+        .callback(
+            callback -> {
+              try {
+                // There exist at least 2 similar but unrelated UnixOperatingSystemMXBean
+                // interfaces, in
+                // com.sun.management and com.ibm.lang.management. Hence use reflection and
+                // recursively go
+                // through implemented interfaces until the method can be made accessible and
+                // invoked.
+                Long processCpuTime = callLongGetter("getProcessCpuTime", osBean);
+                if (processCpuTime != null) {
+                  callback.call(Unit.nanosToSeconds(processCpuTime));
+                }
+              } catch (Exception ignored) {
+              }
+            })
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(PROCESS_START_TIME_SECONDS)
-                .help("Start time of the process since unix epoch in seconds.")
-                .unit(Unit.SECONDS)
-                .callback(callback -> callback.call(Unit.millisToSeconds(runtimeBean.getStartTime())))
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(PROCESS_START_TIME_SECONDS)
+        .help("Start time of the process since unix epoch in seconds.")
+        .unit(Unit.SECONDS)
+        .callback(callback -> callback.call(Unit.millisToSeconds(runtimeBean.getStartTime())))
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(PROCESS_OPEN_FDS)
-                .help("Number of open file descriptors.")
-                .callback(callback -> {
-                    try {
-                        Long openFds = callLongGetter("getOpenFileDescriptorCount", osBean);
-                        if (openFds != null) {
-                            callback.call(openFds);
-                        }
-                    } catch (Exception ignored) {
-                    }
-                })
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(PROCESS_OPEN_FDS)
+        .help("Number of open file descriptors.")
+        .callback(
+            callback -> {
+              try {
+                Long openFds = callLongGetter("getOpenFileDescriptorCount", osBean);
+                if (openFds != null) {
+                  callback.call(openFds);
+                }
+              } catch (Exception ignored) {
+              }
+            })
+        .register(registry);
 
-        GaugeWithCallback.builder(config)
-                .name(PROCESS_MAX_FDS)
-                .help("Maximum number of open file descriptors.")
-                .callback(callback -> {
-                    try {
-                        Long maxFds = callLongGetter("getMaxFileDescriptorCount", osBean);
-                        if (maxFds != null) {
-                            callback.call(maxFds);
-                        }
-                    } catch (Exception ignored) {
-                    }
-                })
-                .register(registry);
+    GaugeWithCallback.builder(config)
+        .name(PROCESS_MAX_FDS)
+        .help("Maximum number of open file descriptors.")
+        .callback(
+            callback -> {
+              try {
+                Long maxFds = callLongGetter("getMaxFileDescriptorCount", osBean);
+                if (maxFds != null) {
+                  callback.call(maxFds);
+                }
+              } catch (Exception ignored) {
+              }
+            })
+        .register(registry);
 
-        if (linux) {
+    if (linux) {
 
-            GaugeWithCallback.builder(config)
-                    .name(PROCESS_VIRTUAL_MEMORY_BYTES)
-                    .help("Virtual memory size in bytes.")
-                    .unit(Unit.BYTES)
-                    .callback(callback -> {
-                        try {
-                            String line = grepper.lineStartingWith(PROC_SELF_STATUS, "VmSize:");
-                            callback.call(Unit.kiloBytesToBytes(Double.parseDouble(line.split("\\s+")[1])));
-                        } catch (Exception ignored) {
-                        }
-                    })
-                    .register(registry);
+      GaugeWithCallback.builder(config)
+          .name(PROCESS_VIRTUAL_MEMORY_BYTES)
+          .help("Virtual memory size in bytes.")
+          .unit(Unit.BYTES)
+          .callback(
+              callback -> {
+                try {
+                  String line = grepper.lineStartingWith(PROC_SELF_STATUS, "VmSize:");
+                  callback.call(Unit.kiloBytesToBytes(Double.parseDouble(line.split("\\s+")[1])));
+                } catch (Exception ignored) {
+                }
+              })
+          .register(registry);
 
-            GaugeWithCallback.builder(config)
-                    .name(PROCESS_RESIDENT_MEMORY_BYTES)
-                    .help("Resident memory size in bytes.")
-                    .unit(Unit.BYTES)
-                    .callback(callback -> {
-                        try {
-                            String line = grepper.lineStartingWith(PROC_SELF_STATUS, "VmRSS:");
-                            callback.call(Unit.kiloBytesToBytes(Double.parseDouble(line.split("\\s+")[1])));
-                        } catch (Exception ignored) {
-                        }
-                    })
-                    .register(registry);
-        }
+      GaugeWithCallback.builder(config)
+          .name(PROCESS_RESIDENT_MEMORY_BYTES)
+          .help("Resident memory size in bytes.")
+          .unit(Unit.BYTES)
+          .callback(
+              callback -> {
+                try {
+                  String line = grepper.lineStartingWith(PROC_SELF_STATUS, "VmRSS:");
+                  callback.call(Unit.kiloBytesToBytes(Double.parseDouble(line.split("\\s+")[1])));
+                } catch (Exception ignored) {
+                }
+              })
+          .register(registry);
     }
+  }
+
+  private Long callLongGetter(String getterName, Object obj)
+      throws NoSuchMethodException, InvocationTargetException {
+    return callLongGetter(obj.getClass().getMethod(getterName), obj);
+  }
 
-    private Long callLongGetter(String getterName, Object obj) throws NoSuchMethodException, InvocationTargetException {
-        return callLongGetter(obj.getClass().getMethod(getterName), obj);
+  /**
+   * Attempts to call a method either directly or via one of the implemented interfaces.
+   *
+   * 

A Method object refers to a specific method declared in a specific class. The first + * invocation might happen with method == SomeConcreteClass.publicLongGetter() and will fail if + * SomeConcreteClass is not public. We then recurse over all interfaces implemented by + * SomeConcreteClass (or extended by those interfaces and so on) until we eventually invoke + * callMethod() with method == SomePublicInterface.publicLongGetter(), which will then succeed. + * + *

There is a built-in assumption that the method will never return null (or, equivalently, + * that it returns the primitive data type, i.e. {@code long} rather than {@code Long}). If this + * assumption doesn't hold, the method might be called repeatedly and the returned value will be + * the one produced by the last call. + */ + private Long callLongGetter(Method method, Object obj) throws InvocationTargetException { + try { + return (Long) method.invoke(obj); + } catch (IllegalAccessException e) { + // Expected, the declaring class or interface might not be public. } - /** - * Attempts to call a method either directly or via one of the implemented interfaces. - *

- * A Method object refers to a specific method declared in a specific class. The first invocation - * might happen with method == SomeConcreteClass.publicLongGetter() and will fail if - * SomeConcreteClass is not public. We then recurse over all interfaces implemented by - * SomeConcreteClass (or extended by those interfaces and so on) until we eventually invoke - * callMethod() with method == SomePublicInterface.publicLongGetter(), which will then succeed. - *

- * There is a built-in assumption that the method will never return null (or, equivalently, that - * it returns the primitive data type, i.e. {@code long} rather than {@code Long}). If this - * assumption doesn't hold, the method might be called repeatedly and the returned value will be - * the one produced by the last call. - */ - private Long callLongGetter(Method method, Object obj) throws InvocationTargetException { - try { - return (Long) method.invoke(obj); - } catch (IllegalAccessException e) { - // Expected, the declaring class or interface might not be public. + // Iterate over all implemented/extended interfaces and attempt invoking the method with the + // same name and parameters on each. + for (Class clazz : method.getDeclaringClass().getInterfaces()) { + try { + Method interfaceMethod = clazz.getMethod(method.getName(), method.getParameterTypes()); + Long result = callLongGetter(interfaceMethod, obj); + if (result != null) { + return result; } - - // Iterate over all implemented/extended interfaces and attempt invoking the method with the - // same name and parameters on each. - for (Class clazz : method.getDeclaringClass().getInterfaces()) { - try { - Method interfaceMethod = clazz.getMethod(method.getName(), method.getParameterTypes()); - Long result = callLongGetter(interfaceMethod, obj); - if (result != null) { - return result; - } - } catch (NoSuchMethodException e) { - // Expected, class might implement multiple, unrelated interfaces. - } - } - return null; + } catch (NoSuchMethodException e) { + // Expected, class might implement multiple, unrelated interfaces. + } } + return null; + } - interface Grepper { - String lineStartingWith(File file, String prefix) throws IOException; - } + interface Grepper { + String lineStartingWith(File file, String prefix) throws IOException; + } - private static class FileGrepper implements Grepper { + private static class FileGrepper implements Grepper { - @Override - public String lineStartingWith(File file, String prefix) throws IOException { - try (BufferedReader reader = new BufferedReader(new FileReader(file))) { - String line = reader.readLine(); - while (line != null) { - if (line.startsWith(prefix)) { - return line; - } - line = reader.readLine(); - } - } - return null; + @Override + public String lineStartingWith(File file, String prefix) throws IOException { + try (BufferedReader reader = new BufferedReader(new FileReader(file))) { + String line = reader.readLine(); + while (line != null) { + if (line.startsWith(prefix)) { + return line; + } + line = reader.readLine(); } + } + return null; } + } - public static Builder builder() { - return new Builder(PrometheusProperties.get()); - } + public static Builder builder() { + return new Builder(PrometheusProperties.get()); + } - public static Builder builder(PrometheusProperties config) { - return new Builder(config); - } + public static Builder builder(PrometheusProperties config) { + return new Builder(config); + } - public static class Builder { + public static class Builder { - private final PrometheusProperties config; - private OperatingSystemMXBean osBean; - private RuntimeMXBean runtimeBean; - private Grepper grepper; + private final PrometheusProperties config; + private OperatingSystemMXBean osBean; + private RuntimeMXBean runtimeBean; + private Grepper grepper; - private Builder(PrometheusProperties config) { - this.config = config; - } + private Builder(PrometheusProperties config) { + this.config = config; + } - /** - * Package private. For testing only. - */ - Builder osBean(OperatingSystemMXBean osBean) { - this.osBean = osBean; - return this; - } + /** Package private. For testing only. */ + Builder osBean(OperatingSystemMXBean osBean) { + this.osBean = osBean; + return this; + } - /** - * Package private. For testing only. - */ - Builder runtimeBean(RuntimeMXBean runtimeBean) { - this.runtimeBean = runtimeBean; - return this; - } + /** Package private. For testing only. */ + Builder runtimeBean(RuntimeMXBean runtimeBean) { + this.runtimeBean = runtimeBean; + return this; + } - /** - * Package private. For testing only. - */ - Builder grepper(Grepper grepper) { - this.grepper = grepper; - return this; - } + /** Package private. For testing only. */ + Builder grepper(Grepper grepper) { + this.grepper = grepper; + return this; + } - public void register() { - register(PrometheusRegistry.defaultRegistry); - } + public void register() { + register(PrometheusRegistry.defaultRegistry); + } - public void register(PrometheusRegistry registry) { - OperatingSystemMXBean osBean = this.osBean != null ? this.osBean : ManagementFactory.getOperatingSystemMXBean(); - RuntimeMXBean runtimeMXBean = this.runtimeBean != null ? this.runtimeBean : ManagementFactory.getRuntimeMXBean(); - Grepper grepper = this.grepper != null ? this.grepper : new FileGrepper(); - new ProcessMetrics(osBean, runtimeMXBean, grepper, config).register(registry); - } + public void register(PrometheusRegistry registry) { + OperatingSystemMXBean osBean = + this.osBean != null ? this.osBean : ManagementFactory.getOperatingSystemMXBean(); + RuntimeMXBean runtimeMXBean = + this.runtimeBean != null ? this.runtimeBean : ManagementFactory.getRuntimeMXBean(); + Grepper grepper = this.grepper != null ? this.grepper : new FileGrepper(); + new ProcessMetrics(osBean, runtimeMXBean, grepper, config).register(registry); } + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/ExampleExporterForManualTesting.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/ExampleExporterForManualTesting.java index 16b59b85a..b3d1c8169 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/ExampleExporterForManualTesting.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/ExampleExporterForManualTesting.java @@ -1,25 +1,22 @@ package io.prometheus.metrics.instrumentation.jvm; import io.prometheus.metrics.exporter.httpserver.HTTPServer; - import java.io.IOException; - public class ExampleExporterForManualTesting { - public static void main(String[] args) throws IOException, InterruptedException { + public static void main(String[] args) throws IOException, InterruptedException { - JvmMetrics.builder().register(); + JvmMetrics.builder().register(); - HTTPServer server = HTTPServer.builder() - .port(9400) - .buildAndStart(); + HTTPServer server = HTTPServer.builder().port(9400).buildAndStart(); - System.out.println("HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); + System.out.println( + "HTTPServer listening on port http://localhost:" + server.getPort() + "/metrics"); - while (true) { - Thread.sleep(100); - Runtime.getRuntime().gc(); // Memory allocation metrics only start after GC run. - } + while (true) { + Thread.sleep(100); + Runtime.getRuntime().gc(); // Memory allocation metrics only start after GC run. } + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmBufferPoolMetricsTest.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmBufferPoolMetricsTest.java index b8aafa0ea..df6dddf99 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmBufferPoolMetricsTest.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmBufferPoolMetricsTest.java @@ -1,82 +1,81 @@ package io.prometheus.metrics.instrumentation.jvm; +import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import io.prometheus.metrics.model.registry.MetricNameFilter; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.MetricSnapshots; +import java.io.IOException; +import java.lang.management.BufferPoolMXBean; +import java.util.Arrays; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; -import java.io.IOException; -import java.lang.management.BufferPoolMXBean; -import java.util.Arrays; - -import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - public class JvmBufferPoolMetricsTest { - private final BufferPoolMXBean directBuffer = Mockito.mock(BufferPoolMXBean.class); - private final BufferPoolMXBean mappedBuffer = Mockito.mock(BufferPoolMXBean.class); + private final BufferPoolMXBean directBuffer = Mockito.mock(BufferPoolMXBean.class); + private final BufferPoolMXBean mappedBuffer = Mockito.mock(BufferPoolMXBean.class); - @Before - public void setUp() { - when(directBuffer.getName()).thenReturn("direct"); - when(directBuffer.getCount()).thenReturn(2L); - when(directBuffer.getMemoryUsed()).thenReturn(1234L); - when(directBuffer.getTotalCapacity()).thenReturn(3456L); - when(mappedBuffer.getName()).thenReturn("mapped"); - when(mappedBuffer.getCount()).thenReturn(3L); - when(mappedBuffer.getMemoryUsed()).thenReturn(2345L); - when(mappedBuffer.getTotalCapacity()).thenReturn(4567L); - } + @Before + public void setUp() { + when(directBuffer.getName()).thenReturn("direct"); + when(directBuffer.getCount()).thenReturn(2L); + when(directBuffer.getMemoryUsed()).thenReturn(1234L); + when(directBuffer.getTotalCapacity()).thenReturn(3456L); + when(mappedBuffer.getName()).thenReturn("mapped"); + when(mappedBuffer.getCount()).thenReturn(3L); + when(mappedBuffer.getMemoryUsed()).thenReturn(2345L); + when(mappedBuffer.getTotalCapacity()).thenReturn(4567L); + } - @Test - public void testGoodCase() throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - JvmBufferPoolMetrics.builder() - .bufferPoolBeans(Arrays.asList(mappedBuffer, directBuffer)) - .register(registry); - MetricSnapshots snapshots = registry.scrape(); + @Test + public void testGoodCase() throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + JvmBufferPoolMetrics.builder() + .bufferPoolBeans(Arrays.asList(mappedBuffer, directBuffer)) + .register(registry); + MetricSnapshots snapshots = registry.scrape(); - String expected = "" + - "# TYPE jvm_buffer_pool_capacity_bytes gauge\n" + - "# UNIT jvm_buffer_pool_capacity_bytes bytes\n" + - "# HELP jvm_buffer_pool_capacity_bytes Bytes capacity of a given JVM buffer pool.\n" + - "jvm_buffer_pool_capacity_bytes{pool=\"direct\"} 3456.0\n" + - "jvm_buffer_pool_capacity_bytes{pool=\"mapped\"} 4567.0\n" + - "# TYPE jvm_buffer_pool_used_buffers gauge\n" + - "# HELP jvm_buffer_pool_used_buffers Used buffers of a given JVM buffer pool.\n" + - "jvm_buffer_pool_used_buffers{pool=\"direct\"} 2.0\n" + - "jvm_buffer_pool_used_buffers{pool=\"mapped\"} 3.0\n" + - "# TYPE jvm_buffer_pool_used_bytes gauge\n" + - "# UNIT jvm_buffer_pool_used_bytes bytes\n" + - "# HELP jvm_buffer_pool_used_bytes Used bytes of a given JVM buffer pool.\n" + - "jvm_buffer_pool_used_bytes{pool=\"direct\"} 1234.0\n" + - "jvm_buffer_pool_used_bytes{pool=\"mapped\"} 2345.0\n" + - "# EOF\n"; + String expected = + "" + + "# TYPE jvm_buffer_pool_capacity_bytes gauge\n" + + "# UNIT jvm_buffer_pool_capacity_bytes bytes\n" + + "# HELP jvm_buffer_pool_capacity_bytes Bytes capacity of a given JVM buffer pool.\n" + + "jvm_buffer_pool_capacity_bytes{pool=\"direct\"} 3456.0\n" + + "jvm_buffer_pool_capacity_bytes{pool=\"mapped\"} 4567.0\n" + + "# TYPE jvm_buffer_pool_used_buffers gauge\n" + + "# HELP jvm_buffer_pool_used_buffers Used buffers of a given JVM buffer pool.\n" + + "jvm_buffer_pool_used_buffers{pool=\"direct\"} 2.0\n" + + "jvm_buffer_pool_used_buffers{pool=\"mapped\"} 3.0\n" + + "# TYPE jvm_buffer_pool_used_bytes gauge\n" + + "# UNIT jvm_buffer_pool_used_bytes bytes\n" + + "# HELP jvm_buffer_pool_used_bytes Used bytes of a given JVM buffer pool.\n" + + "jvm_buffer_pool_used_bytes{pool=\"direct\"} 1234.0\n" + + "jvm_buffer_pool_used_bytes{pool=\"mapped\"} 2345.0\n" + + "# EOF\n"; - Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); - } + Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); + } - @Test - public void testIgnoredMetricNotScraped() { - MetricNameFilter filter = MetricNameFilter.builder() - .nameMustNotBeEqualTo("jvm_buffer_pool_used_bytes") - .build(); + @Test + public void testIgnoredMetricNotScraped() { + MetricNameFilter filter = + MetricNameFilter.builder().nameMustNotBeEqualTo("jvm_buffer_pool_used_bytes").build(); - PrometheusRegistry registry = new PrometheusRegistry(); - JvmBufferPoolMetrics.builder() - .bufferPoolBeans(Arrays.asList(directBuffer, mappedBuffer)) - .register(registry); - registry.scrape(filter); + PrometheusRegistry registry = new PrometheusRegistry(); + JvmBufferPoolMetrics.builder() + .bufferPoolBeans(Arrays.asList(directBuffer, mappedBuffer)) + .register(registry); + registry.scrape(filter); - verify(directBuffer, times(0)).getMemoryUsed(); - verify(mappedBuffer, times(0)).getMemoryUsed(); - verify(directBuffer, times(1)).getTotalCapacity(); - verify(mappedBuffer, times(1)).getTotalCapacity(); - } + verify(directBuffer, times(0)).getMemoryUsed(); + verify(mappedBuffer, times(0)).getMemoryUsed(); + verify(directBuffer, times(1)).getTotalCapacity(); + verify(mappedBuffer, times(1)).getTotalCapacity(); + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmClassLoadingMetricsTest.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmClassLoadingMetricsTest.java index 16413b839..4196031bf 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmClassLoadingMetricsTest.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmClassLoadingMetricsTest.java @@ -1,68 +1,63 @@ package io.prometheus.metrics.instrumentation.jvm; +import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import io.prometheus.metrics.model.registry.MetricNameFilter; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.MetricSnapshots; +import java.io.IOException; +import java.lang.management.ClassLoadingMXBean; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; -import java.io.IOException; -import java.lang.management.ClassLoadingMXBean; - -import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - public class JvmClassLoadingMetricsTest { - private ClassLoadingMXBean mockClassLoadingBean = Mockito.mock(ClassLoadingMXBean.class); - - @Before - public void setUp() { - when(mockClassLoadingBean.getLoadedClassCount()).thenReturn(1000); - when(mockClassLoadingBean.getTotalLoadedClassCount()).thenReturn(2000L); - when(mockClassLoadingBean.getUnloadedClassCount()).thenReturn(500L); - } - - @Test - public void testGoodCase() throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - JvmClassLoadingMetrics.builder() - .classLoadingBean(mockClassLoadingBean) - .register(registry); - MetricSnapshots snapshots = registry.scrape(); - - String expected = "" + - "# TYPE jvm_classes_currently_loaded gauge\n" + - "# HELP jvm_classes_currently_loaded The number of classes that are currently loaded in the JVM\n" + - "jvm_classes_currently_loaded 1000.0\n" + - "# TYPE jvm_classes_loaded counter\n" + - "# HELP jvm_classes_loaded The total number of classes that have been loaded since the JVM has started execution\n" + - "jvm_classes_loaded_total 2000.0\n" + - "# TYPE jvm_classes_unloaded counter\n" + - "# HELP jvm_classes_unloaded The total number of classes that have been unloaded since the JVM has started execution\n" + - "jvm_classes_unloaded_total 500.0\n" + - "# EOF\n"; - - Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); - } - - @Test - public void testIgnoredMetricNotScraped() { - MetricNameFilter filter = MetricNameFilter.builder() - .nameMustNotBeEqualTo("jvm_classes_currently_loaded") - .build(); - - PrometheusRegistry registry = new PrometheusRegistry(); - JvmClassLoadingMetrics.builder() - .classLoadingBean(mockClassLoadingBean) - .register(registry); - registry.scrape(filter); - - verify(mockClassLoadingBean, times(0)).getLoadedClassCount(); - verify(mockClassLoadingBean, times(1)).getTotalLoadedClassCount(); - } + private ClassLoadingMXBean mockClassLoadingBean = Mockito.mock(ClassLoadingMXBean.class); + + @Before + public void setUp() { + when(mockClassLoadingBean.getLoadedClassCount()).thenReturn(1000); + when(mockClassLoadingBean.getTotalLoadedClassCount()).thenReturn(2000L); + when(mockClassLoadingBean.getUnloadedClassCount()).thenReturn(500L); + } + + @Test + public void testGoodCase() throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + JvmClassLoadingMetrics.builder().classLoadingBean(mockClassLoadingBean).register(registry); + MetricSnapshots snapshots = registry.scrape(); + + String expected = + "" + + "# TYPE jvm_classes_currently_loaded gauge\n" + + "# HELP jvm_classes_currently_loaded The number of classes that are currently loaded in the JVM\n" + + "jvm_classes_currently_loaded 1000.0\n" + + "# TYPE jvm_classes_loaded counter\n" + + "# HELP jvm_classes_loaded The total number of classes that have been loaded since the JVM has started execution\n" + + "jvm_classes_loaded_total 2000.0\n" + + "# TYPE jvm_classes_unloaded counter\n" + + "# HELP jvm_classes_unloaded The total number of classes that have been unloaded since the JVM has started execution\n" + + "jvm_classes_unloaded_total 500.0\n" + + "# EOF\n"; + + Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); + } + + @Test + public void testIgnoredMetricNotScraped() { + MetricNameFilter filter = + MetricNameFilter.builder().nameMustNotBeEqualTo("jvm_classes_currently_loaded").build(); + + PrometheusRegistry registry = new PrometheusRegistry(); + JvmClassLoadingMetrics.builder().classLoadingBean(mockClassLoadingBean).register(registry); + registry.scrape(filter); + + verify(mockClassLoadingBean, times(0)).getLoadedClassCount(); + verify(mockClassLoadingBean, times(1)).getTotalLoadedClassCount(); + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmCompilationMetricsTest.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmCompilationMetricsTest.java index 75ecc8370..780d6a949 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmCompilationMetricsTest.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmCompilationMetricsTest.java @@ -1,62 +1,59 @@ package io.prometheus.metrics.instrumentation.jvm; +import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.internal.verification.VerificationModeFactory.times; + import io.prometheus.metrics.model.registry.MetricNameFilter; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.MetricSnapshots; +import java.io.IOException; +import java.lang.management.CompilationMXBean; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; -import java.io.IOException; -import java.lang.management.CompilationMXBean; - -import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.mockito.internal.verification.VerificationModeFactory.times; - public class JvmCompilationMetricsTest { - private CompilationMXBean mockCompilationBean = Mockito.mock(CompilationMXBean.class); - - @Before - public void setUp() { - when(mockCompilationBean.getTotalCompilationTime()).thenReturn(10000l); - when(mockCompilationBean.isCompilationTimeMonitoringSupported()).thenReturn(true); - } - - @Test - public void testGoodCase() throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - JvmCompilationMetrics.builder() - .compilationBean(mockCompilationBean) - .register(registry); - MetricSnapshots snapshots = registry.scrape(); - - String expected = "" + - "# TYPE jvm_compilation_time_seconds counter\n" + - "# UNIT jvm_compilation_time_seconds seconds\n" + - "# HELP jvm_compilation_time_seconds The total time in seconds taken for HotSpot class compilation\n" + - "jvm_compilation_time_seconds_total 10.0\n" + - "# EOF\n"; - - Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); - } - - @Test - public void testIgnoredMetricNotScraped() { - MetricNameFilter filter = MetricNameFilter.builder() - .nameMustNotBeEqualTo("jvm_compilation_time_seconds_total") - .build(); - - PrometheusRegistry registry = new PrometheusRegistry(); - JvmCompilationMetrics.builder() - .compilationBean(mockCompilationBean) - .register(registry); - MetricSnapshots snapshots = registry.scrape(filter); - - verify(mockCompilationBean, times(0)).getTotalCompilationTime(); - Assert.assertEquals(0, snapshots.size()); - } + private CompilationMXBean mockCompilationBean = Mockito.mock(CompilationMXBean.class); + + @Before + public void setUp() { + when(mockCompilationBean.getTotalCompilationTime()).thenReturn(10000l); + when(mockCompilationBean.isCompilationTimeMonitoringSupported()).thenReturn(true); + } + + @Test + public void testGoodCase() throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + JvmCompilationMetrics.builder().compilationBean(mockCompilationBean).register(registry); + MetricSnapshots snapshots = registry.scrape(); + + String expected = + "" + + "# TYPE jvm_compilation_time_seconds counter\n" + + "# UNIT jvm_compilation_time_seconds seconds\n" + + "# HELP jvm_compilation_time_seconds The total time in seconds taken for HotSpot class compilation\n" + + "jvm_compilation_time_seconds_total 10.0\n" + + "# EOF\n"; + + Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); + } + + @Test + public void testIgnoredMetricNotScraped() { + MetricNameFilter filter = + MetricNameFilter.builder() + .nameMustNotBeEqualTo("jvm_compilation_time_seconds_total") + .build(); + + PrometheusRegistry registry = new PrometheusRegistry(); + JvmCompilationMetrics.builder().compilationBean(mockCompilationBean).register(registry); + MetricSnapshots snapshots = registry.scrape(filter); + + verify(mockCompilationBean, times(0)).getTotalCompilationTime(); + Assert.assertEquals(0, snapshots.size()); + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmGarbageCollectorMetricsTest.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmGarbageCollectorMetricsTest.java index d7dad1787..67ba4f064 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmGarbageCollectorMetricsTest.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmGarbageCollectorMetricsTest.java @@ -1,74 +1,72 @@ package io.prometheus.metrics.instrumentation.jvm; +import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import io.prometheus.metrics.model.registry.MetricNameFilter; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.MetricSnapshots; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; - import java.io.IOException; import java.lang.management.GarbageCollectorMXBean; import java.util.Arrays; import java.util.concurrent.TimeUnit; - -import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; public class JvmGarbageCollectorMetricsTest { - private GarbageCollectorMXBean mockGcBean1 = Mockito.mock(GarbageCollectorMXBean.class); - private GarbageCollectorMXBean mockGcBean2 = Mockito.mock(GarbageCollectorMXBean.class); + private GarbageCollectorMXBean mockGcBean1 = Mockito.mock(GarbageCollectorMXBean.class); + private GarbageCollectorMXBean mockGcBean2 = Mockito.mock(GarbageCollectorMXBean.class); - @Before - public void setUp() { - when(mockGcBean1.getName()).thenReturn("MyGC1"); - when(mockGcBean1.getCollectionCount()).thenReturn(100L); - when(mockGcBean1.getCollectionTime()).thenReturn(TimeUnit.SECONDS.toMillis(10)); - when(mockGcBean2.getName()).thenReturn("MyGC2"); - when(mockGcBean2.getCollectionCount()).thenReturn(200L); - when(mockGcBean2.getCollectionTime()).thenReturn(TimeUnit.SECONDS.toMillis(20)); - } + @Before + public void setUp() { + when(mockGcBean1.getName()).thenReturn("MyGC1"); + when(mockGcBean1.getCollectionCount()).thenReturn(100L); + when(mockGcBean1.getCollectionTime()).thenReturn(TimeUnit.SECONDS.toMillis(10)); + when(mockGcBean2.getName()).thenReturn("MyGC2"); + when(mockGcBean2.getCollectionCount()).thenReturn(200L); + when(mockGcBean2.getCollectionTime()).thenReturn(TimeUnit.SECONDS.toMillis(20)); + } - @Test - public void testGoodCase() throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - JvmGarbageCollectorMetrics.builder() - .garbageCollectorBeans(Arrays.asList(mockGcBean1, mockGcBean2)) - .register(registry); - MetricSnapshots snapshots = registry.scrape(); + @Test + public void testGoodCase() throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + JvmGarbageCollectorMetrics.builder() + .garbageCollectorBeans(Arrays.asList(mockGcBean1, mockGcBean2)) + .register(registry); + MetricSnapshots snapshots = registry.scrape(); - String expected = "" + - "# TYPE jvm_gc_collection_seconds summary\n" + - "# UNIT jvm_gc_collection_seconds seconds\n" + - "# HELP jvm_gc_collection_seconds Time spent in a given JVM garbage collector in seconds.\n" + - "jvm_gc_collection_seconds_count{gc=\"MyGC1\"} 100\n" + - "jvm_gc_collection_seconds_sum{gc=\"MyGC1\"} 10.0\n" + - "jvm_gc_collection_seconds_count{gc=\"MyGC2\"} 200\n" + - "jvm_gc_collection_seconds_sum{gc=\"MyGC2\"} 20.0\n" + - "# EOF\n"; + String expected = + "" + + "# TYPE jvm_gc_collection_seconds summary\n" + + "# UNIT jvm_gc_collection_seconds seconds\n" + + "# HELP jvm_gc_collection_seconds Time spent in a given JVM garbage collector in seconds.\n" + + "jvm_gc_collection_seconds_count{gc=\"MyGC1\"} 100\n" + + "jvm_gc_collection_seconds_sum{gc=\"MyGC1\"} 10.0\n" + + "jvm_gc_collection_seconds_count{gc=\"MyGC2\"} 200\n" + + "jvm_gc_collection_seconds_sum{gc=\"MyGC2\"} 20.0\n" + + "# EOF\n"; - Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); - } + Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); + } - @Test - public void testIgnoredMetricNotScraped() { - MetricNameFilter filter = MetricNameFilter.builder() - .nameMustNotBeEqualTo("jvm_gc_collection_seconds") - .build(); + @Test + public void testIgnoredMetricNotScraped() { + MetricNameFilter filter = + MetricNameFilter.builder().nameMustNotBeEqualTo("jvm_gc_collection_seconds").build(); - PrometheusRegistry registry = new PrometheusRegistry(); - JvmGarbageCollectorMetrics.builder() - .garbageCollectorBeans(Arrays.asList(mockGcBean1, mockGcBean2)) - .register(registry); - MetricSnapshots snapshots = registry.scrape(filter); + PrometheusRegistry registry = new PrometheusRegistry(); + JvmGarbageCollectorMetrics.builder() + .garbageCollectorBeans(Arrays.asList(mockGcBean1, mockGcBean2)) + .register(registry); + MetricSnapshots snapshots = registry.scrape(filter); - verify(mockGcBean1, times(0)).getCollectionTime(); - verify(mockGcBean1, times(0)).getCollectionCount(); - Assert.assertEquals(0, snapshots.size()); - } + verify(mockGcBean1, times(0)).getCollectionTime(); + verify(mockGcBean1, times(0)).getCollectionCount(); + Assert.assertEquals(0, snapshots.size()); + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryMetricsTest.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryMetricsTest.java index b70bb0bba..167299291 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryMetricsTest.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryMetricsTest.java @@ -1,177 +1,177 @@ package io.prometheus.metrics.instrumentation.jvm; +import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import io.prometheus.metrics.model.registry.MetricNameFilter; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.MetricSnapshots; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; - import java.io.IOException; import java.lang.management.MemoryMXBean; import java.lang.management.MemoryPoolMXBean; import java.lang.management.MemoryUsage; import java.util.Arrays; - -import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; public class JvmMemoryMetricsTest { - private MemoryMXBean mockMemoryBean = Mockito.mock(MemoryMXBean.class); - private MemoryPoolMXBean mockPoolsBeanEdenSpace = Mockito.mock(MemoryPoolMXBean.class); - private MemoryPoolMXBean mockPoolsBeanOldGen = Mockito.mock(MemoryPoolMXBean.class); - private MemoryUsage memoryUsageHeap = Mockito.mock(MemoryUsage.class); - private MemoryUsage memoryUsageNonHeap = Mockito.mock(MemoryUsage.class); - private MemoryUsage memoryUsagePoolEdenSpace = Mockito.mock(MemoryUsage.class); - private MemoryUsage memoryUsagePoolOldGen = Mockito.mock(MemoryUsage.class); - private MemoryUsage memoryUsagePoolCollectionEdenSpace = Mockito.mock(MemoryUsage.class); - private MemoryUsage memoryUsagePoolCollectionOldGen = Mockito.mock(MemoryUsage.class); - - @Before - public void setUp() { - when(mockMemoryBean.getHeapMemoryUsage()).thenReturn(memoryUsageHeap); - when(mockMemoryBean.getNonHeapMemoryUsage()).thenReturn(memoryUsageNonHeap); - - long val = 1L; - when(mockMemoryBean.getObjectPendingFinalizationCount()).thenReturn((int) val++); - - when(memoryUsageHeap.getUsed()).thenReturn(val++); - when(memoryUsageHeap.getMax()).thenReturn(val++); - when(memoryUsageHeap.getCommitted()).thenReturn(val++); - when(memoryUsageHeap.getInit()).thenReturn(val++); - - when(memoryUsageNonHeap.getUsed()).thenReturn(val++); - when(memoryUsageNonHeap.getMax()).thenReturn(val++); - when(memoryUsageNonHeap.getCommitted()).thenReturn(val++); - when(memoryUsageNonHeap.getInit()).thenReturn(val++); - - when(memoryUsagePoolEdenSpace.getUsed()).thenReturn(val++); - when(memoryUsagePoolEdenSpace.getMax()).thenReturn(val++); - when(memoryUsagePoolEdenSpace.getCommitted()).thenReturn(val++); - when(memoryUsagePoolEdenSpace.getInit()).thenReturn(val++); - - when(memoryUsagePoolOldGen.getUsed()).thenReturn(val++); - when(memoryUsagePoolOldGen.getMax()).thenReturn(val++); - when(memoryUsagePoolOldGen.getCommitted()).thenReturn(val++); - when(memoryUsagePoolOldGen.getInit()).thenReturn(val++); - - when(memoryUsagePoolCollectionEdenSpace.getUsed()).thenReturn(val++); - when(memoryUsagePoolCollectionEdenSpace.getMax()).thenReturn(val++); - when(memoryUsagePoolCollectionEdenSpace.getCommitted()).thenReturn(val++); - when(memoryUsagePoolCollectionEdenSpace.getInit()).thenReturn(val++); - - when(memoryUsagePoolCollectionOldGen.getUsed()).thenReturn(val++); - when(memoryUsagePoolCollectionOldGen.getMax()).thenReturn(val++); - when(memoryUsagePoolCollectionOldGen.getCommitted()).thenReturn(val++); - when(memoryUsagePoolCollectionOldGen.getInit()).thenReturn(val++); - - when(mockPoolsBeanEdenSpace.getName()).thenReturn("PS Eden Space"); - when(mockPoolsBeanEdenSpace.getUsage()).thenReturn(memoryUsagePoolEdenSpace); - when(mockPoolsBeanEdenSpace.getCollectionUsage()).thenReturn(memoryUsagePoolCollectionEdenSpace); - - when(mockPoolsBeanOldGen.getName()).thenReturn("PS Old Gen"); - when(mockPoolsBeanOldGen.getUsage()).thenReturn(memoryUsagePoolOldGen); - when(mockPoolsBeanOldGen.getCollectionUsage()).thenReturn(memoryUsagePoolCollectionOldGen); - } - - @Test - public void testGoodCase() throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - JvmMemoryMetrics.builder() - .withMemoryBean(mockMemoryBean) - .withMemoryPoolBeans(Arrays.asList(mockPoolsBeanEdenSpace, mockPoolsBeanOldGen)) - .register(registry); - MetricSnapshots snapshots = registry.scrape(); - - String expected = "" + - "# TYPE jvm_memory_committed_bytes gauge\n" + - "# UNIT jvm_memory_committed_bytes bytes\n" + - "# HELP jvm_memory_committed_bytes Committed (bytes) of a given JVM memory area.\n" + - "jvm_memory_committed_bytes{area=\"heap\"} 4.0\n" + - "jvm_memory_committed_bytes{area=\"nonheap\"} 8.0\n" + - "# TYPE jvm_memory_init_bytes gauge\n" + - "# UNIT jvm_memory_init_bytes bytes\n" + - "# HELP jvm_memory_init_bytes Initial bytes of a given JVM memory area.\n" + - "jvm_memory_init_bytes{area=\"heap\"} 5.0\n" + - "jvm_memory_init_bytes{area=\"nonheap\"} 9.0\n" + - "# TYPE jvm_memory_max_bytes gauge\n" + - "# UNIT jvm_memory_max_bytes bytes\n" + - "# HELP jvm_memory_max_bytes Max (bytes) of a given JVM memory area.\n" + - "jvm_memory_max_bytes{area=\"heap\"} 3.0\n" + - "jvm_memory_max_bytes{area=\"nonheap\"} 7.0\n" + - "# TYPE jvm_memory_objects_pending_finalization gauge\n" + - "# HELP jvm_memory_objects_pending_finalization The number of objects waiting in the finalizer queue.\n" + - "jvm_memory_objects_pending_finalization 1.0\n" + - "# TYPE jvm_memory_pool_collection_committed_bytes gauge\n" + - "# UNIT jvm_memory_pool_collection_committed_bytes bytes\n" + - "# HELP jvm_memory_pool_collection_committed_bytes Committed after last collection bytes of a given JVM memory pool.\n" + - "jvm_memory_pool_collection_committed_bytes{pool=\"PS Eden Space\"} 20.0\n" + - "jvm_memory_pool_collection_committed_bytes{pool=\"PS Old Gen\"} 24.0\n" + - "# TYPE jvm_memory_pool_collection_init_bytes gauge\n" + - "# UNIT jvm_memory_pool_collection_init_bytes bytes\n" + - "# HELP jvm_memory_pool_collection_init_bytes Initial after last collection bytes of a given JVM memory pool.\n" + - "jvm_memory_pool_collection_init_bytes{pool=\"PS Eden Space\"} 21.0\n" + - "jvm_memory_pool_collection_init_bytes{pool=\"PS Old Gen\"} 25.0\n" + - "# TYPE jvm_memory_pool_collection_max_bytes gauge\n" + - "# UNIT jvm_memory_pool_collection_max_bytes bytes\n" + - "# HELP jvm_memory_pool_collection_max_bytes Max bytes after last collection of a given JVM memory pool.\n" + - "jvm_memory_pool_collection_max_bytes{pool=\"PS Eden Space\"} 19.0\n" + - "jvm_memory_pool_collection_max_bytes{pool=\"PS Old Gen\"} 23.0\n" + - "# TYPE jvm_memory_pool_collection_used_bytes gauge\n" + - "# UNIT jvm_memory_pool_collection_used_bytes bytes\n" + - "# HELP jvm_memory_pool_collection_used_bytes Used bytes after last collection of a given JVM memory pool.\n" + - "jvm_memory_pool_collection_used_bytes{pool=\"PS Eden Space\"} 18.0\n" + - "jvm_memory_pool_collection_used_bytes{pool=\"PS Old Gen\"} 22.0\n" + - "# TYPE jvm_memory_pool_committed_bytes gauge\n" + - "# UNIT jvm_memory_pool_committed_bytes bytes\n" + - "# HELP jvm_memory_pool_committed_bytes Committed bytes of a given JVM memory pool.\n" + - "jvm_memory_pool_committed_bytes{pool=\"PS Eden Space\"} 12.0\n" + - "jvm_memory_pool_committed_bytes{pool=\"PS Old Gen\"} 16.0\n" + - "# TYPE jvm_memory_pool_init_bytes gauge\n" + - "# UNIT jvm_memory_pool_init_bytes bytes\n" + - "# HELP jvm_memory_pool_init_bytes Initial bytes of a given JVM memory pool.\n" + - "jvm_memory_pool_init_bytes{pool=\"PS Eden Space\"} 13.0\n" + - "jvm_memory_pool_init_bytes{pool=\"PS Old Gen\"} 17.0\n" + - "# TYPE jvm_memory_pool_max_bytes gauge\n" + - "# UNIT jvm_memory_pool_max_bytes bytes\n" + - "# HELP jvm_memory_pool_max_bytes Max bytes of a given JVM memory pool.\n" + - "jvm_memory_pool_max_bytes{pool=\"PS Eden Space\"} 11.0\n" + - "jvm_memory_pool_max_bytes{pool=\"PS Old Gen\"} 15.0\n" + - "# TYPE jvm_memory_pool_used_bytes gauge\n" + - "# UNIT jvm_memory_pool_used_bytes bytes\n" + - "# HELP jvm_memory_pool_used_bytes Used bytes of a given JVM memory pool.\n" + - "jvm_memory_pool_used_bytes{pool=\"PS Eden Space\"} 10.0\n" + - "jvm_memory_pool_used_bytes{pool=\"PS Old Gen\"} 14.0\n" + - "# TYPE jvm_memory_used_bytes gauge\n" + - "# UNIT jvm_memory_used_bytes bytes\n" + - "# HELP jvm_memory_used_bytes Used bytes of a given JVM memory area.\n" + - "jvm_memory_used_bytes{area=\"heap\"} 2.0\n" + - "jvm_memory_used_bytes{area=\"nonheap\"} 6.0\n" + - "# EOF\n"; - - Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); - } - - @Test - public void testIgnoredMetricNotScraped() { - MetricNameFilter filter = MetricNameFilter.builder() - .nameMustNotBeEqualTo("jvm_memory_pool_used_bytes") - .build(); - - PrometheusRegistry registry = new PrometheusRegistry(); - JvmMemoryMetrics.builder() - .withMemoryBean(mockMemoryBean) - .withMemoryPoolBeans(Arrays.asList(mockPoolsBeanEdenSpace, mockPoolsBeanOldGen)) - .register(registry); - registry.scrape(filter); - - verify(memoryUsagePoolEdenSpace, times(0)).getUsed(); - verify(memoryUsagePoolOldGen, times(0)).getUsed(); - verify(memoryUsagePoolEdenSpace, times(1)).getMax(); - verify(memoryUsagePoolOldGen, times(1)).getMax(); - } + private MemoryMXBean mockMemoryBean = Mockito.mock(MemoryMXBean.class); + private MemoryPoolMXBean mockPoolsBeanEdenSpace = Mockito.mock(MemoryPoolMXBean.class); + private MemoryPoolMXBean mockPoolsBeanOldGen = Mockito.mock(MemoryPoolMXBean.class); + private MemoryUsage memoryUsageHeap = Mockito.mock(MemoryUsage.class); + private MemoryUsage memoryUsageNonHeap = Mockito.mock(MemoryUsage.class); + private MemoryUsage memoryUsagePoolEdenSpace = Mockito.mock(MemoryUsage.class); + private MemoryUsage memoryUsagePoolOldGen = Mockito.mock(MemoryUsage.class); + private MemoryUsage memoryUsagePoolCollectionEdenSpace = Mockito.mock(MemoryUsage.class); + private MemoryUsage memoryUsagePoolCollectionOldGen = Mockito.mock(MemoryUsage.class); + + @Before + public void setUp() { + when(mockMemoryBean.getHeapMemoryUsage()).thenReturn(memoryUsageHeap); + when(mockMemoryBean.getNonHeapMemoryUsage()).thenReturn(memoryUsageNonHeap); + + long val = 1L; + when(mockMemoryBean.getObjectPendingFinalizationCount()).thenReturn((int) val++); + + when(memoryUsageHeap.getUsed()).thenReturn(val++); + when(memoryUsageHeap.getMax()).thenReturn(val++); + when(memoryUsageHeap.getCommitted()).thenReturn(val++); + when(memoryUsageHeap.getInit()).thenReturn(val++); + + when(memoryUsageNonHeap.getUsed()).thenReturn(val++); + when(memoryUsageNonHeap.getMax()).thenReturn(val++); + when(memoryUsageNonHeap.getCommitted()).thenReturn(val++); + when(memoryUsageNonHeap.getInit()).thenReturn(val++); + + when(memoryUsagePoolEdenSpace.getUsed()).thenReturn(val++); + when(memoryUsagePoolEdenSpace.getMax()).thenReturn(val++); + when(memoryUsagePoolEdenSpace.getCommitted()).thenReturn(val++); + when(memoryUsagePoolEdenSpace.getInit()).thenReturn(val++); + + when(memoryUsagePoolOldGen.getUsed()).thenReturn(val++); + when(memoryUsagePoolOldGen.getMax()).thenReturn(val++); + when(memoryUsagePoolOldGen.getCommitted()).thenReturn(val++); + when(memoryUsagePoolOldGen.getInit()).thenReturn(val++); + + when(memoryUsagePoolCollectionEdenSpace.getUsed()).thenReturn(val++); + when(memoryUsagePoolCollectionEdenSpace.getMax()).thenReturn(val++); + when(memoryUsagePoolCollectionEdenSpace.getCommitted()).thenReturn(val++); + when(memoryUsagePoolCollectionEdenSpace.getInit()).thenReturn(val++); + + when(memoryUsagePoolCollectionOldGen.getUsed()).thenReturn(val++); + when(memoryUsagePoolCollectionOldGen.getMax()).thenReturn(val++); + when(memoryUsagePoolCollectionOldGen.getCommitted()).thenReturn(val++); + when(memoryUsagePoolCollectionOldGen.getInit()).thenReturn(val++); + + when(mockPoolsBeanEdenSpace.getName()).thenReturn("PS Eden Space"); + when(mockPoolsBeanEdenSpace.getUsage()).thenReturn(memoryUsagePoolEdenSpace); + when(mockPoolsBeanEdenSpace.getCollectionUsage()) + .thenReturn(memoryUsagePoolCollectionEdenSpace); + + when(mockPoolsBeanOldGen.getName()).thenReturn("PS Old Gen"); + when(mockPoolsBeanOldGen.getUsage()).thenReturn(memoryUsagePoolOldGen); + when(mockPoolsBeanOldGen.getCollectionUsage()).thenReturn(memoryUsagePoolCollectionOldGen); + } + + @Test + public void testGoodCase() throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + JvmMemoryMetrics.builder() + .withMemoryBean(mockMemoryBean) + .withMemoryPoolBeans(Arrays.asList(mockPoolsBeanEdenSpace, mockPoolsBeanOldGen)) + .register(registry); + MetricSnapshots snapshots = registry.scrape(); + + String expected = + "" + + "# TYPE jvm_memory_committed_bytes gauge\n" + + "# UNIT jvm_memory_committed_bytes bytes\n" + + "# HELP jvm_memory_committed_bytes Committed (bytes) of a given JVM memory area.\n" + + "jvm_memory_committed_bytes{area=\"heap\"} 4.0\n" + + "jvm_memory_committed_bytes{area=\"nonheap\"} 8.0\n" + + "# TYPE jvm_memory_init_bytes gauge\n" + + "# UNIT jvm_memory_init_bytes bytes\n" + + "# HELP jvm_memory_init_bytes Initial bytes of a given JVM memory area.\n" + + "jvm_memory_init_bytes{area=\"heap\"} 5.0\n" + + "jvm_memory_init_bytes{area=\"nonheap\"} 9.0\n" + + "# TYPE jvm_memory_max_bytes gauge\n" + + "# UNIT jvm_memory_max_bytes bytes\n" + + "# HELP jvm_memory_max_bytes Max (bytes) of a given JVM memory area.\n" + + "jvm_memory_max_bytes{area=\"heap\"} 3.0\n" + + "jvm_memory_max_bytes{area=\"nonheap\"} 7.0\n" + + "# TYPE jvm_memory_objects_pending_finalization gauge\n" + + "# HELP jvm_memory_objects_pending_finalization The number of objects waiting in the finalizer queue.\n" + + "jvm_memory_objects_pending_finalization 1.0\n" + + "# TYPE jvm_memory_pool_collection_committed_bytes gauge\n" + + "# UNIT jvm_memory_pool_collection_committed_bytes bytes\n" + + "# HELP jvm_memory_pool_collection_committed_bytes Committed after last collection bytes of a given JVM memory pool.\n" + + "jvm_memory_pool_collection_committed_bytes{pool=\"PS Eden Space\"} 20.0\n" + + "jvm_memory_pool_collection_committed_bytes{pool=\"PS Old Gen\"} 24.0\n" + + "# TYPE jvm_memory_pool_collection_init_bytes gauge\n" + + "# UNIT jvm_memory_pool_collection_init_bytes bytes\n" + + "# HELP jvm_memory_pool_collection_init_bytes Initial after last collection bytes of a given JVM memory pool.\n" + + "jvm_memory_pool_collection_init_bytes{pool=\"PS Eden Space\"} 21.0\n" + + "jvm_memory_pool_collection_init_bytes{pool=\"PS Old Gen\"} 25.0\n" + + "# TYPE jvm_memory_pool_collection_max_bytes gauge\n" + + "# UNIT jvm_memory_pool_collection_max_bytes bytes\n" + + "# HELP jvm_memory_pool_collection_max_bytes Max bytes after last collection of a given JVM memory pool.\n" + + "jvm_memory_pool_collection_max_bytes{pool=\"PS Eden Space\"} 19.0\n" + + "jvm_memory_pool_collection_max_bytes{pool=\"PS Old Gen\"} 23.0\n" + + "# TYPE jvm_memory_pool_collection_used_bytes gauge\n" + + "# UNIT jvm_memory_pool_collection_used_bytes bytes\n" + + "# HELP jvm_memory_pool_collection_used_bytes Used bytes after last collection of a given JVM memory pool.\n" + + "jvm_memory_pool_collection_used_bytes{pool=\"PS Eden Space\"} 18.0\n" + + "jvm_memory_pool_collection_used_bytes{pool=\"PS Old Gen\"} 22.0\n" + + "# TYPE jvm_memory_pool_committed_bytes gauge\n" + + "# UNIT jvm_memory_pool_committed_bytes bytes\n" + + "# HELP jvm_memory_pool_committed_bytes Committed bytes of a given JVM memory pool.\n" + + "jvm_memory_pool_committed_bytes{pool=\"PS Eden Space\"} 12.0\n" + + "jvm_memory_pool_committed_bytes{pool=\"PS Old Gen\"} 16.0\n" + + "# TYPE jvm_memory_pool_init_bytes gauge\n" + + "# UNIT jvm_memory_pool_init_bytes bytes\n" + + "# HELP jvm_memory_pool_init_bytes Initial bytes of a given JVM memory pool.\n" + + "jvm_memory_pool_init_bytes{pool=\"PS Eden Space\"} 13.0\n" + + "jvm_memory_pool_init_bytes{pool=\"PS Old Gen\"} 17.0\n" + + "# TYPE jvm_memory_pool_max_bytes gauge\n" + + "# UNIT jvm_memory_pool_max_bytes bytes\n" + + "# HELP jvm_memory_pool_max_bytes Max bytes of a given JVM memory pool.\n" + + "jvm_memory_pool_max_bytes{pool=\"PS Eden Space\"} 11.0\n" + + "jvm_memory_pool_max_bytes{pool=\"PS Old Gen\"} 15.0\n" + + "# TYPE jvm_memory_pool_used_bytes gauge\n" + + "# UNIT jvm_memory_pool_used_bytes bytes\n" + + "# HELP jvm_memory_pool_used_bytes Used bytes of a given JVM memory pool.\n" + + "jvm_memory_pool_used_bytes{pool=\"PS Eden Space\"} 10.0\n" + + "jvm_memory_pool_used_bytes{pool=\"PS Old Gen\"} 14.0\n" + + "# TYPE jvm_memory_used_bytes gauge\n" + + "# UNIT jvm_memory_used_bytes bytes\n" + + "# HELP jvm_memory_used_bytes Used bytes of a given JVM memory area.\n" + + "jvm_memory_used_bytes{area=\"heap\"} 2.0\n" + + "jvm_memory_used_bytes{area=\"nonheap\"} 6.0\n" + + "# EOF\n"; + + Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); + } + + @Test + public void testIgnoredMetricNotScraped() { + MetricNameFilter filter = + MetricNameFilter.builder().nameMustNotBeEqualTo("jvm_memory_pool_used_bytes").build(); + + PrometheusRegistry registry = new PrometheusRegistry(); + JvmMemoryMetrics.builder() + .withMemoryBean(mockMemoryBean) + .withMemoryPoolBeans(Arrays.asList(mockPoolsBeanEdenSpace, mockPoolsBeanOldGen)) + .register(registry); + registry.scrape(filter); + + verify(memoryUsagePoolEdenSpace, times(0)).getUsed(); + verify(memoryUsagePoolOldGen, times(0)).getUsed(); + verify(memoryUsagePoolEdenSpace, times(1)).getMax(); + verify(memoryUsagePoolOldGen, times(1)).getMax(); + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryPoolAllocationMetricsTest.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryPoolAllocationMetricsTest.java index 945ac5e27..8f90a9cb8 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryPoolAllocationMetricsTest.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmMemoryPoolAllocationMetricsTest.java @@ -1,5 +1,7 @@ package io.prometheus.metrics.instrumentation.jvm; +import static org.junit.Assert.assertEquals; + import io.prometheus.metrics.core.metrics.Counter; import io.prometheus.metrics.instrumentation.jvm.JvmMemoryPoolAllocationMetrics.AllocationCountingNotificationListener; import io.prometheus.metrics.model.registry.PrometheusRegistry; @@ -9,57 +11,56 @@ import org.junit.Assert; import org.junit.Test; -import static org.junit.Assert.assertEquals; - public class JvmMemoryPoolAllocationMetricsTest { - @Test - public void testListenerLogic() { - PrometheusRegistry registry = new PrometheusRegistry(); - Counter counter = Counter.builder().name("test").labelNames("pool").register(registry); - AllocationCountingNotificationListener listener = new AllocationCountingNotificationListener(counter); + @Test + public void testListenerLogic() { + PrometheusRegistry registry = new PrometheusRegistry(); + Counter counter = Counter.builder().name("test").labelNames("pool").register(registry); + AllocationCountingNotificationListener listener = + new AllocationCountingNotificationListener(counter); - // Increase by 123 - listener.handleMemoryPool("TestPool", 0, 123); - assertEquals(123, getCountByPool("test", "TestPool", registry.scrape()), 0.0); + // Increase by 123 + listener.handleMemoryPool("TestPool", 0, 123); + assertEquals(123, getCountByPool("test", "TestPool", registry.scrape()), 0.0); - // No increase - listener.handleMemoryPool("TestPool", 123, 123); - assertEquals(123, getCountByPool("test", "TestPool", registry.scrape()), 0.0); + // No increase + listener.handleMemoryPool("TestPool", 123, 123); + assertEquals(123, getCountByPool("test", "TestPool", registry.scrape()), 0.0); - // No increase, then decrease to 0 - listener.handleMemoryPool("TestPool", 123, 0); - assertEquals(123, getCountByPool("test", "TestPool", registry.scrape()), 0.0); + // No increase, then decrease to 0 + listener.handleMemoryPool("TestPool", 123, 0); + assertEquals(123, getCountByPool("test", "TestPool", registry.scrape()), 0.0); - // No increase, then increase by 7 - listener.handleMemoryPool("TestPool", 0, 7); - assertEquals(130, getCountByPool("test", "TestPool", registry.scrape()), 0.0); + // No increase, then increase by 7 + listener.handleMemoryPool("TestPool", 0, 7); + assertEquals(130, getCountByPool("test", "TestPool", registry.scrape()), 0.0); - // Increase by 10, then decrease to 10 - listener.handleMemoryPool("TestPool", 17, 10); - assertEquals(140, getCountByPool("test", "TestPool", registry.scrape()), 0.0); + // Increase by 10, then decrease to 10 + listener.handleMemoryPool("TestPool", 17, 10); + assertEquals(140, getCountByPool("test", "TestPool", registry.scrape()), 0.0); - // Increase by 7, then increase by 3 - listener.handleMemoryPool("TestPool", 17, 20); - assertEquals(150, getCountByPool("test", "TestPool", registry.scrape()), 0.0); + // Increase by 7, then increase by 3 + listener.handleMemoryPool("TestPool", 17, 20); + assertEquals(150, getCountByPool("test", "TestPool", registry.scrape()), 0.0); - // Decrease to 17, then increase by 3 - listener.handleMemoryPool("TestPool", 17, 20); - assertEquals(153, getCountByPool("test", "TestPool", registry.scrape()), 0.0); - } + // Decrease to 17, then increase by 3 + listener.handleMemoryPool("TestPool", 17, 20); + assertEquals(153, getCountByPool("test", "TestPool", registry.scrape()), 0.0); + } - private double getCountByPool(String metricName, String poolName, MetricSnapshots snapshots) { - for (MetricSnapshot snapshot : snapshots) { - if (snapshot.getMetadata().getPrometheusName().equals(metricName)) { - for (CounterSnapshot.CounterDataPointSnapshot data : ((CounterSnapshot) snapshot).getDataPoints()) { - if (data.getLabels().get("pool").equals(poolName)) { - return data.getValue(); - } - } - } + private double getCountByPool(String metricName, String poolName, MetricSnapshots snapshots) { + for (MetricSnapshot snapshot : snapshots) { + if (snapshot.getMetadata().getPrometheusName().equals(metricName)) { + for (CounterSnapshot.CounterDataPointSnapshot data : + ((CounterSnapshot) snapshot).getDataPoints()) { + if (data.getLabels().get("pool").equals(poolName)) { + return data.getValue(); + } } - Assert.fail("pool " + poolName + " not found."); - return 0.0; + } } - + Assert.fail("pool " + poolName + " not found."); + return 0.0; + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmMetricsTest.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmMetricsTest.java index d93b1682d..f0df40538 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmMetricsTest.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmMetricsTest.java @@ -1,19 +1,19 @@ package io.prometheus.metrics.instrumentation.jvm; -import io.prometheus.metrics.model.registry.PrometheusRegistry; -import org.junit.Test; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import io.prometheus.metrics.model.registry.PrometheusRegistry; +import org.junit.Test; + public class JvmMetricsTest { - @Test - public void testRegisterIdempotent() { - PrometheusRegistry registry = new PrometheusRegistry(); - assertEquals(0, registry.scrape().size()); - JvmMetrics.builder().register(registry); - assertTrue(registry.scrape().size() > 0); - JvmMetrics.builder().register(registry); - } + @Test + public void testRegisterIdempotent() { + PrometheusRegistry registry = new PrometheusRegistry(); + assertEquals(0, registry.scrape().size()); + JvmMetrics.builder().register(registry); + assertTrue(registry.scrape().size() > 0); + JvmMetrics.builder().register(registry); + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmNativeMemoryMetricsTest.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmNativeMemoryMetricsTest.java index 4b9321660..13d1cc99f 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmNativeMemoryMetricsTest.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmNativeMemoryMetricsTest.java @@ -1,25 +1,25 @@ package io.prometheus.metrics.instrumentation.jvm; +import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; +import static org.mockito.Mockito.when; + import io.prometheus.metrics.config.PrometheusProperties; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.MetricSnapshots; +import java.io.IOException; import junit.framework.TestCase; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; -import java.io.IOException; - -import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; -import static org.mockito.Mockito.when; - public class JvmNativeMemoryMetricsTest extends TestCase { @Test public void testNativeMemoryTrackingFail() throws IOException { JvmNativeMemoryMetrics.isEnabled.set(true); - JvmNativeMemoryMetrics.PlatformMBeanServerAdapter adapter = Mockito.mock(JvmNativeMemoryMetrics.PlatformMBeanServerAdapter.class); + JvmNativeMemoryMetrics.PlatformMBeanServerAdapter adapter = + Mockito.mock(JvmNativeMemoryMetrics.PlatformMBeanServerAdapter.class); when(adapter.vmNativeMemorySummaryInBytes()).thenThrow(new RuntimeException("mock")); PrometheusRegistry registry = new PrometheusRegistry(); @@ -35,7 +35,8 @@ public void testNativeMemoryTrackingFail() throws IOException { public void testNativeMemoryTrackingEmpty() throws IOException { JvmNativeMemoryMetrics.isEnabled.set(true); - JvmNativeMemoryMetrics.PlatformMBeanServerAdapter adapter = Mockito.mock(JvmNativeMemoryMetrics.PlatformMBeanServerAdapter.class); + JvmNativeMemoryMetrics.PlatformMBeanServerAdapter adapter = + Mockito.mock(JvmNativeMemoryMetrics.PlatformMBeanServerAdapter.class); when(adapter.vmNativeMemorySummaryInBytes()).thenReturn(""); PrometheusRegistry registry = new PrometheusRegistry(); @@ -51,8 +52,10 @@ public void testNativeMemoryTrackingEmpty() throws IOException { public void testNativeMemoryTrackingDisabled() throws IOException { JvmNativeMemoryMetrics.isEnabled.set(true); - JvmNativeMemoryMetrics.PlatformMBeanServerAdapter adapter = Mockito.mock(JvmNativeMemoryMetrics.PlatformMBeanServerAdapter.class); - when(adapter.vmNativeMemorySummaryInBytes()).thenReturn("Native memory tracking is not enabled"); + JvmNativeMemoryMetrics.PlatformMBeanServerAdapter adapter = + Mockito.mock(JvmNativeMemoryMetrics.PlatformMBeanServerAdapter.class); + when(adapter.vmNativeMemorySummaryInBytes()) + .thenReturn("Native memory tracking is not enabled"); PrometheusRegistry registry = new PrometheusRegistry(); new JvmNativeMemoryMetrics.Builder(PrometheusProperties.get(), adapter).register(registry); @@ -67,159 +70,161 @@ public void testNativeMemoryTrackingDisabled() throws IOException { public void testNativeMemoryTrackingEnabled() throws IOException { JvmNativeMemoryMetrics.isEnabled.set(true); - JvmNativeMemoryMetrics.PlatformMBeanServerAdapter adapter = Mockito.mock(JvmNativeMemoryMetrics.PlatformMBeanServerAdapter.class); - when(adapter.vmNativeMemorySummaryInBytes()).thenReturn( - "Native Memory Tracking:\n" + - "\n" + - "Total: reserved=10341970661, committed=642716389\n" + - " malloc: 27513573 #22947\n" + - " mmap: reserved=10314457088, committed=615202816\n" + - "\n" + - "- Java Heap (reserved=8531214336, committed=536870912)\n" + - " (mmap: reserved=8531214336, committed=536870912) \n" + - " \n" + - "- Class (reserved=1073899939, committed=616867)\n" + - " (classes #1630)\n" + - " ( instance classes #1462, array classes #168)\n" + - " (malloc=158115 #2350) \n" + - " (mmap: reserved=1073741824, committed=458752) \n" + - " ( Metadata: )\n" + - " ( reserved=67108864, committed=2818048)\n" + - " ( used=2748008)\n" + - " ( waste=70040 =2.49%)\n" + - " ( Class space:)\n" + - " ( reserved=1073741824, committed=458752)\n" + - " ( used=343568)\n" + - " ( waste=115184 =25.11%)\n" + - " \n" + - "- Thread (reserved=21020080, committed=847280)\n" + - " (thread #20)\n" + - " (stack: reserved=20971520, committed=798720)\n" + - " (malloc=27512 #125) \n" + - " (arena=21048 #37)\n" + - " \n" + - "- Code (reserved=253796784, committed=7836080)\n" + - " (malloc=105944 #1403) \n" + - " (mmap: reserved=253689856, committed=7729152) \n" + - " (arena=984 #1)\n" + - " \n" + - "- GC (reserved=373343252, committed=76530708)\n" + - " (malloc=22463508 #720) \n" + - " (mmap: reserved=350879744, committed=54067200) \n" + - " \n" + - "- Compiler (reserved=1926356, committed=1926356)\n" + - " (malloc=20428 #73) \n" + - " (arena=1905928 #20)\n" + - " \n" + - "- Internal (reserved=242257, committed=242257)\n" + - " (malloc=176721 #1808) \n" + - " (mmap: reserved=65536, committed=65536) \n" + - " \n" + - "- Other (reserved=4096, committed=4096)\n" + - " (malloc=4096 #2) \n" + - " \n" + - "- Symbol (reserved=1505072, committed=1505072)\n" + - " (malloc=1136432 #14482) \n" + - " (arena=368640 #1)\n" + - " \n" + - "- Native Memory Tracking (reserved=373448, committed=373448)\n" + - " (malloc=6280 #91) \n" + - " (tracking overhead=367168)\n" + - " \n" + - "- Shared class space (reserved=16777216, committed=12386304)\n" + - " (mmap: reserved=16777216, committed=12386304) \n" + - " \n" + - "- Arena Chunk (reserved=503216, committed=503216)\n" + - " (malloc=503216) \n" + - " \n" + - "- Tracing (reserved=33097, committed=33097)\n" + - " (malloc=369 #10) \n" + - " (arena=32728 #1)\n" + - " \n" + - "- Arguments (reserved=160, committed=160)\n" + - " (malloc=160 #5) \n" + - " \n" + - "- Module (reserved=169168, committed=169168)\n" + - " (malloc=169168 #1266) \n" + - " \n" + - "- Safepoint (reserved=8192, committed=8192)\n" + - " (mmap: reserved=8192, committed=8192) \n" + - " \n" + - "- Synchronization (reserved=31160, committed=31160)\n" + - " (malloc=31160 #452) \n" + - " \n" + - "- Serviceability (reserved=600, committed=600)\n" + - " (malloc=600 #6) \n" + - " \n" + - "- Metaspace (reserved=67120768, committed=2829952)\n" + - " (malloc=11904 #12) \n" + - " (mmap: reserved=67108864, committed=2818048) \n" + - " \n" + - "- String Deduplication (reserved=632, committed=632)\n" + - " (malloc=632 #8) \n" + - " \n" + - "- Object Monitors (reserved=832, committed=832)\n" + - " (malloc=832 #4) \n" + - " \n" + - "\n" - ); + JvmNativeMemoryMetrics.PlatformMBeanServerAdapter adapter = + Mockito.mock(JvmNativeMemoryMetrics.PlatformMBeanServerAdapter.class); + when(adapter.vmNativeMemorySummaryInBytes()) + .thenReturn( + "Native Memory Tracking:\n" + + "\n" + + "Total: reserved=10341970661, committed=642716389\n" + + " malloc: 27513573 #22947\n" + + " mmap: reserved=10314457088, committed=615202816\n" + + "\n" + + "- Java Heap (reserved=8531214336, committed=536870912)\n" + + " (mmap: reserved=8531214336, committed=536870912) \n" + + " \n" + + "- Class (reserved=1073899939, committed=616867)\n" + + " (classes #1630)\n" + + " ( instance classes #1462, array classes #168)\n" + + " (malloc=158115 #2350) \n" + + " (mmap: reserved=1073741824, committed=458752) \n" + + " ( Metadata: )\n" + + " ( reserved=67108864, committed=2818048)\n" + + " ( used=2748008)\n" + + " ( waste=70040 =2.49%)\n" + + " ( Class space:)\n" + + " ( reserved=1073741824, committed=458752)\n" + + " ( used=343568)\n" + + " ( waste=115184 =25.11%)\n" + + " \n" + + "- Thread (reserved=21020080, committed=847280)\n" + + " (thread #20)\n" + + " (stack: reserved=20971520, committed=798720)\n" + + " (malloc=27512 #125) \n" + + " (arena=21048 #37)\n" + + " \n" + + "- Code (reserved=253796784, committed=7836080)\n" + + " (malloc=105944 #1403) \n" + + " (mmap: reserved=253689856, committed=7729152) \n" + + " (arena=984 #1)\n" + + " \n" + + "- GC (reserved=373343252, committed=76530708)\n" + + " (malloc=22463508 #720) \n" + + " (mmap: reserved=350879744, committed=54067200) \n" + + " \n" + + "- Compiler (reserved=1926356, committed=1926356)\n" + + " (malloc=20428 #73) \n" + + " (arena=1905928 #20)\n" + + " \n" + + "- Internal (reserved=242257, committed=242257)\n" + + " (malloc=176721 #1808) \n" + + " (mmap: reserved=65536, committed=65536) \n" + + " \n" + + "- Other (reserved=4096, committed=4096)\n" + + " (malloc=4096 #2) \n" + + " \n" + + "- Symbol (reserved=1505072, committed=1505072)\n" + + " (malloc=1136432 #14482) \n" + + " (arena=368640 #1)\n" + + " \n" + + "- Native Memory Tracking (reserved=373448, committed=373448)\n" + + " (malloc=6280 #91) \n" + + " (tracking overhead=367168)\n" + + " \n" + + "- Shared class space (reserved=16777216, committed=12386304)\n" + + " (mmap: reserved=16777216, committed=12386304) \n" + + " \n" + + "- Arena Chunk (reserved=503216, committed=503216)\n" + + " (malloc=503216) \n" + + " \n" + + "- Tracing (reserved=33097, committed=33097)\n" + + " (malloc=369 #10) \n" + + " (arena=32728 #1)\n" + + " \n" + + "- Arguments (reserved=160, committed=160)\n" + + " (malloc=160 #5) \n" + + " \n" + + "- Module (reserved=169168, committed=169168)\n" + + " (malloc=169168 #1266) \n" + + " \n" + + "- Safepoint (reserved=8192, committed=8192)\n" + + " (mmap: reserved=8192, committed=8192) \n" + + " \n" + + "- Synchronization (reserved=31160, committed=31160)\n" + + " (malloc=31160 #452) \n" + + " \n" + + "- Serviceability (reserved=600, committed=600)\n" + + " (malloc=600 #6) \n" + + " \n" + + "- Metaspace (reserved=67120768, committed=2829952)\n" + + " (malloc=11904 #12) \n" + + " (mmap: reserved=67108864, committed=2818048) \n" + + " \n" + + "- String Deduplication (reserved=632, committed=632)\n" + + " (malloc=632 #8) \n" + + " \n" + + "- Object Monitors (reserved=832, committed=832)\n" + + " (malloc=832 #4) \n" + + " \n" + + "\n"); PrometheusRegistry registry = new PrometheusRegistry(); new JvmNativeMemoryMetrics.Builder(PrometheusProperties.get(), adapter).register(registry); MetricSnapshots snapshots = registry.scrape(); - String expected = "" + - "# TYPE jvm_native_memory_committed_bytes gauge\n" + - "# UNIT jvm_native_memory_committed_bytes bytes\n" + - "# HELP jvm_native_memory_committed_bytes Committed bytes of a given JVM. Committed memory represents the amount of memory the JVM is using right now.\n" + - "jvm_native_memory_committed_bytes{pool=\"Arena Chunk\"} 503216.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Arguments\"} 160.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Class\"} 616867.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Code\"} 7836080.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Compiler\"} 1926356.0\n" + - "jvm_native_memory_committed_bytes{pool=\"GC\"} 7.6530708E7\n" + - "jvm_native_memory_committed_bytes{pool=\"Internal\"} 242257.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Java Heap\"} 5.36870912E8\n" + - "jvm_native_memory_committed_bytes{pool=\"Metaspace\"} 2829952.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Module\"} 169168.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Native Memory Tracking\"} 373448.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Object Monitors\"} 832.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Other\"} 4096.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Safepoint\"} 8192.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Serviceability\"} 600.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Shared class space\"} 1.2386304E7\n" + - "jvm_native_memory_committed_bytes{pool=\"String Deduplication\"} 632.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Symbol\"} 1505072.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Synchronization\"} 31160.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Thread\"} 847280.0\n" + - "jvm_native_memory_committed_bytes{pool=\"Total\"} 6.42716389E8\n" + - "jvm_native_memory_committed_bytes{pool=\"Tracing\"} 33097.0\n" + - "# TYPE jvm_native_memory_reserved_bytes gauge\n" + - "# UNIT jvm_native_memory_reserved_bytes bytes\n" + - "# HELP jvm_native_memory_reserved_bytes Reserved bytes of a given JVM. Reserved memory represents the total amount of memory the JVM can potentially use.\n" + - "jvm_native_memory_reserved_bytes{pool=\"Arena Chunk\"} 503216.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"Arguments\"} 160.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"Class\"} 1.073899939E9\n" + - "jvm_native_memory_reserved_bytes{pool=\"Code\"} 2.53796784E8\n" + - "jvm_native_memory_reserved_bytes{pool=\"Compiler\"} 1926356.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"GC\"} 3.73343252E8\n" + - "jvm_native_memory_reserved_bytes{pool=\"Internal\"} 242257.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"Java Heap\"} 8.531214336E9\n" + - "jvm_native_memory_reserved_bytes{pool=\"Metaspace\"} 6.7120768E7\n" + - "jvm_native_memory_reserved_bytes{pool=\"Module\"} 169168.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"Native Memory Tracking\"} 373448.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"Object Monitors\"} 832.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"Other\"} 4096.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"Safepoint\"} 8192.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"Serviceability\"} 600.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"Shared class space\"} 1.6777216E7\n" + - "jvm_native_memory_reserved_bytes{pool=\"String Deduplication\"} 632.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"Symbol\"} 1505072.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"Synchronization\"} 31160.0\n" + - "jvm_native_memory_reserved_bytes{pool=\"Thread\"} 2.102008E7\n" + - "jvm_native_memory_reserved_bytes{pool=\"Total\"} 1.0341970661E10\n" + - "jvm_native_memory_reserved_bytes{pool=\"Tracing\"} 33097.0\n" + - "# EOF\n"; + String expected = + "" + + "# TYPE jvm_native_memory_committed_bytes gauge\n" + + "# UNIT jvm_native_memory_committed_bytes bytes\n" + + "# HELP jvm_native_memory_committed_bytes Committed bytes of a given JVM. Committed memory represents the amount of memory the JVM is using right now.\n" + + "jvm_native_memory_committed_bytes{pool=\"Arena Chunk\"} 503216.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Arguments\"} 160.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Class\"} 616867.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Code\"} 7836080.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Compiler\"} 1926356.0\n" + + "jvm_native_memory_committed_bytes{pool=\"GC\"} 7.6530708E7\n" + + "jvm_native_memory_committed_bytes{pool=\"Internal\"} 242257.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Java Heap\"} 5.36870912E8\n" + + "jvm_native_memory_committed_bytes{pool=\"Metaspace\"} 2829952.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Module\"} 169168.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Native Memory Tracking\"} 373448.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Object Monitors\"} 832.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Other\"} 4096.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Safepoint\"} 8192.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Serviceability\"} 600.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Shared class space\"} 1.2386304E7\n" + + "jvm_native_memory_committed_bytes{pool=\"String Deduplication\"} 632.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Symbol\"} 1505072.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Synchronization\"} 31160.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Thread\"} 847280.0\n" + + "jvm_native_memory_committed_bytes{pool=\"Total\"} 6.42716389E8\n" + + "jvm_native_memory_committed_bytes{pool=\"Tracing\"} 33097.0\n" + + "# TYPE jvm_native_memory_reserved_bytes gauge\n" + + "# UNIT jvm_native_memory_reserved_bytes bytes\n" + + "# HELP jvm_native_memory_reserved_bytes Reserved bytes of a given JVM. Reserved memory represents the total amount of memory the JVM can potentially use.\n" + + "jvm_native_memory_reserved_bytes{pool=\"Arena Chunk\"} 503216.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"Arguments\"} 160.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"Class\"} 1.073899939E9\n" + + "jvm_native_memory_reserved_bytes{pool=\"Code\"} 2.53796784E8\n" + + "jvm_native_memory_reserved_bytes{pool=\"Compiler\"} 1926356.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"GC\"} 3.73343252E8\n" + + "jvm_native_memory_reserved_bytes{pool=\"Internal\"} 242257.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"Java Heap\"} 8.531214336E9\n" + + "jvm_native_memory_reserved_bytes{pool=\"Metaspace\"} 6.7120768E7\n" + + "jvm_native_memory_reserved_bytes{pool=\"Module\"} 169168.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"Native Memory Tracking\"} 373448.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"Object Monitors\"} 832.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"Other\"} 4096.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"Safepoint\"} 8192.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"Serviceability\"} 600.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"Shared class space\"} 1.6777216E7\n" + + "jvm_native_memory_reserved_bytes{pool=\"String Deduplication\"} 632.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"Symbol\"} 1505072.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"Synchronization\"} 31160.0\n" + + "jvm_native_memory_reserved_bytes{pool=\"Thread\"} 2.102008E7\n" + + "jvm_native_memory_reserved_bytes{pool=\"Total\"} 1.0341970661E10\n" + + "jvm_native_memory_reserved_bytes{pool=\"Tracing\"} 33097.0\n" + + "# EOF\n"; Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmRuntimeInfoMetricTest.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmRuntimeInfoMetricTest.java index 0c4dd786a..a99178234 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmRuntimeInfoMetricTest.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmRuntimeInfoMetricTest.java @@ -1,32 +1,32 @@ package io.prometheus.metrics.instrumentation.jvm; +import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; + import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.MetricSnapshots; +import java.io.IOException; import org.junit.Assert; import org.junit.Test; -import java.io.IOException; - -import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; - public class JvmRuntimeInfoMetricTest { - @Test - public void testGoodCase() throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - JvmRuntimeInfoMetric.builder() - .version("1.8.0_382-b05") - .vendor("Oracle Corporation") - .runtime("OpenJDK Runtime Environment") - .register(registry); - MetricSnapshots snapshots = registry.scrape(); + @Test + public void testGoodCase() throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + JvmRuntimeInfoMetric.builder() + .version("1.8.0_382-b05") + .vendor("Oracle Corporation") + .runtime("OpenJDK Runtime Environment") + .register(registry); + MetricSnapshots snapshots = registry.scrape(); - String expected = "" + - "# TYPE jvm_runtime info\n" + - "# HELP jvm_runtime JVM runtime info\n" + - "jvm_runtime_info{runtime=\"OpenJDK Runtime Environment\",vendor=\"Oracle Corporation\",version=\"1.8.0_382-b05\"} 1\n" + - "# EOF\n"; + String expected = + "" + + "# TYPE jvm_runtime info\n" + + "# HELP jvm_runtime JVM runtime info\n" + + "jvm_runtime_info{runtime=\"OpenJDK Runtime Environment\",vendor=\"Oracle Corporation\",version=\"1.8.0_382-b05\"} 1\n" + + "# EOF\n"; - Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); - } + Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmThreadsMetricsTest.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmThreadsMetricsTest.java index 94cadc268..e454082b2 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmThreadsMetricsTest.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/JvmThreadsMetricsTest.java @@ -1,200 +1,199 @@ package io.prometheus.metrics.instrumentation.jvm; +import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import io.prometheus.metrics.model.registry.MetricNameFilter; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.GaugeSnapshot; import io.prometheus.metrics.model.snapshots.MetricSnapshot; import io.prometheus.metrics.model.snapshots.MetricSnapshots; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; - import java.io.IOException; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; - -import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; public class JvmThreadsMetricsTest { - private ThreadMXBean mockThreadsBean = Mockito.mock(ThreadMXBean.class); - private ThreadInfo mockThreadInfoBlocked = Mockito.mock(ThreadInfo.class); - private ThreadInfo mockThreadInfoRunnable1 = Mockito.mock(ThreadInfo.class); - private ThreadInfo mockThreadInfoRunnable2 = Mockito.mock(ThreadInfo.class); - - @Before - public void setUp() { - when(mockThreadsBean.getThreadCount()).thenReturn(300); - when(mockThreadsBean.getDaemonThreadCount()).thenReturn(200); - when(mockThreadsBean.getPeakThreadCount()).thenReturn(301); - when(mockThreadsBean.getTotalStartedThreadCount()).thenReturn(503L); - when(mockThreadsBean.findDeadlockedThreads()).thenReturn(new long[]{1L, 2L, 3L}); - when(mockThreadsBean.findMonitorDeadlockedThreads()).thenReturn(new long[]{2L, 3L, 4L}); - when(mockThreadsBean.getAllThreadIds()).thenReturn(new long[]{3L, 4L, 5L}); - when(mockThreadInfoBlocked.getThreadState()).thenReturn(Thread.State.BLOCKED); - when(mockThreadInfoRunnable1.getThreadState()).thenReturn(Thread.State.RUNNABLE); - when(mockThreadInfoRunnable2.getThreadState()).thenReturn(Thread.State.RUNNABLE); - when(mockThreadsBean.getThreadInfo(new long[]{3L, 4L, 5L}, 0)).thenReturn(new ThreadInfo[]{ - mockThreadInfoBlocked, mockThreadInfoRunnable1, mockThreadInfoRunnable2 - }); + private ThreadMXBean mockThreadsBean = Mockito.mock(ThreadMXBean.class); + private ThreadInfo mockThreadInfoBlocked = Mockito.mock(ThreadInfo.class); + private ThreadInfo mockThreadInfoRunnable1 = Mockito.mock(ThreadInfo.class); + private ThreadInfo mockThreadInfoRunnable2 = Mockito.mock(ThreadInfo.class); + + @Before + public void setUp() { + when(mockThreadsBean.getThreadCount()).thenReturn(300); + when(mockThreadsBean.getDaemonThreadCount()).thenReturn(200); + when(mockThreadsBean.getPeakThreadCount()).thenReturn(301); + when(mockThreadsBean.getTotalStartedThreadCount()).thenReturn(503L); + when(mockThreadsBean.findDeadlockedThreads()).thenReturn(new long[] {1L, 2L, 3L}); + when(mockThreadsBean.findMonitorDeadlockedThreads()).thenReturn(new long[] {2L, 3L, 4L}); + when(mockThreadsBean.getAllThreadIds()).thenReturn(new long[] {3L, 4L, 5L}); + when(mockThreadInfoBlocked.getThreadState()).thenReturn(Thread.State.BLOCKED); + when(mockThreadInfoRunnable1.getThreadState()).thenReturn(Thread.State.RUNNABLE); + when(mockThreadInfoRunnable2.getThreadState()).thenReturn(Thread.State.RUNNABLE); + when(mockThreadsBean.getThreadInfo(new long[] {3L, 4L, 5L}, 0)) + .thenReturn( + new ThreadInfo[] { + mockThreadInfoBlocked, mockThreadInfoRunnable1, mockThreadInfoRunnable2 + }); + } + + @Test + public void testGoodCase() throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + JvmThreadsMetrics.builder().threadBean(mockThreadsBean).isNativeImage(false).register(registry); + MetricSnapshots snapshots = registry.scrape(); + + String expected = + "" + + "# TYPE jvm_threads_current gauge\n" + + "# HELP jvm_threads_current Current thread count of a JVM\n" + + "jvm_threads_current 300.0\n" + + "# TYPE jvm_threads_daemon gauge\n" + + "# HELP jvm_threads_daemon Daemon thread count of a JVM\n" + + "jvm_threads_daemon 200.0\n" + + "# TYPE jvm_threads_deadlocked gauge\n" + + "# HELP jvm_threads_deadlocked Cycles of JVM-threads that are in deadlock waiting to acquire object monitors or ownable synchronizers\n" + + "jvm_threads_deadlocked 3.0\n" + + "# TYPE jvm_threads_deadlocked_monitor gauge\n" + + "# HELP jvm_threads_deadlocked_monitor Cycles of JVM-threads that are in deadlock waiting to acquire object monitors\n" + + "jvm_threads_deadlocked_monitor 3.0\n" + + "# TYPE jvm_threads_peak gauge\n" + + "# HELP jvm_threads_peak Peak thread count of a JVM\n" + + "jvm_threads_peak 301.0\n" + + "# TYPE jvm_threads_started counter\n" + + "# HELP jvm_threads_started Started thread count of a JVM\n" + + "jvm_threads_started_total 503.0\n" + + "# TYPE jvm_threads_state gauge\n" + + "# HELP jvm_threads_state Current count of threads by state\n" + + "jvm_threads_state{state=\"BLOCKED\"} 1.0\n" + + "jvm_threads_state{state=\"NEW\"} 0.0\n" + + "jvm_threads_state{state=\"RUNNABLE\"} 2.0\n" + + "jvm_threads_state{state=\"TERMINATED\"} 0.0\n" + + "jvm_threads_state{state=\"TIMED_WAITING\"} 0.0\n" + + "jvm_threads_state{state=\"UNKNOWN\"} 0.0\n" + + "jvm_threads_state{state=\"WAITING\"} 0.0\n" + + "# EOF\n"; + + Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); + } + + @Test + public void testIgnoredMetricNotScraped() { + MetricNameFilter filter = + MetricNameFilter.builder().nameMustNotBeEqualTo("jvm_threads_deadlocked").build(); + + PrometheusRegistry registry = new PrometheusRegistry(); + JvmThreadsMetrics.builder().threadBean(mockThreadsBean).isNativeImage(false).register(registry); + registry.scrape(filter); + + verify(mockThreadsBean, times(0)).findDeadlockedThreads(); + verify(mockThreadsBean, times(1)).getThreadCount(); + } + + @Test + public void testInvalidThreadIds() { + try { + String javaVersion = System.getProperty("java.version"); // Example: "21.0.2" + String majorJavaVersion = javaVersion.replaceAll("\\..*", ""); // Example: "21" + if (Integer.parseInt(majorJavaVersion) >= 21) { + // With Java 21 and newer you can no longer have invalid thread ids. + return; + } + } catch (NumberFormatException ignored) { } - - @Test - public void testGoodCase() throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - JvmThreadsMetrics.builder() - .threadBean(mockThreadsBean) - .isNativeImage(false) - .register(registry); - MetricSnapshots snapshots = registry.scrape(); - - String expected = "" + - "# TYPE jvm_threads_current gauge\n" + - "# HELP jvm_threads_current Current thread count of a JVM\n" + - "jvm_threads_current 300.0\n" + - "# TYPE jvm_threads_daemon gauge\n" + - "# HELP jvm_threads_daemon Daemon thread count of a JVM\n" + - "jvm_threads_daemon 200.0\n" + - "# TYPE jvm_threads_deadlocked gauge\n" + - "# HELP jvm_threads_deadlocked Cycles of JVM-threads that are in deadlock waiting to acquire object monitors or ownable synchronizers\n" + - "jvm_threads_deadlocked 3.0\n" + - "# TYPE jvm_threads_deadlocked_monitor gauge\n" + - "# HELP jvm_threads_deadlocked_monitor Cycles of JVM-threads that are in deadlock waiting to acquire object monitors\n" + - "jvm_threads_deadlocked_monitor 3.0\n" + - "# TYPE jvm_threads_peak gauge\n" + - "# HELP jvm_threads_peak Peak thread count of a JVM\n" + - "jvm_threads_peak 301.0\n" + - "# TYPE jvm_threads_started counter\n" + - "# HELP jvm_threads_started Started thread count of a JVM\n" + - "jvm_threads_started_total 503.0\n" + - "# TYPE jvm_threads_state gauge\n" + - "# HELP jvm_threads_state Current count of threads by state\n" + - "jvm_threads_state{state=\"BLOCKED\"} 1.0\n" + - "jvm_threads_state{state=\"NEW\"} 0.0\n" + - "jvm_threads_state{state=\"RUNNABLE\"} 2.0\n" + - "jvm_threads_state{state=\"TERMINATED\"} 0.0\n" + - "jvm_threads_state{state=\"TIMED_WAITING\"} 0.0\n" + - "jvm_threads_state{state=\"UNKNOWN\"} 0.0\n" + - "jvm_threads_state{state=\"WAITING\"} 0.0\n" + - "# EOF\n"; - - Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); - } - - @Test - public void testIgnoredMetricNotScraped() { - MetricNameFilter filter = MetricNameFilter.builder() - .nameMustNotBeEqualTo("jvm_threads_deadlocked") - .build(); - - PrometheusRegistry registry = new PrometheusRegistry(); - JvmThreadsMetrics.builder() - .threadBean(mockThreadsBean) - .isNativeImage(false) - .register(registry); - registry.scrape(filter); - - verify(mockThreadsBean, times(0)).findDeadlockedThreads(); - verify(mockThreadsBean, times(1)).getThreadCount(); + PrometheusRegistry registry = new PrometheusRegistry(); + JvmThreadsMetrics.builder().register(registry); + + // Number of threads to create with invalid thread ids + int numberOfInvalidThreadIds = 2; + + Map expected = getCountByState(registry.scrape()); + expected.compute( + "UNKNOWN", + (key, oldValue) -> + oldValue == null ? numberOfInvalidThreadIds : oldValue + numberOfInvalidThreadIds); + + final CountDownLatch countDownLatch = new CountDownLatch(numberOfInvalidThreadIds); + + try { + // Create and start threads with invalid thread ids (id=0, id=-1, etc.) + for (int i = 0; i < numberOfInvalidThreadIds; i++) { + new ThreadWithInvalidId(-i, new TestRunnable(countDownLatch)).start(); + } + + Map actual = getCountByState(registry.scrape()); + + Assert.assertEquals(expected.size(), actual.size()); + for (String threadState : expected.keySet()) { + Assert.assertEquals(expected.get(threadState), actual.get(threadState), 0.0); + } + } finally { + for (int i = 0; i < numberOfInvalidThreadIds; i++) { + countDownLatch.countDown(); + } } - - @Test - public void testInvalidThreadIds() { - try { - String javaVersion = System.getProperty("java.version"); // Example: "21.0.2" - String majorJavaVersion = javaVersion.replaceAll("\\..*", ""); // Example: "21" - if (Integer.parseInt(majorJavaVersion) >= 21) { - // With Java 21 and newer you can no longer have invalid thread ids. - return; - } - } catch (NumberFormatException ignored) { + } + + private Map getCountByState(MetricSnapshots snapshots) { + Map result = new HashMap<>(); + for (MetricSnapshot snapshot : snapshots) { + if (snapshot.getMetadata().getName().equals("jvm_threads_state")) { + for (GaugeSnapshot.GaugeDataPointSnapshot data : + ((GaugeSnapshot) snapshot).getDataPoints()) { + String state = data.getLabels().get("state"); + Assert.assertNotNull(state); + result.put(state, data.getValue()); } - PrometheusRegistry registry = new PrometheusRegistry(); - JvmThreadsMetrics.builder().register(registry); - - // Number of threads to create with invalid thread ids - int numberOfInvalidThreadIds = 2; - - Map expected = getCountByState(registry.scrape()); - expected.compute("UNKNOWN", (key, oldValue) -> oldValue == null ? numberOfInvalidThreadIds : oldValue + numberOfInvalidThreadIds); - - final CountDownLatch countDownLatch = new CountDownLatch(numberOfInvalidThreadIds); + } + } + return result; + } - try { - // Create and start threads with invalid thread ids (id=0, id=-1, etc.) - for (int i = 0; i < numberOfInvalidThreadIds; i++) { - new ThreadWithInvalidId(-i, new TestRunnable(countDownLatch)).start(); - } + private static class ThreadWithInvalidId extends Thread { - Map actual = getCountByState(registry.scrape()); + private final long id; - Assert.assertEquals(expected.size(), actual.size()); - for (String threadState : expected.keySet()) { - Assert.assertEquals(expected.get(threadState), actual.get(threadState), 0.0); - } - } finally { - for (int i = 0; i < numberOfInvalidThreadIds; i++) { - countDownLatch.countDown(); - } - } + public ThreadWithInvalidId(long id, Runnable runnable) { + super(runnable); + setDaemon(true); + this.id = id; } - private Map getCountByState(MetricSnapshots snapshots) { - Map result = new HashMap<>(); - for (MetricSnapshot snapshot : snapshots) { - if (snapshot.getMetadata().getName().equals("jvm_threads_state")) { - for (GaugeSnapshot.GaugeDataPointSnapshot data : ((GaugeSnapshot) snapshot).getDataPoints()) { - String state = data.getLabels().get("state"); - Assert.assertNotNull(state); - result.put(state, data.getValue()); - } - } - } - return result; + /** + * Note that only Java versions < 21 call this to get the thread id. With Java 21 and newer it's + * no longer possible to make an invalid thread id. + */ + @Override + public long getId() { + return this.id; } + } - private static class ThreadWithInvalidId extends Thread { - - private final long id; + private static class TestRunnable implements Runnable { - public ThreadWithInvalidId(long id, Runnable runnable) { - super(runnable); - setDaemon(true); - this.id = id; - } + private final CountDownLatch countDownLatch; - /** - * Note that only Java versions < 21 call this to get the thread id. - * With Java 21 and newer it's no longer possible to make an invalid thread id. - */ - @Override - public long getId() { - return this.id; - } + public TestRunnable(CountDownLatch countDownLatch) { + this.countDownLatch = countDownLatch; } - private static class TestRunnable implements Runnable { - - private final CountDownLatch countDownLatch; - - public TestRunnable(CountDownLatch countDownLatch) { - this.countDownLatch = countDownLatch; - } - - @Override - public void run() { - try { - countDownLatch.await(); - } catch (InterruptedException e) { - // DO NOTHING - } - } + @Override + public void run() { + try { + countDownLatch.await(); + } catch (InterruptedException e) { + // DO NOTHING + } } + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/ProcessMetricsTest.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/ProcessMetricsTest.java index 92f286e01..41a5b514b 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/ProcessMetricsTest.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/ProcessMetricsTest.java @@ -1,116 +1,120 @@ package io.prometheus.metrics.instrumentation.jvm; +import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import io.prometheus.metrics.model.registry.MetricNameFilter; import io.prometheus.metrics.model.registry.PrometheusRegistry; import io.prometheus.metrics.model.snapshots.MetricSnapshots; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mockito; - import java.io.File; import java.io.IOException; import java.lang.management.RuntimeMXBean; import java.util.concurrent.TimeUnit; - -import static io.prometheus.metrics.instrumentation.jvm.TestUtil.convertToOpenMetricsFormat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; public class ProcessMetricsTest { - private com.sun.management.UnixOperatingSystemMXBean sunOsBean = Mockito.mock(com.sun.management.UnixOperatingSystemMXBean.class); - private java.lang.management.OperatingSystemMXBean javaOsBean = Mockito.mock(java.lang.management.OperatingSystemMXBean.class); - private ProcessMetrics.Grepper linuxGrepper = Mockito.mock(ProcessMetrics.Grepper.class); - private ProcessMetrics.Grepper windowsGrepper = Mockito.mock(ProcessMetrics.Grepper.class); - private RuntimeMXBean runtimeBean = Mockito.mock(RuntimeMXBean.class); + private com.sun.management.UnixOperatingSystemMXBean sunOsBean = + Mockito.mock(com.sun.management.UnixOperatingSystemMXBean.class); + private java.lang.management.OperatingSystemMXBean javaOsBean = + Mockito.mock(java.lang.management.OperatingSystemMXBean.class); + private ProcessMetrics.Grepper linuxGrepper = Mockito.mock(ProcessMetrics.Grepper.class); + private ProcessMetrics.Grepper windowsGrepper = Mockito.mock(ProcessMetrics.Grepper.class); + private RuntimeMXBean runtimeBean = Mockito.mock(RuntimeMXBean.class); - @Before - public void setUp() throws IOException { - when(sunOsBean.getProcessCpuTime()).thenReturn(TimeUnit.MILLISECONDS.toNanos(72)); - when(sunOsBean.getOpenFileDescriptorCount()).thenReturn(127L); - when(sunOsBean.getMaxFileDescriptorCount()).thenReturn(244L); - when(runtimeBean.getStartTime()).thenReturn(37100L); - when(linuxGrepper.lineStartingWith(any(File.class), eq("VmSize:"))).thenReturn("VmSize: 6036 kB"); - when(linuxGrepper.lineStartingWith(any(File.class), eq("VmRSS:"))).thenReturn("VmRSS: 1012 kB"); - } + @Before + public void setUp() throws IOException { + when(sunOsBean.getProcessCpuTime()).thenReturn(TimeUnit.MILLISECONDS.toNanos(72)); + when(sunOsBean.getOpenFileDescriptorCount()).thenReturn(127L); + when(sunOsBean.getMaxFileDescriptorCount()).thenReturn(244L); + when(runtimeBean.getStartTime()).thenReturn(37100L); + when(linuxGrepper.lineStartingWith(any(File.class), eq("VmSize:"))) + .thenReturn("VmSize: 6036 kB"); + when(linuxGrepper.lineStartingWith(any(File.class), eq("VmRSS:"))) + .thenReturn("VmRSS: 1012 kB"); + } - @Test - public void testGoodCase() throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - ProcessMetrics.builder() - .osBean(sunOsBean) - .runtimeBean(runtimeBean) - .grepper(linuxGrepper) - .register(registry); - MetricSnapshots snapshots = registry.scrape(); + @Test + public void testGoodCase() throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + ProcessMetrics.builder() + .osBean(sunOsBean) + .runtimeBean(runtimeBean) + .grepper(linuxGrepper) + .register(registry); + MetricSnapshots snapshots = registry.scrape(); - String expected = "" + - "# TYPE process_cpu_seconds counter\n" + - "# UNIT process_cpu_seconds seconds\n" + - "# HELP process_cpu_seconds Total user and system CPU time spent in seconds.\n" + - "process_cpu_seconds_total 0.072\n" + - "# TYPE process_max_fds gauge\n" + - "# HELP process_max_fds Maximum number of open file descriptors.\n" + - "process_max_fds 244.0\n" + - "# TYPE process_open_fds gauge\n" + - "# HELP process_open_fds Number of open file descriptors.\n" + - "process_open_fds 127.0\n" + - "# TYPE process_resident_memory_bytes gauge\n" + - "# UNIT process_resident_memory_bytes bytes\n" + - "# HELP process_resident_memory_bytes Resident memory size in bytes.\n" + - "process_resident_memory_bytes 1036288.0\n" + - "# TYPE process_start_time_seconds gauge\n" + - "# UNIT process_start_time_seconds seconds\n" + - "# HELP process_start_time_seconds Start time of the process since unix epoch in seconds.\n" + - "process_start_time_seconds 37.1\n" + - "# TYPE process_virtual_memory_bytes gauge\n" + - "# UNIT process_virtual_memory_bytes bytes\n" + - "# HELP process_virtual_memory_bytes Virtual memory size in bytes.\n" + - "process_virtual_memory_bytes 6180864.0\n" + - "# EOF\n"; + String expected = + "" + + "# TYPE process_cpu_seconds counter\n" + + "# UNIT process_cpu_seconds seconds\n" + + "# HELP process_cpu_seconds Total user and system CPU time spent in seconds.\n" + + "process_cpu_seconds_total 0.072\n" + + "# TYPE process_max_fds gauge\n" + + "# HELP process_max_fds Maximum number of open file descriptors.\n" + + "process_max_fds 244.0\n" + + "# TYPE process_open_fds gauge\n" + + "# HELP process_open_fds Number of open file descriptors.\n" + + "process_open_fds 127.0\n" + + "# TYPE process_resident_memory_bytes gauge\n" + + "# UNIT process_resident_memory_bytes bytes\n" + + "# HELP process_resident_memory_bytes Resident memory size in bytes.\n" + + "process_resident_memory_bytes 1036288.0\n" + + "# TYPE process_start_time_seconds gauge\n" + + "# UNIT process_start_time_seconds seconds\n" + + "# HELP process_start_time_seconds Start time of the process since unix epoch in seconds.\n" + + "process_start_time_seconds 37.1\n" + + "# TYPE process_virtual_memory_bytes gauge\n" + + "# UNIT process_virtual_memory_bytes bytes\n" + + "# HELP process_virtual_memory_bytes Virtual memory size in bytes.\n" + + "process_virtual_memory_bytes 6180864.0\n" + + "# EOF\n"; - Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); - } + Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); + } - @Test - public void testMinimal() throws IOException { - PrometheusRegistry registry = new PrometheusRegistry(); - ProcessMetrics.builder() - .osBean(javaOsBean) - .runtimeBean(runtimeBean) - .grepper(windowsGrepper) - .register(registry); - MetricSnapshots snapshots = registry.scrape(); + @Test + public void testMinimal() throws IOException { + PrometheusRegistry registry = new PrometheusRegistry(); + ProcessMetrics.builder() + .osBean(javaOsBean) + .runtimeBean(runtimeBean) + .grepper(windowsGrepper) + .register(registry); + MetricSnapshots snapshots = registry.scrape(); - String expected = "" + - "# TYPE process_start_time_seconds gauge\n" + - "# UNIT process_start_time_seconds seconds\n" + - "# HELP process_start_time_seconds Start time of the process since unix epoch in seconds.\n" + - "process_start_time_seconds 37.1\n" + - "# EOF\n"; + String expected = + "" + + "# TYPE process_start_time_seconds gauge\n" + + "# UNIT process_start_time_seconds seconds\n" + + "# HELP process_start_time_seconds Start time of the process since unix epoch in seconds.\n" + + "process_start_time_seconds 37.1\n" + + "# EOF\n"; - Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); - } + Assert.assertEquals(expected, convertToOpenMetricsFormat(snapshots)); + } - @Test - public void testIgnoredMetricNotScraped() { - MetricNameFilter filter = MetricNameFilter.builder() - .nameMustNotBeEqualTo("process_max_fds") - .build(); + @Test + public void testIgnoredMetricNotScraped() { + MetricNameFilter filter = + MetricNameFilter.builder().nameMustNotBeEqualTo("process_max_fds").build(); - PrometheusRegistry registry = new PrometheusRegistry(); - ProcessMetrics.builder() - .osBean(sunOsBean) - .runtimeBean(runtimeBean) - .grepper(linuxGrepper) - .register(registry); - registry.scrape(filter); + PrometheusRegistry registry = new PrometheusRegistry(); + ProcessMetrics.builder() + .osBean(sunOsBean) + .runtimeBean(runtimeBean) + .grepper(linuxGrepper) + .register(registry); + registry.scrape(filter); - verify(sunOsBean, times(0)).getMaxFileDescriptorCount(); - verify(sunOsBean, times(1)).getOpenFileDescriptorCount(); - } + verify(sunOsBean, times(0)).getMaxFileDescriptorCount(); + verify(sunOsBean, times(1)).getOpenFileDescriptorCount(); + } } diff --git a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/TestUtil.java b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/TestUtil.java index 2b0d7972c..a86517368 100644 --- a/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/TestUtil.java +++ b/prometheus-metrics-instrumentation-jvm/src/test/java/io/prometheus/metrics/instrumentation/jvm/TestUtil.java @@ -2,17 +2,16 @@ import io.prometheus.metrics.expositionformats.OpenMetricsTextFormatWriter; import io.prometheus.metrics.model.snapshots.MetricSnapshots; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; public class TestUtil { - static String convertToOpenMetricsFormat(MetricSnapshots snapshots) throws IOException { - ByteArrayOutputStream out = new ByteArrayOutputStream(); - OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); - writer.write(out, snapshots); - return out.toString(StandardCharsets.UTF_8.name()); - } + static String convertToOpenMetricsFormat(MetricSnapshots snapshots) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, true); + writer.write(out, snapshots); + return out.toString(StandardCharsets.UTF_8.name()); + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/registry/MetricNameFilter.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/registry/MetricNameFilter.java index c0c345a1b..59fbe6a13 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/registry/MetricNameFilter.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/registry/MetricNameFilter.java @@ -1,196 +1,197 @@ package io.prometheus.metrics.model.registry; +import static java.util.Collections.unmodifiableCollection; + import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.function.Predicate; -import static java.util.Collections.unmodifiableCollection; - -/** - * Filter samples (i.e. time series) by name. - */ +/** Filter samples (i.e. time series) by name. */ public class MetricNameFilter implements Predicate { + /** For convenience, a filter that allows all names. */ + public static final Predicate ALLOW_ALL = name -> true; + + private final Collection nameIsEqualTo; + private final Collection nameIsNotEqualTo; + private final Collection nameStartsWith; + private final Collection nameDoesNotStartWith; + + private MetricNameFilter( + Collection nameIsEqualTo, + Collection nameIsNotEqualTo, + Collection nameStartsWith, + Collection nameDoesNotStartWith) { + this.nameIsEqualTo = unmodifiableCollection(new ArrayList<>(nameIsEqualTo)); + this.nameIsNotEqualTo = unmodifiableCollection(new ArrayList<>(nameIsNotEqualTo)); + this.nameStartsWith = unmodifiableCollection(new ArrayList<>(nameStartsWith)); + this.nameDoesNotStartWith = unmodifiableCollection(new ArrayList<>(nameDoesNotStartWith)); + } + + @Override + public boolean test(String sampleName) { + return matchesNameEqualTo(sampleName) + && !matchesNameNotEqualTo(sampleName) + && matchesNameStartsWith(sampleName) + && !matchesNameDoesNotStartWith(sampleName); + } + + private boolean matchesNameEqualTo(String metricName) { + if (nameIsEqualTo.isEmpty()) { + return true; + } + for (String name : nameIsEqualTo) { + // The following ignores suffixes like _total. + // "request_count" and "request_count_total" both match a metric named "request_count". + if (name.startsWith(metricName)) { + return true; + } + } + return false; + } + + private boolean matchesNameNotEqualTo(String metricName) { + if (nameIsNotEqualTo.isEmpty()) { + return false; + } + for (String name : nameIsNotEqualTo) { + // The following ignores suffixes like _total. + // "request_count" and "request_count_total" both match a metric named "request_count". + if (name.startsWith(metricName)) { + return true; + } + } + return false; + } + + private boolean matchesNameStartsWith(String metricName) { + if (nameStartsWith.isEmpty()) { + return true; + } + for (String prefix : nameStartsWith) { + if (metricName.startsWith(prefix)) { + return true; + } + } + return false; + } + + private boolean matchesNameDoesNotStartWith(String metricName) { + if (nameDoesNotStartWith.isEmpty()) { + return false; + } + for (String prefix : nameDoesNotStartWith) { + if (metricName.startsWith(prefix)) { + return true; + } + } + return false; + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + + private final Collection nameEqualTo = new ArrayList<>(); + private final Collection nameNotEqualTo = new ArrayList<>(); + private final Collection nameStartsWith = new ArrayList<>(); + private final Collection nameDoesNotStartWith = new ArrayList<>(); + + private Builder() {} + + /** + * @see #nameMustBeEqualTo(Collection) + */ + public Builder nameMustBeEqualTo(String... names) { + return nameMustBeEqualTo(Arrays.asList(names)); + } + + /** + * Only samples with one of the {@code names} will be included. + * + *

Note that the provided {@code names} will be matched against the sample name (i.e. the + * time series name) and not the metric name. For instance, to retrieve all samples from a + * histogram, you must include the '_count', '_sum' and '_bucket' names. + * + *

This method should be used by HTTP exporters to implement the {@code ?name[]=} URL + * parameters. + * + * @param names empty means no restriction. + */ + public Builder nameMustBeEqualTo(Collection names) { + if (names != null) { + nameEqualTo.addAll(names); + } + return this; + } + + /** + * @see #nameMustNotBeEqualTo(Collection) + */ + public Builder nameMustNotBeEqualTo(String... names) { + return nameMustNotBeEqualTo(Arrays.asList(names)); + } + /** - * For convenience, a filter that allows all names. + * All samples that are not in {@code names} will be excluded. + * + *

Note that the provided {@code names} will be matched against the sample name (i.e. the + * time series name) and not the metric name. For instance, to exclude all samples from a + * histogram, you must exclude the '_count', '_sum' and '_bucket' names. + * + * @param names empty means no name will be excluded. */ - public static final Predicate ALLOW_ALL = name -> true; - - private final Collection nameIsEqualTo; - private final Collection nameIsNotEqualTo; - private final Collection nameStartsWith; - private final Collection nameDoesNotStartWith; - - private MetricNameFilter(Collection nameIsEqualTo, Collection nameIsNotEqualTo, Collection nameStartsWith, Collection nameDoesNotStartWith) { - this.nameIsEqualTo = unmodifiableCollection(new ArrayList<>(nameIsEqualTo)); - this.nameIsNotEqualTo = unmodifiableCollection(new ArrayList<>(nameIsNotEqualTo)); - this.nameStartsWith = unmodifiableCollection(new ArrayList<>(nameStartsWith)); - this.nameDoesNotStartWith = unmodifiableCollection(new ArrayList<>(nameDoesNotStartWith)); - } - - @Override - public boolean test(String sampleName) { - return matchesNameEqualTo(sampleName) - && !matchesNameNotEqualTo(sampleName) - && matchesNameStartsWith(sampleName) - && !matchesNameDoesNotStartWith(sampleName); - } - - private boolean matchesNameEqualTo(String metricName) { - if (nameIsEqualTo.isEmpty()) { - return true; - } - for (String name : nameIsEqualTo) { - // The following ignores suffixes like _total. - // "request_count" and "request_count_total" both match a metric named "request_count". - if (name.startsWith(metricName)) { - return true; - } - } - return false; - } - - private boolean matchesNameNotEqualTo(String metricName) { - if (nameIsNotEqualTo.isEmpty()) { - return false; - } - for (String name : nameIsNotEqualTo) { - // The following ignores suffixes like _total. - // "request_count" and "request_count_total" both match a metric named "request_count". - if (name.startsWith(metricName)) { - return true; - } - } - return false; - } - - private boolean matchesNameStartsWith(String metricName) { - if (nameStartsWith.isEmpty()) { - return true; - } - for (String prefix : nameStartsWith) { - if (metricName.startsWith(prefix)) { - return true; - } - } - return false; - } - - private boolean matchesNameDoesNotStartWith(String metricName) { - if (nameDoesNotStartWith.isEmpty()) { - return false; - } - for (String prefix : nameDoesNotStartWith) { - if (metricName.startsWith(prefix)) { - return true; - } - } - return false; - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - - private final Collection nameEqualTo = new ArrayList<>(); - private final Collection nameNotEqualTo = new ArrayList<>(); - private final Collection nameStartsWith = new ArrayList<>(); - private final Collection nameDoesNotStartWith = new ArrayList<>(); - - private Builder() { - } - - /** - * @see #nameMustBeEqualTo(Collection) - */ - public Builder nameMustBeEqualTo(String... names) { - return nameMustBeEqualTo(Arrays.asList(names)); - } - - /** - * Only samples with one of the {@code names} will be included. - *

- * Note that the provided {@code names} will be matched against the sample name (i.e. the time series name) - * and not the metric name. For instance, to retrieve all samples from a histogram, you must include the - * '_count', '_sum' and '_bucket' names. - *

- * This method should be used by HTTP exporters to implement the {@code ?name[]=} URL parameters. - * - * @param names empty means no restriction. - */ - public Builder nameMustBeEqualTo(Collection names) { - if (names != null) { - nameEqualTo.addAll(names); - } - return this; - } - - /** - * @see #nameMustNotBeEqualTo(Collection) - */ - public Builder nameMustNotBeEqualTo(String... names) { - return nameMustNotBeEqualTo(Arrays.asList(names)); - } - - /** - * All samples that are not in {@code names} will be excluded. - *

- * Note that the provided {@code names} will be matched against the sample name (i.e. the time series name) - * and not the metric name. For instance, to exclude all samples from a histogram, you must exclude the - * '_count', '_sum' and '_bucket' names. - * - * @param names empty means no name will be excluded. - */ - public Builder nameMustNotBeEqualTo(Collection names) { - if (names != null) { - nameNotEqualTo.addAll(names); - } - return this; - } - - /** - * @see #nameMustStartWith(Collection) - */ - public Builder nameMustStartWith(String... prefixes) { - return nameMustStartWith(Arrays.asList(prefixes)); - } - - /** - * Only samples whose name starts with one of the {@code prefixes} will be included. - * - * @param prefixes empty means no restriction. - */ - public Builder nameMustStartWith(Collection prefixes) { - if (prefixes != null) { - nameStartsWith.addAll(prefixes); - } - return this; - } - - /** - * @see #nameMustNotStartWith(Collection) - */ - public Builder nameMustNotStartWith(String... prefixes) { - return nameMustNotStartWith(Arrays.asList(prefixes)); - } - - /** - * Samples with names starting with one of the {@code prefixes} will be excluded. - * - * @param prefixes empty means no time series will be excluded. - */ - public Builder nameMustNotStartWith(Collection prefixes) { - if (prefixes != null) { - nameDoesNotStartWith.addAll(prefixes); - } - return this; - } - - public MetricNameFilter build() { - return new MetricNameFilter(nameEqualTo, nameNotEqualTo, nameStartsWith, nameDoesNotStartWith); - } + public Builder nameMustNotBeEqualTo(Collection names) { + if (names != null) { + nameNotEqualTo.addAll(names); + } + return this; + } + + /** + * @see #nameMustStartWith(Collection) + */ + public Builder nameMustStartWith(String... prefixes) { + return nameMustStartWith(Arrays.asList(prefixes)); + } + + /** + * Only samples whose name starts with one of the {@code prefixes} will be included. + * + * @param prefixes empty means no restriction. + */ + public Builder nameMustStartWith(Collection prefixes) { + if (prefixes != null) { + nameStartsWith.addAll(prefixes); + } + return this; + } + + /** + * @see #nameMustNotStartWith(Collection) + */ + public Builder nameMustNotStartWith(String... prefixes) { + return nameMustNotStartWith(Arrays.asList(prefixes)); + } + + /** + * Samples with names starting with one of the {@code prefixes} will be excluded. + * + * @param prefixes empty means no time series will be excluded. + */ + public Builder nameMustNotStartWith(Collection prefixes) { + if (prefixes != null) { + nameDoesNotStartWith.addAll(prefixes); + } + return this; + } + + public MetricNameFilter build() { + return new MetricNameFilter( + nameEqualTo, nameNotEqualTo, nameStartsWith, nameDoesNotStartWith); } + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/registry/MultiCollector.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/registry/MultiCollector.java index 5434c0ec0..a4f52746e 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/registry/MultiCollector.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/registry/MultiCollector.java @@ -2,70 +2,71 @@ import io.prometheus.metrics.model.snapshots.MetricSnapshot; import io.prometheus.metrics.model.snapshots.MetricSnapshots; - import java.util.Collections; import java.util.List; import java.util.function.Predicate; -/** - * Like {@link Collector}, but collecting multiple Snapshots at once. - */ +/** Like {@link Collector}, but collecting multiple Snapshots at once. */ @FunctionalInterface public interface MultiCollector { - /** - * Called when the Prometheus server scrapes metrics. - */ - MetricSnapshots collect(); + /** Called when the Prometheus server scrapes metrics. */ + MetricSnapshots collect(); - /** - * Provides Collector with the details of the request issued by Prometheus to allow multi-target pattern implementation - * Override to implement request dependent logic to provide MetricSnapshot - */ - default MetricSnapshots collect(PrometheusScrapeRequest scrapeRequest) { - return collect(); - } - - - /** - * Like {@link #collect()}, but returns only the snapshots where {@code includedNames.test(name)} is {@code true}. - *

- * Override this if there is a more efficient way than first collecting all snapshot and then discarding the excluded ones. - */ - default MetricSnapshots collect(Predicate includedNames) { - return collect(includedNames, null); - } + /** + * Provides Collector with the details of the request issued by Prometheus to allow multi-target + * pattern implementation Override to implement request dependent logic to provide MetricSnapshot + */ + default MetricSnapshots collect(PrometheusScrapeRequest scrapeRequest) { + return collect(); + } - /** - * Like {@link #collect(Predicate)}, but with support for multi-target pattern. - *

- * Override this if there is a more efficient way than first collecting the snapshot and then discarding it. - */ - default MetricSnapshots collect(Predicate includedNames, PrometheusScrapeRequest scrapeRequest) { - MetricSnapshots allSnapshots = scrapeRequest == null ? collect(): collect(scrapeRequest); - MetricSnapshots.Builder result = MetricSnapshots.builder(); - for (MetricSnapshot snapshot : allSnapshots) { - if (includedNames.test(snapshot.getMetadata().getPrometheusName())) { - result.metricSnapshot(snapshot); - } - } - return result.build(); - } + /** + * Like {@link #collect()}, but returns only the snapshots where {@code includedNames.test(name)} + * is {@code true}. + * + *

Override this if there is a more efficient way than first collecting all snapshot and then + * discarding the excluded ones. + */ + default MetricSnapshots collect(Predicate includedNames) { + return collect(includedNames, null); + } - - /** - * This is called in two places: - *

    - *
  1. During registration to check if a metric with that name already exists.
  2. - *
  3. During scrape to check if the collector can be skipped because a name filter is present and all names are excluded.
  4. - *
- * Returning an empty list means checks are omitted (registration metric always succeeds), - * and the collector is always scraped (if a name filter is present and all names are excluded the result is dropped). - *

- * If your collector returns a constant list of metrics that have names that do not change at runtime - * it is a good idea to overwrite this and return the names. - */ - default List getPrometheusNames() { - return Collections.emptyList(); + /** + * Like {@link #collect(Predicate)}, but with support for multi-target pattern. + * + *

Override this if there is a more efficient way than first collecting the snapshot and then + * discarding it. + */ + default MetricSnapshots collect( + Predicate includedNames, PrometheusScrapeRequest scrapeRequest) { + MetricSnapshots allSnapshots = scrapeRequest == null ? collect() : collect(scrapeRequest); + MetricSnapshots.Builder result = MetricSnapshots.builder(); + for (MetricSnapshot snapshot : allSnapshots) { + if (includedNames.test(snapshot.getMetadata().getPrometheusName())) { + result.metricSnapshot(snapshot); + } } + return result.build(); + } + + /** + * This is called in two places: + * + *

    + *
  1. During registration to check if a metric with that name already exists. + *
  2. During scrape to check if the collector can be skipped because a name filter is present + * and all names are excluded. + *
+ * + * Returning an empty list means checks are omitted (registration metric always succeeds), and the + * collector is always scraped (if a name filter is present and all names are excluded the result + * is dropped). + * + *

If your collector returns a constant list of metrics that have names that do not change at + * runtime it is a good idea to overwrite this and return the names. + */ + default List getPrometheusNames() { + return Collections.emptyList(); + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/registry/PrometheusScrapeRequest.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/registry/PrometheusScrapeRequest.java index e8651292e..b1789c3bd 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/registry/PrometheusScrapeRequest.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/registry/PrometheusScrapeRequest.java @@ -1,17 +1,11 @@ package io.prometheus.metrics.model.registry; -/** - * Infos extracted from the request received by the endpoint - */ +/** Infos extracted from the request received by the endpoint */ public interface PrometheusScrapeRequest { - /** - * Absolute path of the HTTP request. - */ - String getRequestPath(); + /** Absolute path of the HTTP request. */ + String getRequestPath(); - /** - * See {@code jakarta.servlet.ServletRequest.getParameterValues(String name)} - */ - String[] getParameterValues(String name); + /** See {@code jakarta.servlet.ServletRequest.getParameterValues(String name)} */ + String[] getParameterValues(String name); } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/ClassicHistogramBucket.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/ClassicHistogramBucket.java index 6ca2f90ec..ebadd1f16 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/ClassicHistogramBucket.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/ClassicHistogramBucket.java @@ -1,42 +1,44 @@ package io.prometheus.metrics.model.snapshots; /** - * Helper class for iterating over {@link ClassicHistogramBuckets}. - * Note that the {@code count} is not cumulative. + * Helper class for iterating over {@link ClassicHistogramBuckets}. Note that the {@code count} is + * not cumulative. */ public class ClassicHistogramBucket implements Comparable { - private final long count; // not cumulative - private final double upperBound; + private final long count; // not cumulative + private final double upperBound; - public ClassicHistogramBucket(double upperBound, long count) { - this.count = count; - this.upperBound = upperBound; - if (Double.isNaN(upperBound)) { - throw new IllegalArgumentException("Cannot use NaN as an upper bound for a histogram bucket"); - } - if (count < 0) { - throw new IllegalArgumentException(count + ": " + ClassicHistogramBuckets.class.getSimpleName() + " cannot have a negative count"); - } + public ClassicHistogramBucket(double upperBound, long count) { + this.count = count; + this.upperBound = upperBound; + if (Double.isNaN(upperBound)) { + throw new IllegalArgumentException("Cannot use NaN as an upper bound for a histogram bucket"); } - - public long getCount() { - return count; + if (count < 0) { + throw new IllegalArgumentException( + count + + ": " + + ClassicHistogramBuckets.class.getSimpleName() + + " cannot have a negative count"); } + } - public double getUpperBound() { - return upperBound; - } + public long getCount() { + return count; + } + + public double getUpperBound() { + return upperBound; + } - /** - * For sorting a list of buckets by upper bound. - */ - @Override - public int compareTo(ClassicHistogramBucket other) { - int result = Double.compare(upperBound, other.upperBound); - if (result != 0) { - return result; - } - return Long.compare(count, other.count); + /** For sorting a list of buckets by upper bound. */ + @Override + public int compareTo(ClassicHistogramBucket other) { + int result = Double.compare(upperBound, other.upperBound); + if (result != 0) { + return result; } + return Long.compare(count, other.count); + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/ClassicHistogramBuckets.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/ClassicHistogramBuckets.java index 4d7c6d279..6b5d4dea6 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/ClassicHistogramBuckets.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/ClassicHistogramBuckets.java @@ -8,216 +8,220 @@ import java.util.stream.Stream; /** - * Immutable container for histogram buckets with fixed bucket boundaries. - * Note that the counts are not cumulative. + * Immutable container for histogram buckets with fixed bucket boundaries. Note that the counts are + * not cumulative. */ public class ClassicHistogramBuckets implements Iterable { - /** - * Used in native histograms to indicate that no classic histogram buckets are present. - */ - public static final ClassicHistogramBuckets EMPTY = new ClassicHistogramBuckets(new double[]{}, new long[]{}); - - private final double[] upperBounds; - private final long[] counts; // not cumulative - - private ClassicHistogramBuckets(double[] upperBounds, long[] counts) { - this.upperBounds = upperBounds; - this.counts = counts; - } - - /** - * To create new {@link ClassicHistogramBuckets}, you can either use one of the static {@code of(...)} methods, - * or use {@link ClassicHistogramBuckets#builder()}. - *

- * This method will create a copy of upperBounds and counts. - * - * @param upperBounds must have the same length as counts. Must not contain duplicates. - * Must contain at least {@link Double#POSITIVE_INFINITY} for the {@code +Inf} bucket. - * An upper bound must not be {@link Double#NaN}. - * The upperBounds array does not need to be sorted. - * @param counts must have the same length as {@code upperBounds}. - * The entry at index {@code i} is the count for the {@code upperBound} at index {@code i}. - * For each count, {@link Number#longValue()} is called to get the value. - * Counts are not cumulative. Counts must not be negative. - */ - public static ClassicHistogramBuckets of(List upperBounds, List counts) { - double[] upperBoundsCopy = new double[upperBounds.size()]; - for (int i = 0; i < upperBounds.size(); i++) { - upperBoundsCopy[i] = upperBounds.get(i); + /** Used in native histograms to indicate that no classic histogram buckets are present. */ + public static final ClassicHistogramBuckets EMPTY = + new ClassicHistogramBuckets(new double[] {}, new long[] {}); + + private final double[] upperBounds; + private final long[] counts; // not cumulative + + private ClassicHistogramBuckets(double[] upperBounds, long[] counts) { + this.upperBounds = upperBounds; + this.counts = counts; + } + + /** + * To create new {@link ClassicHistogramBuckets}, you can either use one of the static {@code + * of(...)} methods, or use {@link ClassicHistogramBuckets#builder()}. + * + *

This method will create a copy of upperBounds and counts. + * + * @param upperBounds must have the same length as counts. Must not contain duplicates. Must + * contain at least {@link Double#POSITIVE_INFINITY} for the {@code +Inf} bucket. An upper + * bound must not be {@link Double#NaN}. The upperBounds array does not need to be sorted. + * @param counts must have the same length as {@code upperBounds}. The entry at index {@code i} is + * the count for the {@code upperBound} at index {@code i}. For each count, {@link + * Number#longValue()} is called to get the value. Counts are not cumulative. Counts + * must not be negative. + */ + public static ClassicHistogramBuckets of( + List upperBounds, List counts) { + double[] upperBoundsCopy = new double[upperBounds.size()]; + for (int i = 0; i < upperBounds.size(); i++) { + upperBoundsCopy[i] = upperBounds.get(i); + } + long[] countsCopy = new long[counts.size()]; + for (int i = 0; i < counts.size(); i++) { + countsCopy[i] = counts.get(i).longValue(); + } + sortAndValidate(upperBoundsCopy, countsCopy); + return new ClassicHistogramBuckets(upperBoundsCopy, countsCopy); + } + + /** + * To create new {@link ClassicHistogramBuckets}, you can either use one of the static {@code + * of(...)} methods, or use {@link ClassicHistogramBuckets#builder()}. + * + *

This method will create a copy of upperBounds and counts. + * + * @param upperBounds must have the same length as counts. Must not contain duplicates. Must + * contain at least {@link Double#POSITIVE_INFINITY} for the {@code +Inf} bucket. An upper + * bound must not be {@link Double#NaN}. The upperBounds array does not need to be sorted. + * @param counts must have the same length as {@code upperBounds}. The entry at index {@code i} is + * the count for the {@code upperBound} at index {@code i}. For each count, {@link + * Number#longValue()} is called to get the value. Counts are not cumulative. Counts + * must not be negative. + */ + public static ClassicHistogramBuckets of(double[] upperBounds, Number[] counts) { + double[] upperBoundsCopy = Arrays.copyOf(upperBounds, upperBounds.length); + long[] countsCopy = new long[counts.length]; + for (int i = 0; i < counts.length; i++) { + countsCopy[i] = counts[i].longValue(); + } + sortAndValidate(upperBoundsCopy, countsCopy); + return new ClassicHistogramBuckets(upperBoundsCopy, countsCopy); + } + + /** + * To create new {@link ClassicHistogramBuckets}, you can either use one of the static {@code + * of(...)} methods, or use {@link ClassicHistogramBuckets#builder()}. + * + *

This method will create a copy of upperBounds and counts. + * + * @param upperBounds must have the same length as counts. Must not contain duplicates. Must + * contain at least {@link Double#POSITIVE_INFINITY} for the {@code +Inf} bucket. An upper + * bound must not be {@link Double#NaN}. The upperBounds array does not need to be sorted. + * @param counts must have the same length as {@code upperBounds}. The entry at index {@code i} is + * the count for the {@code upperBound} at index {@code i}. Counts are not cumulative. + * Counts must not be negative. + */ + public static ClassicHistogramBuckets of(double[] upperBounds, long[] counts) { + double[] upperBoundsCopy = Arrays.copyOf(upperBounds, upperBounds.length); + long[] countsCopy = Arrays.copyOf(counts, counts.length); + sortAndValidate(upperBoundsCopy, countsCopy); + return new ClassicHistogramBuckets(upperBoundsCopy, countsCopy); + } + + private static void sortAndValidate(double[] upperBounds, long[] counts) { + if (upperBounds.length != counts.length) { + throw new IllegalArgumentException( + "upperBounds.length == " + + upperBounds.length + + " but counts.length == " + + counts.length + + ". Expected the same length."); + } + sort(upperBounds, counts); + validate(upperBounds, counts); + } + + private static void sort(double[] upperBounds, long[] counts) { + // Bubblesort. Should be efficient here as in most cases upperBounds is already sorted. + int n = upperBounds.length; + for (int i = 0; i < n - 1; i++) { + for (int j = 0; j < n - i - 1; j++) { + if (upperBounds[j] > upperBounds[j + 1]) { + swap(j, j + 1, upperBounds, counts); } - long[] countsCopy = new long[counts.size()]; - for (int i = 0; i < counts.size(); i++) { - countsCopy[i] = counts.get(i).longValue(); + } + } + } + + private static void swap(int i, int j, double[] upperBounds, long[] counts) { + double tmpDouble = upperBounds[j]; + upperBounds[j] = upperBounds[i]; + upperBounds[i] = tmpDouble; + long tmpLong = counts[j]; + counts[j] = counts[i]; + counts[i] = tmpLong; + } + + private static void validate(double[] upperBounds, long[] counts) { + // Preconditions: + // * upperBounds sorted + // * upperBounds and counts have the same length + if (upperBounds.length == 0) { + throw new IllegalArgumentException( + ClassicHistogramBuckets.class.getSimpleName() + + " cannot be empty. They must contain at least the +Inf bucket."); + } + if (upperBounds[upperBounds.length - 1] != Double.POSITIVE_INFINITY) { + throw new IllegalArgumentException( + ClassicHistogramBuckets.class.getSimpleName() + " must contain the +Inf bucket."); + } + for (int i = 0; i < upperBounds.length; i++) { + if (Double.isNaN(upperBounds[i])) { + throw new IllegalArgumentException( + "Cannot use NaN as an upper bound in " + ClassicHistogramBuckets.class.getSimpleName()); + } + if (counts[i] < 0) { + throw new IllegalArgumentException( + "Counts in " + ClassicHistogramBuckets.class.getSimpleName() + " cannot be negative."); + } + if (i > 0) { + if (upperBounds[i - 1] == upperBounds[i]) { + throw new IllegalArgumentException("Duplicate upper bound " + upperBounds[i]); } - sortAndValidate(upperBoundsCopy, countsCopy); - return new ClassicHistogramBuckets(upperBoundsCopy, countsCopy); + } } + } - /** - * To create new {@link ClassicHistogramBuckets}, you can either use one of the static {@code of(...)} methods, - * or use {@link ClassicHistogramBuckets#builder()}. - *

- * This method will create a copy of upperBounds and counts. - * - * @param upperBounds must have the same length as counts. Must not contain duplicates. - * Must contain at least {@link Double#POSITIVE_INFINITY} for the {@code +Inf} bucket. - * An upper bound must not be {@link Double#NaN}. - * The upperBounds array does not need to be sorted. - * @param counts must have the same length as {@code upperBounds}. - * The entry at index {@code i} is the count for the {@code upperBound} at index {@code i}. - * For each count, {@link Number#longValue()} is called to get the value. - * Counts are not cumulative. Counts must not be negative. - */ - public static ClassicHistogramBuckets of(double[] upperBounds, Number[] counts) { - double[] upperBoundsCopy = Arrays.copyOf(upperBounds, upperBounds.length); - long[] countsCopy = new long[counts.length]; - for (int i = 0; i < counts.length; i++) { - countsCopy[i] = counts[i].longValue(); - } - sortAndValidate(upperBoundsCopy, countsCopy); - return new ClassicHistogramBuckets(upperBoundsCopy, countsCopy); - } + public int size() { + return upperBounds.length; + } - /** - * To create new {@link ClassicHistogramBuckets}, you can either use one of the static {@code of(...)} methods, - * or use {@link ClassicHistogramBuckets#builder()}. - *

- * This method will create a copy of upperBounds and counts. - * - * @param upperBounds must have the same length as counts. Must not contain duplicates. - * Must contain at least {@link Double#POSITIVE_INFINITY} for the {@code +Inf} bucket. - * An upper bound must not be {@link Double#NaN}. - * The upperBounds array does not need to be sorted. - * @param counts must have the same length as {@code upperBounds}. - * The entry at index {@code i} is the count for the {@code upperBound} at index {@code i}. - * Counts are not cumulative. Counts must not be negative. - */ - public static ClassicHistogramBuckets of(double[] upperBounds, long[] counts) { - double[] upperBoundsCopy = Arrays.copyOf(upperBounds, upperBounds.length); - long[] countsCopy = Arrays.copyOf(counts, counts.length); - sortAndValidate(upperBoundsCopy, countsCopy); - return new ClassicHistogramBuckets(upperBoundsCopy, countsCopy); - } + public double getUpperBound(int i) { + return upperBounds[i]; + } - private static void sortAndValidate(double[] upperBounds, long[] counts) { - if (upperBounds.length != counts.length) { - throw new IllegalArgumentException("upperBounds.length == " + upperBounds.length + " but counts.length == " + counts.length + ". Expected the same length."); - } - sort(upperBounds, counts); - validate(upperBounds, counts); - } + /** The count is not cumulative. */ + public long getCount(int i) { + return counts[i]; + } - private static void sort(double[] upperBounds, long[] counts) { - // Bubblesort. Should be efficient here as in most cases upperBounds is already sorted. - int n = upperBounds.length; - for (int i = 0; i < n - 1; i++) { - for (int j = 0; j < n - i - 1; j++) { - if (upperBounds[j] > upperBounds[j + 1]) { - swap(j, j + 1, upperBounds, counts); - } - } - } - } + public boolean isEmpty() { + return this.upperBounds.length == 0; + } - private static void swap(int i, int j, double[] upperBounds, long[] counts) { - double tmpDouble = upperBounds[j]; - upperBounds[j] = upperBounds[i]; - upperBounds[i] = tmpDouble; - long tmpLong = counts[j]; - counts[j] = counts[i]; - counts[i] = tmpLong; + private List asList() { + List result = new ArrayList<>(size()); + for (int i = 0; i < upperBounds.length; i++) { + result.add(new ClassicHistogramBucket(upperBounds[i], counts[i])); } + return Collections.unmodifiableList(result); + } - private static void validate(double[] upperBounds, long[] counts) { - // Preconditions: - // * upperBounds sorted - // * upperBounds and counts have the same length - if (upperBounds.length == 0) { - throw new IllegalArgumentException(ClassicHistogramBuckets.class.getSimpleName() + " cannot be empty. They must contain at least the +Inf bucket."); - } - if (upperBounds[upperBounds.length - 1] != Double.POSITIVE_INFINITY) { - throw new IllegalArgumentException(ClassicHistogramBuckets.class.getSimpleName() + " must contain the +Inf bucket."); - } - for (int i = 0; i < upperBounds.length; i++) { - if (Double.isNaN(upperBounds[i])) { - throw new IllegalArgumentException("Cannot use NaN as an upper bound in " + ClassicHistogramBuckets.class.getSimpleName()); - } - if (counts[i] < 0) { - throw new IllegalArgumentException("Counts in " + ClassicHistogramBuckets.class.getSimpleName() + " cannot be negative."); - } - if (i > 0) { - if (upperBounds[i - 1] == upperBounds[i]) { - throw new IllegalArgumentException("Duplicate upper bound " + upperBounds[i]); - } - } - } - } - - public int size() { - return upperBounds.length; - } - - public double getUpperBound(int i) { - return upperBounds[i]; - } + @Override + public Iterator iterator() { + return asList().iterator(); + } - /** - * The count is not cumulative. - */ - public long getCount(int i) { - return counts[i]; - } + public Stream stream() { + return asList().stream(); + } - public boolean isEmpty() { - return this.upperBounds.length == 0; - } + /** + * To create new {@link ClassicHistogramBuckets}, you can either use one of the static {@code + * of(...)} methods, or use {@code builder()}. + */ + public static Builder builder() { + return new Builder(); + } - private List asList() { - List result = new ArrayList<>(size()); - for (int i = 0; i < upperBounds.length; i++) { - result.add(new ClassicHistogramBucket(upperBounds[i], counts[i])); - } - return Collections.unmodifiableList(result); - } + public static class Builder { + private final List upperBounds = new ArrayList<>(); + private final List counts = new ArrayList<>(); - @Override - public Iterator iterator() { - return asList().iterator(); - } + private Builder() {} - public Stream stream() { - return asList().stream(); + /** Must be called at least once for the {@link Double#POSITIVE_INFINITY} bucket. */ + public Builder bucket(double upperBound, long count) { + upperBounds.add(upperBound); + counts.add(count); + return this; } /** - * To create new {@link ClassicHistogramBuckets}, you can either use one of the static {@code of(...)} methods, - * or use {@code builder()}. + * Will throw an {@link IllegalArgumentException} if the {@link Double#POSITIVE_INFINITY} bucket + * is missing. */ - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - private final List upperBounds = new ArrayList<>(); - private final List counts = new ArrayList<>(); - - private Builder() {} - - /** - * Must be called at least once for the {@link Double#POSITIVE_INFINITY} bucket. - */ - public Builder bucket(double upperBound, long count) { - upperBounds.add(upperBound); - counts.add(count); - return this; - } - - /** - * Will throw an {@link IllegalArgumentException} if the {@link Double#POSITIVE_INFINITY} bucket is missing. - */ - public ClassicHistogramBuckets build() { - return ClassicHistogramBuckets.of(upperBounds, counts); - } + public ClassicHistogramBuckets build() { + return ClassicHistogramBuckets.of(upperBounds, counts); } + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/CounterSnapshot.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/CounterSnapshot.java index 4aceea6bd..fa807af19 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/CounterSnapshot.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/CounterSnapshot.java @@ -4,149 +4,146 @@ import java.util.Collection; import java.util.List; -/** - * Immutable snapshot of a Counter. - */ +/** Immutable snapshot of a Counter. */ public class CounterSnapshot extends MetricSnapshot { + /** + * To create a new {@link CounterSnapshot}, you can either call the constructor directly or use + * the builder with {@link CounterSnapshot#builder()}. + * + * @param metadata the metric name in metadata must not include the {@code _total} suffix. See + * {@link MetricMetadata} for more naming conventions. + * @param dataPoints the constructor will create a sorted copy of the collection. + */ + public CounterSnapshot(MetricMetadata metadata, Collection dataPoints) { + super(metadata, dataPoints); + } + + @Override + public List getDataPoints() { + return (List) dataPoints; + } + + public static class CounterDataPointSnapshot extends DataPointSnapshot { + + private final double value; + private final Exemplar exemplar; // may be null + /** - * To create a new {@link CounterSnapshot}, you can either call the constructor directly or use - * the builder with {@link CounterSnapshot#builder()}. + * To create a new {@link CounterDataPointSnapshot}, you can either call the constructor + * directly or use the Builder with {@link CounterDataPointSnapshot#builder()}. * - * @param metadata the metric name in metadata must not include the {@code _total} suffix. - * See {@link MetricMetadata} for more naming conventions. - * @param dataPoints the constructor will create a sorted copy of the collection. + * @param value the counter value. Must not be negative. + * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. + * @param exemplar may be null. + * @param createdTimestampMillis timestamp (as in {@link System#currentTimeMillis()}) when the + * time series (this specific set of labels) was created (or reset to zero). It's optional. + * Use {@code 0L} if there is no created timestamp. */ - public CounterSnapshot(MetricMetadata metadata, Collection dataPoints) { - super(metadata, dataPoints); + public CounterDataPointSnapshot( + double value, Labels labels, Exemplar exemplar, long createdTimestampMillis) { + this(value, labels, exemplar, createdTimestampMillis, 0); } - @Override - public List getDataPoints() { - return (List) dataPoints; + /** + * Constructor with an additional scrape timestamp. This is only useful in rare cases as the + * scrape timestamp is usually set by the Prometheus server during scraping. Exceptions include + * mirroring metrics with given timestamps from other metric sources. + */ + public CounterDataPointSnapshot( + double value, + Labels labels, + Exemplar exemplar, + long createdTimestampMillis, + long scrapeTimestampMillis) { + super(labels, createdTimestampMillis, scrapeTimestampMillis); + this.value = value; + this.exemplar = exemplar; + validate(); } - public static class CounterDataPointSnapshot extends DataPointSnapshot { - - private final double value; - private final Exemplar exemplar; // may be null - - /** - * To create a new {@link CounterDataPointSnapshot}, you can either call the constructor directly or use the - * Builder with {@link CounterDataPointSnapshot#builder()}. - * - * @param value the counter value. Must not be negative. - * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. - * @param exemplar may be null. - * @param createdTimestampMillis timestamp (as in {@link System#currentTimeMillis()}) when the time series - * (this specific set of labels) was created (or reset to zero). - * It's optional. Use {@code 0L} if there is no created timestamp. - */ - public CounterDataPointSnapshot(double value, Labels labels, Exemplar exemplar, long createdTimestampMillis) { - this(value, labels, exemplar, createdTimestampMillis, 0); - } + public double getValue() { + return value; + } - /** - * Constructor with an additional scrape timestamp. - * This is only useful in rare cases as the scrape timestamp is usually set by the Prometheus server - * during scraping. Exceptions include mirroring metrics with given timestamps from other metric sources. - */ - public CounterDataPointSnapshot(double value, Labels labels, Exemplar exemplar, long createdTimestampMillis, long scrapeTimestampMillis) { - super(labels, createdTimestampMillis, scrapeTimestampMillis); - this.value = value; - this.exemplar = exemplar; - validate(); - } + /** May be {@code null}. */ + public Exemplar getExemplar() { + return exemplar; + } - public double getValue() { - return value; - } + protected void validate() { + if (value < 0.0) { + throw new IllegalArgumentException(value + ": counters cannot have a negative value"); + } + } - /** - * May be {@code null}. - */ - public Exemplar getExemplar() { - return exemplar; - } + public static Builder builder() { + return new Builder(); + } - protected void validate() { - if (value < 0.0) { - throw new IllegalArgumentException(value + ": counters cannot have a negative value"); - } - } + public static class Builder extends DataPointSnapshot.Builder { - public static Builder builder() { - return new Builder(); - } + private Exemplar exemplar = null; + private Double value = null; + private long createdTimestampMillis = 0L; + + private Builder() {} + + /** Counter value. This is required. The value must not be negative. */ + public Builder value(double value) { + this.value = value; + return this; + } - public static class Builder extends DataPointSnapshot.Builder { - - private Exemplar exemplar = null; - private Double value = null; - private long createdTimestampMillis = 0L; - - private Builder() { - } - - /** - * Counter value. This is required. The value must not be negative. - */ - public Builder value(double value) { - this.value = value; - return this; - } - - public Builder exemplar(Exemplar exemplar) { - this.exemplar = exemplar; - return this; - } - - public Builder createdTimestampMillis(long createdTimestampMillis) { - this.createdTimestampMillis = createdTimestampMillis; - return this; - } - - public CounterDataPointSnapshot build() { - if (value == null) { - throw new IllegalArgumentException("Missing required field: value is null."); - } - return new CounterDataPointSnapshot(value, labels, exemplar, createdTimestampMillis, scrapeTimestampMillis); - } - - @Override - protected Builder self() { - return this; - } + public Builder exemplar(Exemplar exemplar) { + this.exemplar = exemplar; + return this; + } + + public Builder createdTimestampMillis(long createdTimestampMillis) { + this.createdTimestampMillis = createdTimestampMillis; + return this; + } + + public CounterDataPointSnapshot build() { + if (value == null) { + throw new IllegalArgumentException("Missing required field: value is null."); } + return new CounterDataPointSnapshot( + value, labels, exemplar, createdTimestampMillis, scrapeTimestampMillis); + } + + @Override + protected Builder self() { + return this; + } } + } - public static Builder builder() { - return new Builder(); - } + public static Builder builder() { + return new Builder(); + } - public static class Builder extends MetricSnapshot.Builder { + public static class Builder extends MetricSnapshot.Builder { - private final List dataPoints = new ArrayList<>(); + private final List dataPoints = new ArrayList<>(); - private Builder() { - } + private Builder() {} - /** - * Add a data point. Can be called multiple times to add multiple data points. - */ - public Builder dataPoint(CounterDataPointSnapshot dataPoint) { - dataPoints.add(dataPoint); - return this; - } + /** Add a data point. Can be called multiple times to add multiple data points. */ + public Builder dataPoint(CounterDataPointSnapshot dataPoint) { + dataPoints.add(dataPoint); + return this; + } - @Override - public CounterSnapshot build() { - return new CounterSnapshot(buildMetadata(), dataPoints); - } + @Override + public CounterSnapshot build() { + return new CounterSnapshot(buildMetadata(), dataPoints); + } - @Override - protected Builder self() { - return this; - } + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/DataPointSnapshot.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/DataPointSnapshot.java index 7710754e2..47ad4486b 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/DataPointSnapshot.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/DataPointSnapshot.java @@ -1,82 +1,85 @@ package io.prometheus.metrics.model.snapshots; public abstract class DataPointSnapshot { - private final Labels labels; - private final long createdTimestampMillis; - private final long scrapeTimestampMillis; + private final Labels labels; + private final long createdTimestampMillis; + private final long scrapeTimestampMillis; - protected DataPointSnapshot(Labels labels, long createdTimestampMillis, long scrapeTimestampMillis) { - this.labels = labels; - this.createdTimestampMillis = createdTimestampMillis; - this.scrapeTimestampMillis = scrapeTimestampMillis; - validate(); - } + protected DataPointSnapshot( + Labels labels, long createdTimestampMillis, long scrapeTimestampMillis) { + this.labels = labels; + this.createdTimestampMillis = createdTimestampMillis; + this.scrapeTimestampMillis = scrapeTimestampMillis; + validate(); + } - private void validate() { - if (labels == null) { - throw new IllegalArgumentException("Labels cannot be null. Use Labels.EMPTY if there are no labels."); - } - if (createdTimestampMillis < 0) { - throw new IllegalArgumentException("Created timestamp cannot be negative. Use 0 if the metric doesn't have a created timestamp."); - } - if (scrapeTimestampMillis < 0) { - throw new IllegalArgumentException("Scrape timestamp cannot be negative. Use 0 to indicate that the Prometheus server should set the scrape timestamp."); - } - if (hasCreatedTimestamp() && hasScrapeTimestamp()) { - if (scrapeTimestampMillis < createdTimestampMillis) { - throw new IllegalArgumentException("The scrape timestamp cannot be before the created timestamp"); - } - } + private void validate() { + if (labels == null) { + throw new IllegalArgumentException( + "Labels cannot be null. Use Labels.EMPTY if there are no labels."); } - - public Labels getLabels() { - return labels; + if (createdTimestampMillis < 0) { + throw new IllegalArgumentException( + "Created timestamp cannot be negative. Use 0 if the metric doesn't have a created timestamp."); } - - public boolean hasScrapeTimestamp() { - return scrapeTimestampMillis != 0L; + if (scrapeTimestampMillis < 0) { + throw new IllegalArgumentException( + "Scrape timestamp cannot be negative. Use 0 to indicate that the Prometheus server should set the scrape timestamp."); } - - /** - * This will only return a reasonable value if {@link #hasScrapeTimestamp()} is true. - */ - public long getScrapeTimestampMillis() { - return scrapeTimestampMillis; + if (hasCreatedTimestamp() && hasScrapeTimestamp()) { + if (scrapeTimestampMillis < createdTimestampMillis) { + throw new IllegalArgumentException( + "The scrape timestamp cannot be before the created timestamp"); + } } + } - public boolean hasCreatedTimestamp() { - return createdTimestampMillis != 0L; - } + public Labels getLabels() { + return labels; + } - /** - * This will only return a reasonable value if {@link #hasCreatedTimestamp()} is true. - * Some metrics like Gauge don't have created timestamps. For these metrics {@link #hasCreatedTimestamp()} - * is always false. - */ - public long getCreatedTimestampMillis() { - return createdTimestampMillis; - } + public boolean hasScrapeTimestamp() { + return scrapeTimestampMillis != 0L; + } + + /** This will only return a reasonable value if {@link #hasScrapeTimestamp()} is true. */ + public long getScrapeTimestampMillis() { + return scrapeTimestampMillis; + } + + public boolean hasCreatedTimestamp() { + return createdTimestampMillis != 0L; + } - public static abstract class Builder> { + /** + * This will only return a reasonable value if {@link #hasCreatedTimestamp()} is true. Some + * metrics like Gauge don't have created timestamps. For these metrics {@link + * #hasCreatedTimestamp()} is always false. + */ + public long getCreatedTimestampMillis() { + return createdTimestampMillis; + } - protected Labels labels = Labels.EMPTY; - protected long scrapeTimestampMillis = 0L; + public abstract static class Builder> { - public T labels(Labels labels) { - this.labels = labels; - return self(); - } + protected Labels labels = Labels.EMPTY; + protected long scrapeTimestampMillis = 0L; - /** - * In most cases you should not set a scrape timestamp, - * because the scrape timestamp is set by the Prometheus server during scraping. - * Exceptions include mirroring metrics with given timestamps from other metric sources. - */ - public T scrapeTimestampMillis(long scrapeTimestampMillis) { - this.scrapeTimestampMillis = scrapeTimestampMillis; - return self(); - } + public T labels(Labels labels) { + this.labels = labels; + return self(); + } - protected abstract T self(); + /** + * In most cases you should not set a scrape timestamp, because the scrape timestamp is set by + * the Prometheus server during scraping. Exceptions include mirroring metrics with given + * timestamps from other metric sources. + */ + public T scrapeTimestampMillis(long scrapeTimestampMillis) { + this.scrapeTimestampMillis = scrapeTimestampMillis; + return self(); } + + protected abstract T self(); + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/DistributionDataPointSnapshot.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/DistributionDataPointSnapshot.java index fe5f729c1..c8092237c 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/DistributionDataPointSnapshot.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/DistributionDataPointSnapshot.java @@ -1,89 +1,86 @@ package io.prometheus.metrics.model.snapshots; /** - * Common base class for histogram and summary data. - * Histograms and Summaries represent distributions, like a latency distribution or a distribution - * of request sizes in Bytes. + * Common base class for histogram and summary data. Histograms and Summaries represent + * distributions, like a latency distribution or a distribution of request sizes in Bytes. */ public abstract class DistributionDataPointSnapshot extends DataPointSnapshot { - private final long count; // optional, negative value means no count. - private final double sum; // optional, Double.NaN means no sum. - private final Exemplars exemplars; // optional, Exemplars.EMPTY means no Exemplars. + private final long count; // optional, negative value means no count. + private final double sum; // optional, Double.NaN means no sum. + private final Exemplars exemplars; // optional, Exemplars.EMPTY means no Exemplars. - /** - * See JavaDoc of the child classes. - */ - protected DistributionDataPointSnapshot(long count, double sum, Exemplars exemplars, Labels labels, long createdTimestampMillis, long scrapeTimestampMillis) { - super(labels, createdTimestampMillis, scrapeTimestampMillis); - this.count = count; - this.sum = sum; - this.exemplars = exemplars == null ? Exemplars.EMPTY : exemplars; - validate(); - } + /** See JavaDoc of the child classes. */ + protected DistributionDataPointSnapshot( + long count, + double sum, + Exemplars exemplars, + Labels labels, + long createdTimestampMillis, + long scrapeTimestampMillis) { + super(labels, createdTimestampMillis, scrapeTimestampMillis); + this.count = count; + this.sum = sum; + this.exemplars = exemplars == null ? Exemplars.EMPTY : exemplars; + validate(); + } - private void validate() { - // If a histogram or summary observes negative values the sum could be negative. - // According to OpenMetrics sum should be omitted in that case, but we don't enforce this here. - } + private void validate() { + // If a histogram or summary observes negative values the sum could be negative. + // According to OpenMetrics sum should be omitted in that case, but we don't enforce this here. + } - public boolean hasCount() { - return count >= 0; - } + public boolean hasCount() { + return count >= 0; + } - public boolean hasSum() { - return !Double.isNaN(sum); - } + public boolean hasSum() { + return !Double.isNaN(sum); + } - /** - * This will return garbage if {@link #hasCount()} is {@code false}. - */ - public long getCount() { - return count; - } + /** This will return garbage if {@link #hasCount()} is {@code false}. */ + public long getCount() { + return count; + } - /** - * This will return garbage if {@link #hasSum()} is {@code false}. - */ - public double getSum() { - return sum; - } + /** This will return garbage if {@link #hasSum()} is {@code false}. */ + public double getSum() { + return sum; + } - /** - * May be {@link Exemplars#EMPTY}, but will never be {@code null}. - */ - public Exemplars getExemplars() { - return exemplars; - } + /** May be {@link Exemplars#EMPTY}, but will never be {@code null}. */ + public Exemplars getExemplars() { + return exemplars; + } - static abstract class Builder> extends DataPointSnapshot.Builder { + abstract static class Builder> extends DataPointSnapshot.Builder { - protected long count = -1; - protected double sum = Double.NaN; - protected long createdTimestampMillis = 0L; - protected Exemplars exemplars = Exemplars.EMPTY; + protected long count = -1; + protected double sum = Double.NaN; + protected long createdTimestampMillis = 0L; + protected Exemplars exemplars = Exemplars.EMPTY; - /** - * Count can be explicitly set on summaries (this is a public method for summary metrics), - * and it is set implicitly on histograms (derived from the bucket counts). - */ - protected T count(long count) { - this.count = count; - return self(); - } + /** + * Count can be explicitly set on summaries (this is a public method for summary metrics), and + * it is set implicitly on histograms (derived from the bucket counts). + */ + protected T count(long count) { + this.count = count; + return self(); + } - public T sum(double sum) { - this.sum = sum; - return self(); - } + public T sum(double sum) { + this.sum = sum; + return self(); + } - public T exemplars(Exemplars exemplars) { - this.exemplars = exemplars; - return self(); - } + public T exemplars(Exemplars exemplars) { + this.exemplars = exemplars; + return self(); + } - public T createdTimestampMillis(long createdTimestampMillis) { - this.createdTimestampMillis = createdTimestampMillis; - return self(); - } + public T createdTimestampMillis(long createdTimestampMillis) { + this.createdTimestampMillis = createdTimestampMillis; + return self(); } + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/DuplicateLabelsException.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/DuplicateLabelsException.java index 4588b344d..d1f3f3414 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/DuplicateLabelsException.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/DuplicateLabelsException.java @@ -1,25 +1,25 @@ package io.prometheus.metrics.model.snapshots; /** - * Thrown when a collector tries to create a {@link MetricSnapshot} - * where multiple data points have the same labels (same label names and label values). + * Thrown when a collector tries to create a {@link MetricSnapshot} where multiple data points have + * the same labels (same label names and label values). */ public class DuplicateLabelsException extends IllegalArgumentException { - private final MetricMetadata metadata; - private final Labels labels; + private final MetricMetadata metadata; + private final Labels labels; - public DuplicateLabelsException(MetricMetadata metadata, Labels labels) { - super("Duplicate labels for metric \"" + metadata.getName() + "\": " + labels); - this.metadata = metadata; - this.labels = labels; - } + public DuplicateLabelsException(MetricMetadata metadata, Labels labels) { + super("Duplicate labels for metric \"" + metadata.getName() + "\": " + labels); + this.metadata = metadata; + this.labels = labels; + } - public MetricMetadata getMetadata() { - return metadata; - } + public MetricMetadata getMetadata() { + return metadata; + } - public Labels getLabels() { - return labels; - } -} \ No newline at end of file + public Labels getLabels() { + return labels; + } +} diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/Exemplar.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/Exemplar.java index aa46bd229..e56d9e1c7 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/Exemplar.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/Exemplar.java @@ -1,129 +1,121 @@ package io.prometheus.metrics.model.snapshots; -/** - * Immutable representation of an Exemplar. - */ +/** Immutable representation of an Exemplar. */ public class Exemplar { - /** - * Label name for trace id. - */ - public static final String TRACE_ID = "trace_id"; - - /** - * Label name for span id. - */ - public static final String SPAN_ID = "span_id"; - - private final double value; - private final Labels labels; - private final long timestampMillis; - - /** - * To create a new {@link Exemplar}, you can either call the constructor directly - * or use the Builder with {@link Exemplar#builder()}. - * - * @param value the observed value. This is required. - * @param labels in most cases the labels will contain the {@link #TRACE_ID} and {@link #SPAN_ID}. - * Must not be {@code null}. Use {@link Labels#EMPTY} if no labels are present. - * @param timestampMillis timestamp when the value was observed. Optional. Use 0L if not available. - */ - public Exemplar(double value, Labels labels, long timestampMillis) { - if (labels == null) { - throw new NullPointerException("Labels cannot be null. Use Labels.EMPTY."); - } - this.value = value; - this.labels = labels; - this.timestampMillis = timestampMillis; + /** Label name for trace id. */ + public static final String TRACE_ID = "trace_id"; + + /** Label name for span id. */ + public static final String SPAN_ID = "span_id"; + + private final double value; + private final Labels labels; + private final long timestampMillis; + + /** + * To create a new {@link Exemplar}, you can either call the constructor directly or use the + * Builder with {@link Exemplar#builder()}. + * + * @param value the observed value. This is required. + * @param labels in most cases the labels will contain the {@link #TRACE_ID} and {@link #SPAN_ID}. + * Must not be {@code null}. Use {@link Labels#EMPTY} if no labels are present. + * @param timestampMillis timestamp when the value was observed. Optional. Use 0L if not + * available. + */ + public Exemplar(double value, Labels labels, long timestampMillis) { + if (labels == null) { + throw new NullPointerException("Labels cannot be null. Use Labels.EMPTY."); } - - public double getValue() { - return value; + this.value = value; + this.labels = labels; + this.timestampMillis = timestampMillis; + } + + public double getValue() { + return value; + } + + /** + * In most cases labels will contain {@link #TRACE_ID} and {@link #SPAN_ID}, but this is not + * required. May be {@link Labels#EMPTY}, but may not be {@code null}. + */ + public Labels getLabels() { + return labels; + } + + public boolean hasTimestamp() { + return timestampMillis != 0L; + } + + /** Will return garbage if {@link #hasTimestamp()} is {@code false}. */ + public long getTimestampMillis() { + return timestampMillis; + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + + private Double value = null; + private Labels labels = Labels.EMPTY; + private String traceId = null; + private String spanId = null; + private long timestampMillis = 0L; + + private Builder() {} + + public Builder value(double value) { + this.value = value; + return this; } - /** - * In most cases labels will contain {@link #TRACE_ID} and {@link #SPAN_ID}, but this is not required. - * May be {@link Labels#EMPTY}, but may not be {@code null}. - */ - public Labels getLabels() { - return labels; + public Builder traceId(String traceId) { + this.traceId = traceId; + return this; } - public boolean hasTimestamp() { - return timestampMillis != 0L; + public Builder spanId(String spanId) { + this.spanId = spanId; + return this; } - /** - * Will return garbage if {@link #hasTimestamp()} is {@code false}. - */ - public long getTimestampMillis() { - return timestampMillis; + public Builder labels(Labels labels) { + if (labels == null) { + throw new NullPointerException(); + } + this.labels = labels; + return this; } - public static Builder builder() { - return new Builder(); + public Builder timestampMillis(long timestampMillis) { + this.timestampMillis = timestampMillis; + return this; } - public static class Builder { - - private Double value = null; - private Labels labels = Labels.EMPTY; - private String traceId = null; - private String spanId = null; - private long timestampMillis = 0L; - - private Builder() { - } - - public Builder value(double value) { - this.value = value; - return this; - } - - public Builder traceId(String traceId) { - this.traceId = traceId; - return this; - } - - public Builder spanId(String spanId) { - this.spanId = spanId; - return this; - } - - public Builder labels(Labels labels) { - if (labels == null) { - throw new NullPointerException(); - } - this.labels = labels; - return this; - } - - public Builder timestampMillis(long timestampMillis) { - this.timestampMillis = timestampMillis; - return this; - } - - /** - * @throws IllegalStateException if {@link #value(double)} wasn't called. - */ - public Exemplar build() { - if (value == null) { - throw new IllegalStateException("cannot build an Exemplar without a value"); - } - Labels allLabels; - if (traceId != null && spanId != null) { - allLabels = Labels.of(TRACE_ID, traceId, SPAN_ID, spanId); - } else if (traceId != null) { - allLabels = Labels.of(TRACE_ID, traceId); - } else if (spanId != null) { - allLabels = Labels.of(SPAN_ID, spanId); - } else { - allLabels = Labels.EMPTY; - } - if (!labels.isEmpty()) { - allLabels = allLabels.merge(labels); - } - return new Exemplar(value, allLabels, timestampMillis); - } + /** + * @throws IllegalStateException if {@link #value(double)} wasn't called. + */ + public Exemplar build() { + if (value == null) { + throw new IllegalStateException("cannot build an Exemplar without a value"); + } + Labels allLabels; + if (traceId != null && spanId != null) { + allLabels = Labels.of(TRACE_ID, traceId, SPAN_ID, spanId); + } else if (traceId != null) { + allLabels = Labels.of(TRACE_ID, traceId); + } else if (spanId != null) { + allLabels = Labels.of(SPAN_ID, spanId); + } else { + allLabels = Labels.EMPTY; + } + if (!labels.isEmpty()) { + allLabels = allLabels.merge(labels); + } + return new Exemplar(value, allLabels, timestampMillis); } + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/GaugeSnapshot.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/GaugeSnapshot.java index 2600214aa..c94faf8dd 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/GaugeSnapshot.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/GaugeSnapshot.java @@ -4,135 +4,124 @@ import java.util.Collection; import java.util.List; -/** - * Immutable snapshot of a Gauge. - */ +/** Immutable snapshot of a Gauge. */ public final class GaugeSnapshot extends MetricSnapshot { + /** + * To create a new {@link GaugeSnapshot}, you can either call the constructor directly or use the + * builder with {@link GaugeSnapshot#builder()}. + * + * @param metadata see {@link MetricMetadata} for naming conventions. + * @param data the constructor will create a sorted copy of the collection. + */ + public GaugeSnapshot(MetricMetadata metadata, Collection data) { + super(metadata, data); + } + + @Override + public List getDataPoints() { + return (List) dataPoints; + } + + public static final class GaugeDataPointSnapshot extends DataPointSnapshot { + + private final double value; + private final Exemplar exemplar; // may be null + /** - * To create a new {@link GaugeSnapshot}, you can either call the constructor directly or use - * the builder with {@link GaugeSnapshot#builder()}. + * To create a new {@link GaugeDataPointSnapshot}, you can either call the constructor directly + * or use the Builder with {@link GaugeDataPointSnapshot#builder()}. * - * @param metadata see {@link MetricMetadata} for naming conventions. - * @param data the constructor will create a sorted copy of the collection. + * @param value the gauge value. + * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. + * @param exemplar may be null. */ - public GaugeSnapshot(MetricMetadata metadata, Collection data) { - super(metadata, data); + public GaugeDataPointSnapshot(double value, Labels labels, Exemplar exemplar) { + this(value, labels, exemplar, 0); } - @Override - public List getDataPoints() { - return (List) dataPoints; + /** + * Constructor with an additional scrape timestamp. This is only useful in rare cases as the + * scrape timestamp is usually set by the Prometheus server during scraping. Exceptions include + * mirroring metrics with given timestamps from other metric sources. + */ + public GaugeDataPointSnapshot( + double value, Labels labels, Exemplar exemplar, long scrapeTimestampMillis) { + super(labels, 0L, scrapeTimestampMillis); + this.value = value; + this.exemplar = exemplar; } - public static final class GaugeDataPointSnapshot extends DataPointSnapshot { - - private final double value; - private final Exemplar exemplar; // may be null - - /** - * To create a new {@link GaugeDataPointSnapshot}, you can either call the constructor directly or use the - * Builder with {@link GaugeDataPointSnapshot#builder()}. - * - * @param value the gauge value. - * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. - * @param exemplar may be null. - */ - public GaugeDataPointSnapshot(double value, Labels labels, Exemplar exemplar) { - this(value, labels, exemplar, 0); - } + public double getValue() { + return value; + } - /** - * Constructor with an additional scrape timestamp. - * This is only useful in rare cases as the scrape timestamp is usually set by the Prometheus server - * during scraping. Exceptions include mirroring metrics with given timestamps from other metric sources. - */ - public GaugeDataPointSnapshot(double value, Labels labels, Exemplar exemplar, long scrapeTimestampMillis) { - super(labels, 0L, scrapeTimestampMillis); - this.value = value; - this.exemplar = exemplar; - } + /** May be {@code null}. */ + public Exemplar getExemplar() { + return exemplar; + } - public double getValue() { - return value; - } + public static Builder builder() { + return new Builder(); + } - /** - * May be {@code null}. - */ - public Exemplar getExemplar() { - return exemplar; - } + public static class Builder extends DataPointSnapshot.Builder { - public static Builder builder() { - return new Builder(); - } + private Exemplar exemplar = null; + private Double value = null; + + private Builder() {} + + /** Gauge value. This is required. */ + public Builder value(double value) { + this.value = value; + return this; + } + + /** Optional */ + public Builder exemplar(Exemplar exemplar) { + this.exemplar = exemplar; + return this; + } - public static class Builder extends DataPointSnapshot.Builder { - - private Exemplar exemplar = null; - private Double value = null; - - private Builder() { - } - - /** - * Gauge value. This is required. - */ - public Builder value(double value) { - this.value = value; - return this; - } - - /** - * Optional - */ - public Builder exemplar(Exemplar exemplar) { - this.exemplar = exemplar; - return this; - } - - public GaugeDataPointSnapshot build() { - if (value == null) { - throw new IllegalArgumentException("Missing required field: value is null."); - } - return new GaugeDataPointSnapshot(value, labels, exemplar, scrapeTimestampMillis); - } - - @Override - protected Builder self() { - return this; - } + public GaugeDataPointSnapshot build() { + if (value == null) { + throw new IllegalArgumentException("Missing required field: value is null."); } - } + return new GaugeDataPointSnapshot(value, labels, exemplar, scrapeTimestampMillis); + } - public static Builder builder() { - return new Builder(); + @Override + protected Builder self() { + return this; + } } + } - public static class Builder extends MetricSnapshot.Builder { + public static Builder builder() { + return new Builder(); + } - private final List dataPoints = new ArrayList<>(); + public static class Builder extends MetricSnapshot.Builder { - private Builder() { - } + private final List dataPoints = new ArrayList<>(); - /** - * Add a data point. This can be called multiple times to add multiple data points. - */ - public Builder dataPoint(GaugeDataPointSnapshot dataPoint) { - dataPoints.add(dataPoint); - return this; - } + private Builder() {} - @Override - public GaugeSnapshot build() { - return new GaugeSnapshot(buildMetadata(), dataPoints); - } + /** Add a data point. This can be called multiple times to add multiple data points. */ + public Builder dataPoint(GaugeDataPointSnapshot dataPoint) { + dataPoints.add(dataPoint); + return this; + } - @Override - protected Builder self() { - return this; - } + @Override + public GaugeSnapshot build() { + return new GaugeSnapshot(buildMetadata(), dataPoints); + } + + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/HistogramSnapshot.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/HistogramSnapshot.java index 0737f7368..5988cce3d 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/HistogramSnapshot.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/HistogramSnapshot.java @@ -4,399 +4,487 @@ import java.util.Collection; import java.util.List; -/** - * Immutable snapshot of a Histogram. - */ +/** Immutable snapshot of a Histogram. */ public final class HistogramSnapshot extends MetricSnapshot { - private final boolean gaugeHistogram; - public static final int CLASSIC_HISTOGRAM = Integer.MIN_VALUE; + private final boolean gaugeHistogram; + public static final int CLASSIC_HISTOGRAM = Integer.MIN_VALUE; + + /** + * To create a new {@link HistogramSnapshot}, you can either call the constructor directly or use + * the builder with {@link HistogramSnapshot#builder()}. + * + * @param metadata see {@link MetricMetadata} for naming conventions. + * @param data the constructor will create a sorted copy of the collection. + */ + public HistogramSnapshot(MetricMetadata metadata, Collection data) { + this(false, metadata, data); + } + + /** + * Use this with the first parameter {@code true} to create a snapshot of a Gauge Histogram. The + * data model for Gauge Histograms is the same as for regular histograms, except that bucket + * values are semantically gauges and not counters. See openmetrics.io for more info on Gauge Histograms. + */ + public HistogramSnapshot( + boolean isGaugeHistogram, + MetricMetadata metadata, + Collection data) { + super(metadata, data); + this.gaugeHistogram = isGaugeHistogram; + } + + public boolean isGaugeHistogram() { + return gaugeHistogram; + } + + @Override + public List getDataPoints() { + return (List) dataPoints; + } + + public static final class HistogramDataPointSnapshot extends DistributionDataPointSnapshot { + + // There are two types of histograms: Classic histograms and native histograms. + // Classic histograms have a fixed set of buckets. + // Native histograms have "infinitely many" buckets with exponentially growing boundaries. + // The OpenTelemetry terminology for native histogram is "exponential histogram". + // --- + // A histogram can be a classic histogram (indicated by nativeSchema == CLASSIC_HISTOGRAM), + // or a native histogram (indicated by classicBuckets == ClassicHistogramBuckets.EMPTY), + // or both. + // --- + // A histogram that is both classic and native is great for migrating from classic histograms + // to native histograms: Old Prometheus servers can still scrape the classic histogram, while + // new Prometheus servers can scrape the native histogram. + + private final ClassicHistogramBuckets + classicBuckets; // May be ClassicHistogramBuckets.EMPTY for native histograms. + private final int + nativeSchema; // Number in [-4, 8]. May be CLASSIC_HISTOGRAM for classic histograms. + private final long nativeZeroCount; // only used if nativeSchema != CLASSIC_HISTOGRAM + private final double nativeZeroThreshold; // only used if nativeSchema != CLASSIC_HISTOGRAM + private final NativeHistogramBuckets + nativeBucketsForPositiveValues; // only used if nativeSchema != CLASSIC_HISTOGRAM + private final NativeHistogramBuckets + nativeBucketsForNegativeValues; // only used if nativeSchema != CLASSIC_HISTOGRAM /** - * To create a new {@link HistogramSnapshot}, you can either call the constructor directly or use - * the builder with {@link HistogramSnapshot#builder()}. + * Constructor for classic histograms (as opposed to native histograms). * - * @param metadata see {@link MetricMetadata} for naming conventions. - * @param data the constructor will create a sorted copy of the collection. + *

To create a new {@link HistogramDataPointSnapshot}, you can either call the constructor + * directly or use the Builder with {@link HistogramSnapshot#builder()}. + * + * @param classicBuckets required. Must not be empty. Must at least contain the +Inf bucket. + * @param sum sum of all observed values. Optional, pass {@link Double#NaN} if not available. + * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. + * @param exemplars must not be null. Use {@link Exemplars#EMPTY} if there are no Exemplars. + * @param createdTimestampMillis timestamp (as in {@link System#currentTimeMillis()}) when the + * time series (this specific set of labels) was created (or reset to zero). It's optional. + * Use {@code 0L} if there is no created timestamp. */ - public HistogramSnapshot(MetricMetadata metadata, Collection data) { - this(false, metadata, data); + public HistogramDataPointSnapshot( + ClassicHistogramBuckets classicBuckets, + double sum, + Labels labels, + Exemplars exemplars, + long createdTimestampMillis) { + this( + classicBuckets, + CLASSIC_HISTOGRAM, + 0, + 0, + NativeHistogramBuckets.EMPTY, + NativeHistogramBuckets.EMPTY, + sum, + labels, + exemplars, + createdTimestampMillis, + 0L); } /** - * Use this with the first parameter {@code true} to create a snapshot of a Gauge Histogram. - * The data model for Gauge Histograms is the same as for regular histograms, except that bucket values - * are semantically gauges and not counters. - * See openmetrics.io for more info on Gauge Histograms. + * Constructor for native histograms (as opposed to classic histograms). + * + *

To create a new {@link HistogramDataPointSnapshot}, you can either call the constructor + * directly or use the Builder with {@link HistogramSnapshot#builder()}. + * + * @param nativeSchema number in [-4, 8]. See Prometheus + * client_model metrics.proto. + * @param nativeZeroCount number of observed zero values (zero is special because there is no + * histogram bucket for zero values). + * @param nativeZeroThreshold observations in [-zeroThreshold, +zeroThreshold] are treated as + * zero. This is to avoid creating a large number of buckets if observations fluctuate + * around zero. + * @param nativeBucketsForPositiveValues must not be {@code null}. Use {@link + * NativeHistogramBuckets#EMPTY} if empty. + * @param nativeBucketsForNegativeValues must not be {@code null}. Use {@link + * NativeHistogramBuckets#EMPTY} if empty. + * @param sum sum of all observed values. Optional, use {@link Double#NaN} if not available. + * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. + * @param exemplars must not be null. Use {@link Exemplars#EMPTY} if there are no Exemplars. + * @param createdTimestampMillis timestamp (as in {@link System#currentTimeMillis()}) when the + * time series (this specific set of labels) was created (or reset to zero). It's optional. + * Use {@code 0L} if there is no created timestamp. */ - public HistogramSnapshot(boolean isGaugeHistogram, MetricMetadata metadata, Collection data) { - super(metadata, data); - this.gaugeHistogram = isGaugeHistogram; + public HistogramDataPointSnapshot( + int nativeSchema, + long nativeZeroCount, + double nativeZeroThreshold, + NativeHistogramBuckets nativeBucketsForPositiveValues, + NativeHistogramBuckets nativeBucketsForNegativeValues, + double sum, + Labels labels, + Exemplars exemplars, + long createdTimestampMillis) { + this( + ClassicHistogramBuckets.EMPTY, + nativeSchema, + nativeZeroCount, + nativeZeroThreshold, + nativeBucketsForPositiveValues, + nativeBucketsForNegativeValues, + sum, + labels, + exemplars, + createdTimestampMillis, + 0L); } - public boolean isGaugeHistogram() { - return gaugeHistogram; + /** + * Constructor for a histogram with both, classic and native data. + * + *

To create a new {@link HistogramDataPointSnapshot}, you can either call the constructor + * directly or use the Builder with {@link HistogramSnapshot#builder()}. + * + * @param classicBuckets required. Must not be empty. Must at least contain the +Inf bucket. + * @param nativeSchema number in [-4, 8]. See Prometheus + * client_model metrics.proto. + * @param nativeZeroCount number of observed zero values (zero is special because there is no + * histogram bucket for zero values). + * @param nativeZeroThreshold observations in [-zeroThreshold, +zeroThreshold] are treated as + * zero. This is to avoid creating a large number of buckets if observations fluctuate + * around zero. + * @param nativeBucketsForPositiveValues must not be {@code null}. Use {@link + * NativeHistogramBuckets#EMPTY} if empty. + * @param nativeBucketsForNegativeValues must not be {@code null}. Use {@link + * NativeHistogramBuckets#EMPTY} if empty. + * @param sum sum of all observed values. Optional, use {@link Double#NaN} if not available. + * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. + * @param exemplars must not be null. Use {@link Exemplars#EMPTY} if there are no Exemplars. + * @param createdTimestampMillis timestamp (as in {@link System#currentTimeMillis()}) when the + * time series (this specific set of labels) was created (or reset to zero). It's optional. + * Use {@code 0L} if there is no created timestamp. + */ + public HistogramDataPointSnapshot( + ClassicHistogramBuckets classicBuckets, + int nativeSchema, + long nativeZeroCount, + double nativeZeroThreshold, + NativeHistogramBuckets nativeBucketsForPositiveValues, + NativeHistogramBuckets nativeBucketsForNegativeValues, + double sum, + Labels labels, + Exemplars exemplars, + long createdTimestampMillis) { + this( + classicBuckets, + nativeSchema, + nativeZeroCount, + nativeZeroThreshold, + nativeBucketsForPositiveValues, + nativeBucketsForNegativeValues, + sum, + labels, + exemplars, + createdTimestampMillis, + 0L); } - @Override - public List getDataPoints() { - return (List) dataPoints; + /** + * Constructor with an additional scrape timestamp. This is only useful in rare cases as the + * scrape timestamp is usually set by the Prometheus server during scraping. Exceptions include + * mirroring metrics with given timestamps from other metric sources. + */ + public HistogramDataPointSnapshot( + ClassicHistogramBuckets classicBuckets, + int nativeSchema, + long nativeZeroCount, + double nativeZeroThreshold, + NativeHistogramBuckets nativeBucketsForPositiveValues, + NativeHistogramBuckets nativeBucketsForNegativeValues, + double sum, + Labels labels, + Exemplars exemplars, + long createdTimestampMillis, + long scrapeTimestampMillis) { + super( + calculateCount( + classicBuckets, + nativeSchema, + nativeZeroCount, + nativeBucketsForPositiveValues, + nativeBucketsForNegativeValues), + sum, + exemplars, + labels, + createdTimestampMillis, + scrapeTimestampMillis); + this.classicBuckets = classicBuckets; + this.nativeSchema = nativeSchema; + this.nativeZeroCount = nativeSchema == CLASSIC_HISTOGRAM ? 0 : nativeZeroCount; + this.nativeZeroThreshold = nativeSchema == CLASSIC_HISTOGRAM ? 0 : nativeZeroThreshold; + this.nativeBucketsForPositiveValues = + nativeSchema == CLASSIC_HISTOGRAM + ? NativeHistogramBuckets.EMPTY + : nativeBucketsForPositiveValues; + this.nativeBucketsForNegativeValues = + nativeSchema == CLASSIC_HISTOGRAM + ? NativeHistogramBuckets.EMPTY + : nativeBucketsForNegativeValues; + validate(); } - public static final class HistogramDataPointSnapshot extends DistributionDataPointSnapshot { - - // There are two types of histograms: Classic histograms and native histograms. - // Classic histograms have a fixed set of buckets. - // Native histograms have "infinitely many" buckets with exponentially growing boundaries. - // The OpenTelemetry terminology for native histogram is "exponential histogram". - // --- - // A histogram can be a classic histogram (indicated by nativeSchema == CLASSIC_HISTOGRAM), - // or a native histogram (indicated by classicBuckets == ClassicHistogramBuckets.EMPTY), - // or both. - // --- - // A histogram that is both classic and native is great for migrating from classic histograms - // to native histograms: Old Prometheus servers can still scrape the classic histogram, while - // new Prometheus servers can scrape the native histogram. - - private final ClassicHistogramBuckets classicBuckets; // May be ClassicHistogramBuckets.EMPTY for native histograms. - private final int nativeSchema; // Number in [-4, 8]. May be CLASSIC_HISTOGRAM for classic histograms. - private final long nativeZeroCount; // only used if nativeSchema != CLASSIC_HISTOGRAM - private final double nativeZeroThreshold; // only used if nativeSchema != CLASSIC_HISTOGRAM - private final NativeHistogramBuckets nativeBucketsForPositiveValues; // only used if nativeSchema != CLASSIC_HISTOGRAM - private final NativeHistogramBuckets nativeBucketsForNegativeValues; // only used if nativeSchema != CLASSIC_HISTOGRAM - - /** - * Constructor for classic histograms (as opposed to native histograms). - *

- * To create a new {@link HistogramDataPointSnapshot}, you can either call the constructor directly or use the - * Builder with {@link HistogramSnapshot#builder()}. - * - * @param classicBuckets required. Must not be empty. Must at least contain the +Inf bucket. - * @param sum sum of all observed values. Optional, pass {@link Double#NaN} if not available. - * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. - * @param exemplars must not be null. Use {@link Exemplars#EMPTY} if there are no Exemplars. - * @param createdTimestampMillis timestamp (as in {@link System#currentTimeMillis()}) when the time series - * (this specific set of labels) was created (or reset to zero). - * It's optional. Use {@code 0L} if there is no created timestamp. - */ - public HistogramDataPointSnapshot( - ClassicHistogramBuckets classicBuckets, - double sum, - Labels labels, - Exemplars exemplars, - long createdTimestampMillis) { - this(classicBuckets, CLASSIC_HISTOGRAM, 0, 0, NativeHistogramBuckets.EMPTY, NativeHistogramBuckets.EMPTY, sum, labels, exemplars, createdTimestampMillis, 0L); - } - - /** - * Constructor for native histograms (as opposed to classic histograms). - *

- * To create a new {@link HistogramDataPointSnapshot}, you can either call the constructor directly or use the - * Builder with {@link HistogramSnapshot#builder()}. - * - * @param nativeSchema number in [-4, 8]. See Prometheus client_model metrics.proto. - * @param nativeZeroCount number of observed zero values (zero is special because there is no - * histogram bucket for zero values). - * @param nativeZeroThreshold observations in [-zeroThreshold, +zeroThreshold] are treated as zero. - * This is to avoid creating a large number of buckets if observations fluctuate around zero. - * @param nativeBucketsForPositiveValues must not be {@code null}. Use {@link NativeHistogramBuckets#EMPTY} if empty. - * @param nativeBucketsForNegativeValues must not be {@code null}. Use {@link NativeHistogramBuckets#EMPTY} if empty. - * @param sum sum of all observed values. Optional, use {@link Double#NaN} if not available. - * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. - * @param exemplars must not be null. Use {@link Exemplars#EMPTY} if there are no Exemplars. - * @param createdTimestampMillis timestamp (as in {@link System#currentTimeMillis()}) when the time series - * (this specific set of labels) was created (or reset to zero). - * It's optional. Use {@code 0L} if there is no created timestamp. - */ - public HistogramDataPointSnapshot( - int nativeSchema, - long nativeZeroCount, - double nativeZeroThreshold, - NativeHistogramBuckets nativeBucketsForPositiveValues, - NativeHistogramBuckets nativeBucketsForNegativeValues, - double sum, - Labels labels, - Exemplars exemplars, - long createdTimestampMillis) { - this(ClassicHistogramBuckets.EMPTY, nativeSchema, nativeZeroCount, nativeZeroThreshold, nativeBucketsForPositiveValues, nativeBucketsForNegativeValues, sum, labels, exemplars, createdTimestampMillis, 0L); + private static long calculateCount( + ClassicHistogramBuckets classicBuckets, + int nativeSchema, + long nativeZeroCount, + NativeHistogramBuckets nativeBucketsForPositiveValues, + NativeHistogramBuckets nativeBucketsForNegativeValues) { + if (classicBuckets.isEmpty()) { + // This is a native histogram + return calculateNativeCount( + nativeZeroCount, nativeBucketsForPositiveValues, nativeBucketsForNegativeValues); + } else if (nativeSchema == CLASSIC_HISTOGRAM) { + // This is a classic histogram + return calculateClassicCount(classicBuckets); + } else { + // This is both, a native and a classic histogram. Count should be the same for both. + long classicCount = calculateClassicCount(classicBuckets); + long nativeCount = + calculateNativeCount( + nativeZeroCount, nativeBucketsForPositiveValues, nativeBucketsForNegativeValues); + if (classicCount != nativeCount) { + throw new IllegalArgumentException( + "Inconsistent observation count: If a histogram has both classic and native data the observation count must be the same. Classic count is " + + classicCount + + " but native count is " + + nativeCount + + "."); } + return classicCount; + } + } - /** - * Constructor for a histogram with both, classic and native data. - *

- * To create a new {@link HistogramDataPointSnapshot}, you can either call the constructor directly or use the - * Builder with {@link HistogramSnapshot#builder()}. - * - * @param classicBuckets required. Must not be empty. Must at least contain the +Inf bucket. - * @param nativeSchema number in [-4, 8]. See Prometheus client_model metrics.proto. - * @param nativeZeroCount number of observed zero values (zero is special because there is no - * histogram bucket for zero values). - * @param nativeZeroThreshold observations in [-zeroThreshold, +zeroThreshold] are treated as zero. - * This is to avoid creating a large number of buckets if observations fluctuate around zero. - * @param nativeBucketsForPositiveValues must not be {@code null}. Use {@link NativeHistogramBuckets#EMPTY} if empty. - * @param nativeBucketsForNegativeValues must not be {@code null}. Use {@link NativeHistogramBuckets#EMPTY} if empty. - * @param sum sum of all observed values. Optional, use {@link Double#NaN} if not available. - * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. - * @param exemplars must not be null. Use {@link Exemplars#EMPTY} if there are no Exemplars. - * @param createdTimestampMillis timestamp (as in {@link System#currentTimeMillis()}) when the time series - * (this specific set of labels) was created (or reset to zero). - * It's optional. Use {@code 0L} if there is no created timestamp. - */ - public HistogramDataPointSnapshot( - ClassicHistogramBuckets classicBuckets, - int nativeSchema, - long nativeZeroCount, - double nativeZeroThreshold, - NativeHistogramBuckets nativeBucketsForPositiveValues, - NativeHistogramBuckets nativeBucketsForNegativeValues, - double sum, - Labels labels, - Exemplars exemplars, - long createdTimestampMillis) { - this(classicBuckets, nativeSchema, nativeZeroCount, nativeZeroThreshold, nativeBucketsForPositiveValues, nativeBucketsForNegativeValues, sum, labels, exemplars, createdTimestampMillis, 0L); - } + private static long calculateClassicCount(ClassicHistogramBuckets classicBuckets) { + long count = 0; + for (int i = 0; i < classicBuckets.size(); i++) { + count += classicBuckets.getCount(i); + } + return count; + } - /** - * Constructor with an additional scrape timestamp. - * This is only useful in rare cases as the scrape timestamp is usually set by the Prometheus server - * during scraping. Exceptions include mirroring metrics with given timestamps from other metric sources. - */ - public HistogramDataPointSnapshot( - ClassicHistogramBuckets classicBuckets, - int nativeSchema, - long nativeZeroCount, - double nativeZeroThreshold, - NativeHistogramBuckets nativeBucketsForPositiveValues, - NativeHistogramBuckets nativeBucketsForNegativeValues, - double sum, - Labels labels, - Exemplars exemplars, - long createdTimestampMillis, - long scrapeTimestampMillis) { - super(calculateCount(classicBuckets, nativeSchema, nativeZeroCount, nativeBucketsForPositiveValues, nativeBucketsForNegativeValues), sum, exemplars, labels, createdTimestampMillis, scrapeTimestampMillis); - this.classicBuckets = classicBuckets; - this.nativeSchema = nativeSchema; - this.nativeZeroCount = nativeSchema == CLASSIC_HISTOGRAM ? 0 : nativeZeroCount; - this.nativeZeroThreshold = nativeSchema == CLASSIC_HISTOGRAM ? 0 : nativeZeroThreshold; - this.nativeBucketsForPositiveValues = nativeSchema == CLASSIC_HISTOGRAM ? NativeHistogramBuckets.EMPTY : nativeBucketsForPositiveValues; - this.nativeBucketsForNegativeValues = nativeSchema == CLASSIC_HISTOGRAM ? NativeHistogramBuckets.EMPTY : nativeBucketsForNegativeValues; - validate(); - } + private static long calculateNativeCount( + long nativeZeroCount, + NativeHistogramBuckets nativeBucketsForPositiveValues, + NativeHistogramBuckets nativeBucketsForNegativeValues) { + long count = nativeZeroCount; + for (int i = 0; i < nativeBucketsForNegativeValues.size(); i++) { + count += nativeBucketsForNegativeValues.getCount(i); + } + for (int i = 0; i < nativeBucketsForPositiveValues.size(); i++) { + count += nativeBucketsForPositiveValues.getCount(i); + } + return count; + } - private static long calculateCount(ClassicHistogramBuckets classicBuckets, int nativeSchema, long nativeZeroCount, NativeHistogramBuckets nativeBucketsForPositiveValues, NativeHistogramBuckets nativeBucketsForNegativeValues) { - if (classicBuckets.isEmpty()) { - // This is a native histogram - return calculateNativeCount(nativeZeroCount, nativeBucketsForPositiveValues, nativeBucketsForNegativeValues); - } else if (nativeSchema == CLASSIC_HISTOGRAM) { - // This is a classic histogram - return calculateClassicCount(classicBuckets); - } else { - // This is both, a native and a classic histogram. Count should be the same for both. - long classicCount = calculateClassicCount(classicBuckets); - long nativeCount = calculateNativeCount(nativeZeroCount, nativeBucketsForPositiveValues, nativeBucketsForNegativeValues); - if (classicCount != nativeCount) { - throw new IllegalArgumentException("Inconsistent observation count: If a histogram has both classic and native data the observation count must be the same. Classic count is " + classicCount + " but native count is " + nativeCount + "."); - } - return classicCount; - } - } + public boolean hasClassicHistogramData() { + return !classicBuckets.isEmpty(); + } - private static long calculateClassicCount(ClassicHistogramBuckets classicBuckets) { - long count = 0; - for (int i = 0; i < classicBuckets.size(); i++) { - count += classicBuckets.getCount(i); - } - return count; - } + public boolean hasNativeHistogramData() { + return nativeSchema != CLASSIC_HISTOGRAM; + } - private static long calculateNativeCount(long nativeZeroCount, NativeHistogramBuckets nativeBucketsForPositiveValues, NativeHistogramBuckets nativeBucketsForNegativeValues) { - long count = nativeZeroCount; - for (int i = 0; i < nativeBucketsForNegativeValues.size(); i++) { - count += nativeBucketsForNegativeValues.getCount(i); - } - for (int i = 0; i < nativeBucketsForPositiveValues.size(); i++) { - count += nativeBucketsForPositiveValues.getCount(i); - } - return count; - } + /** Will return garbage if {@link #hasClassicHistogramData()} is {@code false}. */ + public ClassicHistogramBuckets getClassicBuckets() { + return classicBuckets; + } - public boolean hasClassicHistogramData() { - return !classicBuckets.isEmpty(); - } + /** + * The schema defines the scale of the native histogram, i.g. the granularity of the buckets. + * Current supported values are -4 <= schema <= 8. See {@link NativeHistogramBuckets} for + * more info. This will return garbage if {@link #hasNativeHistogramData()} is {@code false}. + */ + public int getNativeSchema() { + return nativeSchema; + } - public boolean hasNativeHistogramData() { - return nativeSchema != CLASSIC_HISTOGRAM; - } + /** + * Number of observed zero values. Will return garbage if {@link #hasNativeHistogramData()} is + * {@code false}. + */ + public long getNativeZeroCount() { + return nativeZeroCount; + } - /** - * Will return garbage if {@link #hasClassicHistogramData()} is {@code false}. - */ - public ClassicHistogramBuckets getClassicBuckets() { - return classicBuckets; - } + /** + * All observations in [-nativeZeroThreshold; +nativeZeroThreshold] are treated as zero. This is + * useful to avoid creation of a large number of buckets if observations fluctuate around zero. + * Will return garbage if {@link #hasNativeHistogramData()} is {@code false}. + */ + public double getNativeZeroThreshold() { + return nativeZeroThreshold; + } - /** - * The schema defines the scale of the native histogram, i.g. the granularity of the buckets. - * Current supported values are -4 <= schema <= 8. - * See {@link NativeHistogramBuckets} for more info. - * This will return garbage if {@link #hasNativeHistogramData()} is {@code false}. - */ - public int getNativeSchema() { - return nativeSchema; - } + /** Will return garbage if {@link #hasNativeHistogramData()} is {@code false}. */ + public NativeHistogramBuckets getNativeBucketsForPositiveValues() { + return nativeBucketsForPositiveValues; + } - /** - * Number of observed zero values. - * Will return garbage if {@link #hasNativeHistogramData()} is {@code false}. - */ - public long getNativeZeroCount() { - return nativeZeroCount; - } + /** Will return garbage if {@link #hasNativeHistogramData()} is {@code false}. */ + public NativeHistogramBuckets getNativeBucketsForNegativeValues() { + return nativeBucketsForNegativeValues; + } - /** - * All observations in [-nativeZeroThreshold; +nativeZeroThreshold] are treated as zero. - * This is useful to avoid creation of a large number of buckets if observations fluctuate around zero. - * Will return garbage if {@link #hasNativeHistogramData()} is {@code false}. - */ - public double getNativeZeroThreshold() { - return nativeZeroThreshold; + private void validate() { + for (Label label : getLabels()) { + if (label.getName().equals("le")) { + throw new IllegalArgumentException("le is a reserved label name for histograms"); } - - /** - * Will return garbage if {@link #hasNativeHistogramData()} is {@code false}. - */ - public NativeHistogramBuckets getNativeBucketsForPositiveValues() { - return nativeBucketsForPositiveValues; + } + if (nativeSchema == CLASSIC_HISTOGRAM && classicBuckets.isEmpty()) { + throw new IllegalArgumentException( + "Histogram buckets cannot be empty, must at least have the +Inf bucket."); + } + if (nativeSchema != CLASSIC_HISTOGRAM) { + if (nativeSchema < -4 || nativeSchema > 8) { + throw new IllegalArgumentException( + nativeSchema + ": illegal schema. Expecting number in [-4, 8]."); } - - /** - * Will return garbage if {@link #hasNativeHistogramData()} is {@code false}. - */ - public NativeHistogramBuckets getNativeBucketsForNegativeValues() { - return nativeBucketsForNegativeValues; + if (nativeZeroCount < 0) { + throw new IllegalArgumentException( + nativeZeroCount + ": nativeZeroCount cannot be negative"); } - - private void validate() { - for (Label label : getLabels()) { - if (label.getName().equals("le")) { - throw new IllegalArgumentException("le is a reserved label name for histograms"); - } - } - if (nativeSchema == CLASSIC_HISTOGRAM && classicBuckets.isEmpty()) { - throw new IllegalArgumentException("Histogram buckets cannot be empty, must at least have the +Inf bucket."); - } - if (nativeSchema != CLASSIC_HISTOGRAM) { - if (nativeSchema < -4 || nativeSchema > 8) { - throw new IllegalArgumentException(nativeSchema + ": illegal schema. Expecting number in [-4, 8]."); - } - if (nativeZeroCount < 0) { - throw new IllegalArgumentException(nativeZeroCount + ": nativeZeroCount cannot be negative"); - } - if (Double.isNaN(nativeZeroThreshold) || nativeZeroThreshold < 0) { - throw new IllegalArgumentException(nativeZeroThreshold + ": illegal nativeZeroThreshold. Must be >= 0."); - } - } + if (Double.isNaN(nativeZeroThreshold) || nativeZeroThreshold < 0) { + throw new IllegalArgumentException( + nativeZeroThreshold + ": illegal nativeZeroThreshold. Must be >= 0."); } + } + } - public static Builder builder() { - return new Builder(); - } + public static Builder builder() { + return new Builder(); + } - public static class Builder extends DistributionDataPointSnapshot.Builder { - - private ClassicHistogramBuckets classicHistogramBuckets = ClassicHistogramBuckets.EMPTY; - private int nativeSchema = CLASSIC_HISTOGRAM; - private long nativeZeroCount = 0; - private double nativeZeroThreshold = 0; - private NativeHistogramBuckets nativeBucketsForPositiveValues = NativeHistogramBuckets.EMPTY; - private NativeHistogramBuckets nativeBucketsForNegativeValues = NativeHistogramBuckets.EMPTY; - - private Builder() { - } - - @Override - protected Builder self() { - return this; - } - - public Builder classicHistogramBuckets(ClassicHistogramBuckets classicBuckets) { - this.classicHistogramBuckets = classicBuckets; - return this; - } - - public Builder nativeSchema(int nativeSchema) { - this.nativeSchema = nativeSchema; - return this; - } - - public Builder nativeZeroCount(long zeroCount) { - this.nativeZeroCount = zeroCount; - return this; - } - - public Builder nativeZeroThreshold(double zeroThreshold) { - this.nativeZeroThreshold = zeroThreshold; - return this; - } - - public Builder nativeBucketsForPositiveValues(NativeHistogramBuckets bucketsForPositiveValues) { - this.nativeBucketsForPositiveValues = bucketsForPositiveValues; - return this; - } - - public Builder nativeBucketsForNegativeValues(NativeHistogramBuckets bucketsForNegativeValues) { - this.nativeBucketsForNegativeValues = bucketsForNegativeValues; - return this; - } - - public HistogramDataPointSnapshot build() { - if (nativeSchema == CLASSIC_HISTOGRAM && classicHistogramBuckets.isEmpty()) { - throw new IllegalArgumentException("One of nativeSchema and classicHistogramBuckets is required."); - } - return new HistogramDataPointSnapshot(classicHistogramBuckets, nativeSchema, nativeZeroCount, nativeZeroThreshold, nativeBucketsForPositiveValues, nativeBucketsForNegativeValues, sum, labels, exemplars, createdTimestampMillis, scrapeTimestampMillis); - } + public static class Builder extends DistributionDataPointSnapshot.Builder { + + private ClassicHistogramBuckets classicHistogramBuckets = ClassicHistogramBuckets.EMPTY; + private int nativeSchema = CLASSIC_HISTOGRAM; + private long nativeZeroCount = 0; + private double nativeZeroThreshold = 0; + private NativeHistogramBuckets nativeBucketsForPositiveValues = NativeHistogramBuckets.EMPTY; + private NativeHistogramBuckets nativeBucketsForNegativeValues = NativeHistogramBuckets.EMPTY; + + private Builder() {} + + @Override + protected Builder self() { + return this; + } + + public Builder classicHistogramBuckets(ClassicHistogramBuckets classicBuckets) { + this.classicHistogramBuckets = classicBuckets; + return this; + } + + public Builder nativeSchema(int nativeSchema) { + this.nativeSchema = nativeSchema; + return this; + } + + public Builder nativeZeroCount(long zeroCount) { + this.nativeZeroCount = zeroCount; + return this; + } + + public Builder nativeZeroThreshold(double zeroThreshold) { + this.nativeZeroThreshold = zeroThreshold; + return this; + } + + public Builder nativeBucketsForPositiveValues( + NativeHistogramBuckets bucketsForPositiveValues) { + this.nativeBucketsForPositiveValues = bucketsForPositiveValues; + return this; + } + + public Builder nativeBucketsForNegativeValues( + NativeHistogramBuckets bucketsForNegativeValues) { + this.nativeBucketsForNegativeValues = bucketsForNegativeValues; + return this; + } + + public HistogramDataPointSnapshot build() { + if (nativeSchema == CLASSIC_HISTOGRAM && classicHistogramBuckets.isEmpty()) { + throw new IllegalArgumentException( + "One of nativeSchema and classicHistogramBuckets is required."); } + return new HistogramDataPointSnapshot( + classicHistogramBuckets, + nativeSchema, + nativeZeroCount, + nativeZeroThreshold, + nativeBucketsForPositiveValues, + nativeBucketsForNegativeValues, + sum, + labels, + exemplars, + createdTimestampMillis, + scrapeTimestampMillis); + } } + } - public static Builder builder() { - return new Builder(); - } + public static Builder builder() { + return new Builder(); + } - public static class Builder extends MetricSnapshot.Builder { + public static class Builder extends MetricSnapshot.Builder { - private final List dataPoints = new ArrayList<>(); - private boolean isGaugeHistogram = false; + private final List dataPoints = new ArrayList<>(); + private boolean isGaugeHistogram = false; - private Builder() { - } + private Builder() {} - /** - * Add a data point. Call multiple times to add multiple data points. - */ - public Builder dataPoint(HistogramDataPointSnapshot dataPoint) { - dataPoints.add(dataPoint); - return this; - } + /** Add a data point. Call multiple times to add multiple data points. */ + public Builder dataPoint(HistogramDataPointSnapshot dataPoint) { + dataPoints.add(dataPoint); + return this; + } - /** - * {@code true} indicates that this histogram is a gauge histogram. - * The data model for gauge histograms is the same as for regular histograms, - * except that bucket values are semantically gauges and not counters. - * See openmetrics.io for more info on gauge histograms. - */ - public Builder gaugeHistogram(boolean isGaugeHistogram) { - this.isGaugeHistogram = isGaugeHistogram; - return this; - } + /** + * {@code true} indicates that this histogram is a gauge histogram. The data model for gauge + * histograms is the same as for regular histograms, except that bucket values are semantically + * gauges and not counters. See openmetrics.io for more + * info on gauge histograms. + */ + public Builder gaugeHistogram(boolean isGaugeHistogram) { + this.isGaugeHistogram = isGaugeHistogram; + return this; + } - @Override - public HistogramSnapshot build() { - return new HistogramSnapshot(isGaugeHistogram, buildMetadata(), dataPoints); - } + @Override + public HistogramSnapshot build() { + return new HistogramSnapshot(isGaugeHistogram, buildMetadata(), dataPoints); + } - @Override - protected Builder self() { - return this; - } + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/InfoSnapshot.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/InfoSnapshot.java index 5d3890c0e..c7f180d9f 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/InfoSnapshot.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/InfoSnapshot.java @@ -4,105 +4,98 @@ import java.util.Collection; import java.util.List; -/** - * Immutable snapshot of an Info metric. - */ +/** Immutable snapshot of an Info metric. */ public final class InfoSnapshot extends MetricSnapshot { + /** + * To create a new {@link InfoSnapshot}, you can either call the constructor directly or use the + * builder with {@link InfoSnapshot#builder()}. + * + * @param metadata the metric name in metadata must not include the {@code _info} suffix. See + * {@link MetricMetadata} for more naming conventions. The metadata must not have a unit. + * @param data the constructor will create a sorted copy of the collection. + */ + public InfoSnapshot(MetricMetadata metadata, Collection data) { + super(metadata, data); + if (metadata.hasUnit()) { + throw new IllegalArgumentException("An Info metric cannot have a unit."); + } + } + + @Override + public List getDataPoints() { + return (List) dataPoints; + } + + public static class InfoDataPointSnapshot extends DataPointSnapshot { + /** - * To create a new {@link InfoSnapshot}, you can either call the constructor directly or use - * the builder with {@link InfoSnapshot#builder()}. + * To create a new {@link InfoDataPointSnapshot}, you can either call the constructor directly + * or use the Builder with {@link InfoDataPointSnapshot#builder()}. * - * @param metadata the metric name in metadata must not include the {@code _info} suffix. - * See {@link MetricMetadata} for more naming conventions. - * The metadata must not have a unit. - * @param data the constructor will create a sorted copy of the collection. + * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. */ - public InfoSnapshot(MetricMetadata metadata, Collection data) { - super(metadata, data); - if (metadata.hasUnit()) { - throw new IllegalArgumentException("An Info metric cannot have a unit."); - } + public InfoDataPointSnapshot(Labels labels) { + this(labels, 0L); } - @Override - public List getDataPoints() { - return (List) dataPoints; + /** + * Constructor with an additional scrape timestamp. This is only useful in rare cases as the + * scrape timestamp is usually set by the Prometheus server during scraping. Exceptions include + * mirroring metrics with given timestamps from other metric sources. + */ + public InfoDataPointSnapshot(Labels labels, long scrapeTimestampMillis) { + super(labels, 0L, scrapeTimestampMillis); } - public static class InfoDataPointSnapshot extends DataPointSnapshot { - - /** - * To create a new {@link InfoDataPointSnapshot}, you can either call the constructor directly or use the - * Builder with {@link InfoDataPointSnapshot#builder()}. - * - * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. - */ - public InfoDataPointSnapshot(Labels labels) { - this(labels, 0L); - } - - /** - * Constructor with an additional scrape timestamp. - * This is only useful in rare cases as the scrape timestamp is usually set by the Prometheus server - * during scraping. Exceptions include mirroring metrics with given timestamps from other metric sources. - */ - public InfoDataPointSnapshot(Labels labels, long scrapeTimestampMillis) { - super(labels, 0L, scrapeTimestampMillis); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder extends DataPointSnapshot.Builder { - - private Builder() { - } - - public InfoDataPointSnapshot build() { - return new InfoDataPointSnapshot(labels, scrapeTimestampMillis); - } - - @Override - protected Builder self() { - return this; - } - } + public static Builder builder() { + return new Builder(); } - public static Builder builder() { - return new Builder(); + public static class Builder extends DataPointSnapshot.Builder { + + private Builder() {} + + public InfoDataPointSnapshot build() { + return new InfoDataPointSnapshot(labels, scrapeTimestampMillis); + } + + @Override + protected Builder self() { + return this; + } } + } - public static class Builder extends MetricSnapshot.Builder { + public static Builder builder() { + return new Builder(); + } - private final List dataPoints = new ArrayList<>(); + public static class Builder extends MetricSnapshot.Builder { - private Builder() { - } + private final List dataPoints = new ArrayList<>(); - /** - * Add a data point. Call multiple times for adding multiple data points. - */ - public Builder dataPoint(InfoDataPointSnapshot dataPoint) { - dataPoints.add(dataPoint); - return this; - } + private Builder() {} - @Override - public Builder unit(Unit unit) { - throw new IllegalArgumentException("Info metric cannot have a unit."); - } + /** Add a data point. Call multiple times for adding multiple data points. */ + public Builder dataPoint(InfoDataPointSnapshot dataPoint) { + dataPoints.add(dataPoint); + return this; + } - @Override - public InfoSnapshot build() { - return new InfoSnapshot(buildMetadata(), dataPoints); - } + @Override + public Builder unit(Unit unit) { + throw new IllegalArgumentException("Info metric cannot have a unit."); + } - @Override - protected Builder self() { - return this; - } + @Override + public InfoSnapshot build() { + return new InfoSnapshot(buildMetadata(), dataPoints); + } + + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/Label.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/Label.java index 674eafff8..f14af41ce 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/Label.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/Label.java @@ -2,51 +2,46 @@ import java.util.Objects; -/** - * Utility for iterating over {@link Labels}. - */ +/** Utility for iterating over {@link Labels}. */ public final class Label implements Comparable

- * For example, the name for a counter "http_requests_total" is "http_requests". - * The name of an info called "jvm_info" is "jvm". - *

- * We allow dots in label names. Dots are automatically replaced with underscores in Prometheus - * exposition formats. However, if metrics from this library are exposed in OpenTelemetry - * format dots are retained. - *

- * See {@link #MetricMetadata(String, String, Unit)} for more info on naming conventions. - */ - private final String name; + /** + * Name without suffix. + * + *

For example, the name for a counter "http_requests_total" is "http_requests". The name of an + * info called "jvm_info" is "jvm". + * + *

We allow dots in label names. Dots are automatically replaced with underscores in Prometheus + * exposition formats. However, if metrics from this library are exposed in OpenTelemetry format + * dots are retained. + * + *

See {@link #MetricMetadata(String, String, Unit)} for more info on naming conventions. + */ + private final String name; - /** - * Same as name, except if name contains dots, then the prometheusName is {@code name.replace(".", "_")}. - */ - private final String prometheusName; + /** + * Same as name, except if name contains dots, then the prometheusName is {@code name.replace(".", + * "_")}. + */ + private final String prometheusName; - /** - * optional, may be {@code null}. - */ - private final String help; + /** optional, may be {@code null}. */ + private final String help; - /** - * optional, may be {@code null}. - */ - private final Unit unit; + /** optional, may be {@code null}. */ + private final Unit unit; - /** - * See {@link #MetricMetadata(String, String, Unit)} - */ - public MetricMetadata(String name) { - this(name, null, null); - } + /** See {@link #MetricMetadata(String, String, Unit)} */ + public MetricMetadata(String name) { + this(name, null, null); + } - /** - * See {@link #MetricMetadata(String, String, Unit)} - */ - public MetricMetadata(String name, String help) { - this(name, help, null); - } + /** See {@link #MetricMetadata(String, String, Unit)} */ + public MetricMetadata(String name, String help) { + this(name, help, null); + } - /** - * Constructor. - * @param name must not be {@code null}. {@link PrometheusNaming#isValidMetricName(String) isValidMetricName(name)} - * must be {@code true}. Use {@link PrometheusNaming#sanitizeMetricName(String)} to convert arbitrary - * strings into valid names. - * @param help optional. May be {@code null}. - * @param unit optional. May be {@code null}. - */ - public MetricMetadata(String name, String help, Unit unit) { - this.name = name; - this.help = help; - this.unit = unit; - validate(); - this.prometheusName = name.contains(".") ? PrometheusNaming.prometheusName(name) : name; - } + /** + * Constructor. + * + * @param name must not be {@code null}. {@link PrometheusNaming#isValidMetricName(String) + * isValidMetricName(name)} must be {@code true}. Use {@link + * PrometheusNaming#sanitizeMetricName(String)} to convert arbitrary strings into valid names. + * @param help optional. May be {@code null}. + * @param unit optional. May be {@code null}. + */ + public MetricMetadata(String name, String help, Unit unit) { + this.name = name; + this.help = help; + this.unit = unit; + validate(); + this.prometheusName = name.contains(".") ? PrometheusNaming.prometheusName(name) : name; + } - /** - * The name does not include the {@code _total} suffix for counter metrics - * or the {@code _info} suffix for Info metrics. - *

- * The name may contain dots. Use {@link #getPrometheusName()} to get the name in Prometheus format, - * i.e. with dots replaced by underscores. - */ - public String getName() { - return name; - } + /** + * The name does not include the {@code _total} suffix for counter metrics or the {@code _info} + * suffix for Info metrics. + * + *

The name may contain dots. Use {@link #getPrometheusName()} to get the name in Prometheus + * format, i.e. with dots replaced by underscores. + */ + public String getName() { + return name; + } - /** - * Same as {@link #getName()} but with dots replaced by underscores. - *

- * This is used by Prometheus exposition formats. - */ - public String getPrometheusName() { - return prometheusName; - } + /** + * Same as {@link #getName()} but with dots replaced by underscores. + * + *

This is used by Prometheus exposition formats. + */ + public String getPrometheusName() { + return prometheusName; + } - public String getHelp() { - return help; - } + public String getHelp() { + return help; + } - public boolean hasUnit() { - return unit != null; - } + public boolean hasUnit() { + return unit != null; + } - public Unit getUnit() { - return unit; - } + public Unit getUnit() { + return unit; + } - private void validate() { - if (name == null) { - throw new IllegalArgumentException("Missing required field: name is null"); - } - String error = PrometheusNaming.validateMetricName(name); - if (error != null) { - throw new IllegalArgumentException("'" + name + "': Illegal metric name. " + error - + " Call " + PrometheusNaming.class.getSimpleName() + ".sanitizeMetricName(name) to avoid this error."); - } - if (hasUnit()) { - if (!name.endsWith("_" + unit) && !name.endsWith("." + unit)) { - throw new IllegalArgumentException("'" + name + "': Illegal metric name. If the unit is non-null, the name must end with the unit: _" + unit + "." - + " Call " + PrometheusNaming.class.getSimpleName() + ".sanitizeMetricName(name, unit) to avoid this error."); - } - } + private void validate() { + if (name == null) { + throw new IllegalArgumentException("Missing required field: name is null"); + } + String error = PrometheusNaming.validateMetricName(name); + if (error != null) { + throw new IllegalArgumentException( + "'" + + name + + "': Illegal metric name. " + + error + + " Call " + + PrometheusNaming.class.getSimpleName() + + ".sanitizeMetricName(name) to avoid this error."); + } + if (hasUnit()) { + if (!name.endsWith("_" + unit) && !name.endsWith("." + unit)) { + throw new IllegalArgumentException( + "'" + + name + + "': Illegal metric name. If the unit is non-null, the name must end with the unit: _" + + unit + + "." + + " Call " + + PrometheusNaming.class.getSimpleName() + + ".sanitizeMetricName(name, unit) to avoid this error."); + } } + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/MetricSnapshot.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/MetricSnapshot.java index dcf41bc09..d44e54ff9 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/MetricSnapshot.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/MetricSnapshot.java @@ -7,81 +7,81 @@ import java.util.Comparator; import java.util.List; -/** - * Base class for metric snapshots. - */ +/** Base class for metric snapshots. */ public abstract class MetricSnapshot { - private final MetricMetadata metadata; - protected final List dataPoints; + private final MetricMetadata metadata; + protected final List dataPoints; - protected MetricSnapshot(MetricMetadata metadata, DataPointSnapshot... dataPoints) { - this(metadata, Arrays.asList(dataPoints)); - } + protected MetricSnapshot(MetricMetadata metadata, DataPointSnapshot... dataPoints) { + this(metadata, Arrays.asList(dataPoints)); + } - protected MetricSnapshot(MetricMetadata metadata, Collection dataPoints) { - if (metadata == null) { - throw new NullPointerException("metadata"); - } - if (dataPoints == null) { - throw new NullPointerException("dataPoints"); - } - this.metadata = metadata; - List dataCopy = new ArrayList<>(dataPoints); - dataCopy.sort(Comparator.comparing(DataPointSnapshot::getLabels)); - this.dataPoints = Collections.unmodifiableList(dataCopy); - validateLabels(); + protected MetricSnapshot( + MetricMetadata metadata, Collection dataPoints) { + if (metadata == null) { + throw new NullPointerException("metadata"); } - - public MetricMetadata getMetadata() { - return metadata; + if (dataPoints == null) { + throw new NullPointerException("dataPoints"); } - - public abstract List getDataPoints(); - - protected void validateLabels() { - // Verify that labels are unique (the same set of names/values must not be used multiple times for the same metric). - for (int i = 0; i < dataPoints.size() - 1; i++) { - if (dataPoints.get(i).getLabels().equals(dataPoints.get(i + 1).getLabels())) { - throw new DuplicateLabelsException(metadata, dataPoints.get(i).getLabels()); - } - } - // Should we verify that all entries in data have the same label names? - // No. They should have the same label names, but according to OpenMetrics this is not a MUST. + this.metadata = metadata; + List dataCopy = new ArrayList<>(dataPoints); + dataCopy.sort(Comparator.comparing(DataPointSnapshot::getLabels)); + this.dataPoints = Collections.unmodifiableList(dataCopy); + validateLabels(); + } + + public MetricMetadata getMetadata() { + return metadata; + } + + public abstract List getDataPoints(); + + protected void validateLabels() { + // Verify that labels are unique (the same set of names/values must not be used multiple times + // for the same metric). + for (int i = 0; i < dataPoints.size() - 1; i++) { + if (dataPoints.get(i).getLabels().equals(dataPoints.get(i + 1).getLabels())) { + throw new DuplicateLabelsException(metadata, dataPoints.get(i).getLabels()); + } + } + // Should we verify that all entries in data have the same label names? + // No. They should have the same label names, but according to OpenMetrics this is not a MUST. + } + + public abstract static class Builder> { + + private String name; + private String help; + private Unit unit; + + /** + * The name is required. If the name is missing or invalid, {@code build()} will throw an {@link + * IllegalArgumentException}. See {@link PrometheusNaming#isValidMetricName(String)} for info on + * valid metric names. + */ + public T name(String name) { + this.name = name; + return self(); } - public static abstract class Builder> { - - private String name; - private String help; - private Unit unit; - - /** - * The name is required. - * If the name is missing or invalid, {@code build()} will throw an {@link IllegalArgumentException}. - * See {@link PrometheusNaming#isValidMetricName(String)} for info on valid metric names. - */ - public T name(String name) { - this.name = name; - return self(); - } - - public T help(String help) { - this.help = help; - return self(); - } - - public T unit(Unit unit) { - this.unit = unit; - return self(); - } + public T help(String help) { + this.help = help; + return self(); + } - public abstract MetricSnapshot build(); + public T unit(Unit unit) { + this.unit = unit; + return self(); + } - protected MetricMetadata buildMetadata() { - return new MetricMetadata(name, help, unit); - } + public abstract MetricSnapshot build(); - protected abstract T self(); + protected MetricMetadata buildMetadata() { + return new MetricMetadata(name, help, unit); } + + protected abstract T self(); + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/MetricSnapshots.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/MetricSnapshots.java index a4865acb9..ecee897e4 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/MetricSnapshots.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/MetricSnapshots.java @@ -1,5 +1,9 @@ package io.prometheus.metrics.model.snapshots; +import static io.prometheus.metrics.model.snapshots.PrometheusNaming.prometheusName; +import static java.util.Collections.unmodifiableList; +import static java.util.Comparator.comparing; + import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -9,97 +13,90 @@ import java.util.Set; import java.util.stream.Stream; -import static io.prometheus.metrics.model.snapshots.PrometheusNaming.prometheusName; -import static java.util.Collections.unmodifiableList; -import static java.util.Comparator.comparing; - -/** - * Immutable list of metric snapshots. - */ +/** Immutable list of metric snapshots. */ public class MetricSnapshots implements Iterable { - private final List snapshots; - - /** - * See {@link #MetricSnapshots(Collection)} - */ - public MetricSnapshots(MetricSnapshot... snapshots) { - this(Arrays.asList(snapshots)); + private final List snapshots; + + /** See {@link #MetricSnapshots(Collection)} */ + public MetricSnapshots(MetricSnapshot... snapshots) { + this(Arrays.asList(snapshots)); + } + + /** + * To create MetricSnapshots, you can either call the constructor directly or use {@link + * #builder()}. + * + * @param snapshots the constructor creates a sorted copy of snapshots. + * @throws IllegalArgumentException if snapshots contains duplicate metric names. To avoid + * duplicate metric names use {@link #builder()} and check {@link + * Builder#containsMetricName(String)} before calling {@link + * Builder#metricSnapshot(MetricSnapshot)}. + */ + public MetricSnapshots(Collection snapshots) { + List list = new ArrayList<>(snapshots); + list.sort(comparing(s -> s.getMetadata().getPrometheusName())); + for (int i = 0; i < snapshots.size() - 1; i++) { + if (list.get(i) + .getMetadata() + .getPrometheusName() + .equals(list.get(i + 1).getMetadata().getPrometheusName())) { + throw new IllegalArgumentException( + list.get(i).getMetadata().getPrometheusName() + ": duplicate metric name"); + } } + this.snapshots = unmodifiableList(list); + } - /** - * To create MetricSnapshots, you can either call the constructor directly - * or use {@link #builder()}. - * - * @param snapshots the constructor creates a sorted copy of snapshots. - * @throws IllegalArgumentException if snapshots contains duplicate metric names. - * To avoid duplicate metric names use {@link #builder()} and check - * {@link Builder#containsMetricName(String)} before calling - * {@link Builder#metricSnapshot(MetricSnapshot)}. - */ - public MetricSnapshots(Collection snapshots) { - List list = new ArrayList<>(snapshots); - list.sort(comparing(s -> s.getMetadata().getPrometheusName())); - for (int i = 0; i < snapshots.size() - 1; i++) { - if (list.get(i).getMetadata().getPrometheusName().equals(list.get(i + 1).getMetadata().getPrometheusName())) { - throw new IllegalArgumentException(list.get(i).getMetadata().getPrometheusName() + ": duplicate metric name"); - } - } - this.snapshots = unmodifiableList(list); - } + public static MetricSnapshots of(MetricSnapshot... snapshots) { + return new MetricSnapshots(snapshots); + } - public static MetricSnapshots of(MetricSnapshot... snapshots) { - return new MetricSnapshots(snapshots); - } + @Override + public Iterator iterator() { + return snapshots.iterator(); + } - @Override - public Iterator iterator() { - return snapshots.iterator(); - } + public int size() { + return snapshots.size(); + } - public int size() { - return snapshots.size(); - } + public MetricSnapshot get(int i) { + return snapshots.get(i); + } - public MetricSnapshot get(int i) { - return snapshots.get(i); - } + public Stream stream() { + return snapshots.stream(); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + + private final List snapshots = new ArrayList<>(); + private final Set prometheusNames = new HashSet<>(); + + private Builder() {} - public Stream stream() { - return snapshots.stream(); + public boolean containsMetricName(String name) { + if (name == null) { + return false; + } + String prometheusName = prometheusName(name); + return prometheusNames.contains(prometheusName); } - public static Builder builder() { - return new Builder(); + /** Add a metric snapshot. Call multiple times to add multiple metric snapshots. */ + public Builder metricSnapshot(MetricSnapshot snapshot) { + snapshots.add(snapshot); + prometheusNames.add(snapshot.getMetadata().getPrometheusName()); + return this; } - public static class Builder { - - private final List snapshots = new ArrayList<>(); - private final Set prometheusNames = new HashSet<>(); - - private Builder() { - } - - public boolean containsMetricName(String name) { - if (name == null) { - return false; - } - String prometheusName = prometheusName(name); - return prometheusNames.contains(prometheusName); - } - - /** - * Add a metric snapshot. Call multiple times to add multiple metric snapshots. - */ - public Builder metricSnapshot(MetricSnapshot snapshot) { - snapshots.add(snapshot); - prometheusNames.add(snapshot.getMetadata().getPrometheusName()); - return this; - } - - public MetricSnapshots build() { - return new MetricSnapshots(snapshots); - } + public MetricSnapshots build() { + return new MetricSnapshots(snapshots); } + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/NativeHistogramBucket.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/NativeHistogramBucket.java index e7af0e6e8..9e58af18f 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/NativeHistogramBucket.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/NativeHistogramBucket.java @@ -1,26 +1,22 @@ package io.prometheus.metrics.model.snapshots; -/** - * For iterating over {@link NativeHistogramBuckets}. - */ +/** For iterating over {@link NativeHistogramBuckets}. */ public class NativeHistogramBucket { - private final int bucketIndex; - private final long count; + private final int bucketIndex; + private final long count; - public NativeHistogramBucket(int bucketIndex, long count) { - this.bucketIndex = bucketIndex; - this.count = count; - } + public NativeHistogramBucket(int bucketIndex, long count) { + this.bucketIndex = bucketIndex; + this.count = count; + } - /** - * See {@link NativeHistogramBuckets} for info on native bucket indexes. - */ - public int getBucketIndex() { - return bucketIndex; - } + /** See {@link NativeHistogramBuckets} for info on native bucket indexes. */ + public int getBucketIndex() { + return bucketIndex; + } - public long getCount() { - return count; - } + public long getCount() { + return count; + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/NativeHistogramBuckets.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/NativeHistogramBuckets.java index e4f722168..3b1214364 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/NativeHistogramBuckets.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/NativeHistogramBuckets.java @@ -9,9 +9,10 @@ /** * Immutable representation of native histogram buckets. - *

- * The bucket index defines the boundaries of the bucket, - * depending on the histogram's {@link HistogramSnapshot.HistogramDataPointSnapshot#getNativeSchema() schema}. + * + *

The bucket index defines the boundaries of the bucket, depending on the histogram's {@link + * HistogramSnapshot.HistogramDataPointSnapshot#getNativeSchema() schema}. + * *

  *     base = 2^(2^-schema)
  *     lower bound = base^(index - 1)
@@ -20,143 +21,149 @@
  */
 public class NativeHistogramBuckets implements Iterable {
 
-    public static final NativeHistogramBuckets EMPTY = new NativeHistogramBuckets(new int[]{}, new long[]{});
-    private final int[] bucketIndexes; // sorted
-    private final long[] counts;
-
-    private NativeHistogramBuckets(int[] bucketIndexes, long[] counts) {
-        this.bucketIndexes = bucketIndexes;
-        this.counts = counts;
-    }
-
-    /**
-     * To create a new {@link NativeHistogramBuckets} instance, you can either use one of the static {@code of(...)}
-     * methods, or use {@link NativeHistogramBuckets#builder()}.
-     * @param bucketIndexes see class javadoc of {@link NativeHistogramBuckets}. May be empty.
-     * @param counts must have the same length as bucketIndexes
-     */
-    public static NativeHistogramBuckets of(int[] bucketIndexes, long[] counts) {
-        int[] bucketIndexesCopy = Arrays.copyOf(bucketIndexes, bucketIndexes.length);
-        long[] countsCopy = Arrays.copyOf(counts, counts.length);
-        sortAndValidate(bucketIndexesCopy, countsCopy);
-        return new NativeHistogramBuckets(bucketIndexesCopy, countsCopy);
-    }
-
-    /**
-     * To create a new {@link NativeHistogramBuckets} instance, you can either use one of the static {@code of(...)}
-     * methods, or use {@link NativeHistogramBuckets#builder()}.
-     * @param bucketIndexes see class javadoc of {@link NativeHistogramBuckets}. May be empty.
-     * @param counts must have the same size as bucketIndexes
-     */
-    public static NativeHistogramBuckets of(List bucketIndexes, List counts) {
-        int[] bucketIndexesCopy = new int[bucketIndexes.size()];
-        for (int i=0; i asList() {
-        List result = new ArrayList<>(size());
-        for (int i=0; i iterator() {
-        return asList().iterator();
+  public static final NativeHistogramBuckets EMPTY =
+      new NativeHistogramBuckets(new int[] {}, new long[] {});
+  private final int[] bucketIndexes; // sorted
+  private final long[] counts;
+
+  private NativeHistogramBuckets(int[] bucketIndexes, long[] counts) {
+    this.bucketIndexes = bucketIndexes;
+    this.counts = counts;
+  }
+
+  /**
+   * To create a new {@link NativeHistogramBuckets} instance, you can either use one of the static
+   * {@code of(...)} methods, or use {@link NativeHistogramBuckets#builder()}.
+   *
+   * @param bucketIndexes see class javadoc of {@link NativeHistogramBuckets}. May be empty.
+   * @param counts must have the same length as bucketIndexes
+   */
+  public static NativeHistogramBuckets of(int[] bucketIndexes, long[] counts) {
+    int[] bucketIndexesCopy = Arrays.copyOf(bucketIndexes, bucketIndexes.length);
+    long[] countsCopy = Arrays.copyOf(counts, counts.length);
+    sortAndValidate(bucketIndexesCopy, countsCopy);
+    return new NativeHistogramBuckets(bucketIndexesCopy, countsCopy);
+  }
+
+  /**
+   * To create a new {@link NativeHistogramBuckets} instance, you can either use one of the static
+   * {@code of(...)} methods, or use {@link NativeHistogramBuckets#builder()}.
+   *
+   * @param bucketIndexes see class javadoc of {@link NativeHistogramBuckets}. May be empty.
+   * @param counts must have the same size as bucketIndexes
+   */
+  public static NativeHistogramBuckets of(List bucketIndexes, List counts) {
+    int[] bucketIndexesCopy = new int[bucketIndexes.size()];
+    for (int i = 0; i < bucketIndexes.size(); i++) {
+      bucketIndexesCopy[i] = bucketIndexes.get(i);
     }
-
-    public Stream stream() {
-        return asList().stream();
+    long[] countsCopy = new long[counts.size()];
+    for (int i = 0; i < counts.size(); i++) {
+      countsCopy[i] = counts.get(i);
     }
-
-    public int getBucketIndex(int i) {
-        return bucketIndexes[i];
+    sortAndValidate(bucketIndexesCopy, countsCopy);
+    return new NativeHistogramBuckets(bucketIndexesCopy, countsCopy);
+  }
+
+  public int size() {
+    return bucketIndexes.length;
+  }
+
+  private List asList() {
+    List result = new ArrayList<>(size());
+    for (int i = 0; i < bucketIndexes.length; i++) {
+      result.add(new NativeHistogramBucket(bucketIndexes[i], counts[i]));
     }
-
-    public long getCount(int i) {
-        return counts[i];
+    return Collections.unmodifiableList(result);
+  }
+
+  @Override
+  public Iterator iterator() {
+    return asList().iterator();
+  }
+
+  public Stream stream() {
+    return asList().stream();
+  }
+
+  public int getBucketIndex(int i) {
+    return bucketIndexes[i];
+  }
+
+  public long getCount(int i) {
+    return counts[i];
+  }
+
+  private static void sortAndValidate(int[] bucketIndexes, long[] counts) {
+    if (bucketIndexes.length != counts.length) {
+      throw new IllegalArgumentException(
+          "bucketIndexes.length == "
+              + bucketIndexes.length
+              + " but counts.length == "
+              + counts.length
+              + ". Expected the same length.");
     }
-
-    private static void sortAndValidate(int[] bucketIndexes, long[] counts) {
-        if (bucketIndexes.length != counts.length) {
-            throw new IllegalArgumentException("bucketIndexes.length == " + bucketIndexes.length + " but counts.length == " + counts.length + ". Expected the same length.");
-        }
-        sort(bucketIndexes, counts);
-        validate(bucketIndexes, counts);
-    }
-
-    private static void sort(int[] bucketIndexes, long[] counts) {
-        // Bubblesort. Should be efficient here as in most cases bucketIndexes is already sorted.
-        int n = bucketIndexes.length;
-        for (int i = 0; i < n - 1; i++) {
-            for (int j = 0; j < n - i - 1; j++) {
-                if (bucketIndexes[j] > bucketIndexes[j + 1]) {
-                    swap(j, j+1, bucketIndexes, counts);
-                }
-            }
+    sort(bucketIndexes, counts);
+    validate(bucketIndexes, counts);
+  }
+
+  private static void sort(int[] bucketIndexes, long[] counts) {
+    // Bubblesort. Should be efficient here as in most cases bucketIndexes is already sorted.
+    int n = bucketIndexes.length;
+    for (int i = 0; i < n - 1; i++) {
+      for (int j = 0; j < n - i - 1; j++) {
+        if (bucketIndexes[j] > bucketIndexes[j + 1]) {
+          swap(j, j + 1, bucketIndexes, counts);
         }
+      }
     }
-
-    private static void swap(int i, int j, int[] bucketIndexes, long[] counts) {
-        int tmpInt = bucketIndexes[j];
-        bucketIndexes[j] = bucketIndexes[i];
-        bucketIndexes[i] = tmpInt;
-        long tmpLong = counts[j];
-        counts[j] = counts[i];
-        counts[i] = tmpLong;
-    }
-
-    private static void validate(int[] bucketIndexes, long[] counts) {
-        // Preconditions:
-        // * bucketIndexes sorted
-        // * bucketIndexes and counts have the same length
-        for (int i=0; i 0) {
-                if (bucketIndexes[i-1] == bucketIndexes[i]) {
-                    throw new IllegalArgumentException("Duplicate bucket index " + bucketIndexes[i]);
-                }
-            }
+  }
+
+  private static void swap(int i, int j, int[] bucketIndexes, long[] counts) {
+    int tmpInt = bucketIndexes[j];
+    bucketIndexes[j] = bucketIndexes[i];
+    bucketIndexes[i] = tmpInt;
+    long tmpLong = counts[j];
+    counts[j] = counts[i];
+    counts[i] = tmpLong;
+  }
+
+  private static void validate(int[] bucketIndexes, long[] counts) {
+    // Preconditions:
+    // * bucketIndexes sorted
+    // * bucketIndexes and counts have the same length
+    for (int i = 0; i < bucketIndexes.length; i++) {
+      if (counts[i] < 0) {
+        throw new IllegalArgumentException("Bucket counts cannot be negative.");
+      }
+      if (i > 0) {
+        if (bucketIndexes[i - 1] == bucketIndexes[i]) {
+          throw new IllegalArgumentException("Duplicate bucket index " + bucketIndexes[i]);
         }
+      }
     }
+  }
 
-    public static Builder builder() {
-        return new Builder();
-    }
+  public static Builder builder() {
+    return new Builder();
+  }
 
-    public static class Builder {
+  public static class Builder {
 
-        private final List bucketIndexes = new ArrayList<>();
-        private final List counts = new ArrayList<>();
+    private final List bucketIndexes = new ArrayList<>();
+    private final List counts = new ArrayList<>();
 
-        private Builder() {}
+    private Builder() {}
 
-        /**
-         * Add a native histogram bucket. Call multiple times to add multiple buckets.
-         */
-        public Builder bucket(int bucketIndex, long count) {
-            bucketIndexes.add(bucketIndex);
-            counts.add(count);
-            return this;
-        }
+    /** Add a native histogram bucket. Call multiple times to add multiple buckets. */
+    public Builder bucket(int bucketIndex, long count) {
+      bucketIndexes.add(bucketIndex);
+      counts.add(count);
+      return this;
+    }
 
-        public NativeHistogramBuckets build() {
-            return NativeHistogramBuckets.of(bucketIndexes, counts);
-        }
+    public NativeHistogramBuckets build() {
+      return NativeHistogramBuckets.of(bucketIndexes, counts);
     }
+  }
 }
diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/PrometheusNaming.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/PrometheusNaming.java
index 21c162632..d4f9db699 100644
--- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/PrometheusNaming.java
+++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/PrometheusNaming.java
@@ -4,276 +4,280 @@
 
 /**
  * Utility for Prometheus Metric and Label naming.
- * 

- * Note that this library allows dots in metric and label names. Dots will automatically be replaced with underscores - * in Prometheus exposition formats. However, if metrics are exposed in OpenTelemetry format the dots are retained. + * + *

Note that this library allows dots in metric and label names. Dots will automatically be + * replaced with underscores in Prometheus exposition formats. However, if metrics are exposed in + * OpenTelemetry format the dots are retained. */ public class PrometheusNaming { - /** - * Legal characters for metric names, including dot. - */ - private static final Pattern METRIC_NAME_PATTERN = Pattern.compile("^[a-zA-Z_.:][a-zA-Z0-9_.:]*$"); + /** Legal characters for metric names, including dot. */ + private static final Pattern METRIC_NAME_PATTERN = + Pattern.compile("^[a-zA-Z_.:][a-zA-Z0-9_.:]*$"); - /** - * Legal characters for label names, including dot. - */ - private static final Pattern LABEL_NAME_PATTERN = Pattern.compile("^[a-zA-Z_.][a-zA-Z0-9_.]*$"); + /** Legal characters for label names, including dot. */ + private static final Pattern LABEL_NAME_PATTERN = Pattern.compile("^[a-zA-Z_.][a-zA-Z0-9_.]*$"); - /** - * Legal characters for unit names, including dot. - */ - private static final Pattern UNIT_NAME_PATTERN = Pattern.compile("^[a-zA-Z0-9_.:]+$"); + /** Legal characters for unit names, including dot. */ + private static final Pattern UNIT_NAME_PATTERN = Pattern.compile("^[a-zA-Z0-9_.:]+$"); - /** - * According to OpenMetrics {@code _count} and {@code _sum} (and {@code _gcount}, {@code _gsum}) should also be - * reserved metric name suffixes. However, popular instrumentation libraries have Gauges with names - * ending in {@code _count}. - * Examples: - *

    - *
  • Micrometer: {@code jvm_buffer_count}
  • - *
  • OpenTelemetry: {@code process_runtime_jvm_buffer_count}
  • - *
- * We do not treat {@code _count} and {@code _sum} as reserved suffixes here for compatibility with these libraries. - * However, there is a risk of name conflict if someone creates a gauge named {@code my_data_count} and a - * histogram or summary named {@code my_data}, because the histogram or summary will implicitly have a sample - * named {@code my_data_count}. - */ - private static final String[] RESERVED_METRIC_NAME_SUFFIXES = { - "_total", "_created", "_bucket", "_info", - ".total", ".created", ".bucket", ".info" - }; + /** + * According to OpenMetrics {@code _count} and {@code _sum} (and {@code _gcount}, {@code _gsum}) + * should also be reserved metric name suffixes. However, popular instrumentation libraries have + * Gauges with names ending in {@code _count}. Examples: + * + *
    + *
  • Micrometer: {@code jvm_buffer_count} + *
  • OpenTelemetry: {@code process_runtime_jvm_buffer_count} + *
+ * + * We do not treat {@code _count} and {@code _sum} as reserved suffixes here for compatibility + * with these libraries. However, there is a risk of name conflict if someone creates a gauge + * named {@code my_data_count} and a histogram or summary named {@code my_data}, because the + * histogram or summary will implicitly have a sample named {@code my_data_count}. + */ + private static final String[] RESERVED_METRIC_NAME_SUFFIXES = { + "_total", "_created", "_bucket", "_info", + ".total", ".created", ".bucket", ".info" + }; - /** - * Test if a metric name is valid. Rules: - *
    - *
  • The name must match {@link #METRIC_NAME_PATTERN}.
  • - *
  • The name MUST NOT end with one of the {@link #RESERVED_METRIC_NAME_SUFFIXES}.
  • - *
- * If a metric has a {@link Unit}, the metric name SHOULD end with the unit as a suffix. - * Note that OpenMetrics requires metric names to have their unit as suffix, - * and we implement this in {@code prometheus-metrics-core}. However, {@code prometheus-metrics-model} - * does not enforce Unit suffixes. - *

- * Example: If you create a Counter for a processing time with Unit {@link Unit#SECONDS SECONDS}, - * the name should be {@code processing_time_seconds}. When exposed in OpenMetrics Text format, - * this will be represented as two values: {@code processing_time_seconds_total} for the counter value, - * and the optional {@code processing_time_seconds_created} timestamp. - *

- * Use {@link #sanitizeMetricName(String)} to convert arbitrary Strings to valid metric names. - */ - public static boolean isValidMetricName(String name) { - return validateMetricName(name) == null; - } + /** + * Test if a metric name is valid. Rules: + * + *

    + *
  • The name must match {@link #METRIC_NAME_PATTERN}. + *
  • The name MUST NOT end with one of the {@link #RESERVED_METRIC_NAME_SUFFIXES}. + *
+ * + * If a metric has a {@link Unit}, the metric name SHOULD end with the unit as a suffix. Note that + * OpenMetrics requires metric names to have their unit as + * suffix, and we implement this in {@code prometheus-metrics-core}. However, {@code + * prometheus-metrics-model} does not enforce Unit suffixes. + * + *

Example: If you create a Counter for a processing time with Unit {@link Unit#SECONDS + * SECONDS}, the name should be {@code processing_time_seconds}. When exposed in OpenMetrics Text + * format, this will be represented as two values: {@code processing_time_seconds_total} for the + * counter value, and the optional {@code processing_time_seconds_created} timestamp. + * + *

Use {@link #sanitizeMetricName(String)} to convert arbitrary Strings to valid metric names. + */ + public static boolean isValidMetricName(String name) { + return validateMetricName(name) == null; + } - /** - * Same as {@link #isValidMetricName(String)}, but produces an error message. - *

- * The name is valid if the error message is {@code null}. - */ - static String validateMetricName(String name) { - for (String reservedSuffix : RESERVED_METRIC_NAME_SUFFIXES) { - if (name.endsWith(reservedSuffix)) { - return "The metric name must not include the '" + reservedSuffix + "' suffix."; - } - } - if (!METRIC_NAME_PATTERN.matcher(name).matches()) { - return "The metric name contains unsupported characters"; - } - return null; + /** + * Same as {@link #isValidMetricName(String)}, but produces an error message. + * + *

The name is valid if the error message is {@code null}. + */ + static String validateMetricName(String name) { + for (String reservedSuffix : RESERVED_METRIC_NAME_SUFFIXES) { + if (name.endsWith(reservedSuffix)) { + return "The metric name must not include the '" + reservedSuffix + "' suffix."; + } } - - public static boolean isValidLabelName(String name) { - return LABEL_NAME_PATTERN.matcher(name).matches() && - !(name.startsWith("__") || name.startsWith("._") || name.startsWith("..") || name.startsWith("_.")); + if (!METRIC_NAME_PATTERN.matcher(name).matches()) { + return "The metric name contains unsupported characters"; } + return null; + } - /** - * Units may not have illegal characters, and they may not end with a reserved suffix like 'total'. - */ - public static boolean isValidUnitName(String name) { - return validateUnitName(name) == null; - } + public static boolean isValidLabelName(String name) { + return LABEL_NAME_PATTERN.matcher(name).matches() + && !(name.startsWith("__") + || name.startsWith("._") + || name.startsWith("..") + || name.startsWith("_.")); + } - /** - * Same as {@link #isValidUnitName(String)} but returns an error message. - */ - public static String validateUnitName(String name) { - if (name.isEmpty()) { - return "The unit name must not be empty."; - } - for (String reservedSuffix : RESERVED_METRIC_NAME_SUFFIXES) { - String suffixName = reservedSuffix.substring(1); - if (name.endsWith(suffixName)) { - return suffixName + " is a reserved suffix in Prometheus"; - } - } - if (!UNIT_NAME_PATTERN.matcher(name).matches()) { - return "The unit name contains unsupported characters"; - } - return null; - } + /** + * Units may not have illegal characters, and they may not end with a reserved suffix like + * 'total'. + */ + public static boolean isValidUnitName(String name) { + return validateUnitName(name) == null; + } - /** - * Get the metric or label name that is used in Prometheus exposition format. - * - * @param name must be a valid metric or label name, - * i.e. {@link #isValidMetricName(String) isValidMetricName(name)} - * or {@link #isValidLabelName(String) isValidLabelName(name)} must be true. - * @return the name with dots replaced by underscores. - */ - public static String prometheusName(String name) { - return name.replace(".", "_"); + /** Same as {@link #isValidUnitName(String)} but returns an error message. */ + public static String validateUnitName(String name) { + if (name.isEmpty()) { + return "The unit name must not be empty."; } + for (String reservedSuffix : RESERVED_METRIC_NAME_SUFFIXES) { + String suffixName = reservedSuffix.substring(1); + if (name.endsWith(suffixName)) { + return suffixName + " is a reserved suffix in Prometheus"; + } + } + if (!UNIT_NAME_PATTERN.matcher(name).matches()) { + return "The unit name contains unsupported characters"; + } + return null; + } + + /** + * Get the metric or label name that is used in Prometheus exposition format. + * + * @param name must be a valid metric or label name, i.e. {@link #isValidMetricName(String) + * isValidMetricName(name)} or {@link #isValidLabelName(String) isValidLabelName(name)} must + * be true. + * @return the name with dots replaced by underscores. + */ + public static String prometheusName(String name) { + return name.replace(".", "_"); + } - /** - * Convert an arbitrary string to a name where {@link #isValidMetricName(String) isValidMetricName(name)} is true. - */ - public static String sanitizeMetricName(String metricName) { - if (metricName.isEmpty()) { - throw new IllegalArgumentException("Cannot convert an empty string to a valid metric name."); + /** + * Convert an arbitrary string to a name where {@link #isValidMetricName(String) + * isValidMetricName(name)} is true. + */ + public static String sanitizeMetricName(String metricName) { + if (metricName.isEmpty()) { + throw new IllegalArgumentException("Cannot convert an empty string to a valid metric name."); + } + String sanitizedName = replaceIllegalCharsInMetricName(metricName); + boolean modified = true; + while (modified) { + modified = false; + for (String reservedSuffix : RESERVED_METRIC_NAME_SUFFIXES) { + if (sanitizedName.equals(reservedSuffix)) { + // This is for the corner case when you call sanitizeMetricName("_total"). + // In that case the result will be "total". + return reservedSuffix.substring(1); } - String sanitizedName = replaceIllegalCharsInMetricName(metricName); - boolean modified = true; - while (modified) { - modified = false; - for (String reservedSuffix : RESERVED_METRIC_NAME_SUFFIXES) { - if (sanitizedName.equals(reservedSuffix)) { - // This is for the corner case when you call sanitizeMetricName("_total"). - // In that case the result will be "total". - return reservedSuffix.substring(1); - } - if (sanitizedName.endsWith(reservedSuffix)) { - sanitizedName = sanitizedName.substring(0, sanitizedName.length() - reservedSuffix.length()); - modified = true; - } - } + if (sanitizedName.endsWith(reservedSuffix)) { + sanitizedName = + sanitizedName.substring(0, sanitizedName.length() - reservedSuffix.length()); + modified = true; } - return sanitizedName; + } } + return sanitizedName; + } - /** - * Like {@link #sanitizeMetricName(String)}, but also makes sure that the unit is appended - * as a suffix if the unit is not {@code null}. - */ - public static String sanitizeMetricName(String metricName, Unit unit) { - String result = sanitizeMetricName(metricName); - if (unit != null) { - if (!result.endsWith("_" + unit) && !result.endsWith("." + unit)) { - result += "_" + unit; - } - } - return result; + /** + * Like {@link #sanitizeMetricName(String)}, but also makes sure that the unit is appended as a + * suffix if the unit is not {@code null}. + */ + public static String sanitizeMetricName(String metricName, Unit unit) { + String result = sanitizeMetricName(metricName); + if (unit != null) { + if (!result.endsWith("_" + unit) && !result.endsWith("." + unit)) { + result += "_" + unit; + } } + return result; + } - /** - * Convert an arbitrary string to a name where {@link #isValidLabelName(String) isValidLabelName(name)} is true. - */ - public static String sanitizeLabelName(String labelName) { - if (labelName.isEmpty()) { - throw new IllegalArgumentException("Cannot convert an empty string to a valid label name."); - } - String sanitizedName = replaceIllegalCharsInLabelName(labelName); - while (sanitizedName.startsWith("__") || sanitizedName.startsWith("_.") || sanitizedName.startsWith("._") || sanitizedName.startsWith("..")) { - sanitizedName = sanitizedName.substring(1); - } - return sanitizedName; + /** + * Convert an arbitrary string to a name where {@link #isValidLabelName(String) + * isValidLabelName(name)} is true. + */ + public static String sanitizeLabelName(String labelName) { + if (labelName.isEmpty()) { + throw new IllegalArgumentException("Cannot convert an empty string to a valid label name."); + } + String sanitizedName = replaceIllegalCharsInLabelName(labelName); + while (sanitizedName.startsWith("__") + || sanitizedName.startsWith("_.") + || sanitizedName.startsWith("._") + || sanitizedName.startsWith("..")) { + sanitizedName = sanitizedName.substring(1); } + return sanitizedName; + } - /** - * Convert an arbitrary string to a name where {@link #isValidUnitName(String) isValidUnitName(name)} is true. - * - * @throws IllegalArgumentException if the {@code unitName} cannot be converted, for example if you call {@code sanitizeUnitName("total")} or {@code sanitizeUnitName("")}. - * @throws NullPointerException if {@code unitName} is null. - */ - public static String sanitizeUnitName(String unitName) { - if (unitName.isEmpty()) { - throw new IllegalArgumentException("Cannot convert an empty string to a valid unit name."); - } - String sanitizedName = replaceIllegalCharsInUnitName(unitName); - boolean modified = true; - while (modified) { - modified = false; - while (sanitizedName.startsWith("_") || sanitizedName.startsWith(".")) { - sanitizedName = sanitizedName.substring(1); - modified = true; - } - while (sanitizedName.endsWith(".") || sanitizedName.endsWith("_")) { - sanitizedName = sanitizedName.substring(0, sanitizedName.length()-1); - modified = true; - } - for (String reservedSuffix : RESERVED_METRIC_NAME_SUFFIXES) { - String suffixName = reservedSuffix.substring(1); - if (sanitizedName.endsWith(suffixName)) { - sanitizedName = sanitizedName.substring(0, sanitizedName.length() - suffixName.length()); - modified = true; - } - } - } - if (sanitizedName.isEmpty()) { - throw new IllegalArgumentException("Cannot convert '" + unitName + "' into a valid unit name."); + /** + * Convert an arbitrary string to a name where {@link #isValidUnitName(String) + * isValidUnitName(name)} is true. + * + * @throws IllegalArgumentException if the {@code unitName} cannot be converted, for example if + * you call {@code sanitizeUnitName("total")} or {@code sanitizeUnitName("")}. + * @throws NullPointerException if {@code unitName} is null. + */ + public static String sanitizeUnitName(String unitName) { + if (unitName.isEmpty()) { + throw new IllegalArgumentException("Cannot convert an empty string to a valid unit name."); + } + String sanitizedName = replaceIllegalCharsInUnitName(unitName); + boolean modified = true; + while (modified) { + modified = false; + while (sanitizedName.startsWith("_") || sanitizedName.startsWith(".")) { + sanitizedName = sanitizedName.substring(1); + modified = true; + } + while (sanitizedName.endsWith(".") || sanitizedName.endsWith("_")) { + sanitizedName = sanitizedName.substring(0, sanitizedName.length() - 1); + modified = true; + } + for (String reservedSuffix : RESERVED_METRIC_NAME_SUFFIXES) { + String suffixName = reservedSuffix.substring(1); + if (sanitizedName.endsWith(suffixName)) { + sanitizedName = sanitizedName.substring(0, sanitizedName.length() - suffixName.length()); + modified = true; } - return sanitizedName; + } + } + if (sanitizedName.isEmpty()) { + throw new IllegalArgumentException( + "Cannot convert '" + unitName + "' into a valid unit name."); } + return sanitizedName; + } - /** - * Returns a string that matches {@link #METRIC_NAME_PATTERN}. - */ - private static String replaceIllegalCharsInMetricName(String name) { - int length = name.length(); - char[] sanitized = new char[length]; - for (int i = 0; i < length; i++) { - char ch = name.charAt(i); - if (ch == '.' || - (ch >= 'a' && ch <= 'z') || - (ch >= 'A' && ch <= 'Z') || - (i > 0 && ch >= '0' && ch <= '9')) { - sanitized[i] = ch; - } else { - sanitized[i] = '_'; - } - } - return new String(sanitized); + /** Returns a string that matches {@link #METRIC_NAME_PATTERN}. */ + private static String replaceIllegalCharsInMetricName(String name) { + int length = name.length(); + char[] sanitized = new char[length]; + for (int i = 0; i < length; i++) { + char ch = name.charAt(i); + if (ch == '.' + || (ch >= 'a' && ch <= 'z') + || (ch >= 'A' && ch <= 'Z') + || (i > 0 && ch >= '0' && ch <= '9')) { + sanitized[i] = ch; + } else { + sanitized[i] = '_'; + } } + return new String(sanitized); + } - /** - * Returns a string that matches {@link #LABEL_NAME_PATTERN}. - */ - private static String replaceIllegalCharsInLabelName(String name) { - int length = name.length(); - char[] sanitized = new char[length]; - for (int i = 0; i < length; i++) { - char ch = name.charAt(i); - if (ch == '.' || - (ch >= 'a' && ch <= 'z') || - (ch >= 'A' && ch <= 'Z') || - (i > 0 && ch >= '0' && ch <= '9')) { - sanitized[i] = ch; - } else { - sanitized[i] = '_'; - } - } - return new String(sanitized); + /** Returns a string that matches {@link #LABEL_NAME_PATTERN}. */ + private static String replaceIllegalCharsInLabelName(String name) { + int length = name.length(); + char[] sanitized = new char[length]; + for (int i = 0; i < length; i++) { + char ch = name.charAt(i); + if (ch == '.' + || (ch >= 'a' && ch <= 'z') + || (ch >= 'A' && ch <= 'Z') + || (i > 0 && ch >= '0' && ch <= '9')) { + sanitized[i] = ch; + } else { + sanitized[i] = '_'; + } } + return new String(sanitized); + } - /** - * Returns a string that matches {@link #UNIT_NAME_PATTERN}. - */ - private static String replaceIllegalCharsInUnitName(String name) { - int length = name.length(); - char[] sanitized = new char[length]; - for (int i = 0; i < length; i++) { - char ch = name.charAt(i); - if (ch == ':' || - ch == '.' || - (ch >= 'a' && ch <= 'z') || - (ch >= 'A' && ch <= 'Z') || - (ch >= '0' && ch <= '9')) { - sanitized[i] = ch; - } else { - sanitized[i] = '_'; - } - } - return new String(sanitized); + /** Returns a string that matches {@link #UNIT_NAME_PATTERN}. */ + private static String replaceIllegalCharsInUnitName(String name) { + int length = name.length(); + char[] sanitized = new char[length]; + for (int i = 0; i < length; i++) { + char ch = name.charAt(i); + if (ch == ':' + || ch == '.' + || (ch >= 'a' && ch <= 'z') + || (ch >= 'A' && ch <= 'Z') + || (ch >= '0' && ch <= '9')) { + sanitized[i] = ch; + } else { + sanitized[i] = '_'; + } } + return new String(sanitized); + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/SummarySnapshot.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/SummarySnapshot.java index 40a31f85a..7bc575ef8 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/SummarySnapshot.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/SummarySnapshot.java @@ -4,137 +4,151 @@ import java.util.Collection; import java.util.List; -/** - * Immutable snapshot of a Summary metric. - */ +/** Immutable snapshot of a Summary metric. */ public final class SummarySnapshot extends MetricSnapshot { + /** + * To create a new {@link SummarySnapshot}, you can either call the constructor directly or use + * the builder with {@link SummarySnapshot#builder()}. + * + * @param metadata See {@link MetricMetadata} for more naming conventions. + * @param data the constructor will create a sorted copy of the collection. + */ + public SummarySnapshot(MetricMetadata metadata, Collection data) { + super(metadata, data); + } + + @Override + public List getDataPoints() { + return (List) dataPoints; + } + + public static final class SummaryDataPointSnapshot extends DistributionDataPointSnapshot { + + private final Quantiles quantiles; + /** - * To create a new {@link SummarySnapshot}, you can either call the constructor directly or use - * the builder with {@link SummarySnapshot#builder()}. + * To create a new {@link SummaryDataPointSnapshot}, you can either call the constructor + * directly or use the Builder with {@link SummaryDataPointSnapshot#builder()}. * - * @param metadata See {@link MetricMetadata} for more naming conventions. - * @param data the constructor will create a sorted copy of the collection. + * @param count total number of observations. Optional, pass -1 if not available. + * @param sum sum of all observed values. Optional, pass {@link Double#NaN} if not available. + * @param quantiles must not be {@code null}. Use {@link Quantiles#EMPTY} if there are no + * quantiles. + * @param labels must not be {@code null}. Use {@link Labels#EMPTY} if there are no labels. + * @param exemplars must not be {@code null}. Use {@link Exemplars#EMPTY} if there are no + * exemplars. + * @param createdTimestampMillis timestamp (as in {@link System#currentTimeMillis()}) when this + * summary data (this specific set of labels) was created. Note that this refers to the + * creation of the timeseries, not the creation of the snapshot. The created timestamp + * optional. Use {@code 0L} if there is no created timestamp. */ - public SummarySnapshot(MetricMetadata metadata, Collection data) { - super(metadata, data); + public SummaryDataPointSnapshot( + long count, + double sum, + Quantiles quantiles, + Labels labels, + Exemplars exemplars, + long createdTimestampMillis) { + this(count, sum, quantiles, labels, exemplars, createdTimestampMillis, 0); } - @Override - public List getDataPoints() { - return (List) dataPoints; + /** + * Constructor with an additional scrape timestamp. This is only useful in rare cases as the + * scrape timestamp is usually set by the Prometheus server during scraping. Exceptions include + * mirroring metrics with given timestamps from other metric sources. + */ + public SummaryDataPointSnapshot( + long count, + double sum, + Quantiles quantiles, + Labels labels, + Exemplars exemplars, + long createdTimestampMillis, + long scrapeTimestampMillis) { + super(count, sum, exemplars, labels, createdTimestampMillis, scrapeTimestampMillis); + this.quantiles = quantiles; + validate(); } - public static final class SummaryDataPointSnapshot extends DistributionDataPointSnapshot { - - private final Quantiles quantiles; - - - /** - * To create a new {@link SummaryDataPointSnapshot}, you can either call the constructor directly - * or use the Builder with {@link SummaryDataPointSnapshot#builder()}. - * - * @param count total number of observations. Optional, pass -1 if not available. - * @param sum sum of all observed values. Optional, pass {@link Double#NaN} if not available. - * @param quantiles must not be {@code null}. Use {@link Quantiles#EMPTY} if there are no quantiles. - * @param labels must not be {@code null}. Use {@link Labels#EMPTY} if there are no labels. - * @param exemplars must not be {@code null}. Use {@link Exemplars#EMPTY} if there are no exemplars. - * @param createdTimestampMillis timestamp (as in {@link System#currentTimeMillis()}) when this summary - * data (this specific set of labels) was created. - * Note that this refers to the creation of the timeseries, - * not the creation of the snapshot. - * The created timestamp optional. Use {@code 0L} if there is no created timestamp. - */ - public SummaryDataPointSnapshot(long count, double sum, Quantiles quantiles, Labels labels, Exemplars exemplars, long createdTimestampMillis) { - this(count, sum, quantiles, labels, exemplars, createdTimestampMillis, 0); - } - - /** - * Constructor with an additional scrape timestamp. - * This is only useful in rare cases as the scrape timestamp is usually set by the Prometheus server - * during scraping. Exceptions include mirroring metrics with given timestamps from other metric sources. - */ - public SummaryDataPointSnapshot(long count, double sum, Quantiles quantiles, Labels labels, Exemplars exemplars, long createdTimestampMillis, long scrapeTimestampMillis) { - super(count, sum, exemplars, labels, createdTimestampMillis, scrapeTimestampMillis); - this.quantiles = quantiles; - validate(); - } - - public Quantiles getQuantiles() { - return quantiles; - } - - private void validate() { - for (Label label : getLabels()) { - if (label.getName().equals("quantile")) { - throw new IllegalArgumentException("quantile is a reserved label name for summaries"); - } - } - if (quantiles == null) { - throw new NullPointerException(); - } - } + public Quantiles getQuantiles() { + return quantiles; + } - public static Builder builder() { - return new Builder(); + private void validate() { + for (Label label : getLabels()) { + if (label.getName().equals("quantile")) { + throw new IllegalArgumentException("quantile is a reserved label name for summaries"); } + } + if (quantiles == null) { + throw new NullPointerException(); + } + } - public static class Builder extends DistributionDataPointSnapshot.Builder { + public static Builder builder() { + return new Builder(); + } - private Quantiles quantiles = Quantiles.EMPTY; + public static class Builder extends DistributionDataPointSnapshot.Builder { + + private Quantiles quantiles = Quantiles.EMPTY; + + private Builder() {} + + @Override + protected Builder self() { + return this; + } + + public Builder quantiles(Quantiles quantiles) { + this.quantiles = quantiles; + return this; + } + + @Override + public Builder count(long count) { + super.count(count); + return this; + } + + public SummaryDataPointSnapshot build() { + return new SummaryDataPointSnapshot( + count, + sum, + quantiles, + labels, + exemplars, + createdTimestampMillis, + scrapeTimestampMillis); + } + } + } - private Builder() { - } + public static Builder builder() { + return new Builder(); + } - @Override - protected Builder self() { - return this; - } + public static class Builder extends MetricSnapshot.Builder { - public Builder quantiles(Quantiles quantiles) { - this.quantiles = quantiles; - return this; - } + private final List dataPoints = new ArrayList<>(); - @Override - public Builder count(long count) { - super.count(count); - return this; - } + private Builder() {} - public SummaryDataPointSnapshot build() { - return new SummaryDataPointSnapshot(count, sum, quantiles, labels, exemplars, createdTimestampMillis, scrapeTimestampMillis); - } - } + /** Add a data point. Call multiple times to add multiple data points. */ + public Builder dataPoint(SummaryDataPointSnapshot data) { + dataPoints.add(data); + return this; } - public static Builder builder() { - return new Builder(); + @Override + public SummarySnapshot build() { + return new SummarySnapshot(buildMetadata(), dataPoints); } - public static class Builder extends MetricSnapshot.Builder { - - private final List dataPoints = new ArrayList<>(); - - private Builder() { - } - - /** - * Add a data point. Call multiple times to add multiple data points. - */ - public Builder dataPoint(SummaryDataPointSnapshot data) { - dataPoints.add(data); - return this; - } - - @Override - public SummarySnapshot build() { - return new SummarySnapshot(buildMetadata(), dataPoints); - } - - @Override - protected Builder self() { - return this; - } + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/Unit.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/Unit.java index 16a6c5941..cb3d93cda 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/Unit.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/Unit.java @@ -4,69 +4,71 @@ /** * Some pre-defined units for convenience. You can create your own units with + * *

  *     new Unit("myUnit");
  * 
- * Note that in Prometheus, units are largely based on SI base units - * (seconds, bytes, joules, grams, meters, ratio, volts, amperes, and celsius). + * + * Note that in Prometheus, units are largely based on SI base units (seconds, bytes, joules, grams, + * meters, ratio, volts, amperes, and celsius). */ public class Unit { - private final String name; + private final String name; - public static final Unit RATIO = new Unit("ratio"); - public static final Unit SECONDS = new Unit("seconds"); - public static final Unit BYTES = new Unit("bytes"); - public static final Unit CELSIUS = new Unit("celsius"); - public static final Unit JOULES = new Unit("joules"); - public static final Unit GRAMS = new Unit("grams"); - public static final Unit METERS = new Unit("meters"); - public static final Unit VOLTS = new Unit("volts"); - public static final Unit AMPERES = new Unit("amperes"); + public static final Unit RATIO = new Unit("ratio"); + public static final Unit SECONDS = new Unit("seconds"); + public static final Unit BYTES = new Unit("bytes"); + public static final Unit CELSIUS = new Unit("celsius"); + public static final Unit JOULES = new Unit("joules"); + public static final Unit GRAMS = new Unit("grams"); + public static final Unit METERS = new Unit("meters"); + public static final Unit VOLTS = new Unit("volts"); + public static final Unit AMPERES = new Unit("amperes"); - public Unit(String name) { - if (name == null) { - throw new NullPointerException("Unit name cannot be null."); - } - name = name.trim(); - String error = PrometheusNaming.validateUnitName(name); - if (error != null) { - throw new IllegalArgumentException(name + ": Illegal unit name: " + error); - } - this.name = name; + public Unit(String name) { + if (name == null) { + throw new NullPointerException("Unit name cannot be null."); } - - @Override - public String toString() { - return name; + name = name.trim(); + String error = PrometheusNaming.validateUnitName(name); + if (error != null) { + throw new IllegalArgumentException(name + ": Illegal unit name: " + error); } + this.name = name; + } - public static double nanosToSeconds(long nanos) { - return nanos / 1E9; - } + @Override + public String toString() { + return name; + } - public static double millisToSeconds(long millis) { - return millis / 1E3; - } + public static double nanosToSeconds(long nanos) { + return nanos / 1E9; + } - public static double secondsToMillis(double seconds) { - return seconds * 1E3; - } + public static double millisToSeconds(long millis) { + return millis / 1E3; + } - public static double kiloBytesToBytes(double kilobytes) { - return kilobytes * 1024; - } + public static double secondsToMillis(double seconds) { + return seconds * 1E3; + } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Unit unit = (Unit) o; - return Objects.equals(name, unit.name); - } + public static double kiloBytesToBytes(double kilobytes) { + return kilobytes * 1024; + } - @Override - public int hashCode() { - return Objects.hash(name); - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Unit unit = (Unit) o; + return Objects.equals(name, unit.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } } diff --git a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/UnknownSnapshot.java b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/UnknownSnapshot.java index 70cf07326..3fe01c9df 100644 --- a/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/UnknownSnapshot.java +++ b/prometheus-metrics-model/src/main/java/io/prometheus/metrics/model/snapshots/UnknownSnapshot.java @@ -4,136 +4,125 @@ import java.util.Collection; import java.util.List; -/** - * Immutable snapshot of an Unknown (Untyped) metric. - */ +/** Immutable snapshot of an Unknown (Untyped) metric. */ public final class UnknownSnapshot extends MetricSnapshot { + /** + * To create a new {@link UnknownSnapshot}, you can either call the constructor directly or use + * the builder with {@link UnknownSnapshot#builder()}. + * + * @param metadata required name and optional help and unit. See {@link MetricMetadata} for naming + * conventions. + * @param data the constructor will create a sorted copy of the collection. + */ + public UnknownSnapshot(MetricMetadata metadata, Collection data) { + super(metadata, data); + } + + @Override + public List getDataPoints() { + return (List) dataPoints; + } + + public static final class UnknownDataPointSnapshot extends DataPointSnapshot { + + private final double value; + private final Exemplar exemplar; // may be null + /** - * To create a new {@link UnknownSnapshot}, you can either call the constructor directly or use - * the builder with {@link UnknownSnapshot#builder()}. + * To create a new {@link UnknownDataPointSnapshot}, you can either call the constructor + * directly or use the Builder with {@link UnknownDataPointSnapshot#builder()}. * - * @param metadata required name and optional help and unit. - * See {@link MetricMetadata} for naming conventions. - * @param data the constructor will create a sorted copy of the collection. + * @param value the value. + * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. + * @param exemplar may be null. */ - public UnknownSnapshot(MetricMetadata metadata, Collection data) { - super(metadata, data); + public UnknownDataPointSnapshot(double value, Labels labels, Exemplar exemplar) { + this(value, labels, exemplar, 0); } - @Override - public List getDataPoints() { - return (List) dataPoints; + /** + * Constructor with an additional scrape timestamp. This is only useful in rare cases as the + * scrape timestamp is usually set by the Prometheus server during scraping. Exceptions include + * mirroring metrics with given timestamps from other metric sources. + */ + public UnknownDataPointSnapshot( + double value, Labels labels, Exemplar exemplar, long scrapeTimestampMillis) { + super(labels, 0L, scrapeTimestampMillis); + this.value = value; + this.exemplar = exemplar; } - public static final class UnknownDataPointSnapshot extends DataPointSnapshot { - - private final double value; - private final Exemplar exemplar; // may be null - - /** - * To create a new {@link UnknownDataPointSnapshot}, you can either call the constructor directly or use the - * Builder with {@link UnknownDataPointSnapshot#builder()}. - * - * @param value the value. - * @param labels must not be null. Use {@link Labels#EMPTY} if there are no labels. - * @param exemplar may be null. - */ - public UnknownDataPointSnapshot(double value, Labels labels, Exemplar exemplar) { - this(value, labels, exemplar, 0); - } + public double getValue() { + return value; + } - /** - * Constructor with an additional scrape timestamp. - * This is only useful in rare cases as the scrape timestamp is usually set by the Prometheus server - * during scraping. Exceptions include mirroring metrics with given timestamps from other metric sources. - */ - public UnknownDataPointSnapshot(double value, Labels labels, Exemplar exemplar, long scrapeTimestampMillis) { - super(labels, 0L, scrapeTimestampMillis); - this.value = value; - this.exemplar = exemplar; - } + /** May return {@code null}. */ + public Exemplar getExemplar() { + return exemplar; + } - public double getValue() { - return value; - } + public static Builder builder() { + return new Builder(); + } - /** - * May return {@code null}. - */ - public Exemplar getExemplar() { - return exemplar; - } + public static class Builder extends DataPointSnapshot.Builder { - public static Builder builder() { - return new Builder(); - } + private Exemplar exemplar = null; + private Double value = null; + + private Builder() {} + + /** required. */ + public Builder value(double value) { + this.value = value; + return this; + } + + /** Optional */ + public Builder exemplar(Exemplar exemplar) { + this.exemplar = exemplar; + return this; + } - public static class Builder extends DataPointSnapshot.Builder { - - private Exemplar exemplar = null; - private Double value = null; - - private Builder() { - } - - /** - * required. - */ - public Builder value(double value) { - this.value = value; - return this; - } - - /** - * Optional - */ - public Builder exemplar(Exemplar exemplar) { - this.exemplar = exemplar; - return this; - } - - public UnknownDataPointSnapshot build() { - if (value == null) { - throw new IllegalArgumentException("Missing required field: value is null."); - } - return new UnknownDataPointSnapshot(value, labels, exemplar, scrapeTimestampMillis); - } - - @Override - protected Builder self() { - return this; - } + public UnknownDataPointSnapshot build() { + if (value == null) { + throw new IllegalArgumentException("Missing required field: value is null."); } - } + return new UnknownDataPointSnapshot(value, labels, exemplar, scrapeTimestampMillis); + } - public static Builder builder() { - return new Builder(); + @Override + protected Builder self() { + return this; + } } + } - public static class Builder extends MetricSnapshot.Builder { + public static Builder builder() { + return new Builder(); + } - private final List dataPoints = new ArrayList<>(); + public static class Builder extends MetricSnapshot.Builder { - private Builder() { - } + private final List dataPoints = new ArrayList<>(); - /** - * Add a data point. Call multiple times to add multiple data points. - */ - public Builder dataPoint(UnknownDataPointSnapshot data) { - dataPoints.add(data); - return this; - } + private Builder() {} - @Override - public UnknownSnapshot build() { - return new UnknownSnapshot(buildMetadata(), dataPoints); - } + /** Add a data point. Call multiple times to add multiple data points. */ + public Builder dataPoint(UnknownDataPointSnapshot data) { + dataPoints.add(data); + return this; + } - @Override - protected Builder self() { - return this; - } + @Override + public UnknownSnapshot build() { + return new UnknownSnapshot(buildMetadata(), dataPoints); + } + + @Override + protected Builder self() { + return this; } + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/ClassicHistogramBucketsTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/ClassicHistogramBucketsTest.java index 78bf8c564..082981431 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/ClassicHistogramBucketsTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/ClassicHistogramBucketsTest.java @@ -1,102 +1,98 @@ package io.prometheus.metrics.model.snapshots; +import java.util.Iterator; import org.junit.Assert; import org.junit.Test; -import java.util.Iterator; - public class ClassicHistogramBucketsTest { - @Test - public void testGoodCase() { - ClassicHistogramBuckets buckets = ClassicHistogramBuckets.builder() - .bucket(Double.NEGATIVE_INFINITY, 0) - .bucket(-10.0, 7) - .bucket(1024, 3) - .bucket(Double.POSITIVE_INFINITY, 8) - .build(); - Assert.assertEquals(4, buckets.size()); - } + @Test + public void testGoodCase() { + ClassicHistogramBuckets buckets = + ClassicHistogramBuckets.builder() + .bucket(Double.NEGATIVE_INFINITY, 0) + .bucket(-10.0, 7) + .bucket(1024, 3) + .bucket(Double.POSITIVE_INFINITY, 8) + .build(); + Assert.assertEquals(4, buckets.size()); + } - @Test - public void testSort() { - ClassicHistogramBuckets buckets = ClassicHistogramBuckets.builder() - .bucket(7, 2) - .bucket(2, 0) - .bucket(Double.POSITIVE_INFINITY, 3) - .build(); - Assert.assertEquals(3, buckets.size()); - Assert.assertEquals(2, buckets.getUpperBound(0), 0.0); - Assert.assertEquals(7, buckets.getUpperBound(1), 0.0); - Assert.assertEquals(Double.POSITIVE_INFINITY, buckets.getUpperBound(2), 0.0); - Assert.assertEquals(0, buckets.getCount(0)); - Assert.assertEquals(2, buckets.getCount(1)); - Assert.assertEquals(3, buckets.getCount(2)); - } + @Test + public void testSort() { + ClassicHistogramBuckets buckets = + ClassicHistogramBuckets.builder() + .bucket(7, 2) + .bucket(2, 0) + .bucket(Double.POSITIVE_INFINITY, 3) + .build(); + Assert.assertEquals(3, buckets.size()); + Assert.assertEquals(2, buckets.getUpperBound(0), 0.0); + Assert.assertEquals(7, buckets.getUpperBound(1), 0.0); + Assert.assertEquals(Double.POSITIVE_INFINITY, buckets.getUpperBound(2), 0.0); + Assert.assertEquals(0, buckets.getCount(0)); + Assert.assertEquals(2, buckets.getCount(1)); + Assert.assertEquals(3, buckets.getCount(2)); + } - @Test - public void testMinimalBuckets() { - ClassicHistogramBuckets buckets = ClassicHistogramBuckets.builder() - .bucket(Double.POSITIVE_INFINITY, 0) - .build(); - Assert.assertEquals(1, buckets.size()); - } + @Test + public void testMinimalBuckets() { + ClassicHistogramBuckets buckets = + ClassicHistogramBuckets.builder().bucket(Double.POSITIVE_INFINITY, 0).build(); + Assert.assertEquals(1, buckets.size()); + } - @Test(expected = IllegalArgumentException.class) - public void testInfBucketMissing() { - ClassicHistogramBuckets.builder() - .bucket(Double.NEGATIVE_INFINITY, 0) - .build(); - } + @Test(expected = IllegalArgumentException.class) + public void testInfBucketMissing() { + ClassicHistogramBuckets.builder().bucket(Double.NEGATIVE_INFINITY, 0).build(); + } - @Test(expected = IllegalArgumentException.class) - public void testNegativeCount() { - ClassicHistogramBuckets.builder() - .bucket(0.0, 10) - .bucket(Double.POSITIVE_INFINITY, -1) - .build(); - } + @Test(expected = IllegalArgumentException.class) + public void testNegativeCount() { + ClassicHistogramBuckets.builder().bucket(0.0, 10).bucket(Double.POSITIVE_INFINITY, -1).build(); + } - @Test(expected = IllegalArgumentException.class) - public void testNaNBoundary() { - ClassicHistogramBuckets.builder() - .bucket(0.0, 1) - .bucket(Double.NaN, 2) - .bucket(Double.POSITIVE_INFINITY, 0) - .build(); - } + @Test(expected = IllegalArgumentException.class) + public void testNaNBoundary() { + ClassicHistogramBuckets.builder() + .bucket(0.0, 1) + .bucket(Double.NaN, 2) + .bucket(Double.POSITIVE_INFINITY, 0) + .build(); + } - @Test(expected = IllegalArgumentException.class) - public void testDuplicateBoundary() { - ClassicHistogramBuckets.builder() - .bucket(1.0, 1) - .bucket(2.0, 2) - .bucket(1.0, 2) - .bucket(Double.POSITIVE_INFINITY, 0) - .build(); - } + @Test(expected = IllegalArgumentException.class) + public void testDuplicateBoundary() { + ClassicHistogramBuckets.builder() + .bucket(1.0, 1) + .bucket(2.0, 2) + .bucket(1.0, 2) + .bucket(Double.POSITIVE_INFINITY, 0) + .build(); + } - @Test(expected = IllegalArgumentException.class) - public void testEmptyBuckets() { - ClassicHistogramBuckets.builder().build(); - } + @Test(expected = IllegalArgumentException.class) + public void testEmptyBuckets() { + ClassicHistogramBuckets.builder().build(); + } - @Test(expected = IllegalArgumentException.class) - public void testDifferentLength() { - double[] upperBounds = new double[] {0.7, 1.3, Double.POSITIVE_INFINITY}; - long[] counts = new long[] {13, 178, 1024, 3000}; - ClassicHistogramBuckets.of(upperBounds, counts); - } + @Test(expected = IllegalArgumentException.class) + public void testDifferentLength() { + double[] upperBounds = new double[] {0.7, 1.3, Double.POSITIVE_INFINITY}; + long[] counts = new long[] {13, 178, 1024, 3000}; + ClassicHistogramBuckets.of(upperBounds, counts); + } - @Test(expected = UnsupportedOperationException.class) - public void testImmutable() { - ClassicHistogramBuckets buckets = ClassicHistogramBuckets.builder() - .bucket(1.0, 7) - .bucket(2.0, 8) - .bucket(Double.POSITIVE_INFINITY, 0) - .build(); - Iterator iterator = buckets.iterator(); - iterator.next(); - iterator.remove(); - } + @Test(expected = UnsupportedOperationException.class) + public void testImmutable() { + ClassicHistogramBuckets buckets = + ClassicHistogramBuckets.builder() + .bucket(1.0, 7) + .bucket(2.0, 8) + .bucket(Double.POSITIVE_INFINITY, 0) + .build(); + Iterator iterator = buckets.iterator(); + iterator.next(); + iterator.remove(); + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/CounterSnapshotTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/CounterSnapshotTest.java index b990de196..96e8df4c9 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/CounterSnapshotTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/CounterSnapshotTest.java @@ -1,108 +1,115 @@ package io.prometheus.metrics.model.snapshots; import io.prometheus.metrics.model.snapshots.CounterSnapshot.CounterDataPointSnapshot; -import org.junit.Assert; -import org.junit.Test; - import java.util.Iterator; import java.util.concurrent.TimeUnit; +import org.junit.Assert; +import org.junit.Test; public class CounterSnapshotTest { - @Test - public void testCompleteGoodCase() { - long createdTimestamp1 = System.currentTimeMillis() - TimeUnit.DAYS.toMillis(1); - long createdTimestamp2 = System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(2); - long exemplarTimestamp = System.currentTimeMillis(); - CounterSnapshot snapshot = CounterSnapshot.builder() - .name("http_server_requests_seconds") - .help("total time spent serving requests") - .unit(Unit.SECONDS) - .dataPoint(CounterDataPointSnapshot.builder() - .value(1.0) - .exemplar(Exemplar.builder() - .value(3.0) - .traceId("abc123") - .spanId("123457") - .timestampMillis(exemplarTimestamp) - .build()) - .labels(Labels.builder() - .label("path", "/world") - .build()) - .createdTimestampMillis(createdTimestamp1) - .build() - ).dataPoint(CounterDataPointSnapshot.builder() - .value(2.0) - .exemplar(Exemplar.builder() - .value(4.0) - .traceId("def456") - .spanId("234567") - .timestampMillis(exemplarTimestamp) - .build()) - .labels(Labels.builder() - .label("path", "/hello") - .build()) - .createdTimestampMillis(createdTimestamp2) - .build() - ) - .build(); - SnapshotTestUtil.assertMetadata(snapshot, "http_server_requests_seconds", "total time spent serving requests", "seconds"); - Assert.assertEquals(2, snapshot.getDataPoints().size()); - CounterDataPointSnapshot data = snapshot.getDataPoints().get(0); // data is sorted by labels, so the first one should be path="/hello" - Assert.assertEquals(Labels.of("path", "/hello"), data.getLabels()); - Assert.assertEquals(2.0, data.getValue(), 0.0); - Assert.assertEquals(4.0, data.getExemplar().getValue(), 0.0); - Assert.assertEquals(createdTimestamp2, data.getCreatedTimestampMillis()); - Assert.assertFalse(data.hasScrapeTimestamp()); - data = snapshot.getDataPoints().get(1); - Assert.assertEquals(Labels.of("path", "/world"), data.getLabels()); - Assert.assertEquals(1.0, data.getValue(), 0.0); - Assert.assertEquals(3.0, data.getExemplar().getValue(), 0.0); - Assert.assertEquals(createdTimestamp1, data.getCreatedTimestampMillis()); - Assert.assertFalse(data.hasScrapeTimestamp()); - } + @Test + public void testCompleteGoodCase() { + long createdTimestamp1 = System.currentTimeMillis() - TimeUnit.DAYS.toMillis(1); + long createdTimestamp2 = System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(2); + long exemplarTimestamp = System.currentTimeMillis(); + CounterSnapshot snapshot = + CounterSnapshot.builder() + .name("http_server_requests_seconds") + .help("total time spent serving requests") + .unit(Unit.SECONDS) + .dataPoint( + CounterDataPointSnapshot.builder() + .value(1.0) + .exemplar( + Exemplar.builder() + .value(3.0) + .traceId("abc123") + .spanId("123457") + .timestampMillis(exemplarTimestamp) + .build()) + .labels(Labels.builder().label("path", "/world").build()) + .createdTimestampMillis(createdTimestamp1) + .build()) + .dataPoint( + CounterDataPointSnapshot.builder() + .value(2.0) + .exemplar( + Exemplar.builder() + .value(4.0) + .traceId("def456") + .spanId("234567") + .timestampMillis(exemplarTimestamp) + .build()) + .labels(Labels.builder().label("path", "/hello").build()) + .createdTimestampMillis(createdTimestamp2) + .build()) + .build(); + SnapshotTestUtil.assertMetadata( + snapshot, "http_server_requests_seconds", "total time spent serving requests", "seconds"); + Assert.assertEquals(2, snapshot.getDataPoints().size()); + CounterDataPointSnapshot data = + snapshot + .getDataPoints() + .get(0); // data is sorted by labels, so the first one should be path="/hello" + Assert.assertEquals(Labels.of("path", "/hello"), data.getLabels()); + Assert.assertEquals(2.0, data.getValue(), 0.0); + Assert.assertEquals(4.0, data.getExemplar().getValue(), 0.0); + Assert.assertEquals(createdTimestamp2, data.getCreatedTimestampMillis()); + Assert.assertFalse(data.hasScrapeTimestamp()); + data = snapshot.getDataPoints().get(1); + Assert.assertEquals(Labels.of("path", "/world"), data.getLabels()); + Assert.assertEquals(1.0, data.getValue(), 0.0); + Assert.assertEquals(3.0, data.getExemplar().getValue(), 0.0); + Assert.assertEquals(createdTimestamp1, data.getCreatedTimestampMillis()); + Assert.assertFalse(data.hasScrapeTimestamp()); + } - @Test - public void testMinimalGoodCase() { - CounterSnapshot snapshot = CounterSnapshot.builder() - .name("events") - .dataPoint(CounterDataPointSnapshot.builder().value(1.0).build()) - .build(); - SnapshotTestUtil.assertMetadata(snapshot, "events", null, null); - Assert.assertEquals(1, snapshot.getDataPoints().size()); - CounterDataPointSnapshot data = snapshot.getDataPoints().get(0); - Assert.assertEquals(Labels.EMPTY, data.getLabels()); - Assert.assertEquals(1.0, data.getValue(), 0.0); - Assert.assertNull(data.getExemplar()); - Assert.assertFalse(data.hasCreatedTimestamp()); - Assert.assertFalse(data.hasScrapeTimestamp()); - } + @Test + public void testMinimalGoodCase() { + CounterSnapshot snapshot = + CounterSnapshot.builder() + .name("events") + .dataPoint(CounterDataPointSnapshot.builder().value(1.0).build()) + .build(); + SnapshotTestUtil.assertMetadata(snapshot, "events", null, null); + Assert.assertEquals(1, snapshot.getDataPoints().size()); + CounterDataPointSnapshot data = snapshot.getDataPoints().get(0); + Assert.assertEquals(Labels.EMPTY, data.getLabels()); + Assert.assertEquals(1.0, data.getValue(), 0.0); + Assert.assertNull(data.getExemplar()); + Assert.assertFalse(data.hasCreatedTimestamp()); + Assert.assertFalse(data.hasScrapeTimestamp()); + } - @Test - public void testEmptyCounter() { - CounterSnapshot snapshot = CounterSnapshot.builder().name("events").build(); - Assert.assertEquals(0, snapshot.getDataPoints().size()); - } + @Test + public void testEmptyCounter() { + CounterSnapshot snapshot = CounterSnapshot.builder().name("events").build(); + Assert.assertEquals(0, snapshot.getDataPoints().size()); + } - @Test(expected = IllegalArgumentException.class) - public void testTotalSuffixPresent() { - CounterSnapshot.builder().name("test_total").build(); - } + @Test(expected = IllegalArgumentException.class) + public void testTotalSuffixPresent() { + CounterSnapshot.builder().name("test_total").build(); + } - @Test(expected = IllegalArgumentException.class) - public void testValueMissing() { - CounterDataPointSnapshot.builder().build(); - } + @Test(expected = IllegalArgumentException.class) + public void testValueMissing() { + CounterDataPointSnapshot.builder().build(); + } - @Test(expected = UnsupportedOperationException.class) - public void testDataImmutable() { - CounterSnapshot snapshot = CounterSnapshot.builder() - .name("events") - .dataPoint(CounterDataPointSnapshot.builder().labels(Labels.of("a", "a")).value(1.0).build()) - .dataPoint(CounterDataPointSnapshot.builder().labels(Labels.of("a", "b")).value(2.0).build()) - .build(); - Iterator iterator = snapshot.getDataPoints().iterator(); - iterator.next(); - iterator.remove(); - } + @Test(expected = UnsupportedOperationException.class) + public void testDataImmutable() { + CounterSnapshot snapshot = + CounterSnapshot.builder() + .name("events") + .dataPoint( + CounterDataPointSnapshot.builder().labels(Labels.of("a", "a")).value(1.0).build()) + .dataPoint( + CounterDataPointSnapshot.builder().labels(Labels.of("a", "b")).value(2.0).build()) + .build(); + Iterator iterator = snapshot.getDataPoints().iterator(); + iterator.next(); + iterator.remove(); + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/ExemplarTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/ExemplarTest.java index 84134b6b3..1a94b12f1 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/ExemplarTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/ExemplarTest.java @@ -5,72 +5,76 @@ public class ExemplarTest { - @Test - public void testGoodCaseComplete() { - long timestamp = System.currentTimeMillis(); - Exemplar exemplar = Exemplar.builder() - .value(2.2) - .traceId("abc123abc123") - .spanId("def456def456") - .timestampMillis(timestamp) - .labels(Labels.of("path", "/", "error", "none")) - .build(); - Assert.assertEquals(2.2, exemplar.getValue(), 0.0); - Assert.assertEquals(Labels.of(Exemplar.TRACE_ID, "abc123abc123", Exemplar.SPAN_ID, "def456def456", "path", "/", "error", "none"), exemplar.getLabels()); - Assert.assertTrue(exemplar.hasTimestamp()); - Assert.assertEquals(timestamp, exemplar.getTimestampMillis()); - } + @Test + public void testGoodCaseComplete() { + long timestamp = System.currentTimeMillis(); + Exemplar exemplar = + Exemplar.builder() + .value(2.2) + .traceId("abc123abc123") + .spanId("def456def456") + .timestampMillis(timestamp) + .labels(Labels.of("path", "/", "error", "none")) + .build(); + Assert.assertEquals(2.2, exemplar.getValue(), 0.0); + Assert.assertEquals( + Labels.of( + Exemplar.TRACE_ID, + "abc123abc123", + Exemplar.SPAN_ID, + "def456def456", + "path", + "/", + "error", + "none"), + exemplar.getLabels()); + Assert.assertTrue(exemplar.hasTimestamp()); + Assert.assertEquals(timestamp, exemplar.getTimestampMillis()); + } - @Test(expected = IllegalStateException.class) - public void testValueMissing() { - Exemplar.builder().build(); - } + @Test(expected = IllegalStateException.class) + public void testValueMissing() { + Exemplar.builder().build(); + } - @Test - public void testMinimal() { - Exemplar exemplar = Exemplar.builder().value(0.0).build(); - Assert.assertEquals(0.0, exemplar.getValue(), 0.0); - Assert.assertEquals(Labels.EMPTY, exemplar.getLabels()); - Assert.assertFalse(exemplar.hasTimestamp()); - } + @Test + public void testMinimal() { + Exemplar exemplar = Exemplar.builder().value(0.0).build(); + Assert.assertEquals(0.0, exemplar.getValue(), 0.0); + Assert.assertEquals(Labels.EMPTY, exemplar.getLabels()); + Assert.assertFalse(exemplar.hasTimestamp()); + } - @Test - public void testLabelsMergeTraceId() { - Exemplar exemplar = Exemplar.builder() - .value(0.0) - .labels(Labels.of("a", "b")) - .traceId("abc") - .build(); - Assert.assertEquals(Labels.of("a", "b", "trace_id", "abc"), exemplar.getLabels()); - } + @Test + public void testLabelsMergeTraceId() { + Exemplar exemplar = + Exemplar.builder().value(0.0).labels(Labels.of("a", "b")).traceId("abc").build(); + Assert.assertEquals(Labels.of("a", "b", "trace_id", "abc"), exemplar.getLabels()); + } - @Test - public void testLabelsMergeSpanId() { - Exemplar exemplar = Exemplar.builder() - .value(0.0) - .labels(Labels.of("a", "b")) - .spanId("abc") - .build(); - Assert.assertEquals(Labels.of("a", "b", "span_id", "abc"), exemplar.getLabels()); - } + @Test + public void testLabelsMergeSpanId() { + Exemplar exemplar = + Exemplar.builder().value(0.0).labels(Labels.of("a", "b")).spanId("abc").build(); + Assert.assertEquals(Labels.of("a", "b", "span_id", "abc"), exemplar.getLabels()); + } - @Test - public void testLabelsMergeTraceIdAndSpanId() { - Exemplar exemplar = Exemplar.builder() - .value(0.0) - .labels(Labels.of("a", "b")) - .spanId("abc") - .traceId("def") - .build(); - Assert.assertEquals(Labels.of("span_id", "abc", "a", "b", "trace_id", "def"), exemplar.getLabels()); - } + @Test + public void testLabelsMergeTraceIdAndSpanId() { + Exemplar exemplar = + Exemplar.builder() + .value(0.0) + .labels(Labels.of("a", "b")) + .spanId("abc") + .traceId("def") + .build(); + Assert.assertEquals( + Labels.of("span_id", "abc", "a", "b", "trace_id", "def"), exemplar.getLabels()); + } - @Test - public void testLabelsMergeNone() { - Exemplar exemplar = Exemplar.builder() - .value(0.0) - .labels(Labels.of("a", "b")) - .build(); - Assert.assertEquals(Labels.of("a", "b"), exemplar.getLabels()); - } + @Test + public void testLabelsMergeNone() { + Exemplar exemplar = Exemplar.builder().value(0.0).labels(Labels.of("a", "b")).build(); + Assert.assertEquals(Labels.of("a", "b"), exemplar.getLabels()); + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/ExemplarsTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/ExemplarsTest.java index 4ca64d6fb..93a1c1efa 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/ExemplarsTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/ExemplarsTest.java @@ -1,52 +1,54 @@ package io.prometheus.metrics.model.snapshots; +import java.util.Iterator; import org.junit.Assert; import org.junit.Test; -import java.util.Iterator; - public class ExemplarsTest { - @Test - public void testUpperBound() { - Exemplars exemplars = Exemplars.of( - Exemplar.builder().value(1.0).build(), - Exemplar.builder().value(3.0).build(), - Exemplar.builder().value(2.0).build() - ); - Assert.assertEquals(3, exemplars.size()); - Assert.assertEquals(1.0, exemplars.get(0).getValue(), 0.0); - Assert.assertEquals(3.0, exemplars.get(1).getValue(), 0.0); - Assert.assertEquals(2.0, exemplars.get(2).getValue(), 0.0); - Assert.assertEquals(1.0, exemplars.get(0.0, Double.POSITIVE_INFINITY).getValue(), 0.0); - Assert.assertEquals(1.0, exemplars.get(0.0, 1.0).getValue(), 0.0); - Assert.assertEquals(3.0, exemplars.get(1.0, 4.0).getValue(), 0.0); - Assert.assertEquals(3.0, exemplars.get(2.0, 3.0).getValue(), 0.0); - Assert.assertEquals(2.0, exemplars.get(1.0, 2.1).getValue(), 0.0); - Assert.assertNull(exemplars.get(2.0, 2.1)); - } + @Test + public void testUpperBound() { + Exemplars exemplars = + Exemplars.of( + Exemplar.builder().value(1.0).build(), + Exemplar.builder().value(3.0).build(), + Exemplar.builder().value(2.0).build()); + Assert.assertEquals(3, exemplars.size()); + Assert.assertEquals(1.0, exemplars.get(0).getValue(), 0.0); + Assert.assertEquals(3.0, exemplars.get(1).getValue(), 0.0); + Assert.assertEquals(2.0, exemplars.get(2).getValue(), 0.0); + Assert.assertEquals(1.0, exemplars.get(0.0, Double.POSITIVE_INFINITY).getValue(), 0.0); + Assert.assertEquals(1.0, exemplars.get(0.0, 1.0).getValue(), 0.0); + Assert.assertEquals(3.0, exemplars.get(1.0, 4.0).getValue(), 0.0); + Assert.assertEquals(3.0, exemplars.get(2.0, 3.0).getValue(), 0.0); + Assert.assertEquals(2.0, exemplars.get(1.0, 2.1).getValue(), 0.0); + Assert.assertNull(exemplars.get(2.0, 2.1)); + } - @Test(expected = UnsupportedOperationException.class) - public void testImmutable() { - Exemplars exemplars = Exemplars.of( - Exemplar.builder().value(1.0).build(), - Exemplar.builder().value(3.0).build(), - Exemplar.builder().value(2.0).build() - ); - Iterator iterator = exemplars.iterator(); - iterator.next(); - iterator.remove(); - } + @Test(expected = UnsupportedOperationException.class) + public void testImmutable() { + Exemplars exemplars = + Exemplars.of( + Exemplar.builder().value(1.0).build(), + Exemplar.builder().value(3.0).build(), + Exemplar.builder().value(2.0).build()); + Iterator iterator = exemplars.iterator(); + iterator.next(); + iterator.remove(); + } - @Test - public void testGet() { - Exemplar oldest = Exemplar.builder().timestampMillis(System.currentTimeMillis() - 100).value(1.8).build(); - Exemplar middle = Exemplar.builder().timestampMillis(System.currentTimeMillis() - 50).value(1.2).build(); - Exemplar newest = Exemplar.builder().timestampMillis(System.currentTimeMillis()).value(1.0).build(); - Exemplars exemplars = Exemplars.of(oldest, newest, middle); - Exemplar result = exemplars.get(1.1, 1.9); // newest is not within these bounds - Assert.assertSame(result, middle); - result = exemplars.get(0.9, Double.POSITIVE_INFINITY); - Assert.assertSame(result, newest); - } + @Test + public void testGet() { + Exemplar oldest = + Exemplar.builder().timestampMillis(System.currentTimeMillis() - 100).value(1.8).build(); + Exemplar middle = + Exemplar.builder().timestampMillis(System.currentTimeMillis() - 50).value(1.2).build(); + Exemplar newest = + Exemplar.builder().timestampMillis(System.currentTimeMillis()).value(1.0).build(); + Exemplars exemplars = Exemplars.of(oldest, newest, middle); + Exemplar result = exemplars.get(1.1, 1.9); // newest is not within these bounds + Assert.assertSame(result, middle); + result = exemplars.get(0.9, Double.POSITIVE_INFINITY); + Assert.assertSame(result, newest); + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/GaugeSnapshotTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/GaugeSnapshotTest.java index b64298529..fe7f386ba 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/GaugeSnapshotTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/GaugeSnapshotTest.java @@ -2,110 +2,114 @@ import io.prometheus.metrics.model.snapshots.CounterSnapshot.CounterDataPointSnapshot; import io.prometheus.metrics.model.snapshots.GaugeSnapshot.GaugeDataPointSnapshot; +import java.util.Iterator; import org.junit.Assert; import org.junit.Test; -import java.util.Iterator; - public class GaugeSnapshotTest { - @Test - public void testCompleteGoodCase() { - long exemplarTimestamp = System.currentTimeMillis(); - GaugeSnapshot snapshot = GaugeSnapshot.builder() - .name("cache_size_bytes") - .help("cache size in Bytes") - .unit(Unit.BYTES) - .dataPoint(GaugeDataPointSnapshot.builder() - .value(1024.0) - .exemplar(Exemplar.builder() - .value(1024.0) - .traceId("abc123") - .spanId("123457") - .timestampMillis(exemplarTimestamp) - .build()) - .labels(Labels.builder() - .label("env", "prod") - .build()) - .build() - ).dataPoint(GaugeDataPointSnapshot.builder() - .value(128.0) - .exemplar(Exemplar.builder() - .value(128.0) - .traceId("def456") - .spanId("234567") - .timestampMillis(exemplarTimestamp) - .build()) - .labels(Labels.builder() - .label("env", "dev") - .build()) - .build() - ) - .build(); - SnapshotTestUtil.assertMetadata(snapshot, "cache_size_bytes", "cache size in Bytes", "bytes"); - Assert.assertEquals(2, snapshot.getDataPoints().size()); - GaugeDataPointSnapshot data = snapshot.getDataPoints().get(0); // data is sorted by labels, so the first one should be path="/hello" - Assert.assertEquals(Labels.of("env", "dev"), data.getLabels()); - Assert.assertEquals(128.0, data.getValue(), 0.0); - Assert.assertEquals(128.0, data.getExemplar().getValue(), 0.0); - Assert.assertFalse(data.hasCreatedTimestamp()); - Assert.assertFalse(data.hasScrapeTimestamp()); - data = snapshot.getDataPoints().get(1); - Assert.assertEquals(Labels.of("env", "prod"), data.getLabels()); - Assert.assertEquals(1024.0, data.getValue(), 0.0); - Assert.assertEquals(1024.0, data.getExemplar().getValue(), 0.0); - Assert.assertFalse(data.hasCreatedTimestamp()); - Assert.assertFalse(data.hasScrapeTimestamp()); - } + @Test + public void testCompleteGoodCase() { + long exemplarTimestamp = System.currentTimeMillis(); + GaugeSnapshot snapshot = + GaugeSnapshot.builder() + .name("cache_size_bytes") + .help("cache size in Bytes") + .unit(Unit.BYTES) + .dataPoint( + GaugeDataPointSnapshot.builder() + .value(1024.0) + .exemplar( + Exemplar.builder() + .value(1024.0) + .traceId("abc123") + .spanId("123457") + .timestampMillis(exemplarTimestamp) + .build()) + .labels(Labels.builder().label("env", "prod").build()) + .build()) + .dataPoint( + GaugeDataPointSnapshot.builder() + .value(128.0) + .exemplar( + Exemplar.builder() + .value(128.0) + .traceId("def456") + .spanId("234567") + .timestampMillis(exemplarTimestamp) + .build()) + .labels(Labels.builder().label("env", "dev").build()) + .build()) + .build(); + SnapshotTestUtil.assertMetadata(snapshot, "cache_size_bytes", "cache size in Bytes", "bytes"); + Assert.assertEquals(2, snapshot.getDataPoints().size()); + GaugeDataPointSnapshot data = + snapshot + .getDataPoints() + .get(0); // data is sorted by labels, so the first one should be path="/hello" + Assert.assertEquals(Labels.of("env", "dev"), data.getLabels()); + Assert.assertEquals(128.0, data.getValue(), 0.0); + Assert.assertEquals(128.0, data.getExemplar().getValue(), 0.0); + Assert.assertFalse(data.hasCreatedTimestamp()); + Assert.assertFalse(data.hasScrapeTimestamp()); + data = snapshot.getDataPoints().get(1); + Assert.assertEquals(Labels.of("env", "prod"), data.getLabels()); + Assert.assertEquals(1024.0, data.getValue(), 0.0); + Assert.assertEquals(1024.0, data.getExemplar().getValue(), 0.0); + Assert.assertFalse(data.hasCreatedTimestamp()); + Assert.assertFalse(data.hasScrapeTimestamp()); + } - @Test - public void testMinimalGoodCase() { - GaugeSnapshot snapshot = GaugeSnapshot.builder() - .name("temperature") - .dataPoint(GaugeDataPointSnapshot.builder().value(23.0).build()) - .build(); - SnapshotTestUtil.assertMetadata(snapshot, "temperature", null, null); - Assert.assertEquals(1, snapshot.getDataPoints().size()); - GaugeDataPointSnapshot data = snapshot.getDataPoints().get(0); - Assert.assertEquals(Labels.EMPTY, data.getLabels()); - Assert.assertEquals(23.0, data.getValue(), 0.0); - Assert.assertNull(data.getExemplar()); - Assert.assertFalse(data.hasCreatedTimestamp()); - Assert.assertFalse(data.hasScrapeTimestamp()); - } + @Test + public void testMinimalGoodCase() { + GaugeSnapshot snapshot = + GaugeSnapshot.builder() + .name("temperature") + .dataPoint(GaugeDataPointSnapshot.builder().value(23.0).build()) + .build(); + SnapshotTestUtil.assertMetadata(snapshot, "temperature", null, null); + Assert.assertEquals(1, snapshot.getDataPoints().size()); + GaugeDataPointSnapshot data = snapshot.getDataPoints().get(0); + Assert.assertEquals(Labels.EMPTY, data.getLabels()); + Assert.assertEquals(23.0, data.getValue(), 0.0); + Assert.assertNull(data.getExemplar()); + Assert.assertFalse(data.hasCreatedTimestamp()); + Assert.assertFalse(data.hasScrapeTimestamp()); + } - @Test - public void testEmptyGauge() { - GaugeSnapshot snapshot = GaugeSnapshot.builder() - .name("temperature") - .build(); - Assert.assertEquals(0, snapshot.getDataPoints().size()); - } + @Test + public void testEmptyGauge() { + GaugeSnapshot snapshot = GaugeSnapshot.builder().name("temperature").build(); + Assert.assertEquals(0, snapshot.getDataPoints().size()); + } - @Test(expected = IllegalArgumentException.class) - public void testTotalSuffixPresent() { - CounterSnapshot.builder().name("test_total").build(); - } + @Test(expected = IllegalArgumentException.class) + public void testTotalSuffixPresent() { + CounterSnapshot.builder().name("test_total").build(); + } - @Test(expected = IllegalArgumentException.class) - public void testTotalSuffixPresentDot() { - CounterSnapshot.builder().name("test.total").build(); - } + @Test(expected = IllegalArgumentException.class) + public void testTotalSuffixPresentDot() { + CounterSnapshot.builder().name("test.total").build(); + } - @Test(expected = IllegalArgumentException.class) - public void testValueMissing() { - CounterDataPointSnapshot.builder().build(); - } + @Test(expected = IllegalArgumentException.class) + public void testValueMissing() { + CounterDataPointSnapshot.builder().build(); + } - @Test(expected = UnsupportedOperationException.class) - public void testDataImmutable() { - GaugeSnapshot snapshot = GaugeSnapshot.builder() - .name("gauge") - .dataPoint(GaugeDataPointSnapshot.builder().labels(Labels.of("a", "a")).value(23.0).build()) - .dataPoint(GaugeDataPointSnapshot.builder().labels(Labels.of("a", "b")).value(23.0).build()) - .build(); - Iterator iterator = snapshot.getDataPoints().iterator(); - iterator.next(); - iterator.remove(); - } + @Test(expected = UnsupportedOperationException.class) + public void testDataImmutable() { + GaugeSnapshot snapshot = + GaugeSnapshot.builder() + .name("gauge") + .dataPoint( + GaugeDataPointSnapshot.builder().labels(Labels.of("a", "a")).value(23.0).build()) + .dataPoint( + GaugeDataPointSnapshot.builder().labels(Labels.of("a", "b")).value(23.0).build()) + .build(); + Iterator iterator = snapshot.getDataPoints().iterator(); + iterator.next(); + iterator.remove(); + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/InfoSnapshotTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/InfoSnapshotTest.java index dd4f2f0cb..78e26d100 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/InfoSnapshotTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/InfoSnapshotTest.java @@ -1,60 +1,62 @@ package io.prometheus.metrics.model.snapshots; +import java.util.Iterator; import org.junit.Assert; import org.junit.Test; -import java.util.Iterator; - public class InfoSnapshotTest { - @Test - public void testCompleteGoodCase() { - InfoSnapshot snapshot = InfoSnapshot.builder() - .name("target") - .help("Target info") - .dataPoint(InfoSnapshot.InfoDataPointSnapshot.builder() - .labels(Labels.of("instance_id", "127.0.0.1:9100", "service_name", "gateway")) - .build()) - .build(); - Assert.assertEquals("target", snapshot.getMetadata().getName()); - Assert.assertEquals("Target info", snapshot.getMetadata().getHelp()); - Assert.assertFalse(snapshot.getMetadata().hasUnit()); - Assert.assertEquals(1, snapshot.getDataPoints().size()); - } - - @Test - public void testEmptyInfo() { - InfoSnapshot snapshot = InfoSnapshot.builder().name("target").build(); - Assert.assertEquals(0, snapshot.getDataPoints().size()); - } - - @Test(expected = UnsupportedOperationException.class) - public void testDataImmutable() { - InfoSnapshot snapshot = InfoSnapshot.builder() - .name("target") - .dataPoint(InfoSnapshot.InfoDataPointSnapshot.builder() - .labels(Labels.of("instance_id", "127.0.0.1:9100", "service_name", "gateway.v1")) - .build()) - .dataPoint(InfoSnapshot.InfoDataPointSnapshot.builder() - .labels(Labels.of("instance_id", "127.0.0.1:9200", "service_name", "gateway.v2")) - .build()) - .build(); - Iterator iterator = snapshot.getDataPoints().iterator(); - iterator.next(); - iterator.remove(); - } - - @Test(expected = IllegalArgumentException.class) - public void testNameMustNotIncludeSuffix() { + @Test + public void testCompleteGoodCase() { + InfoSnapshot snapshot = InfoSnapshot.builder() - .name("jvm_info") - .build(); - } - - @Test(expected = IllegalArgumentException.class) - public void testNameMustNotIncludeSuffixDot() { + .name("target") + .help("Target info") + .dataPoint( + InfoSnapshot.InfoDataPointSnapshot.builder() + .labels(Labels.of("instance_id", "127.0.0.1:9100", "service_name", "gateway")) + .build()) + .build(); + Assert.assertEquals("target", snapshot.getMetadata().getName()); + Assert.assertEquals("Target info", snapshot.getMetadata().getHelp()); + Assert.assertFalse(snapshot.getMetadata().hasUnit()); + Assert.assertEquals(1, snapshot.getDataPoints().size()); + } + + @Test + public void testEmptyInfo() { + InfoSnapshot snapshot = InfoSnapshot.builder().name("target").build(); + Assert.assertEquals(0, snapshot.getDataPoints().size()); + } + + @Test(expected = UnsupportedOperationException.class) + public void testDataImmutable() { + InfoSnapshot snapshot = InfoSnapshot.builder() - .name("jvm.info") - .build(); - } + .name("target") + .dataPoint( + InfoSnapshot.InfoDataPointSnapshot.builder() + .labels( + Labels.of("instance_id", "127.0.0.1:9100", "service_name", "gateway.v1")) + .build()) + .dataPoint( + InfoSnapshot.InfoDataPointSnapshot.builder() + .labels( + Labels.of("instance_id", "127.0.0.1:9200", "service_name", "gateway.v2")) + .build()) + .build(); + Iterator iterator = snapshot.getDataPoints().iterator(); + iterator.next(); + iterator.remove(); + } + + @Test(expected = IllegalArgumentException.class) + public void testNameMustNotIncludeSuffix() { + InfoSnapshot.builder().name("jvm_info").build(); + } + + @Test(expected = IllegalArgumentException.class) + public void testNameMustNotIncludeSuffixDot() { + InfoSnapshot.builder().name("jvm.info").build(); + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/MetricSnapshotTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/MetricSnapshotTest.java index acd659dbf..e387d7155 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/MetricSnapshotTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/MetricSnapshotTest.java @@ -5,33 +5,36 @@ public class MetricSnapshotTest { - @Test(expected = IllegalArgumentException.class) - public void testDuplicateLabels() { - CounterSnapshot.builder() - .name("events") - .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(Labels.of("path", "/hello", "status", "200")) - .value(1.0) - .build()) - .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(Labels.of("path", "/world", "status", "200")) - .value(2.0) - .build()) - .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder() - .labels(Labels.of("status", "200", "path", "/hello")) - .value(3.0) - .build()) - .build(); - } + @Test(expected = IllegalArgumentException.class) + public void testDuplicateLabels() { + CounterSnapshot.builder() + .name("events") + .dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(Labels.of("path", "/hello", "status", "200")) + .value(1.0) + .build()) + .dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(Labels.of("path", "/world", "status", "200")) + .value(2.0) + .build()) + .dataPoint( + CounterSnapshot.CounterDataPointSnapshot.builder() + .labels(Labels.of("status", "200", "path", "/hello")) + .value(3.0) + .build()) + .build(); + } - @Test - public void testNoData() { - MetricSnapshot snapshot = CounterSnapshot.builder().name("test").build(); - Assert.assertEquals(0, snapshot.getDataPoints().size()); - } + @Test + public void testNoData() { + MetricSnapshot snapshot = CounterSnapshot.builder().name("test").build(); + Assert.assertEquals(0, snapshot.getDataPoints().size()); + } - @Test(expected = NullPointerException.class) - public void testNullData() { - new CounterSnapshot(new MetricMetadata("test"), null); - } + @Test(expected = NullPointerException.class) + public void testNullData() { + new CounterSnapshot(new MetricMetadata("test"), null); + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/MetricSnapshotsTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/MetricSnapshotsTest.java index 61b54cb3a..b85fe629f 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/MetricSnapshotsTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/MetricSnapshotsTest.java @@ -1,86 +1,96 @@ package io.prometheus.metrics.model.snapshots; +import java.util.Iterator; import org.junit.Assert; import org.junit.Test; -import java.util.Iterator; - public class MetricSnapshotsTest { - @Test - public void testEmpty() { - MetricSnapshots snapshots = MetricSnapshots.builder().build(); - Assert.assertFalse(snapshots.stream().findAny().isPresent()); - } + @Test + public void testEmpty() { + MetricSnapshots snapshots = MetricSnapshots.builder().build(); + Assert.assertFalse(snapshots.stream().findAny().isPresent()); + } - @Test - public void testSort() { - CounterSnapshot c1 = CounterSnapshot.builder() - .name("counter1") - .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) - .build(); - CounterSnapshot c2 = CounterSnapshot.builder() - .name("counter2") - .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) - .build(); - CounterSnapshot c3 = CounterSnapshot.builder() - .name("counter3") - .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) - .build(); - MetricSnapshots snapshots = new MetricSnapshots(c2, c3, c1); - Assert.assertEquals(3, snapshots.size()); - Assert.assertEquals("counter1", snapshots.get(0).getMetadata().getName()); - Assert.assertEquals("counter2", snapshots.get(1).getMetadata().getName()); - Assert.assertEquals("counter3", snapshots.get(2).getMetadata().getName()); - } + @Test + public void testSort() { + CounterSnapshot c1 = + CounterSnapshot.builder() + .name("counter1") + .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) + .build(); + CounterSnapshot c2 = + CounterSnapshot.builder() + .name("counter2") + .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) + .build(); + CounterSnapshot c3 = + CounterSnapshot.builder() + .name("counter3") + .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) + .build(); + MetricSnapshots snapshots = new MetricSnapshots(c2, c3, c1); + Assert.assertEquals(3, snapshots.size()); + Assert.assertEquals("counter1", snapshots.get(0).getMetadata().getName()); + Assert.assertEquals("counter2", snapshots.get(1).getMetadata().getName()); + Assert.assertEquals("counter3", snapshots.get(2).getMetadata().getName()); + } - @Test(expected = IllegalArgumentException.class) - public void testDuplicateName() { - // Q: What if you have a counter named "foo" and a gauge named "foo"? - // A: Great question. You might think this is a valid scenario, because the counter will produce - // the values "foo_total" and "foo_created" while the gauge will produce the value "foo". - // So from that perspective there is no conflict. However, the name for HELP, TYPE, UNIT is the same, - // and that is the conflict. Therefore, you cannot have a counter named "foo" and a gauge named "foo". - CounterSnapshot c = CounterSnapshot.builder() - .name("my_metric") - .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) - .build(); - GaugeSnapshot g = GaugeSnapshot.builder() - .name("my_metric") - .dataPoint(GaugeSnapshot.GaugeDataPointSnapshot.builder().value(1.0).build()) - .build(); - new MetricSnapshots(c, g); - } + @Test(expected = IllegalArgumentException.class) + public void testDuplicateName() { + // Q: What if you have a counter named "foo" and a gauge named "foo"? + // A: Great question. You might think this is a valid scenario, because the counter will produce + // the values "foo_total" and "foo_created" while the gauge will produce the value "foo". + // So from that perspective there is no conflict. However, the name for HELP, TYPE, UNIT is + // the same, + // and that is the conflict. Therefore, you cannot have a counter named "foo" and a gauge + // named "foo". + CounterSnapshot c = + CounterSnapshot.builder() + .name("my_metric") + .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) + .build(); + GaugeSnapshot g = + GaugeSnapshot.builder() + .name("my_metric") + .dataPoint(GaugeSnapshot.GaugeDataPointSnapshot.builder().value(1.0).build()) + .build(); + new MetricSnapshots(c, g); + } - @Test - public void testBuilder() { - CounterSnapshot counter = CounterSnapshot.builder() - .name("my_metric") - .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) - .build(); - MetricSnapshots.Builder builder = MetricSnapshots.builder(); - Assert.assertFalse(builder.containsMetricName("my_metric")); - builder.metricSnapshot(counter); - Assert.assertTrue(builder.containsMetricName("my_metric")); - } + @Test + public void testBuilder() { + CounterSnapshot counter = + CounterSnapshot.builder() + .name("my_metric") + .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) + .build(); + MetricSnapshots.Builder builder = MetricSnapshots.builder(); + Assert.assertFalse(builder.containsMetricName("my_metric")); + builder.metricSnapshot(counter); + Assert.assertTrue(builder.containsMetricName("my_metric")); + } - @Test(expected = UnsupportedOperationException.class) - public void testImmutable() { - CounterSnapshot c1 = CounterSnapshot.builder() - .name("counter1") - .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) - .build(); - CounterSnapshot c2 = CounterSnapshot.builder() - .name("counter2") - .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) - .build(); - CounterSnapshot c3 = CounterSnapshot.builder() - .name("counter3") - .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) - .build(); - MetricSnapshots snapshots = new MetricSnapshots(c2, c3, c1); - Iterator iterator = snapshots.iterator(); - iterator.next(); - iterator.remove(); - } + @Test(expected = UnsupportedOperationException.class) + public void testImmutable() { + CounterSnapshot c1 = + CounterSnapshot.builder() + .name("counter1") + .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) + .build(); + CounterSnapshot c2 = + CounterSnapshot.builder() + .name("counter2") + .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) + .build(); + CounterSnapshot c3 = + CounterSnapshot.builder() + .name("counter3") + .dataPoint(CounterSnapshot.CounterDataPointSnapshot.builder().value(1.0).build()) + .build(); + MetricSnapshots snapshots = new MetricSnapshots(c2, c3, c1); + Iterator iterator = snapshots.iterator(); + iterator.next(); + iterator.remove(); + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/NativeHistogramBucketsTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/NativeHistogramBucketsTest.java index 3c031d682..1aa7a1023 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/NativeHistogramBucketsTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/NativeHistogramBucketsTest.java @@ -1,63 +1,54 @@ package io.prometheus.metrics.model.snapshots; +import java.util.Iterator; import org.junit.Assert; import org.junit.Test; -import java.util.Iterator; - public class NativeHistogramBucketsTest { - @Test - public void testGoodCase() { - NativeHistogramBuckets buckets = NativeHistogramBuckets.builder() - .bucket(-10, 12) - .bucket(120, 17) - .build(); - Assert.assertEquals(2, buckets.size()); - Assert.assertEquals(-10, buckets.getBucketIndex(0)); - Assert.assertEquals(12, buckets.getCount(0)); - Assert.assertEquals(120, buckets.getBucketIndex(1)); - Assert.assertEquals(17, buckets.getCount(1)); - } - - @Test - public void testEmpty() { - NativeHistogramBuckets buckets = NativeHistogramBuckets.builder().build(); - Assert.assertEquals(0, buckets.size()); - } - - @Test - public void testSort() { - NativeHistogramBuckets buckets = NativeHistogramBuckets.builder() - .bucket(7, 4) - .bucket(2, 0) - .bucket(5, 3) - .build(); - Assert.assertEquals(3, buckets.size()); - Assert.assertEquals(2, buckets.getBucketIndex(0)); - Assert.assertEquals(5, buckets.getBucketIndex(1)); - Assert.assertEquals(7, buckets.getBucketIndex(2)); - Assert.assertEquals(0, buckets.getCount(0)); - Assert.assertEquals(3, buckets.getCount(1)); - Assert.assertEquals(4, buckets.getCount(2)); - } - - @Test(expected = IllegalArgumentException.class) - public void testDifferentLength() { - int[] bucketIndexes = new int[] {0, 1, 2}; - long[] cumulativeCounts = new long[] {13, 178, 1024, 3000}; - NativeHistogramBuckets.of(bucketIndexes, cumulativeCounts); - } - - @Test(expected = UnsupportedOperationException.class) - public void testImmutable() { - NativeHistogramBuckets buckets = NativeHistogramBuckets.builder() - .bucket(1, 1) - .bucket(2, 1) - .build(); - Iterator iterator = buckets.iterator(); - iterator.next(); - iterator.remove(); - } - + @Test + public void testGoodCase() { + NativeHistogramBuckets buckets = + NativeHistogramBuckets.builder().bucket(-10, 12).bucket(120, 17).build(); + Assert.assertEquals(2, buckets.size()); + Assert.assertEquals(-10, buckets.getBucketIndex(0)); + Assert.assertEquals(12, buckets.getCount(0)); + Assert.assertEquals(120, buckets.getBucketIndex(1)); + Assert.assertEquals(17, buckets.getCount(1)); + } + + @Test + public void testEmpty() { + NativeHistogramBuckets buckets = NativeHistogramBuckets.builder().build(); + Assert.assertEquals(0, buckets.size()); + } + + @Test + public void testSort() { + NativeHistogramBuckets buckets = + NativeHistogramBuckets.builder().bucket(7, 4).bucket(2, 0).bucket(5, 3).build(); + Assert.assertEquals(3, buckets.size()); + Assert.assertEquals(2, buckets.getBucketIndex(0)); + Assert.assertEquals(5, buckets.getBucketIndex(1)); + Assert.assertEquals(7, buckets.getBucketIndex(2)); + Assert.assertEquals(0, buckets.getCount(0)); + Assert.assertEquals(3, buckets.getCount(1)); + Assert.assertEquals(4, buckets.getCount(2)); + } + + @Test(expected = IllegalArgumentException.class) + public void testDifferentLength() { + int[] bucketIndexes = new int[] {0, 1, 2}; + long[] cumulativeCounts = new long[] {13, 178, 1024, 3000}; + NativeHistogramBuckets.of(bucketIndexes, cumulativeCounts); + } + + @Test(expected = UnsupportedOperationException.class) + public void testImmutable() { + NativeHistogramBuckets buckets = + NativeHistogramBuckets.builder().bucket(1, 1).bucket(2, 1).build(); + Iterator iterator = buckets.iterator(); + iterator.next(); + iterator.remove(); + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/PrometheusNamingTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/PrometheusNamingTest.java index 9631f9df3..62f2fc2c8 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/PrometheusNamingTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/PrometheusNamingTest.java @@ -1,90 +1,95 @@ package io.prometheus.metrics.model.snapshots; +import static io.prometheus.metrics.model.snapshots.PrometheusNaming.*; + import org.junit.Assert; import org.junit.Test; -import static io.prometheus.metrics.model.snapshots.PrometheusNaming.*; - public class PrometheusNamingTest { - @Test - public void testSanitizeMetricName() { - Assert.assertEquals("_abc_def", prometheusName(sanitizeMetricName("0abc.def"))); - Assert.assertEquals("___ab__c0", prometheusName(sanitizeMetricName("___ab.:c0"))); - Assert.assertEquals("my_prefix_my_metric", sanitizeMetricName("my_prefix/my_metric")); - Assert.assertEquals("my_counter", prometheusName(sanitizeMetricName("my_counter_total"))); - Assert.assertEquals("jvm", sanitizeMetricName("jvm.info")); - Assert.assertEquals("jvm", sanitizeMetricName("jvm_info")); - Assert.assertEquals("jvm", sanitizeMetricName("jvm.info")); - Assert.assertEquals("a.b", sanitizeMetricName("a.b")); - Assert.assertEquals("total", sanitizeMetricName("_total")); - Assert.assertEquals("total", sanitizeMetricName("total")); - } + @Test + public void testSanitizeMetricName() { + Assert.assertEquals("_abc_def", prometheusName(sanitizeMetricName("0abc.def"))); + Assert.assertEquals("___ab__c0", prometheusName(sanitizeMetricName("___ab.:c0"))); + Assert.assertEquals("my_prefix_my_metric", sanitizeMetricName("my_prefix/my_metric")); + Assert.assertEquals("my_counter", prometheusName(sanitizeMetricName("my_counter_total"))); + Assert.assertEquals("jvm", sanitizeMetricName("jvm.info")); + Assert.assertEquals("jvm", sanitizeMetricName("jvm_info")); + Assert.assertEquals("jvm", sanitizeMetricName("jvm.info")); + Assert.assertEquals("a.b", sanitizeMetricName("a.b")); + Assert.assertEquals("total", sanitizeMetricName("_total")); + Assert.assertEquals("total", sanitizeMetricName("total")); + } - @Test - public void testSanitizeMetricNameWithUnit() { - Assert.assertEquals("_abc_def_" + Unit.RATIO, prometheusName(sanitizeMetricName("0abc.def", Unit.RATIO))); - Assert.assertEquals("___ab__c0_" + Unit.RATIO, prometheusName(sanitizeMetricName("___ab.:c0", Unit.RATIO))); - Assert.assertEquals("my_prefix_my_metric_" + Unit.RATIO, sanitizeMetricName("my_prefix/my_metric", Unit.RATIO)); - Assert.assertEquals("my_counter_" + Unit.RATIO, prometheusName(sanitizeMetricName("my_counter_total", Unit.RATIO))); - Assert.assertEquals("jvm_" + Unit.RATIO, sanitizeMetricName("jvm.info", Unit.RATIO)); - Assert.assertEquals("jvm_" + Unit.RATIO, sanitizeMetricName("jvm_info", Unit.RATIO)); - Assert.assertEquals("jvm_" + Unit.RATIO, sanitizeMetricName("jvm.info", Unit.RATIO)); - Assert.assertEquals("a.b_" + Unit.RATIO, sanitizeMetricName("a.b", Unit.RATIO)); - Assert.assertEquals("total_" + Unit.RATIO, sanitizeMetricName("_total", Unit.RATIO)); - Assert.assertEquals("total_" + Unit.RATIO, sanitizeMetricName("total", Unit.RATIO)); - } + @Test + public void testSanitizeMetricNameWithUnit() { + Assert.assertEquals( + "_abc_def_" + Unit.RATIO, prometheusName(sanitizeMetricName("0abc.def", Unit.RATIO))); + Assert.assertEquals( + "___ab__c0_" + Unit.RATIO, prometheusName(sanitizeMetricName("___ab.:c0", Unit.RATIO))); + Assert.assertEquals( + "my_prefix_my_metric_" + Unit.RATIO, sanitizeMetricName("my_prefix/my_metric", Unit.RATIO)); + Assert.assertEquals( + "my_counter_" + Unit.RATIO, + prometheusName(sanitizeMetricName("my_counter_total", Unit.RATIO))); + Assert.assertEquals("jvm_" + Unit.RATIO, sanitizeMetricName("jvm.info", Unit.RATIO)); + Assert.assertEquals("jvm_" + Unit.RATIO, sanitizeMetricName("jvm_info", Unit.RATIO)); + Assert.assertEquals("jvm_" + Unit.RATIO, sanitizeMetricName("jvm.info", Unit.RATIO)); + Assert.assertEquals("a.b_" + Unit.RATIO, sanitizeMetricName("a.b", Unit.RATIO)); + Assert.assertEquals("total_" + Unit.RATIO, sanitizeMetricName("_total", Unit.RATIO)); + Assert.assertEquals("total_" + Unit.RATIO, sanitizeMetricName("total", Unit.RATIO)); + } - @Test - public void testSanitizeLabelName() { - Assert.assertEquals("_abc_def", prometheusName(sanitizeLabelName("0abc.def"))); - Assert.assertEquals("_abc", prometheusName(sanitizeLabelName("_abc"))); - Assert.assertEquals("_abc", prometheusName(sanitizeLabelName("__abc"))); - Assert.assertEquals("_abc", prometheusName(sanitizeLabelName("___abc"))); - Assert.assertEquals("_abc", prometheusName(sanitizeLabelName("_.abc"))); - Assert.assertEquals("abc.def", sanitizeLabelName("abc.def")); - Assert.assertEquals("abc.def2", sanitizeLabelName("abc.def2")); - } + @Test + public void testSanitizeLabelName() { + Assert.assertEquals("_abc_def", prometheusName(sanitizeLabelName("0abc.def"))); + Assert.assertEquals("_abc", prometheusName(sanitizeLabelName("_abc"))); + Assert.assertEquals("_abc", prometheusName(sanitizeLabelName("__abc"))); + Assert.assertEquals("_abc", prometheusName(sanitizeLabelName("___abc"))); + Assert.assertEquals("_abc", prometheusName(sanitizeLabelName("_.abc"))); + Assert.assertEquals("abc.def", sanitizeLabelName("abc.def")); + Assert.assertEquals("abc.def2", sanitizeLabelName("abc.def2")); + } - @Test - public void testValidateUnitName() { - Assert.assertNotNull(validateUnitName("secondstotal")); - Assert.assertNotNull(validateUnitName("total")); - Assert.assertNotNull(validateUnitName("seconds_total")); - Assert.assertNotNull(validateUnitName("_total")); - Assert.assertNotNull(validateUnitName("")); + @Test + public void testValidateUnitName() { + Assert.assertNotNull(validateUnitName("secondstotal")); + Assert.assertNotNull(validateUnitName("total")); + Assert.assertNotNull(validateUnitName("seconds_total")); + Assert.assertNotNull(validateUnitName("_total")); + Assert.assertNotNull(validateUnitName("")); - Assert.assertNull(validateUnitName("seconds")); - Assert.assertNull(validateUnitName("2")); - } + Assert.assertNull(validateUnitName("seconds")); + Assert.assertNull(validateUnitName("2")); + } - @Test - public void testSanitizeUnitName() { - Assert.assertEquals("seconds", sanitizeUnitName("seconds")); - Assert.assertEquals("seconds", sanitizeUnitName("seconds_total")); - Assert.assertEquals("seconds", sanitizeUnitName("seconds_total_total")); - Assert.assertEquals("m_s", sanitizeUnitName("m/s")); - Assert.assertEquals("seconds", sanitizeUnitName("secondstotal")); - Assert.assertEquals("2", sanitizeUnitName("2")); - } + @Test + public void testSanitizeUnitName() { + Assert.assertEquals("seconds", sanitizeUnitName("seconds")); + Assert.assertEquals("seconds", sanitizeUnitName("seconds_total")); + Assert.assertEquals("seconds", sanitizeUnitName("seconds_total_total")); + Assert.assertEquals("m_s", sanitizeUnitName("m/s")); + Assert.assertEquals("seconds", sanitizeUnitName("secondstotal")); + Assert.assertEquals("2", sanitizeUnitName("2")); + } - @Test(expected = IllegalArgumentException.class) - public void testInvalidUnitName1() { - sanitizeUnitName("total"); - } + @Test(expected = IllegalArgumentException.class) + public void testInvalidUnitName1() { + sanitizeUnitName("total"); + } - @Test(expected = IllegalArgumentException.class) - public void testInvalidUnitName2() { - sanitizeUnitName("_total"); - } + @Test(expected = IllegalArgumentException.class) + public void testInvalidUnitName2() { + sanitizeUnitName("_total"); + } - @Test(expected = IllegalArgumentException.class) - public void testInvalidUnitName3() { - sanitizeUnitName("%"); - } + @Test(expected = IllegalArgumentException.class) + public void testInvalidUnitName3() { + sanitizeUnitName("%"); + } - @Test(expected = IllegalArgumentException.class) - public void testEmptyUnitName() { - sanitizeUnitName(""); - } + @Test(expected = IllegalArgumentException.class) + public void testEmptyUnitName() { + sanitizeUnitName(""); + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/QuantilesTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/QuantilesTest.java index c0956361d..0d66d5632 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/QuantilesTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/QuantilesTest.java @@ -1,51 +1,40 @@ package io.prometheus.metrics.model.snapshots; +import java.util.Iterator; import org.junit.Assert; import org.junit.Test; -import java.util.Iterator; - public class QuantilesTest { - @Test - public void testSort() { - Quantiles quantiles = Quantiles.builder() - .quantile(0.99, 0.23) - .quantile(0.5, 0.2) - .quantile(0.95, 0.22) - .build(); - Assert.assertEquals(3, quantiles.size()); - Assert.assertEquals(0.5, quantiles.get(0).getQuantile(), 0); - Assert.assertEquals(0.2, quantiles.get(0).getValue(), 0); - Assert.assertEquals(0.95, quantiles.get(1).getQuantile(), 0); - Assert.assertEquals(0.22, quantiles.get(1).getValue(), 0); - Assert.assertEquals(0.99, quantiles.get(2).getQuantile(), 0); - Assert.assertEquals(0.23, quantiles.get(2).getValue(), 0); - } + @Test + public void testSort() { + Quantiles quantiles = + Quantiles.builder().quantile(0.99, 0.23).quantile(0.5, 0.2).quantile(0.95, 0.22).build(); + Assert.assertEquals(3, quantiles.size()); + Assert.assertEquals(0.5, quantiles.get(0).getQuantile(), 0); + Assert.assertEquals(0.2, quantiles.get(0).getValue(), 0); + Assert.assertEquals(0.95, quantiles.get(1).getQuantile(), 0); + Assert.assertEquals(0.22, quantiles.get(1).getValue(), 0); + Assert.assertEquals(0.99, quantiles.get(2).getQuantile(), 0); + Assert.assertEquals(0.23, quantiles.get(2).getValue(), 0); + } - @Test(expected = UnsupportedOperationException.class) - public void testImmutable() { - Quantiles quantiles = Quantiles.builder() - .quantile(0.99, 0.23) - .quantile(0.5, 0.2) - .quantile(0.95, 0.22) - .build(); - Iterator iterator = quantiles.iterator(); - iterator.next(); - iterator.remove(); - } + @Test(expected = UnsupportedOperationException.class) + public void testImmutable() { + Quantiles quantiles = + Quantiles.builder().quantile(0.99, 0.23).quantile(0.5, 0.2).quantile(0.95, 0.22).build(); + Iterator iterator = quantiles.iterator(); + iterator.next(); + iterator.remove(); + } - @Test - public void testEmpty() { - Assert.assertEquals(0, Quantiles.EMPTY.size()); - } + @Test + public void testEmpty() { + Assert.assertEquals(0, Quantiles.EMPTY.size()); + } - @Test(expected = IllegalArgumentException.class) - public void testDuplicate() { - Quantiles.builder() - .quantile(0.95, 0.23) - .quantile(0.5, 0.2) - .quantile(0.95, 0.22) - .build(); - } + @Test(expected = IllegalArgumentException.class) + public void testDuplicate() { + Quantiles.builder().quantile(0.95, 0.23).quantile(0.5, 0.2).quantile(0.95, 0.22).build(); + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/SummarySnapshotTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/SummarySnapshotTest.java index b5067b17e..4a4a2a355 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/SummarySnapshotTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/SummarySnapshotTest.java @@ -1,105 +1,111 @@ package io.prometheus.metrics.model.snapshots; +import java.util.concurrent.TimeUnit; import org.junit.Assert; import org.junit.Test; -import java.util.concurrent.TimeUnit; - public class SummarySnapshotTest { - @Test - public void testCompleteGoodCase() { - long createdTimestamp = System.currentTimeMillis() - TimeUnit.DAYS.toMillis(1); - long scrapeTimestamp = System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(2); - long exemplarTimestamp = System.currentTimeMillis(); - SummarySnapshot snapshot = SummarySnapshot.builder() - .name("latency_seconds") - .help("latency in seconds") - .unit(Unit.SECONDS) - .dataPoint(SummarySnapshot.SummaryDataPointSnapshot.builder() - .createdTimestampMillis(createdTimestamp) - .scrapeTimestampMillis(scrapeTimestamp) - .labels(Labels.of("endpoint", "/")) - .quantiles(Quantiles.builder() - .quantile(0.5, 0.2) - .quantile(0.95, 0.22) - .quantile(0.99, 0.23) - .build()) - .exemplars(Exemplars.builder() - .exemplar(Exemplar.builder() - .value(0.2) - .traceId("abc123") - .spanId("123457") - .timestampMillis(exemplarTimestamp) - .build()) - .exemplar(Exemplar.builder() - .value(0.21) - .traceId("abc124") - .spanId("123458") - .timestampMillis(exemplarTimestamp) - .build()) - .build()) - .count(1093) - .sum(218.6) - .build()) - .dataPoint(SummarySnapshot.SummaryDataPointSnapshot.builder() - .labels(Labels.of("endpoint", "/test")) - .count(1093) - .sum(218.6) - .build()) - .build(); - SnapshotTestUtil.assertMetadata(snapshot, "latency_seconds", "latency in seconds", "seconds"); - Assert.assertEquals(2, snapshot.getDataPoints().size()); - SummarySnapshot.SummaryDataPointSnapshot data = snapshot.getDataPoints().get(0); - Assert.assertEquals(Labels.of("endpoint", "/"), data.getLabels()); - Assert.assertTrue(data.hasCount()); - Assert.assertEquals(1093, data.getCount()); - Assert.assertTrue(data.hasSum()); - Assert.assertEquals(218.6, data.getSum(), 0); - Assert.assertTrue(data.hasCreatedTimestamp()); - Assert.assertEquals(createdTimestamp, data.getCreatedTimestampMillis()); - Assert.assertTrue(data.hasScrapeTimestamp()); - Assert.assertEquals(scrapeTimestamp, data.getScrapeTimestampMillis()); - Quantiles quantiles = data.getQuantiles(); - Assert.assertEquals(3, quantiles.size()); - // quantiles are tested in QuantilesTest already, skipping here. - Assert.assertEquals(2, data.getExemplars().size()); - // exemplars are tested in ExemplarsTest already, skipping here. + @Test + public void testCompleteGoodCase() { + long createdTimestamp = System.currentTimeMillis() - TimeUnit.DAYS.toMillis(1); + long scrapeTimestamp = System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(2); + long exemplarTimestamp = System.currentTimeMillis(); + SummarySnapshot snapshot = + SummarySnapshot.builder() + .name("latency_seconds") + .help("latency in seconds") + .unit(Unit.SECONDS) + .dataPoint( + SummarySnapshot.SummaryDataPointSnapshot.builder() + .createdTimestampMillis(createdTimestamp) + .scrapeTimestampMillis(scrapeTimestamp) + .labels(Labels.of("endpoint", "/")) + .quantiles( + Quantiles.builder() + .quantile(0.5, 0.2) + .quantile(0.95, 0.22) + .quantile(0.99, 0.23) + .build()) + .exemplars( + Exemplars.builder() + .exemplar( + Exemplar.builder() + .value(0.2) + .traceId("abc123") + .spanId("123457") + .timestampMillis(exemplarTimestamp) + .build()) + .exemplar( + Exemplar.builder() + .value(0.21) + .traceId("abc124") + .spanId("123458") + .timestampMillis(exemplarTimestamp) + .build()) + .build()) + .count(1093) + .sum(218.6) + .build()) + .dataPoint( + SummarySnapshot.SummaryDataPointSnapshot.builder() + .labels(Labels.of("endpoint", "/test")) + .count(1093) + .sum(218.6) + .build()) + .build(); + SnapshotTestUtil.assertMetadata(snapshot, "latency_seconds", "latency in seconds", "seconds"); + Assert.assertEquals(2, snapshot.getDataPoints().size()); + SummarySnapshot.SummaryDataPointSnapshot data = snapshot.getDataPoints().get(0); + Assert.assertEquals(Labels.of("endpoint", "/"), data.getLabels()); + Assert.assertTrue(data.hasCount()); + Assert.assertEquals(1093, data.getCount()); + Assert.assertTrue(data.hasSum()); + Assert.assertEquals(218.6, data.getSum(), 0); + Assert.assertTrue(data.hasCreatedTimestamp()); + Assert.assertEquals(createdTimestamp, data.getCreatedTimestampMillis()); + Assert.assertTrue(data.hasScrapeTimestamp()); + Assert.assertEquals(scrapeTimestamp, data.getScrapeTimestampMillis()); + Quantiles quantiles = data.getQuantiles(); + Assert.assertEquals(3, quantiles.size()); + // quantiles are tested in QuantilesTest already, skipping here. + Assert.assertEquals(2, data.getExemplars().size()); + // exemplars are tested in ExemplarsTest already, skipping here. - data = snapshot.getDataPoints().get(1); - Assert.assertFalse(data.hasCreatedTimestamp()); - Assert.assertFalse(data.hasScrapeTimestamp()); - Assert.assertTrue(data.hasCount()); - Assert.assertTrue(data.hasSum()); - } + data = snapshot.getDataPoints().get(1); + Assert.assertFalse(data.hasCreatedTimestamp()); + Assert.assertFalse(data.hasScrapeTimestamp()); + Assert.assertTrue(data.hasCount()); + Assert.assertTrue(data.hasSum()); + } - @Test - public void testMinimal() { - SummarySnapshot snapshot = SummarySnapshot.builder() - .name("size_bytes") - .dataPoint(SummarySnapshot.SummaryDataPointSnapshot.builder() - .count(10) - .sum(12.0) - .build()) - .build(); - Assert.assertEquals(1, snapshot.getDataPoints().size()); - Assert.assertEquals(Labels.EMPTY, snapshot.getDataPoints().get(0).getLabels()); - } + @Test + public void testMinimal() { + SummarySnapshot snapshot = + SummarySnapshot.builder() + .name("size_bytes") + .dataPoint( + SummarySnapshot.SummaryDataPointSnapshot.builder().count(10).sum(12.0).build()) + .build(); + Assert.assertEquals(1, snapshot.getDataPoints().size()); + Assert.assertEquals(Labels.EMPTY, snapshot.getDataPoints().get(0).getLabels()); + } - @Test - public void testEmptySnapshot() { - SummarySnapshot snapshot = SummarySnapshot.builder().name("empty_summary").build(); - Assert.assertEquals(0, snapshot.getDataPoints().size()); - } + @Test + public void testEmptySnapshot() { + SummarySnapshot snapshot = SummarySnapshot.builder().name("empty_summary").build(); + Assert.assertEquals(0, snapshot.getDataPoints().size()); + } - @Test - public void testEmptyData() { - SummarySnapshot.SummaryDataPointSnapshot data = SummarySnapshot.SummaryDataPointSnapshot.builder().build(); - Assert.assertEquals(0, data.getQuantiles().size()); - Assert.assertFalse(data.hasCount()); - Assert.assertFalse(data.hasSum()); - Assert.assertFalse(data.hasCreatedTimestamp()); - Assert.assertFalse(data.hasScrapeTimestamp()); - Assert.assertEquals(0, data.getExemplars().size()); - } + @Test + public void testEmptyData() { + SummarySnapshot.SummaryDataPointSnapshot data = + SummarySnapshot.SummaryDataPointSnapshot.builder().build(); + Assert.assertEquals(0, data.getQuantiles().size()); + Assert.assertFalse(data.hasCount()); + Assert.assertFalse(data.hasSum()); + Assert.assertFalse(data.hasCreatedTimestamp()); + Assert.assertFalse(data.hasScrapeTimestamp()); + Assert.assertEquals(0, data.getExemplars().size()); + } } diff --git a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/UnknownSnapshotTest.java b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/UnknownSnapshotTest.java index a6d7973b5..cdb81c9dc 100644 --- a/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/UnknownSnapshotTest.java +++ b/prometheus-metrics-model/src/test/java/io/prometheus/metrics/model/snapshots/UnknownSnapshotTest.java @@ -5,83 +5,83 @@ public class UnknownSnapshotTest { - @Test - public void testCompleteGoodCase() { - long exemplarTimestamp = System.currentTimeMillis(); - UnknownSnapshot snapshot = UnknownSnapshot.builder() - .name("my_unknown_seconds") - .help("something in seconds") - .unit(Unit.SECONDS) - .dataPoint(UnknownSnapshot.UnknownDataPointSnapshot.builder() - .value(0.3) - .exemplar(Exemplar.builder() - .value(0.12) - .traceId("abc123") - .spanId("123457") - .timestampMillis(exemplarTimestamp) - .build()) - .labels(Labels.builder() - .label("env", "prod") - .build()) - .build() - ).dataPoint(UnknownSnapshot.UnknownDataPointSnapshot.builder() - .value(0.29) - .labels(Labels.builder() - .label("env", "dev") - .build()) - .build() - ) - .build(); - SnapshotTestUtil.assertMetadata(snapshot, "my_unknown_seconds", "something in seconds", "seconds"); - Assert.assertEquals(2, snapshot.getDataPoints().size()); - UnknownSnapshot.UnknownDataPointSnapshot data = snapshot.getDataPoints().get(1); // env="prod" - Assert.assertEquals(Labels.of("env", "prod"), data.getLabels()); - Assert.assertEquals(0.3, data.getValue(), 0.0); - Assert.assertEquals(0.12, data.getExemplar().getValue(), 0.0); - Assert.assertFalse(data.hasCreatedTimestamp()); - Assert.assertFalse(data.hasScrapeTimestamp()); - } + @Test + public void testCompleteGoodCase() { + long exemplarTimestamp = System.currentTimeMillis(); + UnknownSnapshot snapshot = + UnknownSnapshot.builder() + .name("my_unknown_seconds") + .help("something in seconds") + .unit(Unit.SECONDS) + .dataPoint( + UnknownSnapshot.UnknownDataPointSnapshot.builder() + .value(0.3) + .exemplar( + Exemplar.builder() + .value(0.12) + .traceId("abc123") + .spanId("123457") + .timestampMillis(exemplarTimestamp) + .build()) + .labels(Labels.builder().label("env", "prod").build()) + .build()) + .dataPoint( + UnknownSnapshot.UnknownDataPointSnapshot.builder() + .value(0.29) + .labels(Labels.builder().label("env", "dev").build()) + .build()) + .build(); + SnapshotTestUtil.assertMetadata( + snapshot, "my_unknown_seconds", "something in seconds", "seconds"); + Assert.assertEquals(2, snapshot.getDataPoints().size()); + UnknownSnapshot.UnknownDataPointSnapshot data = snapshot.getDataPoints().get(1); // env="prod" + Assert.assertEquals(Labels.of("env", "prod"), data.getLabels()); + Assert.assertEquals(0.3, data.getValue(), 0.0); + Assert.assertEquals(0.12, data.getExemplar().getValue(), 0.0); + Assert.assertFalse(data.hasCreatedTimestamp()); + Assert.assertFalse(data.hasScrapeTimestamp()); + } - @Test - public void testMinimal() { - UnknownSnapshot snapshot = UnknownSnapshot.builder() - .name("test") - .dataPoint(UnknownSnapshot.UnknownDataPointSnapshot.builder() - .value(1.0) - .build()) - .build(); - Assert.assertEquals(1, snapshot.getDataPoints().size()); - } + @Test + public void testMinimal() { + UnknownSnapshot snapshot = + UnknownSnapshot.builder() + .name("test") + .dataPoint(UnknownSnapshot.UnknownDataPointSnapshot.builder().value(1.0).build()) + .build(); + Assert.assertEquals(1, snapshot.getDataPoints().size()); + } - @Test - public void testEmpty() { - UnknownSnapshot snapshot = UnknownSnapshot.builder().name("test").build(); - Assert.assertEquals(0, snapshot.getDataPoints().size()); - } + @Test + public void testEmpty() { + UnknownSnapshot snapshot = UnknownSnapshot.builder().name("test").build(); + Assert.assertEquals(0, snapshot.getDataPoints().size()); + } - @Test(expected = IllegalArgumentException.class) - public void testNameMissing() { - UnknownSnapshot.builder().build(); - } + @Test(expected = IllegalArgumentException.class) + public void testNameMissing() { + UnknownSnapshot.builder().build(); + } - @Test(expected = IllegalArgumentException.class) - public void testValueMissing() { - UnknownSnapshot.UnknownDataPointSnapshot.builder().build(); - } + @Test(expected = IllegalArgumentException.class) + public void testValueMissing() { + UnknownSnapshot.UnknownDataPointSnapshot.builder().build(); + } - @Test - public void testUnknownDataPointSnapshot() { - Labels labels = Labels.of("k1", "v1"); - Exemplar exemplar = Exemplar.builder().value(2.0).build(); + @Test + public void testUnknownDataPointSnapshot() { + Labels labels = Labels.of("k1", "v1"); + Exemplar exemplar = Exemplar.builder().value(2.0).build(); - UnknownSnapshot.UnknownDataPointSnapshot data = new UnknownSnapshot.UnknownDataPointSnapshot(1.0, labels, exemplar); - Assert.assertEquals(1.0, data.getValue(), 0.1); - Assert.assertEquals(labels, data.getLabels()); - Assert.assertEquals(exemplar, data.getExemplar()); + UnknownSnapshot.UnknownDataPointSnapshot data = + new UnknownSnapshot.UnknownDataPointSnapshot(1.0, labels, exemplar); + Assert.assertEquals(1.0, data.getValue(), 0.1); + Assert.assertEquals(labels, data.getLabels()); + Assert.assertEquals(exemplar, data.getExemplar()); - data = new UnknownSnapshot.UnknownDataPointSnapshot(1.0, labels, exemplar, 0L); - Assert.assertEquals(1.0, data.getValue(), 0.1); - Assert.assertEquals(labels, data.getLabels()); - Assert.assertEquals(exemplar, data.getExemplar()); - } + data = new UnknownSnapshot.UnknownDataPointSnapshot(1.0, labels, exemplar, 0L); + Assert.assertEquals(1.0, data.getValue(), 0.1); + Assert.assertEquals(labels, data.getLabels()); + Assert.assertEquals(exemplar, data.getExemplar()); + } } diff --git a/prometheus-metrics-shaded-dependencies/prometheus-metrics-shaded-opentelemetry/src/main/java/io/prometheus/metrics/shaded/io_opentelemetry/PrometheusMetricsShadedOpenTelemetry.java b/prometheus-metrics-shaded-dependencies/prometheus-metrics-shaded-opentelemetry/src/main/java/io/prometheus/metrics/shaded/io_opentelemetry/PrometheusMetricsShadedOpenTelemetry.java index 7b0019461..ca831e691 100644 --- a/prometheus-metrics-shaded-dependencies/prometheus-metrics-shaded-opentelemetry/src/main/java/io/prometheus/metrics/shaded/io_opentelemetry/PrometheusMetricsShadedOpenTelemetry.java +++ b/prometheus-metrics-shaded-dependencies/prometheus-metrics-shaded-opentelemetry/src/main/java/io/prometheus/metrics/shaded/io_opentelemetry/PrometheusMetricsShadedOpenTelemetry.java @@ -1,9 +1,9 @@ package io.prometheus.metrics.shaded.io_opentelemetry; /** - * This module does not need any source code, however, in order to publish it to Maven central it needs JavaDoc. - *

- * I'm adding this dummy class here to get JavaDoc so I can publish this module to Maven central. + * This module does not need any source code, however, in order to publish it to Maven central it + * needs JavaDoc. + * + *

I'm adding this dummy class here to get JavaDoc so I can publish this module to Maven central. */ -public class PrometheusMetricsShadedOpenTelemetry { -} +public class PrometheusMetricsShadedOpenTelemetry {} diff --git a/prometheus-metrics-shaded-dependencies/prometheus-metrics-shaded-protobuf/src/main/java/io/prometheus/metrics/shaded/com_google_protobuf/PrometheusMetricsShadedProtobuf.java b/prometheus-metrics-shaded-dependencies/prometheus-metrics-shaded-protobuf/src/main/java/io/prometheus/metrics/shaded/com_google_protobuf/PrometheusMetricsShadedProtobuf.java index 3f63392a7..9d78ee8e1 100644 --- a/prometheus-metrics-shaded-dependencies/prometheus-metrics-shaded-protobuf/src/main/java/io/prometheus/metrics/shaded/com_google_protobuf/PrometheusMetricsShadedProtobuf.java +++ b/prometheus-metrics-shaded-dependencies/prometheus-metrics-shaded-protobuf/src/main/java/io/prometheus/metrics/shaded/com_google_protobuf/PrometheusMetricsShadedProtobuf.java @@ -1,9 +1,9 @@ package io.prometheus.metrics.shaded.com_google_protobuf; /** - * This module does not need any source code, however, in order to publish it to Maven central it needs JavaDoc. - *

- * I'm adding this dummy class here to get JavaDoc so I can publish this module to Maven central. + * This module does not need any source code, however, in order to publish it to Maven central it + * needs JavaDoc. + * + *

I'm adding this dummy class here to get JavaDoc so I can publish this module to Maven central. */ -public class PrometheusMetricsShadedProtobuf { -} +public class PrometheusMetricsShadedProtobuf {} diff --git a/prometheus-metrics-simpleclient-bridge/src/main/java/io/prometheus/metrics/simpleclient/bridge/SimpleclientCollector.java b/prometheus-metrics-simpleclient-bridge/src/main/java/io/prometheus/metrics/simpleclient/bridge/SimpleclientCollector.java index e1b5a3113..f1b592eaa 100644 --- a/prometheus-metrics-simpleclient-bridge/src/main/java/io/prometheus/metrics/simpleclient/bridge/SimpleclientCollector.java +++ b/prometheus-metrics-simpleclient-bridge/src/main/java/io/prometheus/metrics/simpleclient/bridge/SimpleclientCollector.java @@ -23,25 +23,27 @@ import io.prometheus.metrics.model.snapshots.SummarySnapshot; import io.prometheus.metrics.model.snapshots.Unit; import io.prometheus.metrics.model.snapshots.UnknownSnapshot; - import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Predicate; /** - * Bridge from {@code simpleclient} (version 0.16.0 and older) to the new {@code prometheus-metrics} (version 1.0.0 and newer). - *

- * Usage: The following line will register all metrics from a {@code simpleclient} {@link CollectorRegistry#defaultRegistry} - * to a {@code prometheus-metrics} {@link PrometheusRegistry#defaultRegistry}: + * Bridge from {@code simpleclient} (version 0.16.0 and older) to the new {@code prometheus-metrics} + * (version 1.0.0 and newer). + * + *

Usage: The following line will register all metrics from a {@code simpleclient} {@link + * CollectorRegistry#defaultRegistry} to a {@code prometheus-metrics} {@link + * PrometheusRegistry#defaultRegistry}: + * *

{@code
  * SimpleclientCollector.builder().register();
  * }
- *

- * If you have custom registries (not the default registries), use the following snippet: + * + *

If you have custom registries (not the default registries), use the following snippet: + * *

{@code
  * CollectorRegistry simpleclientRegistry = ...;
  * PrometheusRegistry prometheusRegistry = ...;
@@ -52,354 +54,373 @@
  */
 public class SimpleclientCollector implements MultiCollector {
 
-    private final CollectorRegistry simpleclientRegistry;
+  private final CollectorRegistry simpleclientRegistry;
 
-    private SimpleclientCollector(CollectorRegistry simpleclientRegistry) {
-        this.simpleclientRegistry = simpleclientRegistry;
-    }
+  private SimpleclientCollector(CollectorRegistry simpleclientRegistry) {
+    this.simpleclientRegistry = simpleclientRegistry;
+  }
 
-    @Override
-    public MetricSnapshots collect() {
-        return convert(simpleclientRegistry.metricFamilySamples());
-    }
+  @Override
+  public MetricSnapshots collect() {
+    return convert(simpleclientRegistry.metricFamilySamples());
+  }
 
-    private MetricSnapshots convert(Enumeration samples) {
-        MetricSnapshots.Builder result = MetricSnapshots.builder();
-        while (samples.hasMoreElements()) {
-            Collector.MetricFamilySamples sample = samples.nextElement();
-            switch (sample.type) {
-                case COUNTER:
-                    result.metricSnapshot(convertCounter(sample));
-                    break;
-                case GAUGE:
-                    result.metricSnapshot(convertGauge(sample));
-                    break;
-                case HISTOGRAM:
-                    result.metricSnapshot(convertHistogram(sample, false));
-                    break;
-                case GAUGE_HISTOGRAM:
-                    result.metricSnapshot(convertHistogram(sample, true));
-                    break;
-                case SUMMARY:
-                    result.metricSnapshot(convertSummary(sample));
-                    break;
-                case INFO:
-                    result.metricSnapshot(convertInfo(sample));
-                    break;
-                case STATE_SET:
-                    result.metricSnapshot(convertStateSet(sample));
-                    break;
-                case UNKNOWN:
-                    result.metricSnapshot(convertUnknown(sample));
-                    break;
-                default:
-                    throw new IllegalStateException(sample.type + ": Unexpected metric type");
-            }
-        }
-        return result.build();
+  private MetricSnapshots convert(Enumeration samples) {
+    MetricSnapshots.Builder result = MetricSnapshots.builder();
+    while (samples.hasMoreElements()) {
+      Collector.MetricFamilySamples sample = samples.nextElement();
+      switch (sample.type) {
+        case COUNTER:
+          result.metricSnapshot(convertCounter(sample));
+          break;
+        case GAUGE:
+          result.metricSnapshot(convertGauge(sample));
+          break;
+        case HISTOGRAM:
+          result.metricSnapshot(convertHistogram(sample, false));
+          break;
+        case GAUGE_HISTOGRAM:
+          result.metricSnapshot(convertHistogram(sample, true));
+          break;
+        case SUMMARY:
+          result.metricSnapshot(convertSummary(sample));
+          break;
+        case INFO:
+          result.metricSnapshot(convertInfo(sample));
+          break;
+        case STATE_SET:
+          result.metricSnapshot(convertStateSet(sample));
+          break;
+        case UNKNOWN:
+          result.metricSnapshot(convertUnknown(sample));
+          break;
+        default:
+          throw new IllegalStateException(sample.type + ": Unexpected metric type");
+      }
     }
+    return result.build();
+  }
 
-    private MetricSnapshot convertCounter(Collector.MetricFamilySamples samples) {
-        CounterSnapshot.Builder counter = CounterSnapshot.builder()
-                .name(sanitizeMetricName(samples.name))
-                .help(samples.help)
-                .unit(convertUnit(samples));
-        Map dataPoints = new HashMap<>();
-        for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
-            Labels labels = Labels.of(sample.labelNames, sample.labelValues);
-            CounterSnapshot.CounterDataPointSnapshot.Builder dataPoint = dataPoints.computeIfAbsent(labels, l -> CounterSnapshot.CounterDataPointSnapshot.builder().labels(labels));
-            if (sample.name.endsWith("_created")) {
-                dataPoint.createdTimestampMillis((long) Unit.secondsToMillis(sample.value));
-            } else {
-                dataPoint.value(sample.value).exemplar(convertExemplar(sample.exemplar));
-                if (sample.timestampMs != null) {
-                    dataPoint.scrapeTimestampMillis(sample.timestampMs);
-                }
-            }
-        }
-        for (CounterSnapshot.CounterDataPointSnapshot.Builder dataPoint : dataPoints.values()) {
-            counter.dataPoint(dataPoint.build());
+  private MetricSnapshot convertCounter(Collector.MetricFamilySamples samples) {
+    CounterSnapshot.Builder counter =
+        CounterSnapshot.builder()
+            .name(sanitizeMetricName(samples.name))
+            .help(samples.help)
+            .unit(convertUnit(samples));
+    Map dataPoints = new HashMap<>();
+    for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
+      Labels labels = Labels.of(sample.labelNames, sample.labelValues);
+      CounterSnapshot.CounterDataPointSnapshot.Builder dataPoint =
+          dataPoints.computeIfAbsent(
+              labels, l -> CounterSnapshot.CounterDataPointSnapshot.builder().labels(labels));
+      if (sample.name.endsWith("_created")) {
+        dataPoint.createdTimestampMillis((long) Unit.secondsToMillis(sample.value));
+      } else {
+        dataPoint.value(sample.value).exemplar(convertExemplar(sample.exemplar));
+        if (sample.timestampMs != null) {
+          dataPoint.scrapeTimestampMillis(sample.timestampMs);
         }
-        return counter.build();
+      }
     }
-
-    private MetricSnapshot convertGauge(Collector.MetricFamilySamples samples) {
-        GaugeSnapshot.Builder gauge = GaugeSnapshot.builder()
-                .name(sanitizeMetricName(samples.name))
-                .help(samples.help)
-                .unit(convertUnit(samples));
-        for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
-            GaugeSnapshot.GaugeDataPointSnapshot.Builder dataPoint = GaugeSnapshot.GaugeDataPointSnapshot.builder()
-                    .value(sample.value)
-                    .labels(Labels.of(sample.labelNames, sample.labelValues))
-                    .exemplar(convertExemplar(sample.exemplar));
-            if (sample.timestampMs != null) {
-                dataPoint.scrapeTimestampMillis(sample.timestampMs);
-            }
-            gauge.dataPoint(dataPoint.build());
-        }
-        return gauge.build();
+    for (CounterSnapshot.CounterDataPointSnapshot.Builder dataPoint : dataPoints.values()) {
+      counter.dataPoint(dataPoint.build());
     }
+    return counter.build();
+  }
 
-    private MetricSnapshot convertHistogram(Collector.MetricFamilySamples samples, boolean isGaugeHistogram) {
-        HistogramSnapshot.Builder histogram = HistogramSnapshot.builder()
-                .name(sanitizeMetricName(samples.name))
-                .help(samples.help)
-                .unit(convertUnit(samples))
-                .gaugeHistogram(isGaugeHistogram);
-        Map dataPoints = new HashMap<>();
-        Map> cumulativeBuckets = new HashMap<>();
-        Map exemplars = new HashMap<>();
-        for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
-            Labels labels = labelsWithout(sample, "le");
-            dataPoints.computeIfAbsent(labels, l -> HistogramSnapshot.HistogramDataPointSnapshot.builder()
-                    .labels(labels));
-            cumulativeBuckets.computeIfAbsent(labels, l -> new HashMap<>());
-            exemplars.computeIfAbsent(labels, l -> Exemplars.builder());
-            if (sample.name.endsWith("_sum")) {
-                dataPoints.get(labels).sum(sample.value);
-            }
-            if (sample.name.endsWith("_bucket")) {
-                addBucket(cumulativeBuckets.get(labels), sample);
-            }
-            if (sample.name.endsWith("_created")) {
-                dataPoints.get(labels).createdTimestampMillis((long) Unit.secondsToMillis(sample.value));
-            }
-            if (sample.exemplar != null) {
-                exemplars.get(labels).exemplar(convertExemplar(sample.exemplar));
-            }
-            if (sample.timestampMs != null) {
-                dataPoints.get(labels).scrapeTimestampMillis(sample.timestampMs);
-            }
-        }
-        for (Labels labels : dataPoints.keySet()) {
-            histogram.dataPoint(dataPoints.get(labels)
-                    .classicHistogramBuckets(makeBuckets(cumulativeBuckets.get(labels)))
-                    .exemplars(exemplars.get(labels).build())
-                    .build());
-        }
-        return histogram.build();
+  private MetricSnapshot convertGauge(Collector.MetricFamilySamples samples) {
+    GaugeSnapshot.Builder gauge =
+        GaugeSnapshot.builder()
+            .name(sanitizeMetricName(samples.name))
+            .help(samples.help)
+            .unit(convertUnit(samples));
+    for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
+      GaugeSnapshot.GaugeDataPointSnapshot.Builder dataPoint =
+          GaugeSnapshot.GaugeDataPointSnapshot.builder()
+              .value(sample.value)
+              .labels(Labels.of(sample.labelNames, sample.labelValues))
+              .exemplar(convertExemplar(sample.exemplar));
+      if (sample.timestampMs != null) {
+        dataPoint.scrapeTimestampMillis(sample.timestampMs);
+      }
+      gauge.dataPoint(dataPoint.build());
     }
+    return gauge.build();
+  }
 
-    private MetricSnapshot convertSummary(Collector.MetricFamilySamples samples) {
-        SummarySnapshot.Builder summary = SummarySnapshot.builder()
-                .name(sanitizeMetricName(samples.name))
-                .help(samples.help)
-                .unit(convertUnit(samples));
-        Map dataPoints = new HashMap<>();
-        Map quantiles = new HashMap<>();
-        Map exemplars = new HashMap<>();
-        for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
-            Labels labels = labelsWithout(sample, "quantile");
-            dataPoints.computeIfAbsent(labels, l -> SummarySnapshot.SummaryDataPointSnapshot.builder()
-                    .labels(labels));
-            quantiles.computeIfAbsent(labels, l -> Quantiles.builder());
-            exemplars.computeIfAbsent(labels, l -> Exemplars.builder());
-            if (sample.name.endsWith("_sum")) {
-                dataPoints.get(labels).sum(sample.value);
-            } else if (sample.name.endsWith("_count")) {
-                dataPoints.get(labels).count((long) sample.value);
-            } else if (sample.name.endsWith("_created")) {
-                dataPoints.get(labels).createdTimestampMillis((long) Unit.secondsToMillis(sample.value));
-            } else {
-                for (int i=0; i dataPoints = new HashMap<>();
+    Map> cumulativeBuckets = new HashMap<>();
+    Map exemplars = new HashMap<>();
+    for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
+      Labels labels = labelsWithout(sample, "le");
+      dataPoints.computeIfAbsent(
+          labels, l -> HistogramSnapshot.HistogramDataPointSnapshot.builder().labels(labels));
+      cumulativeBuckets.computeIfAbsent(labels, l -> new HashMap<>());
+      exemplars.computeIfAbsent(labels, l -> Exemplars.builder());
+      if (sample.name.endsWith("_sum")) {
+        dataPoints.get(labels).sum(sample.value);
+      }
+      if (sample.name.endsWith("_bucket")) {
+        addBucket(cumulativeBuckets.get(labels), sample);
+      }
+      if (sample.name.endsWith("_created")) {
+        dataPoints.get(labels).createdTimestampMillis((long) Unit.secondsToMillis(sample.value));
+      }
+      if (sample.exemplar != null) {
+        exemplars.get(labels).exemplar(convertExemplar(sample.exemplar));
+      }
+      if (sample.timestampMs != null) {
+        dataPoints.get(labels).scrapeTimestampMillis(sample.timestampMs);
+      }
     }
-
-    private MetricSnapshot convertStateSet(Collector.MetricFamilySamples samples) {
-        StateSetSnapshot.Builder stateSet = StateSetSnapshot.builder()
-                .name(sanitizeMetricName(samples.name))
-                .help(samples.help);
-        Map dataPoints = new HashMap<>();
-        for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
-            Labels labels = labelsWithout(sample, sample.name);
-            dataPoints.computeIfAbsent(labels, l -> StateSetSnapshot.StateSetDataPointSnapshot.builder().labels(labels));
-            String stateName = null;
-            for (int i=0; i dataPoints = new HashMap<>();
+    Map quantiles = new HashMap<>();
+    Map exemplars = new HashMap<>();
+    for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
+      Labels labels = labelsWithout(sample, "quantile");
+      dataPoints.computeIfAbsent(
+          labels, l -> SummarySnapshot.SummaryDataPointSnapshot.builder().labels(labels));
+      quantiles.computeIfAbsent(labels, l -> Quantiles.builder());
+      exemplars.computeIfAbsent(labels, l -> Exemplars.builder());
+      if (sample.name.endsWith("_sum")) {
+        dataPoints.get(labels).sum(sample.value);
+      } else if (sample.name.endsWith("_count")) {
+        dataPoints.get(labels).count((long) sample.value);
+      } else if (sample.name.endsWith("_created")) {
+        dataPoints.get(labels).createdTimestampMillis((long) Unit.secondsToMillis(sample.value));
+      } else {
+        for (int i = 0; i < sample.labelNames.size(); i++) {
+          if (sample.labelNames.get(i).equals("quantile")) {
+            quantiles
+                .get(labels)
+                .quantile(
+                    new Quantile(Double.parseDouble(sample.labelValues.get(i)), sample.value));
+            break;
+          }
         }
-        return unknown.build();
+      }
+      if (sample.exemplar != null) {
+        exemplars.get(labels).exemplar(convertExemplar(sample.exemplar));
+      }
+      if (sample.timestampMs != null) {
+        dataPoints.get(labels).scrapeTimestampMillis(sample.timestampMs);
+      }
     }
-
-    private String stripSuffix(String name, String suffix) {
-        if (name.endsWith(suffix)) {
-            return name.substring(0, name.length() - suffix.length());
-        } else {
-            return name;
-        }
+    for (Labels labels : dataPoints.keySet()) {
+      summary.dataPoint(
+          dataPoints
+              .get(labels)
+              .quantiles(quantiles.get(labels).build())
+              .exemplars(exemplars.get(labels).build())
+              .build());
     }
+    return summary.build();
+  }
 
-    private Unit convertUnit(Collector.MetricFamilySamples samples) {
-        if (samples.unit != null && !samples.unit.isEmpty()) {
-            return new Unit(samples.unit);
-        } else {
-            return null;
+  private MetricSnapshot convertStateSet(Collector.MetricFamilySamples samples) {
+    StateSetSnapshot.Builder stateSet =
+        StateSetSnapshot.builder().name(sanitizeMetricName(samples.name)).help(samples.help);
+    Map dataPoints = new HashMap<>();
+    for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
+      Labels labels = labelsWithout(sample, sample.name);
+      dataPoints.computeIfAbsent(
+          labels, l -> StateSetSnapshot.StateSetDataPointSnapshot.builder().labels(labels));
+      String stateName = null;
+      for (int i = 0; i < sample.labelNames.size(); i++) {
+        if (sample.labelNames.get(i).equals(sample.name)) {
+          stateName = sample.labelValues.get(i);
+          break;
         }
+      }
+      if (stateName == null) {
+        throw new IllegalStateException("Invalid StateSet metric: No state name found.");
+      }
+      dataPoints.get(labels).state(stateName, sample.value == 1.0);
+      if (sample.timestampMs != null) {
+        dataPoints.get(labels).scrapeTimestampMillis(sample.timestampMs);
+      }
+    }
+    for (StateSetSnapshot.StateSetDataPointSnapshot.Builder dataPoint : dataPoints.values()) {
+      stateSet.dataPoint(dataPoint.build());
     }
+    return stateSet.build();
+  }
 
-    private ClassicHistogramBuckets makeBuckets(Map cumulativeBuckets) {
-        List upperBounds = new ArrayList<>(cumulativeBuckets.size());
-        upperBounds.addAll(cumulativeBuckets.keySet());
-        Collections.sort(upperBounds);
-        ClassicHistogramBuckets.Builder result = ClassicHistogramBuckets.builder();
-        long previousCount = 0L;
-        for (Double upperBound : upperBounds) {
-            long cumulativeCount = cumulativeBuckets.get(upperBound);
-            result.bucket(upperBound, cumulativeCount - previousCount);
-            previousCount = cumulativeCount;
-        }
-        return result.build();
+  private MetricSnapshot convertUnknown(Collector.MetricFamilySamples samples) {
+    UnknownSnapshot.Builder unknown =
+        UnknownSnapshot.builder()
+            .name(sanitizeMetricName(samples.name))
+            .help(samples.help)
+            .unit(convertUnit(samples));
+    for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
+      UnknownSnapshot.UnknownDataPointSnapshot.Builder dataPoint =
+          UnknownSnapshot.UnknownDataPointSnapshot.builder()
+              .value(sample.value)
+              .labels(Labels.of(sample.labelNames, sample.labelValues))
+              .exemplar(convertExemplar(sample.exemplar));
+      if (sample.timestampMs != null) {
+        dataPoint.scrapeTimestampMillis(sample.timestampMs);
+      }
+      unknown.dataPoint(dataPoint.build());
     }
+    return unknown.build();
+  }
 
-    private void addBucket(Map buckets, Collector.MetricFamilySamples.Sample sample) {
-        for (int i = 0; i < sample.labelNames.size(); i++) {
-            if (sample.labelNames.get(i).equals("le")) {
-                double upperBound;
-                switch (sample.labelValues.get(i)) {
-                    case "+Inf":
-                        upperBound = Double.POSITIVE_INFINITY;
-                        break;
-                    case "-Inf": // Doesn't make sense as count would always be zero. Catch this anyway.
-                        upperBound = Double.NEGATIVE_INFINITY;
-                        break;
-                    default:
-                        upperBound = Double.parseDouble(sample.labelValues.get(i));
-                }
-                buckets.put(upperBound, (long) sample.value);
-                return;
-            }
-        }
-        throw new IllegalStateException(sample.name + " does not have a le label.");
+  private String stripSuffix(String name, String suffix) {
+    if (name.endsWith(suffix)) {
+      return name.substring(0, name.length() - suffix.length());
+    } else {
+      return name;
     }
+  }
 
+  private Unit convertUnit(Collector.MetricFamilySamples samples) {
+    if (samples.unit != null && !samples.unit.isEmpty()) {
+      return new Unit(samples.unit);
+    } else {
+      return null;
+    }
+  }
 
-    private Labels labelsWithout(Collector.MetricFamilySamples.Sample sample, String excludedLabelName) {
-        Labels.Builder labels = Labels.builder();
-        for (int i = 0; i < sample.labelNames.size(); i++) {
-            if (!sample.labelNames.get(i).equals(excludedLabelName)) {
-                labels.label(sample.labelNames.get(i), sample.labelValues.get(i));
-            }
-        }
-        return labels.build();
+  private ClassicHistogramBuckets makeBuckets(Map cumulativeBuckets) {
+    List upperBounds = new ArrayList<>(cumulativeBuckets.size());
+    upperBounds.addAll(cumulativeBuckets.keySet());
+    Collections.sort(upperBounds);
+    ClassicHistogramBuckets.Builder result = ClassicHistogramBuckets.builder();
+    long previousCount = 0L;
+    for (Double upperBound : upperBounds) {
+      long cumulativeCount = cumulativeBuckets.get(upperBound);
+      result.bucket(upperBound, cumulativeCount - previousCount);
+      previousCount = cumulativeCount;
     }
+    return result.build();
+  }
 
-    private MetricSnapshot convertInfo(Collector.MetricFamilySamples samples) {
-        InfoSnapshot.Builder info = InfoSnapshot.builder()
-                .name(sanitizeMetricName(samples.name))
-                .help(samples.help);
-        for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
-            info.dataPoint(InfoSnapshot.InfoDataPointSnapshot.builder()
-                    .labels(Labels.of(sample.labelNames, sample.labelValues))
-                    .build());
+  private void addBucket(Map buckets, Collector.MetricFamilySamples.Sample sample) {
+    for (int i = 0; i < sample.labelNames.size(); i++) {
+      if (sample.labelNames.get(i).equals("le")) {
+        double upperBound;
+        switch (sample.labelValues.get(i)) {
+          case "+Inf":
+            upperBound = Double.POSITIVE_INFINITY;
+            break;
+          case "-Inf": // Doesn't make sense as count would always be zero. Catch this anyway.
+            upperBound = Double.NEGATIVE_INFINITY;
+            break;
+          default:
+            upperBound = Double.parseDouble(sample.labelValues.get(i));
         }
-        return info.build();
+        buckets.put(upperBound, (long) sample.value);
+        return;
+      }
     }
+    throw new IllegalStateException(sample.name + " does not have a le label.");
+  }
 
-    private Exemplar convertExemplar(io.prometheus.client.exemplars.Exemplar exemplar) {
-        if (exemplar == null) {
-            return null;
-        }
-        Exemplar.Builder result = Exemplar.builder().value(exemplar.getValue());
-        if (exemplar.getTimestampMs() != null) {
-            result.timestampMillis(exemplar.getTimestampMs());
-        }
-        Labels.Builder labels = Labels.builder();
-        for (int i = 0; i < exemplar.getNumberOfLabels(); i++) {
-            labels.label(exemplar.getLabelName(i), exemplar.getLabelValue(i));
-        }
-        return result.labels(labels.build()).build();
+  private Labels labelsWithout(
+      Collector.MetricFamilySamples.Sample sample, String excludedLabelName) {
+    Labels.Builder labels = Labels.builder();
+    for (int i = 0; i < sample.labelNames.size(); i++) {
+      if (!sample.labelNames.get(i).equals(excludedLabelName)) {
+        labels.label(sample.labelNames.get(i), sample.labelValues.get(i));
+      }
     }
+    return labels.build();
+  }
 
-    /**
-     * Currently there are no configuration options for the SimpleclientCollector.
-     * However, we want to follow the pattern to pass the config everywhere so that
-     * we can introduce config options later without the need for API changes.
-     */
-    public static Builder builder(PrometheusProperties config) {
-        return new Builder(config);
+  private MetricSnapshot convertInfo(Collector.MetricFamilySamples samples) {
+    InfoSnapshot.Builder info =
+        InfoSnapshot.builder().name(sanitizeMetricName(samples.name)).help(samples.help);
+    for (Collector.MetricFamilySamples.Sample sample : samples.samples) {
+      info.dataPoint(
+          InfoSnapshot.InfoDataPointSnapshot.builder()
+              .labels(Labels.of(sample.labelNames, sample.labelValues))
+              .build());
     }
+    return info.build();
+  }
 
-    public static Builder builder() {
-        return builder(PrometheusProperties.get());
+  private Exemplar convertExemplar(io.prometheus.client.exemplars.Exemplar exemplar) {
+    if (exemplar == null) {
+      return null;
+    }
+    Exemplar.Builder result = Exemplar.builder().value(exemplar.getValue());
+    if (exemplar.getTimestampMs() != null) {
+      result.timestampMillis(exemplar.getTimestampMs());
     }
+    Labels.Builder labels = Labels.builder();
+    for (int i = 0; i < exemplar.getNumberOfLabels(); i++) {
+      labels.label(exemplar.getLabelName(i), exemplar.getLabelValue(i));
+    }
+    return result.labels(labels.build()).build();
+  }
 
-    public static class Builder {
+  /**
+   * Currently there are no configuration options for the SimpleclientCollector. However, we want to
+   * follow the pattern to pass the config everywhere so that we can introduce config options later
+   * without the need for API changes.
+   */
+  public static Builder builder(PrometheusProperties config) {
+    return new Builder(config);
+  }
 
-        private final PrometheusProperties config;
-        private CollectorRegistry collectorRegistry;
+  public static Builder builder() {
+    return builder(PrometheusProperties.get());
+  }
 
-        private Builder(PrometheusProperties config) {
-            this.config = config;
-        }
+  public static class Builder {
 
-        public Builder collectorRegistry(CollectorRegistry registry) {
-            this.collectorRegistry = registry;
-            return this;
-        }
+    private final PrometheusProperties config;
+    private CollectorRegistry collectorRegistry;
 
-        public SimpleclientCollector build() {
-            return collectorRegistry != null ? new SimpleclientCollector(collectorRegistry) : new SimpleclientCollector(CollectorRegistry.defaultRegistry);
-        }
+    private Builder(PrometheusProperties config) {
+      this.config = config;
+    }
 
-        public SimpleclientCollector register() {
-            return register(PrometheusRegistry.defaultRegistry);
-        }
+    public Builder collectorRegistry(CollectorRegistry registry) {
+      this.collectorRegistry = registry;
+      return this;
+    }
 
-        public SimpleclientCollector register(PrometheusRegistry registry) {
-            SimpleclientCollector result = build();
-            registry.register(result);
-            return result;
-        }
+    public SimpleclientCollector build() {
+      return collectorRegistry != null
+          ? new SimpleclientCollector(collectorRegistry)
+          : new SimpleclientCollector(CollectorRegistry.defaultRegistry);
+    }
+
+    public SimpleclientCollector register() {
+      return register(PrometheusRegistry.defaultRegistry);
+    }
+
+    public SimpleclientCollector register(PrometheusRegistry registry) {
+      SimpleclientCollector result = build();
+      registry.register(result);
+      return result;
     }
+  }
 }
diff --git a/prometheus-metrics-simpleclient-bridge/src/test/java/io/prometheus/metrics/simpleclient/bridge/SimpleclientCollectorTest.java b/prometheus-metrics-simpleclient-bridge/src/test/java/io/prometheus/metrics/simpleclient/bridge/SimpleclientCollectorTest.java
index 2681ceed7..83e28d982 100644
--- a/prometheus-metrics-simpleclient-bridge/src/test/java/io/prometheus/metrics/simpleclient/bridge/SimpleclientCollectorTest.java
+++ b/prometheus-metrics-simpleclient-bridge/src/test/java/io/prometheus/metrics/simpleclient/bridge/SimpleclientCollectorTest.java
@@ -10,10 +10,6 @@
 import io.prometheus.client.exporter.common.TextFormat;
 import io.prometheus.metrics.expositionformats.OpenMetricsTextFormatWriter;
 import io.prometheus.metrics.model.registry.PrometheusRegistry;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.StringWriter;
@@ -22,236 +18,255 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
 
 public class SimpleclientCollectorTest {
 
-    private CollectorRegistry origRegistry;
-    private PrometheusRegistry newRegistry;
-
-    @Before
-    public void setUp() {
-        origRegistry = new CollectorRegistry();
-        newRegistry = new PrometheusRegistry();
-        SimpleclientCollector.builder()
-                .collectorRegistry(origRegistry)
-                .register(newRegistry);
-    }
-
-    @Test
-    public void testCounterComplete() throws IOException, InterruptedException {
-        Counter counter = Counter.build()
-                .name("service_time_seconds_total")
-                .help("total time spent serving")
-                .labelNames("path", "status")
-                .register(origRegistry);
-        counter.labels("/hello", "200").incWithExemplar(0.8, "trace_id", "12345", "span_id", "abcde");
-        Thread.sleep(3); // make timestamps a bit different
-        counter.labels("/hello", "500").incWithExemplar(2.4, "trace_id", "23446", "span_id", "bcdef");
-
-        Assert.assertEquals(fixTimestamps(sort(origOpenMetrics())), sort(newOpenMetrics()));
-    }
-
-    @Test
-    public void testCounterMinimal() throws IOException {
+  private CollectorRegistry origRegistry;
+  private PrometheusRegistry newRegistry;
+
+  @Before
+  public void setUp() {
+    origRegistry = new CollectorRegistry();
+    newRegistry = new PrometheusRegistry();
+    SimpleclientCollector.builder().collectorRegistry(origRegistry).register(newRegistry);
+  }
+
+  @Test
+  public void testCounterComplete() throws IOException, InterruptedException {
+    Counter counter =
         Counter.build()
-                .name("events")
-                .help("total number of events")
-                .register(origRegistry);
-
-        Assert.assertEquals(fixTimestamps(sort(origOpenMetrics())), sort(newOpenMetrics()));
-    }
-
-    @Test
-    public void testGaugeComplete() throws IOException, InterruptedException {
-        Gauge gauge = Gauge.build()
-                .name("disk_usage_ratio")
-                .help("percentage used")
-                .unit("ratio")
-                .labelNames("device")
-                .register(origRegistry);
-        gauge.labels("/dev/sda1").set(0.2);
-        Thread.sleep(3);
-        gauge.labels("/dev/sda2").set(0.7);
-
-        Assert.assertEquals(sort(origOpenMetrics()), sort(newOpenMetrics()));
-    }
-
-    @Test
-    public void testGaugeMinimal() throws IOException, InterruptedException {
-        Gauge gauge = Gauge.build()
-                .name("temperature_centigrade")
-                .help("temperature")
-                .unit("celsius")
-                .register(origRegistry);
-        gauge.set(22.3);
-
-        Assert.assertEquals(sort(origOpenMetrics()), sort(newOpenMetrics()));
-    }
-
-    @Test
-    public void testHistogramComplete() throws IOException, InterruptedException {
-        Histogram histogram = Histogram.build()
-                .name("response_size_bytes")
-                .help("response size in Bytes")
-                .labelNames("status")
-                .buckets(64, 256, 512.1)
-                .register(origRegistry);
-        histogram.labels("200").observeWithExemplar(38, "trace_id", "1", "span_id", "2");
-        histogram.labels("200").observeWithExemplar(127, "trace_id", "3", "span_id", "4");
-        histogram.labels("200").observeWithExemplar(130, "trace_id", "5", "span_id", "6");
-        histogram.labels("200").observeWithExemplar(40, "trace_id", "7", "span_id", "8");
-        histogram.labels("200").observeWithExemplar(41, "trace_id", "9", "span_id", "10");
-        Thread.sleep(3); // make timestamps a bit different
-        histogram.labels("500").observeWithExemplar(10000, "trace_id", "11", "span_id", "12");
-
-        Assert.assertEquals(fixCounts(fixTimestamps(sort(origOpenMetrics()))), sort(newOpenMetrics()));
-    }
-
-    @Test
-    public void testHistogramMinimal() throws IOException, InterruptedException {
+            .name("service_time_seconds_total")
+            .help("total time spent serving")
+            .labelNames("path", "status")
+            .register(origRegistry);
+    counter.labels("/hello", "200").incWithExemplar(0.8, "trace_id", "12345", "span_id", "abcde");
+    Thread.sleep(3); // make timestamps a bit different
+    counter.labels("/hello", "500").incWithExemplar(2.4, "trace_id", "23446", "span_id", "bcdef");
+
+    Assert.assertEquals(fixTimestamps(sort(origOpenMetrics())), sort(newOpenMetrics()));
+  }
+
+  @Test
+  public void testCounterMinimal() throws IOException {
+    Counter.build().name("events").help("total number of events").register(origRegistry);
+
+    Assert.assertEquals(fixTimestamps(sort(origOpenMetrics())), sort(newOpenMetrics()));
+  }
+
+  @Test
+  public void testGaugeComplete() throws IOException, InterruptedException {
+    Gauge gauge =
+        Gauge.build()
+            .name("disk_usage_ratio")
+            .help("percentage used")
+            .unit("ratio")
+            .labelNames("device")
+            .register(origRegistry);
+    gauge.labels("/dev/sda1").set(0.2);
+    Thread.sleep(3);
+    gauge.labels("/dev/sda2").set(0.7);
+
+    Assert.assertEquals(sort(origOpenMetrics()), sort(newOpenMetrics()));
+  }
+
+  @Test
+  public void testGaugeMinimal() throws IOException, InterruptedException {
+    Gauge gauge =
+        Gauge.build()
+            .name("temperature_centigrade")
+            .help("temperature")
+            .unit("celsius")
+            .register(origRegistry);
+    gauge.set(22.3);
+
+    Assert.assertEquals(sort(origOpenMetrics()), sort(newOpenMetrics()));
+  }
+
+  @Test
+  public void testHistogramComplete() throws IOException, InterruptedException {
+    Histogram histogram =
         Histogram.build()
-                .name("request_latency")
-                .help("request latency")
-                .register(origRegistry);
-
-        Assert.assertEquals(fixCounts(fixTimestamps(sort(origOpenMetrics()))), sort(newOpenMetrics()));
-    }
-
-    @Test
-    public void testSummaryComplete() throws IOException, InterruptedException {
-        Summary summary = Summary.build()
-                .name("http_request_duration_seconds")
-                .help("request duration")
-                .labelNames("path", "status")
-                .quantile(0.5, 0.01)
-                .quantile(0.95, 0.01)
-                .quantile(0.99, 0.001)
-                .register(origRegistry);
-        summary.labels("/", "200").observe(0.2);
-        Thread.sleep(3);
-        summary.labels("/info", "200").observe(0.7);
-        summary.labels("/info", "200").observe(0.8);
-        summary.labels("/info", "200").observe(0.9);
-        Thread.sleep(3);
-        summary.labels("/", "500").observe(0.3);
-        summary.labels("/", "500").observe(0.31);
-        summary.labels("/", "500").observe(0.32);
-
-        Assert.assertEquals(fixCounts(fixTimestamps(sort(origOpenMetrics()))), sort(newOpenMetrics()));
-    }
-
-    @Test
-    public void testSummaryMinimal() throws IOException, InterruptedException {
-        Summary summary = Summary.build()
-                .name("request_size")
-                .help("request size")
-                .register(origRegistry);
-
-        Assert.assertEquals(fixCounts(fixTimestamps(sort(origOpenMetrics()))), sort(newOpenMetrics()));
-    }
-
-    @Test
-    public void testInfoComplete() throws IOException, InterruptedException {
-        Info info = Info.build()
-                .name("version")
-                .help("version information")
-                .labelNames("env")
-                .register(origRegistry);
-        info.labels("prod").info("major_version", "12", "minor_version", "3");
-        Thread.sleep(3);
-        info.labels("dev").info("major_version", "13", "minor_version", "1");
-
-        Assert.assertEquals(fixBoolean(sort(origOpenMetrics())), sort(newOpenMetrics()));
-    }
-
-    @Test
-    public void testInfoMinimal() throws IOException, InterruptedException {
-        Info info = Info.build()
-                .name("jvm")
-                .help("JVM info")
-                .register(origRegistry);
-        info.info("version", "17");
-
-        Assert.assertEquals(fixBoolean(sort(origOpenMetrics())), sort(newOpenMetrics()));
-    }
-
-    @Test
-    public void testStateSetComplete() throws IOException {
-        Collector stateSet = new Collector() {
-            @Override
-            public List collect() {
-                List samples = new ArrayList<>();
-                samples.add(new Collector.MetricFamilySamples.Sample("state", Arrays.asList("env", "state"), Arrays.asList("dev", "state1"), 1.0));
-                samples.add(new Collector.MetricFamilySamples.Sample("state", Arrays.asList("env", "state"), Arrays.asList("dev", "state2"), 0.0));
-                return Collections.singletonList(new Collector.MetricFamilySamples("state", Collector.Type.STATE_SET, "my state", samples));
-            }
+            .name("response_size_bytes")
+            .help("response size in Bytes")
+            .labelNames("status")
+            .buckets(64, 256, 512.1)
+            .register(origRegistry);
+    histogram.labels("200").observeWithExemplar(38, "trace_id", "1", "span_id", "2");
+    histogram.labels("200").observeWithExemplar(127, "trace_id", "3", "span_id", "4");
+    histogram.labels("200").observeWithExemplar(130, "trace_id", "5", "span_id", "6");
+    histogram.labels("200").observeWithExemplar(40, "trace_id", "7", "span_id", "8");
+    histogram.labels("200").observeWithExemplar(41, "trace_id", "9", "span_id", "10");
+    Thread.sleep(3); // make timestamps a bit different
+    histogram.labels("500").observeWithExemplar(10000, "trace_id", "11", "span_id", "12");
+
+    Assert.assertEquals(fixCounts(fixTimestamps(sort(origOpenMetrics()))), sort(newOpenMetrics()));
+  }
+
+  @Test
+  public void testHistogramMinimal() throws IOException, InterruptedException {
+    Histogram.build().name("request_latency").help("request latency").register(origRegistry);
+
+    Assert.assertEquals(fixCounts(fixTimestamps(sort(origOpenMetrics()))), sort(newOpenMetrics()));
+  }
+
+  @Test
+  public void testSummaryComplete() throws IOException, InterruptedException {
+    Summary summary =
+        Summary.build()
+            .name("http_request_duration_seconds")
+            .help("request duration")
+            .labelNames("path", "status")
+            .quantile(0.5, 0.01)
+            .quantile(0.95, 0.01)
+            .quantile(0.99, 0.001)
+            .register(origRegistry);
+    summary.labels("/", "200").observe(0.2);
+    Thread.sleep(3);
+    summary.labels("/info", "200").observe(0.7);
+    summary.labels("/info", "200").observe(0.8);
+    summary.labels("/info", "200").observe(0.9);
+    Thread.sleep(3);
+    summary.labels("/", "500").observe(0.3);
+    summary.labels("/", "500").observe(0.31);
+    summary.labels("/", "500").observe(0.32);
+
+    Assert.assertEquals(fixCounts(fixTimestamps(sort(origOpenMetrics()))), sort(newOpenMetrics()));
+  }
+
+  @Test
+  public void testSummaryMinimal() throws IOException, InterruptedException {
+    Summary summary =
+        Summary.build().name("request_size").help("request size").register(origRegistry);
+
+    Assert.assertEquals(fixCounts(fixTimestamps(sort(origOpenMetrics()))), sort(newOpenMetrics()));
+  }
+
+  @Test
+  public void testInfoComplete() throws IOException, InterruptedException {
+    Info info =
+        Info.build()
+            .name("version")
+            .help("version information")
+            .labelNames("env")
+            .register(origRegistry);
+    info.labels("prod").info("major_version", "12", "minor_version", "3");
+    Thread.sleep(3);
+    info.labels("dev").info("major_version", "13", "minor_version", "1");
+
+    Assert.assertEquals(fixBoolean(sort(origOpenMetrics())), sort(newOpenMetrics()));
+  }
+
+  @Test
+  public void testInfoMinimal() throws IOException, InterruptedException {
+    Info info = Info.build().name("jvm").help("JVM info").register(origRegistry);
+    info.info("version", "17");
+
+    Assert.assertEquals(fixBoolean(sort(origOpenMetrics())), sort(newOpenMetrics()));
+  }
+
+  @Test
+  public void testStateSetComplete() throws IOException {
+    Collector stateSet =
+        new Collector() {
+          @Override
+          public List collect() {
+            List samples = new ArrayList<>();
+            samples.add(
+                new Collector.MetricFamilySamples.Sample(
+                    "state", Arrays.asList("env", "state"), Arrays.asList("dev", "state1"), 1.0));
+            samples.add(
+                new Collector.MetricFamilySamples.Sample(
+                    "state", Arrays.asList("env", "state"), Arrays.asList("dev", "state2"), 0.0));
+            return Collections.singletonList(
+                new Collector.MetricFamilySamples(
+                    "state", Collector.Type.STATE_SET, "my state", samples));
+          }
         };
-        origRegistry.register(stateSet);
-
-        Assert.assertEquals(fixBoolean(sort(origOpenMetrics())), sort(newOpenMetrics()));
-    }
-
-    @Test
-    public void testUnknownComplete() throws IOException {
-        Collector unknown = new Collector() {
-            @Override
-            public List collect() {
-                List samples = new ArrayList<>();
-                samples.add(new Collector.MetricFamilySamples.Sample("my_unknown_metric_seconds", Arrays.asList("env", "status"), Arrays.asList("dev", "ok"), 3.0));
-                samples.add(new Collector.MetricFamilySamples.Sample("my_unknown_metric_seconds", Arrays.asList("env", "status"), Arrays.asList("prod", "error"), 0.0));
-                return Collections.singletonList(new Collector.MetricFamilySamples("my_unknown_metric_seconds", "seconds", Type.UNKNOWN, "test metric of type unknown", samples));
-            }
+    origRegistry.register(stateSet);
+
+    Assert.assertEquals(fixBoolean(sort(origOpenMetrics())), sort(newOpenMetrics()));
+  }
+
+  @Test
+  public void testUnknownComplete() throws IOException {
+    Collector unknown =
+        new Collector() {
+          @Override
+          public List collect() {
+            List samples = new ArrayList<>();
+            samples.add(
+                new Collector.MetricFamilySamples.Sample(
+                    "my_unknown_metric_seconds",
+                    Arrays.asList("env", "status"),
+                    Arrays.asList("dev", "ok"),
+                    3.0));
+            samples.add(
+                new Collector.MetricFamilySamples.Sample(
+                    "my_unknown_metric_seconds",
+                    Arrays.asList("env", "status"),
+                    Arrays.asList("prod", "error"),
+                    0.0));
+            return Collections.singletonList(
+                new Collector.MetricFamilySamples(
+                    "my_unknown_metric_seconds",
+                    "seconds",
+                    Type.UNKNOWN,
+                    "test metric of type unknown",
+                    samples));
+          }
         };
-        origRegistry.register(unknown);
-
-        Assert.assertEquals(sort(origOpenMetrics()), sort(newOpenMetrics()));
-    }
-
-    private String fixBoolean(String s) {
-        return s.replaceAll(" 1.0", " 1").replaceAll(" 0.0", " 0");
-    }
-
-    private String sort(String s) {
-        String[] lines = s.split("\n");
-        Arrays.sort(lines);
-        return String.join("\n", lines);
-    }
-
-    private String fixTimestamps(String s) {
-        // Example of a "_created" timestamp in orig format: 1.694464002939E9
-        // Example of a "_created" timestamp in new format: 1694464002.939
-        // The following regex translates the orig timestamp to the new timestamp
-        return s
-                .replaceAll("1\\.([0-9]{9})([0-9]{3})E9", "1$1.$2")   // Example: 1.694464002939E9
-                .replaceAll("1\\.([0-9]{9})([0-9]{2})E9", "1$1.$20")  // Example: 1.69460725747E9
-                .replaceAll("1\\.([0-9]{9})([0-9])E9", "1$1.$200") // Example: 1.6946072574E9
-                .replaceAll("1\\.([0-9]{9})E9", "1$1.000")  // Example: 1.712332231E9
-                .replaceAll("1\\.([0-9]{8})E9", "1$10.000") // Example: 1.71233242E9
-                .replaceAll("1\\.([0-9]{7})E9", "1$100.000") // Example: 1.7123324E9
-                .replaceAll("1\\.([0-9]{6})E9", "1$1000.000")
-                .replaceAll("1\\.([0-9]{5})E9", "1$10000.000")
-                .replaceAll("1\\.([0-9]{4})E9", "1$100000.000")
-                .replaceAll("1\\.([0-9]{3})E9", "1$1000000.000")
-                .replaceAll("1\\.([0-9]{2})E9", "1$10000000.000");
-    }
-
-    private String fixCounts(String s) {
-        // Example of a "_count" or "_bucket" in orig format: 3.0
-        // Example of a "_count" or "_bucket" in new format: 3
-        // The following regex translates the orig bucket counts to the new bucket counts
-        return s.replaceAll("((_count|_bucket)(\\{[^}]*})? [0-9])\\.0", "$1");
-    }
-
-    private String origOpenMetrics() throws IOException {
-        StringWriter out = new StringWriter();
-        TextFormat.writeOpenMetrics100(out, origRegistry.metricFamilySamples());
-        return out.toString();
-    }
-
-    private String newOpenMetrics() throws IOException {
-        ByteArrayOutputStream out = new ByteArrayOutputStream();
-        OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, false);
-        writer.write(out, newRegistry.scrape());
-        return out.toString(StandardCharsets.UTF_8.name());
-    }
+    origRegistry.register(unknown);
+
+    Assert.assertEquals(sort(origOpenMetrics()), sort(newOpenMetrics()));
+  }
+
+  private String fixBoolean(String s) {
+    return s.replaceAll(" 1.0", " 1").replaceAll(" 0.0", " 0");
+  }
+
+  private String sort(String s) {
+    String[] lines = s.split("\n");
+    Arrays.sort(lines);
+    return String.join("\n", lines);
+  }
+
+  private String fixTimestamps(String s) {
+    // Example of a "_created" timestamp in orig format: 1.694464002939E9
+    // Example of a "_created" timestamp in new format: 1694464002.939
+    // The following regex translates the orig timestamp to the new timestamp
+    return s.replaceAll("1\\.([0-9]{9})([0-9]{3})E9", "1$1.$2") // Example: 1.694464002939E9
+        .replaceAll("1\\.([0-9]{9})([0-9]{2})E9", "1$1.$20") // Example: 1.69460725747E9
+        .replaceAll("1\\.([0-9]{9})([0-9])E9", "1$1.$200") // Example: 1.6946072574E9
+        .replaceAll("1\\.([0-9]{9})E9", "1$1.000") // Example: 1.712332231E9
+        .replaceAll("1\\.([0-9]{8})E9", "1$10.000") // Example: 1.71233242E9
+        .replaceAll("1\\.([0-9]{7})E9", "1$100.000") // Example: 1.7123324E9
+        .replaceAll("1\\.([0-9]{6})E9", "1$1000.000")
+        .replaceAll("1\\.([0-9]{5})E9", "1$10000.000")
+        .replaceAll("1\\.([0-9]{4})E9", "1$100000.000")
+        .replaceAll("1\\.([0-9]{3})E9", "1$1000000.000")
+        .replaceAll("1\\.([0-9]{2})E9", "1$10000000.000");
+  }
+
+  private String fixCounts(String s) {
+    // Example of a "_count" or "_bucket" in orig format: 3.0
+    // Example of a "_count" or "_bucket" in new format: 3
+    // The following regex translates the orig bucket counts to the new bucket counts
+    return s.replaceAll("((_count|_bucket)(\\{[^}]*})? [0-9])\\.0", "$1");
+  }
+
+  private String origOpenMetrics() throws IOException {
+    StringWriter out = new StringWriter();
+    TextFormat.writeOpenMetrics100(out, origRegistry.metricFamilySamples());
+    return out.toString();
+  }
+
+  private String newOpenMetrics() throws IOException {
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    OpenMetricsTextFormatWriter writer = new OpenMetricsTextFormatWriter(true, false);
+    writer.write(out, newRegistry.scrape());
+    return out.toString(StandardCharsets.UTF_8.name());
+  }
 }
diff --git a/prometheus-metrics-tracer/prometheus-metrics-tracer-common/src/main/java/io/prometheus/metrics/tracer/common/SpanContext.java b/prometheus-metrics-tracer/prometheus-metrics-tracer-common/src/main/java/io/prometheus/metrics/tracer/common/SpanContext.java
index e71cef98e..7ff30a09f 100644
--- a/prometheus-metrics-tracer/prometheus-metrics-tracer-common/src/main/java/io/prometheus/metrics/tracer/common/SpanContext.java
+++ b/prometheus-metrics-tracer/prometheus-metrics-tracer-common/src/main/java/io/prometheus/metrics/tracer/common/SpanContext.java
@@ -6,19 +6,22 @@ public interface SpanContext {
   String EXEMPLAR_ATTRIBUTE_VALUE = "true";
 
   /**
-   * @return the current trace id, or {@code null} if this call is not happening within a span context.
+   * @return the current trace id, or {@code null} if this call is not happening within a span
+   *     context.
    */
   String getCurrentTraceId();
 
   /**
-   * @return the current span id, or {@code null} if this call is not happening within a span context.
+   * @return the current span id, or {@code null} if this call is not happening within a span
+   *     context.
    */
   String getCurrentSpanId();
-  
+
   /**
-   * @return the state of the current Span. If this value is false a component before in the chain take the decision to not record it. Subsequent calling service have
-   * to respect this value in order not to have partial TraceID with only some Span in it. This value is important to be sure to choose a recorded Trace in Examplar
-   * sampling process
+   * @return the state of the current Span. If this value is false a component before in the chain
+   *     take the decision to not record it. Subsequent calling service have to respect this value
+   *     in order not to have partial TraceID with only some Span in it. This value is important to
+   *     be sure to choose a recorded Trace in Examplar sampling process
    */
   boolean isCurrentSpanSampled();
 
diff --git a/prometheus-metrics-tracer/prometheus-metrics-tracer-initializer/src/main/java/io/prometheus/metrics/tracer/initializer/SpanContextSupplier.java b/prometheus-metrics-tracer/prometheus-metrics-tracer-initializer/src/main/java/io/prometheus/metrics/tracer/initializer/SpanContextSupplier.java
index caecb6055..11d14ed8f 100644
--- a/prometheus-metrics-tracer/prometheus-metrics-tracer-initializer/src/main/java/io/prometheus/metrics/tracer/initializer/SpanContextSupplier.java
+++ b/prometheus-metrics-tracer/prometheus-metrics-tracer-initializer/src/main/java/io/prometheus/metrics/tracer/initializer/SpanContextSupplier.java
@@ -3,43 +3,43 @@
 import io.prometheus.metrics.tracer.common.SpanContext;
 import io.prometheus.metrics.tracer.otel.OpenTelemetrySpanContext;
 import io.prometheus.metrics.tracer.otel_agent.OpenTelemetryAgentSpanContext;
-
 import java.util.concurrent.atomic.AtomicReference;
 
 public class SpanContextSupplier {
 
-    private static final AtomicReference spanContextRef = new AtomicReference();
-
-    public static void setSpanContext(SpanContext spanContext) {
-        spanContextRef.set(spanContext);
-    }
-
-    public static boolean hasSpanContext() {
-        return getSpanContext() != null;
+  private static final AtomicReference spanContextRef =
+      new AtomicReference();
+
+  public static void setSpanContext(SpanContext spanContext) {
+    spanContextRef.set(spanContext);
+  }
+
+  public static boolean hasSpanContext() {
+    return getSpanContext() != null;
+  }
+
+  public static SpanContext getSpanContext() {
+    return spanContextRef.get();
+  }
+
+  static {
+    try {
+      if (OpenTelemetrySpanContext.isAvailable()) {
+        spanContextRef.set(new OpenTelemetrySpanContext());
+      }
+    } catch (NoClassDefFoundError ignored) {
+      // tracer_otel dependency not found
+    } catch (UnsupportedClassVersionError ignored) {
+      // OpenTelemetry requires Java 8, but client_java might run on Java 6.
     }
-
-    public static SpanContext getSpanContext() {
-        return spanContextRef.get();
-    }
-
-    static {
-        try {
-            if (OpenTelemetrySpanContext.isAvailable()) {
-                spanContextRef.set(new OpenTelemetrySpanContext());
-            }
-        } catch (NoClassDefFoundError ignored) {
-            // tracer_otel dependency not found
-        } catch (UnsupportedClassVersionError ignored) {
-            // OpenTelemetry requires Java 8, but client_java might run on Java 6.
-        }
-        try {
-            if (OpenTelemetryAgentSpanContext.isAvailable()) {
-                spanContextRef.set(new OpenTelemetryAgentSpanContext());
-            }
-        } catch (NoClassDefFoundError ignored) {
-            // tracer_otel_agent dependency not found
-        } catch (UnsupportedClassVersionError ignored) {
-            // OpenTelemetry requires Java 8, but client_java might run on Java 6.
-        }
+    try {
+      if (OpenTelemetryAgentSpanContext.isAvailable()) {
+        spanContextRef.set(new OpenTelemetryAgentSpanContext());
+      }
+    } catch (NoClassDefFoundError ignored) {
+      // tracer_otel_agent dependency not found
+    } catch (UnsupportedClassVersionError ignored) {
+      // OpenTelemetry requires Java 8, but client_java might run on Java 6.
     }
+  }
 }
diff --git a/prometheus-metrics-tracer/prometheus-metrics-tracer-otel-agent/src/main/java/io/prometheus/metrics/tracer/otel_agent/OpenTelemetryAgentSpanContext.java b/prometheus-metrics-tracer/prometheus-metrics-tracer-otel-agent/src/main/java/io/prometheus/metrics/tracer/otel_agent/OpenTelemetryAgentSpanContext.java
index 36cba7076..3c757ed6b 100644
--- a/prometheus-metrics-tracer/prometheus-metrics-tracer-otel-agent/src/main/java/io/prometheus/metrics/tracer/otel_agent/OpenTelemetryAgentSpanContext.java
+++ b/prometheus-metrics-tracer/prometheus-metrics-tracer-otel-agent/src/main/java/io/prometheus/metrics/tracer/otel_agent/OpenTelemetryAgentSpanContext.java
@@ -6,9 +6,9 @@
 import io.prometheus.metrics.tracer.common.SpanContext;
 
 /**
- * This is exactly the same as the {@code OpenTelemetrySpanContextSupplier}.
- * However, the {@code io.opentelemetry.api} package is relocated to
- * {@code io.opentelemetry.javaagent.shaded.io.opentelemetry.api} in the OpenTelemetry agent.
+ * This is exactly the same as the {@code OpenTelemetrySpanContextSupplier}. However, the {@code
+ * io.opentelemetry.api} package is relocated to {@code
+ * io.opentelemetry.javaagent.shaded.io.opentelemetry.api} in the OpenTelemetry agent.
  */
 public class OpenTelemetryAgentSpanContext implements SpanContext {
 
@@ -21,9 +21,11 @@ public static boolean isAvailable() {
       return true;
     } catch (LinkageError ignored) {
       // NoClassDefFoundError:
-      //   Either OpenTelemetry is not present, or it is version 0.9.1 or older when io.opentelemetry.api.trace.Span did not exist.
+      //   Either OpenTelemetry is not present, or it is version 0.9.1 or older when
+      // io.opentelemetry.api.trace.Span did not exist.
       // IncompatibleClassChangeError:
-      //   The application uses an OpenTelemetry version between 0.10.0 and 0.15.0 when SpanContext was a class, and not an interface.
+      //   The application uses an OpenTelemetry version between 0.10.0 and 0.15.0 when SpanContext
+      // was a class, and not an interface.
       return false;
     }
   }
diff --git a/prometheus-metrics-tracer/prometheus-metrics-tracer-otel/src/main/java/io/prometheus/metrics/tracer/otel/OpenTelemetrySpanContext.java b/prometheus-metrics-tracer/prometheus-metrics-tracer-otel/src/main/java/io/prometheus/metrics/tracer/otel/OpenTelemetrySpanContext.java
index dd338cea2..73f8d1316 100644
--- a/prometheus-metrics-tracer/prometheus-metrics-tracer-otel/src/main/java/io/prometheus/metrics/tracer/otel/OpenTelemetrySpanContext.java
+++ b/prometheus-metrics-tracer/prometheus-metrics-tracer-otel/src/main/java/io/prometheus/metrics/tracer/otel/OpenTelemetrySpanContext.java
@@ -7,7 +7,7 @@
 
 public class OpenTelemetrySpanContext implements SpanContext {
 
-	public static boolean isAvailable() {
+  public static boolean isAvailable() {
     try {
       OpenTelemetrySpanContext test = new OpenTelemetrySpanContext();
       test.getCurrentSpanId();
@@ -16,32 +16,34 @@ public static boolean isAvailable() {
       return true;
     } catch (LinkageError ignored) {
       // NoClassDefFoundError:
-      //   Either OpenTelemetry is not present, or it is version 0.9.1 or older when io.opentelemetry.api.trace.Span did not exist.
+      //   Either OpenTelemetry is not present, or it is version 0.9.1 or older when
+      // io.opentelemetry.api.trace.Span did not exist.
       // IncompatibleClassChangeError:
-      //   The application uses an OpenTelemetry version between 0.10.0 and 0.15.0 when SpanContext was a class, and not an interface.
+      //   The application uses an OpenTelemetry version between 0.10.0 and 0.15.0 when SpanContext
+      // was a class, and not an interface.
       return false;
     }
   }
 
-	@Override
-	public String getCurrentTraceId() {
-		String traceId = Span.current().getSpanContext().getTraceId();
-		return TraceId.isValid(traceId) ? traceId : null;
-	}
+  @Override
+  public String getCurrentTraceId() {
+    String traceId = Span.current().getSpanContext().getTraceId();
+    return TraceId.isValid(traceId) ? traceId : null;
+  }
 
-	@Override
-	public String getCurrentSpanId() {
-		String spanId = Span.current().getSpanContext().getSpanId();
-		return SpanId.isValid(spanId) ? spanId : null;
-	}
+  @Override
+  public String getCurrentSpanId() {
+    String spanId = Span.current().getSpanContext().getSpanId();
+    return SpanId.isValid(spanId) ? spanId : null;
+  }
 
-	@Override
-	public boolean isCurrentSpanSampled() {
-		return Span.current().getSpanContext().isSampled();
-	}
+  @Override
+  public boolean isCurrentSpanSampled() {
+    return Span.current().getSpanContext().isSampled();
+  }
 
-	@Override
-	public void markCurrentSpanAsExemplar() {
-		Span.current().setAttribute(EXEMPLAR_ATTRIBUTE_NAME, EXEMPLAR_ATTRIBUTE_VALUE);
-	}
+  @Override
+  public void markCurrentSpanAsExemplar() {
+    Span.current().setAttribute(EXEMPLAR_ATTRIBUTE_NAME, EXEMPLAR_ATTRIBUTE_VALUE);
+  }
 }