|
| 1 | +package io.opentelemetry.example.metrics; |
| 2 | + |
| 3 | +import static io.opentelemetry.api.common.AttributeKey.stringKey; |
| 4 | + |
| 5 | +import io.opentelemetry.api.OpenTelemetry; |
| 6 | +import io.opentelemetry.api.common.Attributes; |
| 7 | +import io.opentelemetry.api.metrics.LongHistogram; |
| 8 | +import io.opentelemetry.api.metrics.Meter; |
| 9 | +import io.opentelemetry.exporter.otlp.metrics.OtlpGrpcMetricExporter; |
| 10 | +import io.opentelemetry.sdk.OpenTelemetrySdk; |
| 11 | +import io.opentelemetry.sdk.metrics.Aggregation; |
| 12 | +import io.opentelemetry.sdk.metrics.InstrumentSelector; |
| 13 | +import io.opentelemetry.sdk.metrics.SdkMeterProvider; |
| 14 | +import io.opentelemetry.sdk.metrics.View; |
| 15 | +import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; |
| 16 | +import java.time.Duration; |
| 17 | +import java.util.Random; |
| 18 | + |
| 19 | +/** |
| 20 | + * Configures two histograms to use the base2ExponentialBucketHistogram aggregation, one that uses |
| 21 | + * default configurations, and another that sets a custom maxScale. |
| 22 | + */ |
| 23 | +public class ExponentialHistogramExample { |
| 24 | + private final OpenTelemetry otel; |
| 25 | + |
| 26 | + public ExponentialHistogramExample(OpenTelemetry otel) { |
| 27 | + this.otel = otel; |
| 28 | + } |
| 29 | + |
| 30 | + public static void main(String[] args) throws InterruptedException { |
| 31 | + SdkMeterProvider sdkMeterProvider = |
| 32 | + SdkMeterProvider.builder() |
| 33 | + .registerView( |
| 34 | + // Target histograms matching this name and apply a custom maxScale |
| 35 | + InstrumentSelector.builder().setName("*custom_scale*").build(), |
| 36 | + View.builder() |
| 37 | + .setAggregation(Aggregation.base2ExponentialBucketHistogram(160, 4)) |
| 38 | + .build()) |
| 39 | + .registerView( |
| 40 | + // Target this one histogram and use defaults (maxBuckets: 160 maxScale: 20) |
| 41 | + InstrumentSelector.builder().setName("job.duration").build(), |
| 42 | + View.builder() |
| 43 | + .setAggregation(Aggregation.base2ExponentialBucketHistogram()) |
| 44 | + .build()) |
| 45 | + .registerMetricReader( |
| 46 | + PeriodicMetricReader.builder(OtlpGrpcMetricExporter.builder().build()) |
| 47 | + // Default is 60000ms (60 seconds). Set to 10 seconds for demonstrative purposes |
| 48 | + // only. |
| 49 | + .setInterval(Duration.ofSeconds(10)) |
| 50 | + .build()) |
| 51 | + .build(); |
| 52 | + |
| 53 | + OpenTelemetry sdk = OpenTelemetrySdk.builder().setMeterProvider(sdkMeterProvider).build(); |
| 54 | + |
| 55 | + new ExponentialHistogramExample(sdk).run(); |
| 56 | + } |
| 57 | + |
| 58 | + void run() throws InterruptedException { |
| 59 | + Meter meter = otel.getMeter("io.opentelemetry.example.metrics"); |
| 60 | + |
| 61 | + LongHistogram histogram = |
| 62 | + meter |
| 63 | + .histogramBuilder("job.duration") |
| 64 | + .ofLongs() |
| 65 | + .setDescription("A distribution of job execution time") |
| 66 | + .setUnit("seconds") |
| 67 | + .build(); |
| 68 | + |
| 69 | + LongHistogram customScaleHistogram = |
| 70 | + meter |
| 71 | + .histogramBuilder("job2.custom_scale.duration") |
| 72 | + .ofLongs() |
| 73 | + .setDescription("A distribution of job2's execution time using a custom scale value.") |
| 74 | + .setUnit("seconds") |
| 75 | + .build(); |
| 76 | + |
| 77 | + Random rand = new Random(); |
| 78 | + Attributes attrs = Attributes.of(stringKey("job"), "update_database"); |
| 79 | + long metricPoint; |
| 80 | + |
| 81 | + while (true) { |
| 82 | + metricPoint = rand.nextLong(1000); |
| 83 | + histogram.record(metricPoint, attrs); |
| 84 | + customScaleHistogram.record(metricPoint, attrs); |
| 85 | + Thread.sleep(1000); |
| 86 | + } |
| 87 | + } |
| 88 | +} |
0 commit comments