Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
hughsimpson committed Aug 22, 2023
1 parent ad2044d commit cf5c6a0
Show file tree
Hide file tree
Showing 2 changed files with 72 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ package kamon.otel
import io.opentelemetry.sdk.common.InstrumentationScopeInfo
import io.opentelemetry.sdk.metrics.data._
import io.opentelemetry.sdk.metrics.internal.data._
import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.{ExponentialHistogramData, ExponentialHistogramPointData, ImmutableExponentialHistogramData}
import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.{ExponentialHistogramBuckets, ExponentialHistogramData, ExponentialHistogramPointData, ImmutableExponentialHistogramData}
import io.opentelemetry.sdk.resources.Resource
import kamon.metric.Instrument.Snapshot
import kamon.metric.{Distribution, MeasurementUnit, MetricSnapshot, PeriodSnapshot}
Expand All @@ -28,6 +28,7 @@ import org.slf4j.LoggerFactory

import java.lang.{Double => JDouble, Long => JLong}
import java.time.Instant
import java.util
import java.util.{ArrayList => JArrayList, Collection => JCollection}
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
Expand Down Expand Up @@ -112,26 +113,36 @@ class WithResourceMetricsConverter(resource: Resource, kamonVersion: String, fro
}

private def getExpoBucketCounts(maxBucketCount: Int)(s: Snapshot[Distribution]) = {
val min = s.value.min
val max = s.value.max
def maxScale(v: JDouble): Int = MetricsConverter.maxScale(maxBucketCount)(v)

val scale = Math.min(maxScale(s.value.min.toDouble), maxScale(s.value.max.toDouble))
val base = Math.pow(2, Math.pow(2, -scale))
// lower boundary of index 0 is always 0 (inclusive) https://opentelemetry.io/blog/2023/exponential-histograms/#bucket-calculation
val lowerBoundaryIterator: Iterator[Double] = (-maxBucketCount to maxBucketCount).map(i => Math.pow(base, i)).iterator

val counts = ArrayBuffer.newBuilder[JLong]
val foo = new ExponentialHistogramBuckets {
def getOffset: Int = ???
def getBucketCounts: util.List[JLong] = ???
def getTotalCount: Long = ???
}
// val boundaryIterator: Iterator[JDouble] = (bucketConfiguration :+ maxDouble).iterator
// var nextBoundary = boundaryIterator.next()
// var inBucketCount = 0L
// for (el <- s.value.bucketsIterator) {
// while (el.value > nextBoundary) {
// nextBoundary = boundaryIterator.next()
// counts += inBucketCount
// inBucketCount = 0L
// }
// inBucketCount += el.frequency
// }
// while (boundaryIterator.hasNext) {
// counts += inBucketCount
// boundaryIterator.next()
// inBucketCount = 0L
// }
// counts += inBucketCount
var nextBoundary = lowerBoundaryIterator.next()
var inBucketCount = 0L
for (el <- s.value.bucketsIterator) {
while (el.value > nextBoundary) {
nextBoundary = boundaryIterator.next()
counts += inBucketCount
inBucketCount = 0L
}
inBucketCount += el.frequency
}
while (boundaryIterator.hasNext) {
counts += inBucketCount
boundaryIterator.next()
inBucketCount = 0L
}
counts += inBucketCount
counts
}

Expand Down Expand Up @@ -194,6 +205,9 @@ class WithResourceMetricsConverter(resource: Resource, kamonVersion: String, fro
private[otel] object MetricsConverter {
type ExplBucketFn = (String, MeasurementUnit) => Seq[JDouble]
type ExpoBucketFn = (String, MeasurementUnit) => Int
private val minScale = -10
private val maxScale = 20

def convert(resource: Resource, kamonVersion: String, histogramFormat: HistogramFormat,
explicitBucketConfig: ExplBucketFn, exponentialBucketConfig: ExpoBucketFn)(metrics: PeriodSnapshot): JCollection[MetricData] = {
val converter = new WithResourceMetricsConverter(resource, kamonVersion, metrics.from, metrics.to, explicitBucketConfig, exponentialBucketConfig)
Expand All @@ -204,4 +218,10 @@ private[otel] object MetricsConverter {

(gauges ++ histograms ++ counters).asJava
}

private val bases = (maxScale to minScale by -1).map(scale => (scale, Math.pow(2, Math.pow(2, -scale)))).toArray
def maxScale(maxBucketCount: Int)(v: JDouble): Int = {
if (v >= 1) bases.collectFirst{ case (scale, base) if Math.pow(base, maxBucketCount) >= v => scale}.getOrElse(minScale)
else bases.collectFirst{ case (scale, base) if Math.pow(base, -maxBucketCount) <= v => scale}.getOrElse(minScale)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import java.time.Instant
import java.util.{Collection => JCollection}
import scala.collection.JavaConverters._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Random

class OpenTelemetryMetricReporterSpec extends AnyWordSpec
with Matchers with Reconfigure {
Expand Down Expand Up @@ -147,6 +148,38 @@ class OpenTelemetryMetricReporterSpec extends AnyWordSpec
points.head.getBoundaries.asScala shouldEqual Seq[JDouble](1d, 2d, 3d, 4d, 10d)
points.head.getCounts.asScala shouldEqual Seq[JDouble](2d, 2d, 3d, 0d, 1d, 1d)
}

"calculate sensible scales for values" in {
def randomDouble = Random.nextInt(10) match {
case 0 => 0d
case 1 => Random.nextDouble() * 1e-18
case 2 => Random.nextDouble() * 1e-12
case 3 => Random.nextDouble() * 1e-6
case 4 => Random.nextDouble() * 1e-3
case 5 => Random.nextDouble()
case 6 => Random.nextDouble() * 1e3
case 7 => Random.nextDouble() * 1e6
case 8 => Random.nextDouble() * 1e12
case 9 => Random.nextDouble() * 1e18
}

for (i <- (0 to 100).map(_ => randomDouble); maxBucketCount <- (0 to 10).map(_ => Random.nextInt(320))) {
val scale = MetricsConverter.maxScale(maxBucketCount)(i)
val baseFromScale = Math.pow(2, Math.pow(2, -scale))
val baseFromScale_plus_1 = Math.pow(2, Math.pow(2, -scale - 1))
val maxFromBase = Math.pow(baseFromScale, maxBucketCount)
val minFromBase = 1d / Math.pow(baseFromScale, maxBucketCount)
val maxFromBase_plus_1 = Math.pow(baseFromScale_plus_1, maxBucketCount)
val minFromBase_plus_1 = 1d / Math.pow(baseFromScale_plus_1, maxBucketCount)
if (i >= 1) {
if (scale != -10) maxFromBase should be >= i
if (scale != 20) maxFromBase_plus_1 should be <= i
} else {
if (scale != -10) minFromBase should be <= i
if (scale != 20) minFromBase_plus_1 should be >= i
}
}
}
}

private def buildHistogramDist(_buckets: Seq[(Long, Long)]): Distribution = {
Expand Down

0 comments on commit cf5c6a0

Please sign in to comment.