我们正在进行一项关于测试 Akka 用于汽车行业的研究项目。所以我们需要衡量性能。我决定使用 Kamon+StatsD+Grafana,因为它看起来很容易运行。
我现在的问题是,我没有将数据从 StatsD 获取到 Grafana。我还尝试手动生成直方图作为数据,但没有成功:
Kamon.metrics.histogram("test-histogram").record(100)
当我创建一个新的数据源时,唯一有效的 URL 是http://127.0.0.1:8000,无论我在 application.conf 中做了什么设置。我还尝试使用 Docker 容器 IP 作为 statsd.hostname ,但没有成功。
任何想法我在这里缺少什么?
我的设置如下:
应用程序.conf:
include "version"
akka {
loglevel = DEBUG
extensions = ["kamon.statsd.StatsD", "kamon.system.SystemMetrics", "kamon.logreporter.LogReporter"]
actor {
provider = "akka.cluster.ClusterActorRefProvider"
}
remote {
log-remote-lifecycle-events = off
netty.tcp {
//hostname = "132.199.59.153"
//hostname = "169.254.137.212"
hostname = "127.0.0.1"
//hostname = "172.16.32.193"
port = 2552
}
}
cluster {
seed-nodes = [
//"akka.tcp://ActorSystem@132.199.59.153:2552",
//"akka.tcp://ActorSystem@169.254.137.211:2552",
"akka.tcp://ActorSystem@127.0.0.1:2552",
]
}
}
kamon {
metric {
# Time interval for collecting all metrics and send the snapshots to all subscribed actors.
tick-interval = 1 second
# Specify if entities that do not match any include/exclude filter should be tracked.
track-unmatched-entities = yes
filters {
akka-actor {
includes = [ "**" ]
excludes = [ ]
}
akka-dispatcher {
includes = [ "**" ]
excludes = [ ]
}
trace {
includes = [ "**" ]
excludes = [ ]
}
}
}
statsd {
hostname = "172.17.0.2"
port = 8125
flush-interval = 10 seconds
max-packet-size = 1024 bytes
includes {
akka-actor = [ "**" ]
akka-dispatcher = [ "**" ]
trace = [ "**" ]
}
simple-metric-key-generator {
# Application prefix for all metrics pushed to StatsD. The default namespacing scheme for metrics follows
# this pattern:
# application.host.entity.entity-name.metric-name
application = "introduce-kamon-io"
}
}
modules {
kamon-statsd.auto-start = yes
kamon-akka.auto-start = yes
kamon-log-reporter.auto-start = yes
kamon-system-metrics.auto-start = yes
}
}
# Disable legacy metrics in akka-cluster.
//akka.cluster.metrics.enabled=off
# Enable metrics extension in akka-cluster-metrics.
//akka.extensions=["akka.cluster.metrics.ClusterMetricsExtension"]
# Sigar native library extract location during tests.
# Note: use per-jvm-instance folder when running multiple jvm on one host.
//akka.cluster.metrics.native-library-extract-folder=${user.dir}/target/native
构建.sbt:
import com.typesafe.sbt.SbtAspectj._
name := "VideoProxy"
version := "1.0"
scalaVersion := "2.12.1"
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
val akkaVersion = "2.4.16"
val kamonVersion = "0.6.6"
libraryDependencies ++= Seq(
"org.bytedeco" % "javacv-platform" % "1.3.1",
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion,
"com.typesafe.akka" %% "akka-remote" % akkaVersion,
"com.typesafe.akka" %% "akka-cluster" % akkaVersion,
"com.typesafe.akka" %% "akka-cluster-sharding" % akkaVersion,
"com.typesafe.akka" %% "akka-cluster-tools" % akkaVersion,
"com.typesafe.akka" %% "akka-stream" % akkaVersion,
"com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion,
"org.aspectj" % "aspectjweaver" % "1.8.1",
"io.kamon" %% "kamon-core" % kamonVersion,
"io.kamon" %% "kamon-statsd" % kamonVersion,
"io.kamon" %% "kamon-akka-2.4" % kamonVersion exclude("io.kamon", "kamon-akka_akka-2.4_2.12"),
"io.kamon" %% "kamon-akka-remote-2.4" % kamonVersion exclude("io.kamon", "kamon-akka-remote_akka-2.4_2.12"),
"io.kamon" %% "kamon-log-reporter" % kamonVersion,
"io.kamon" %% "kamon-system-metrics" % kamonVersion
)
aspectjSettings
javaOptions <++= AspectjKeys.weaverOptions in Aspectj
// when you call "sbt run" aspectj weaving kicks in
fork in run := true