Skip to content

Commit 7500d29

Browse files
authored
Merge pull request #74 from coroot/collector_endpoint_url
Add base `collector endpoint` URL
2 parents 6b88611 + e68f061 commit 7500d29

File tree

3 files changed

+35
-16
lines changed

3 files changed

+35
-16
lines changed

flags/flags.go

+26-8
Original file line numberDiff line numberDiff line change
@@ -25,14 +25,15 @@ var (
2525
LogPerSecond = kingpin.Flag("log-per-second", "The number of logs per second").Default("10.0").Envar("LOG_PER_SECOND").Float64()
2626
LogBurst = kingpin.Flag("log-burst", "The maximum number of tokens that can be consumed in a single call to allow").Default("100").Envar("LOG_BURST").Int()
2727

28-
MetricsEndpoint = kingpin.Flag("metrics-endpoint", "The URL of the endpoint to send metrics to").Envar("METRICS_ENDPOINT").URL()
29-
TracesEndpoint = kingpin.Flag("traces-endpoint", "The URL of the endpoint to send traces to").Envar("TRACES_ENDPOINT").URL()
30-
LogsEndpoint = kingpin.Flag("logs-endpoint", "The URL of the endpoint to send logs to").Envar("LOGS_ENDPOINT").URL()
31-
ProfilesEndpoint = kingpin.Flag("profiles-endpoint", "The URL of the endpoint to send profiles to").Envar("PROFILES_ENDPOINT").URL()
32-
ApiKey = kingpin.Flag("api-key", "Coroot API key").Envar("API_KEY").String()
33-
ScrapeInterval = kingpin.Flag("scrape-interval", "How often to gather metrics from the agent").Default("15s").Envar("SCRAPE_INTERVAL").Duration()
34-
35-
WalDir = kingpin.Flag("wal-dir", "Path to where the agent stores data (e.g. the metrics Write-Ahead Log)").Default("/tmp/coroot-node-agent").Envar("WAL_DIR").String()
28+
CollectorEndpoint = kingpin.Flag("collector-endpoint", "A base endpoint URL for metrics, traces, logs, and profiles").Envar("COLLECTOR_ENDPOINT").URL()
29+
ApiKey = kingpin.Flag("api-key", "Coroot API key").Envar("API_KEY").String()
30+
MetricsEndpoint = kingpin.Flag("metrics-endpoint", "The URL of the endpoint to send metrics to").Envar("METRICS_ENDPOINT").URL()
31+
TracesEndpoint = kingpin.Flag("traces-endpoint", "The URL of the endpoint to send traces to").Envar("TRACES_ENDPOINT").URL()
32+
LogsEndpoint = kingpin.Flag("logs-endpoint", "The URL of the endpoint to send logs to").Envar("LOGS_ENDPOINT").URL()
33+
ProfilesEndpoint = kingpin.Flag("profiles-endpoint", "The URL of the endpoint to send profiles to").Envar("PROFILES_ENDPOINT").URL()
34+
35+
ScrapeInterval = kingpin.Flag("scrape-interval", "How often to gather metrics from the agent").Default("15s").Envar("SCRAPE_INTERVAL").Duration()
36+
WalDir = kingpin.Flag("wal-dir", "Path to where the agent stores data (e.g. the metrics Write-Ahead Log)").Default("/tmp/coroot-node-agent").Envar("WAL_DIR").String()
3637
)
3738

3839
func GetString(fl *string) string {
@@ -46,6 +47,23 @@ func init() {
4647
if strings.HasSuffix(os.Args[0], ".test") {
4748
return
4849
}
50+
4951
kingpin.HelpFlag.Short('h').Hidden()
5052
kingpin.Parse()
53+
54+
if *CollectorEndpoint != nil {
55+
u := *CollectorEndpoint
56+
if *MetricsEndpoint == nil {
57+
*MetricsEndpoint = u.JoinPath("/v1/metrics")
58+
}
59+
if *TracesEndpoint == nil {
60+
*TracesEndpoint = u.JoinPath("/v1/traces")
61+
}
62+
if *LogsEndpoint == nil {
63+
*LogsEndpoint = u.JoinPath("/v1/logs")
64+
}
65+
if *ProfilesEndpoint == nil {
66+
*ProfilesEndpoint = u.JoinPath("/v1/profiles")
67+
}
68+
}
5169
}

profiling/profiling.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,6 @@ func Init(hostId, hostName string) chan<- containers.ProcessInfo {
5454
constLabels = labels.Labels{
5555
{Name: "host.name", Value: hostName},
5656
{Name: "host.id", Value: hostId},
57-
{Name: "profile.source", Value: "ebpf"},
5857
}
5958

6059
reg := prometheus.NewRegistry()
@@ -167,14 +166,15 @@ func upload(b *pprof.ProfileBuilder) error {
167166
}
168167
u.RawQuery = q.Encode()
169168

169+
b.Profile.SampleType[0].Type = "ebpf:cpu:nanoseconds"
170170
b.Profile.DurationNanos = CollectInterval.Nanoseconds()
171171
body := bytes.NewBuffer(nil)
172172
_, err := b.Write(body)
173173
if err != nil {
174174
return err
175175
}
176176

177-
req, err := http.NewRequest("POST", u.String(), body)
177+
req, err := http.NewRequest(http.MethodPost, u.String(), body)
178178
if err != nil {
179179
return err
180180
}

prom/agent.go

+7-6
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import (
55

66
"github.com/coroot/coroot-node-agent/common"
77
"github.com/coroot/coroot-node-agent/flags"
8+
"github.com/go-kit/log/level"
89
"github.com/prometheus/client_golang/prometheus"
910
promConfig "github.com/prometheus/common/config"
1011
"github.com/prometheus/common/model"
@@ -25,12 +26,12 @@ const (
2526
)
2627

2728
func StartAgent(machineId string) error {
28-
l := Logger{}
29+
logger := level.NewFilter(Logger{}, level.AllowInfo())
2930

3031
if *flags.MetricsEndpoint == nil {
3132
return nil
3233
}
33-
klog.Infoln("Metrics remote write endpoint:", (*flags.MetricsEndpoint).String())
34+
klog.Infoln("metrics remote write endpoint:", (*flags.MetricsEndpoint).String())
3435
cfg := config.DefaultConfig
3536
cfg.GlobalConfig.ScrapeInterval = model.Duration(*flags.ScrapeInterval)
3637
cfg.GlobalConfig.ScrapeTimeout = model.Duration(*flags.ScrapeInterval)
@@ -54,22 +55,22 @@ func StartAgent(machineId string) error {
5455
opts := agent.DefaultOptions()
5556
localStorage := &readyStorage{stats: tsdb.NewDBStats()}
5657
scraper := &readyScrapeManager{}
57-
remoteStorage := remote.NewStorage(l, prometheus.DefaultRegisterer, localStorage.StartTime, *flags.WalDir, RemoteFlushDeadline, scraper)
58-
fanoutStorage := storage.NewFanout(l, localStorage, remoteStorage)
58+
remoteStorage := remote.NewStorage(logger, prometheus.DefaultRegisterer, localStorage.StartTime, *flags.WalDir, RemoteFlushDeadline, scraper)
59+
fanoutStorage := storage.NewFanout(logger, localStorage, remoteStorage)
5960

6061
if err := remoteStorage.ApplyConfig(&cfg); err != nil {
6162
return err
6263
}
6364

64-
scrapeManager, err := scrape.NewManager(nil, l, fanoutStorage, prometheus.DefaultRegisterer)
65+
scrapeManager, err := scrape.NewManager(nil, logger, fanoutStorage, prometheus.DefaultRegisterer)
6566
if err != nil {
6667
return err
6768
}
6869
if err = scrapeManager.ApplyConfig(&cfg); err != nil {
6970
return err
7071
}
7172
scraper.Set(scrapeManager)
72-
db, err := agent.Open(l, prometheus.DefaultRegisterer, remoteStorage, *flags.WalDir, opts)
73+
db, err := agent.Open(logger, prometheus.DefaultRegisterer, remoteStorage, *flags.WalDir, opts)
7374
if err != nil {
7475
return err
7576
}

0 commit comments

Comments
 (0)